1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
31 #include "trans-stmt.h"
32 #include "trans-types.h"
33 #include "trans-array.h"
34 #include "trans-const.h"
36 #include "dependency.h"
39 typedef struct iter_info
45 struct iter_info *next;
49 typedef struct forall_info
56 struct forall_info *prev_nest;
60 static void gfc_trans_where_2 (gfc_code *, tree, bool,
61 forall_info *, stmtblock_t *);
63 /* Translate a F95 label number to a LABEL_EXPR. */
66 gfc_trans_label_here (gfc_code * code)
68 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
72 /* Given a variable expression which has been ASSIGNed to, find the decl
73 containing the auxiliary variables. For variables in common blocks this
77 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
79 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
80 gfc_conv_expr (se, expr);
81 /* Deals with variable in common block. Get the field declaration. */
82 if (TREE_CODE (se->expr) == COMPONENT_REF)
83 se->expr = TREE_OPERAND (se->expr, 1);
84 /* Deals with dummy argument. Get the parameter declaration. */
85 else if (TREE_CODE (se->expr) == INDIRECT_REF)
86 se->expr = TREE_OPERAND (se->expr, 0);
89 /* Translate a label assignment statement. */
92 gfc_trans_label_assign (gfc_code * code)
101 /* Start a new block. */
102 gfc_init_se (&se, NULL);
103 gfc_start_block (&se.pre);
104 gfc_conv_label_variable (&se, code->expr1);
106 len = GFC_DECL_STRING_LEN (se.expr);
107 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
109 label_tree = gfc_get_label_decl (code->label1);
111 if (code->label1->defined == ST_LABEL_TARGET)
113 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
114 len_tree = integer_minus_one_node;
118 gfc_expr *format = code->label1->format;
120 label_len = format->value.character.length;
121 len_tree = build_int_cst (NULL_TREE, label_len);
122 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
123 format->value.character.string);
124 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
127 gfc_add_modify (&se.pre, len, len_tree);
128 gfc_add_modify (&se.pre, addr, label_tree);
130 return gfc_finish_block (&se.pre);
133 /* Translate a GOTO statement. */
136 gfc_trans_goto (gfc_code * code)
138 locus loc = code->loc;
144 if (code->label1 != NULL)
145 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
148 gfc_init_se (&se, NULL);
149 gfc_start_block (&se.pre);
150 gfc_conv_label_variable (&se, code->expr1);
151 tmp = GFC_DECL_STRING_LEN (se.expr);
152 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, tmp,
153 build_int_cst (TREE_TYPE (tmp), -1));
154 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
155 "Assigned label is not a target label");
157 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
159 /* We're going to ignore a label list. It does not really change the
160 statement's semantics (because it is just a further restriction on
161 what's legal code); before, we were comparing label addresses here, but
162 that's a very fragile business and may break with optimization. So
165 target = fold_build1_loc (input_location, GOTO_EXPR, void_type_node,
167 gfc_add_expr_to_block (&se.pre, target);
168 return gfc_finish_block (&se.pre);
172 /* Translate an ENTRY statement. Just adds a label for this entry point. */
174 gfc_trans_entry (gfc_code * code)
176 return build1_v (LABEL_EXPR, code->ext.entry->label);
180 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
181 elemental subroutines. Make temporaries for output arguments if any such
182 dependencies are found. Output arguments are chosen because internal_unpack
183 can be used, as is, to copy the result back to the variable. */
185 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
186 gfc_symbol * sym, gfc_actual_arglist * arg,
187 gfc_dep_check check_variable)
189 gfc_actual_arglist *arg0;
191 gfc_formal_arglist *formal;
192 gfc_loopinfo tmp_loop;
204 if (loopse->ss == NULL)
209 formal = sym->formal;
211 /* Loop over all the arguments testing for dependencies. */
212 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
218 /* Obtain the info structure for the current argument. */
220 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
224 info = &ss->data.info;
228 /* If there is a dependency, create a temporary and use it
229 instead of the variable. */
230 fsym = formal ? formal->sym : NULL;
231 if (e->expr_type == EXPR_VARIABLE
233 && fsym->attr.intent != INTENT_IN
234 && gfc_check_fncall_dependency (e, fsym->attr.intent,
235 sym, arg0, check_variable))
237 tree initial, temptype;
238 stmtblock_t temp_post;
240 /* Make a local loopinfo for the temporary creation, so that
241 none of the other ss->info's have to be renormalized. */
242 gfc_init_loopinfo (&tmp_loop);
243 tmp_loop.dimen = info->dimen;
244 for (n = 0; n < info->dimen; n++)
246 tmp_loop.to[n] = loopse->loop->to[n];
247 tmp_loop.from[n] = loopse->loop->from[n];
248 tmp_loop.order[n] = loopse->loop->order[n];
251 /* Obtain the argument descriptor for unpacking. */
252 gfc_init_se (&parmse, NULL);
253 parmse.want_pointer = 1;
255 /* The scalarizer introduces some specific peculiarities when
256 handling elemental subroutines; the stride can be needed up to
257 the dim_array - 1, rather than dim_loop - 1 to calculate
258 offsets outside the loop. For this reason, we make sure that
259 the descriptor has the dimensionality of the array by converting
260 trailing elements into ranges with end = start. */
261 for (ref = e->ref; ref; ref = ref->next)
262 if (ref->type == REF_ARRAY && ref->u.ar.type == AR_SECTION)
267 bool seen_range = false;
268 for (n = 0; n < ref->u.ar.dimen; n++)
270 if (ref->u.ar.dimen_type[n] == DIMEN_RANGE)
274 || ref->u.ar.dimen_type[n] != DIMEN_ELEMENT)
277 ref->u.ar.end[n] = gfc_copy_expr (ref->u.ar.start[n]);
278 ref->u.ar.dimen_type[n] = DIMEN_RANGE;
282 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
283 gfc_add_block_to_block (&se->pre, &parmse.pre);
285 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
286 initialize the array temporary with a copy of the values. */
287 if (fsym->attr.intent == INTENT_INOUT
288 || (fsym->ts.type ==BT_DERIVED
289 && fsym->attr.intent == INTENT_OUT))
290 initial = parmse.expr;
294 /* Find the type of the temporary to create; we don't use the type
295 of e itself as this breaks for subcomponent-references in e (where
296 the type of e is that of the final reference, but parmse.expr's
297 type corresponds to the full derived-type). */
298 /* TODO: Fix this somehow so we don't need a temporary of the whole
299 array but instead only the components referenced. */
300 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
301 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
302 temptype = TREE_TYPE (temptype);
303 temptype = gfc_get_element_type (temptype);
305 /* Generate the temporary. Cleaning up the temporary should be the
306 very last thing done, so we add the code to a new block and add it
307 to se->post as last instructions. */
308 size = gfc_create_var (gfc_array_index_type, NULL);
309 data = gfc_create_var (pvoid_type_node, NULL);
310 gfc_init_block (&temp_post);
311 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
312 &tmp_loop, info, temptype,
316 gfc_add_modify (&se->pre, size, tmp);
317 tmp = fold_convert (pvoid_type_node, info->data);
318 gfc_add_modify (&se->pre, data, tmp);
320 /* Calculate the offset for the temporary. */
321 offset = gfc_index_zero_node;
322 for (n = 0; n < info->dimen; n++)
324 tmp = gfc_conv_descriptor_stride_get (info->descriptor,
326 tmp = fold_build2_loc (input_location, MULT_EXPR,
327 gfc_array_index_type,
328 loopse->loop->from[n], tmp);
329 offset = fold_build2_loc (input_location, MINUS_EXPR,
330 gfc_array_index_type, offset, tmp);
332 info->offset = gfc_create_var (gfc_array_index_type, NULL);
333 gfc_add_modify (&se->pre, info->offset, offset);
335 /* Copy the result back using unpack. */
336 tmp = build_call_expr_loc (input_location,
337 gfor_fndecl_in_unpack, 2, parmse.expr, data);
338 gfc_add_expr_to_block (&se->post, tmp);
340 /* parmse.pre is already added above. */
341 gfc_add_block_to_block (&se->post, &parmse.post);
342 gfc_add_block_to_block (&se->post, &temp_post);
348 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
351 gfc_trans_call (gfc_code * code, bool dependency_check,
352 tree mask, tree count1, bool invert)
356 int has_alternate_specifier;
357 gfc_dep_check check_variable;
358 tree index = NULL_TREE;
359 tree maskexpr = NULL_TREE;
362 /* A CALL starts a new block because the actual arguments may have to
363 be evaluated first. */
364 gfc_init_se (&se, NULL);
365 gfc_start_block (&se.pre);
367 gcc_assert (code->resolved_sym);
369 ss = gfc_ss_terminator;
370 if (code->resolved_sym->attr.elemental)
371 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
373 /* Is not an elemental subroutine call with array valued arguments. */
374 if (ss == gfc_ss_terminator)
377 /* Translate the call. */
378 has_alternate_specifier
379 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
382 /* A subroutine without side-effect, by definition, does nothing! */
383 TREE_SIDE_EFFECTS (se.expr) = 1;
385 /* Chain the pieces together and return the block. */
386 if (has_alternate_specifier)
388 gfc_code *select_code;
390 select_code = code->next;
391 gcc_assert(select_code->op == EXEC_SELECT);
392 sym = select_code->expr1->symtree->n.sym;
393 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
394 if (sym->backend_decl == NULL)
395 sym->backend_decl = gfc_get_symbol_decl (sym);
396 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
399 gfc_add_expr_to_block (&se.pre, se.expr);
401 gfc_add_block_to_block (&se.pre, &se.post);
406 /* An elemental subroutine call with array valued arguments has
414 /* gfc_walk_elemental_function_args renders the ss chain in the
415 reverse order to the actual argument order. */
416 ss = gfc_reverse_ss (ss);
418 /* Initialize the loop. */
419 gfc_init_se (&loopse, NULL);
420 gfc_init_loopinfo (&loop);
421 gfc_add_ss_to_loop (&loop, ss);
423 gfc_conv_ss_startstride (&loop);
424 /* TODO: gfc_conv_loop_setup generates a temporary for vector
425 subscripts. This could be prevented in the elemental case
426 as temporaries are handled separatedly
427 (below in gfc_conv_elemental_dependencies). */
428 gfc_conv_loop_setup (&loop, &code->expr1->where);
429 gfc_mark_ss_chain_used (ss, 1);
431 /* Convert the arguments, checking for dependencies. */
432 gfc_copy_loopinfo_to_se (&loopse, &loop);
435 /* For operator assignment, do dependency checking. */
436 if (dependency_check)
437 check_variable = ELEM_CHECK_VARIABLE;
439 check_variable = ELEM_DONT_CHECK_VARIABLE;
441 gfc_init_se (&depse, NULL);
442 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
443 code->ext.actual, check_variable);
445 gfc_add_block_to_block (&loop.pre, &depse.pre);
446 gfc_add_block_to_block (&loop.post, &depse.post);
448 /* Generate the loop body. */
449 gfc_start_scalarized_body (&loop, &body);
450 gfc_init_block (&block);
454 /* Form the mask expression according to the mask. */
456 maskexpr = gfc_build_array_ref (mask, index, NULL);
458 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
459 TREE_TYPE (maskexpr), maskexpr);
462 /* Add the subroutine call to the block. */
463 gfc_conv_procedure_call (&loopse, code->resolved_sym,
464 code->ext.actual, code->expr1, NULL);
468 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
469 build_empty_stmt (input_location));
470 gfc_add_expr_to_block (&loopse.pre, tmp);
471 tmp = fold_build2_loc (input_location, PLUS_EXPR,
472 gfc_array_index_type,
473 count1, gfc_index_one_node);
474 gfc_add_modify (&loopse.pre, count1, tmp);
477 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
479 gfc_add_block_to_block (&block, &loopse.pre);
480 gfc_add_block_to_block (&block, &loopse.post);
482 /* Finish up the loop block and the loop. */
483 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
484 gfc_trans_scalarizing_loops (&loop, &body);
485 gfc_add_block_to_block (&se.pre, &loop.pre);
486 gfc_add_block_to_block (&se.pre, &loop.post);
487 gfc_add_block_to_block (&se.pre, &se.post);
488 gfc_cleanup_loop (&loop);
491 return gfc_finish_block (&se.pre);
495 /* Translate the RETURN statement. */
498 gfc_trans_return (gfc_code * code)
506 /* If code->expr is not NULL, this return statement must appear
507 in a subroutine and current_fake_result_decl has already
510 result = gfc_get_fake_result_decl (NULL, 0);
513 gfc_warning ("An alternate return at %L without a * dummy argument",
514 &code->expr1->where);
515 return gfc_generate_return ();
518 /* Start a new block for this statement. */
519 gfc_init_se (&se, NULL);
520 gfc_start_block (&se.pre);
522 gfc_conv_expr (&se, code->expr1);
524 /* Note that the actually returned expression is a simple value and
525 does not depend on any pointers or such; thus we can clean-up with
526 se.post before returning. */
527 tmp = fold_build2_loc (input_location, MODIFY_EXPR, TREE_TYPE (result),
528 result, fold_convert (TREE_TYPE (result),
530 gfc_add_expr_to_block (&se.pre, tmp);
531 gfc_add_block_to_block (&se.pre, &se.post);
533 tmp = gfc_generate_return ();
534 gfc_add_expr_to_block (&se.pre, tmp);
535 return gfc_finish_block (&se.pre);
538 return gfc_generate_return ();
542 /* Translate the PAUSE statement. We have to translate this statement
543 to a runtime library call. */
546 gfc_trans_pause (gfc_code * code)
548 tree gfc_int4_type_node = gfc_get_int_type (4);
552 /* Start a new block for this statement. */
553 gfc_init_se (&se, NULL);
554 gfc_start_block (&se.pre);
557 if (code->expr1 == NULL)
559 tmp = build_int_cst (gfc_int4_type_node, 0);
560 tmp = build_call_expr_loc (input_location,
561 gfor_fndecl_pause_string, 2,
562 build_int_cst (pchar_type_node, 0), tmp);
564 else if (code->expr1->ts.type == BT_INTEGER)
566 gfc_conv_expr (&se, code->expr1);
567 tmp = build_call_expr_loc (input_location,
568 gfor_fndecl_pause_numeric, 1,
569 fold_convert (gfc_int4_type_node, se.expr));
573 gfc_conv_expr_reference (&se, code->expr1);
574 tmp = build_call_expr_loc (input_location,
575 gfor_fndecl_pause_string, 2,
576 se.expr, se.string_length);
579 gfc_add_expr_to_block (&se.pre, tmp);
581 gfc_add_block_to_block (&se.pre, &se.post);
583 return gfc_finish_block (&se.pre);
587 /* Translate the STOP statement. We have to translate this statement
588 to a runtime library call. */
591 gfc_trans_stop (gfc_code *code, bool error_stop)
593 tree gfc_int4_type_node = gfc_get_int_type (4);
597 /* Start a new block for this statement. */
598 gfc_init_se (&se, NULL);
599 gfc_start_block (&se.pre);
601 if (code->expr1 == NULL)
603 tmp = build_int_cst (gfc_int4_type_node, 0);
604 tmp = build_call_expr_loc (input_location,
605 error_stop ? gfor_fndecl_error_stop_string
606 : gfor_fndecl_stop_string,
607 2, build_int_cst (pchar_type_node, 0), tmp);
609 else if (code->expr1->ts.type == BT_INTEGER)
611 gfc_conv_expr (&se, code->expr1);
612 tmp = build_call_expr_loc (input_location,
613 error_stop ? gfor_fndecl_error_stop_numeric
614 : gfor_fndecl_stop_numeric_f08, 1,
615 fold_convert (gfc_int4_type_node, se.expr));
619 gfc_conv_expr_reference (&se, code->expr1);
620 tmp = build_call_expr_loc (input_location,
621 error_stop ? gfor_fndecl_error_stop_string
622 : gfor_fndecl_stop_string,
623 2, se.expr, se.string_length);
626 gfc_add_expr_to_block (&se.pre, tmp);
628 gfc_add_block_to_block (&se.pre, &se.post);
630 return gfc_finish_block (&se.pre);
635 gfc_trans_sync (gfc_code *code, gfc_exec_op type __attribute__ ((unused)))
639 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
641 gfc_init_se (&se, NULL);
642 gfc_start_block (&se.pre);
645 /* Check SYNC IMAGES(imageset) for valid image index.
646 FIXME: Add a check for image-set arrays. */
647 if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
648 && code->expr1->rank == 0)
651 gfc_conv_expr (&se, code->expr1);
652 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
653 se.expr, build_int_cst (TREE_TYPE (se.expr), 1));
654 gfc_trans_runtime_check (true, false, cond, &se.pre,
655 &code->expr1->where, "Invalid image number "
657 fold_convert (integer_type_node, se.expr));
660 /* If STAT is present, set it to zero. */
663 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
664 gfc_conv_expr (&se, code->expr2);
665 gfc_add_modify (&se.pre, se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
668 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
669 return gfc_finish_block (&se.pre);
675 /* Generate GENERIC for the IF construct. This function also deals with
676 the simple IF statement, because the front end translates the IF
677 statement into an IF construct.
709 where COND_S is the simplified version of the predicate. PRE_COND_S
710 are the pre side-effects produced by the translation of the
712 We need to build the chain recursively otherwise we run into
713 problems with folding incomplete statements. */
716 gfc_trans_if_1 (gfc_code * code)
722 /* Check for an unconditional ELSE clause. */
724 return gfc_trans_code (code->next);
726 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
727 gfc_init_se (&if_se, NULL);
728 gfc_start_block (&if_se.pre);
730 /* Calculate the IF condition expression. */
731 gfc_conv_expr_val (&if_se, code->expr1);
733 /* Translate the THEN clause. */
734 stmt = gfc_trans_code (code->next);
736 /* Translate the ELSE clause. */
738 elsestmt = gfc_trans_if_1 (code->block);
740 elsestmt = build_empty_stmt (input_location);
742 /* Build the condition expression and add it to the condition block. */
743 loc = code->expr1->where.lb ? code->expr1->where.lb->location : input_location;
744 stmt = fold_build3_loc (loc, COND_EXPR, void_type_node, if_se.expr, stmt,
747 gfc_add_expr_to_block (&if_se.pre, stmt);
749 /* Finish off this statement. */
750 return gfc_finish_block (&if_se.pre);
754 gfc_trans_if (gfc_code * code)
759 /* Create exit label so it is available for trans'ing the body code. */
760 exit_label = gfc_build_label_decl (NULL_TREE);
761 code->exit_label = exit_label;
763 /* Translate the actual code in code->block. */
764 gfc_init_block (&body);
765 gfc_add_expr_to_block (&body, gfc_trans_if_1 (code->block));
767 /* Add exit label. */
768 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
770 return gfc_finish_block (&body);
774 /* Translate an arithmetic IF expression.
776 IF (cond) label1, label2, label3 translates to
788 An optimized version can be generated in case of equal labels.
789 E.g., if label1 is equal to label2, we can translate it to
798 gfc_trans_arithmetic_if (gfc_code * code)
806 /* Start a new block. */
807 gfc_init_se (&se, NULL);
808 gfc_start_block (&se.pre);
810 /* Pre-evaluate COND. */
811 gfc_conv_expr_val (&se, code->expr1);
812 se.expr = gfc_evaluate_now (se.expr, &se.pre);
814 /* Build something to compare with. */
815 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
817 if (code->label1->value != code->label2->value)
819 /* If (cond < 0) take branch1 else take branch2.
820 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
821 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
822 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
824 if (code->label1->value != code->label3->value)
825 tmp = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
828 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
831 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
832 tmp, branch1, branch2);
835 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
837 if (code->label1->value != code->label3->value
838 && code->label2->value != code->label3->value)
840 /* if (cond <= 0) take branch1 else take branch2. */
841 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
842 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
844 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
845 tmp, branch1, branch2);
848 /* Append the COND_EXPR to the evaluation of COND, and return. */
849 gfc_add_expr_to_block (&se.pre, branch1);
850 return gfc_finish_block (&se.pre);
854 /* Translate a CRITICAL block. */
856 gfc_trans_critical (gfc_code *code)
861 gfc_start_block (&block);
862 tmp = gfc_trans_code (code->block->next);
863 gfc_add_expr_to_block (&block, tmp);
865 return gfc_finish_block (&block);
869 /* Do proper initialization for ASSOCIATE names. */
872 trans_associate_var (gfc_symbol *sym, gfc_wrapped_block *block)
877 gcc_assert (sym->assoc);
878 e = sym->assoc->target;
880 /* Do a `pointer assignment' with updated descriptor (or assign descriptor
881 to array temporary) for arrays with either unknown shape or if associating
883 if (sym->attr.dimension
884 && (sym->as->type == AS_DEFERRED || sym->assoc->variable))
890 desc = sym->backend_decl;
892 /* If association is to an expression, evaluate it and create temporary.
893 Otherwise, get descriptor of target for pointer assignment. */
894 gfc_init_se (&se, NULL);
895 ss = gfc_walk_expr (e);
896 if (sym->assoc->variable)
901 gfc_conv_expr_descriptor (&se, e, ss);
903 /* If we didn't already do the pointer assignment, set associate-name
904 descriptor to the one generated for the temporary. */
905 if (!sym->assoc->variable)
909 gfc_add_modify (&se.pre, desc, se.expr);
911 /* The generated descriptor has lower bound zero (as array
912 temporary), shift bounds so we get lower bounds of 1. */
913 for (dim = 0; dim < e->rank; ++dim)
914 gfc_conv_shift_descriptor_lbound (&se.pre, desc,
915 dim, gfc_index_one_node);
918 /* Done, register stuff as init / cleanup code. */
919 gfc_add_init_cleanup (block, gfc_finish_block (&se.pre),
920 gfc_finish_block (&se.post));
923 /* Do a scalar pointer assignment; this is for scalar variable targets. */
924 else if (gfc_is_associate_pointer (sym))
928 gcc_assert (!sym->attr.dimension);
930 gfc_init_se (&se, NULL);
931 gfc_conv_expr (&se, e);
933 tmp = TREE_TYPE (sym->backend_decl);
934 tmp = gfc_build_addr_expr (tmp, se.expr);
935 gfc_add_modify (&se.pre, sym->backend_decl, tmp);
937 gfc_add_init_cleanup (block, gfc_finish_block( &se.pre),
938 gfc_finish_block (&se.post));
941 /* Do a simple assignment. This is for scalar expressions, where we
942 can simply use expression assignment. */
947 lhs = gfc_lval_expr_from_sym (sym);
948 tmp = gfc_trans_assignment (lhs, e, false, true);
949 gfc_add_init_cleanup (block, tmp, NULL_TREE);
954 /* Translate a BLOCK construct. This is basically what we would do for a
958 gfc_trans_block_construct (gfc_code* code)
962 gfc_wrapped_block block;
965 gfc_association_list *ass;
967 ns = code->ext.block.ns;
972 /* Process local variables. */
973 gcc_assert (!sym->tlink);
975 gfc_process_block_locals (ns);
977 /* Generate code including exit-label. */
978 gfc_init_block (&body);
979 exit_label = gfc_build_label_decl (NULL_TREE);
980 code->exit_label = exit_label;
981 gfc_add_expr_to_block (&body, gfc_trans_code (ns->code));
982 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
984 /* Finish everything. */
985 gfc_start_wrapped_block (&block, gfc_finish_block (&body));
986 gfc_trans_deferred_vars (sym, &block);
987 for (ass = code->ext.block.assoc; ass; ass = ass->next)
988 trans_associate_var (ass->st->n.sym, &block);
990 return gfc_finish_wrapped_block (&block);
994 /* Translate the simple DO construct. This is where the loop variable has
995 integer type and step +-1. We can't use this in the general case
996 because integer overflow and floating point errors could give incorrect
998 We translate a do loop from:
1000 DO dovar = from, to, step
1006 [Evaluate loop bounds and step]
1008 if ((step > 0) ? (dovar <= to) : (dovar => to))
1014 cond = (dovar == to);
1016 if (cond) goto end_label;
1021 This helps the optimizers by avoiding the extra induction variable
1022 used in the general case. */
1025 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
1026 tree from, tree to, tree step, tree exit_cond)
1032 tree saved_dovar = NULL;
1037 type = TREE_TYPE (dovar);
1039 loc = code->ext.iterator->start->where.lb->location;
1041 /* Initialize the DO variable: dovar = from. */
1042 gfc_add_modify_loc (loc, pblock, dovar, from);
1044 /* Save value for do-tinkering checking. */
1045 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1047 saved_dovar = gfc_create_var (type, ".saved_dovar");
1048 gfc_add_modify_loc (loc, pblock, saved_dovar, dovar);
1051 /* Cycle and exit statements are implemented with gotos. */
1052 cycle_label = gfc_build_label_decl (NULL_TREE);
1053 exit_label = gfc_build_label_decl (NULL_TREE);
1055 /* Put the labels where they can be found later. See gfc_trans_do(). */
1056 code->cycle_label = cycle_label;
1057 code->exit_label = exit_label;
1060 gfc_start_block (&body);
1062 /* Main loop body. */
1063 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1064 gfc_add_expr_to_block (&body, tmp);
1066 /* Label for cycle statements (if needed). */
1067 if (TREE_USED (cycle_label))
1069 tmp = build1_v (LABEL_EXPR, cycle_label);
1070 gfc_add_expr_to_block (&body, tmp);
1073 /* Check whether someone has modified the loop variable. */
1074 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1076 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node,
1077 dovar, saved_dovar);
1078 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1079 "Loop variable has been modified");
1082 /* Exit the loop if there is an I/O result condition or error. */
1085 tmp = build1_v (GOTO_EXPR, exit_label);
1086 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1088 build_empty_stmt (loc));
1089 gfc_add_expr_to_block (&body, tmp);
1092 /* Evaluate the loop condition. */
1093 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, dovar,
1095 cond = gfc_evaluate_now_loc (loc, cond, &body);
1097 /* Increment the loop variable. */
1098 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1099 gfc_add_modify_loc (loc, &body, dovar, tmp);
1101 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1102 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1104 /* The loop exit. */
1105 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1106 TREE_USED (exit_label) = 1;
1107 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1108 cond, tmp, build_empty_stmt (loc));
1109 gfc_add_expr_to_block (&body, tmp);
1111 /* Finish the loop body. */
1112 tmp = gfc_finish_block (&body);
1113 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1115 /* Only execute the loop if the number of iterations is positive. */
1116 if (tree_int_cst_sgn (step) > 0)
1117 cond = fold_build2_loc (loc, LE_EXPR, boolean_type_node, dovar,
1120 cond = fold_build2_loc (loc, GE_EXPR, boolean_type_node, dovar,
1122 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, cond, tmp,
1123 build_empty_stmt (loc));
1124 gfc_add_expr_to_block (pblock, tmp);
1126 /* Add the exit label. */
1127 tmp = build1_v (LABEL_EXPR, exit_label);
1128 gfc_add_expr_to_block (pblock, tmp);
1130 return gfc_finish_block (pblock);
1133 /* Translate the DO construct. This obviously is one of the most
1134 important ones to get right with any compiler, but especially
1137 We special case some loop forms as described in gfc_trans_simple_do.
1138 For other cases we implement them with a separate loop count,
1139 as described in the standard.
1141 We translate a do loop from:
1143 DO dovar = from, to, step
1149 [evaluate loop bounds and step]
1150 empty = (step > 0 ? to < from : to > from);
1151 countm1 = (to - from) / step;
1153 if (empty) goto exit_label;
1159 if (countm1 ==0) goto exit_label;
1164 countm1 is an unsigned integer. It is equal to the loop count minus one,
1165 because the loop count itself can overflow. */
1168 gfc_trans_do (gfc_code * code, tree exit_cond)
1172 tree saved_dovar = NULL;
1188 gfc_start_block (&block);
1190 loc = code->ext.iterator->start->where.lb->location;
1192 /* Evaluate all the expressions in the iterator. */
1193 gfc_init_se (&se, NULL);
1194 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1195 gfc_add_block_to_block (&block, &se.pre);
1197 type = TREE_TYPE (dovar);
1199 gfc_init_se (&se, NULL);
1200 gfc_conv_expr_val (&se, code->ext.iterator->start);
1201 gfc_add_block_to_block (&block, &se.pre);
1202 from = gfc_evaluate_now (se.expr, &block);
1204 gfc_init_se (&se, NULL);
1205 gfc_conv_expr_val (&se, code->ext.iterator->end);
1206 gfc_add_block_to_block (&block, &se.pre);
1207 to = gfc_evaluate_now (se.expr, &block);
1209 gfc_init_se (&se, NULL);
1210 gfc_conv_expr_val (&se, code->ext.iterator->step);
1211 gfc_add_block_to_block (&block, &se.pre);
1212 step = gfc_evaluate_now (se.expr, &block);
1214 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1216 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, step,
1217 build_zero_cst (type));
1218 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1219 "DO step value is zero");
1222 /* Special case simple loops. */
1223 if (TREE_CODE (type) == INTEGER_TYPE
1224 && (integer_onep (step)
1225 || tree_int_cst_equal (step, integer_minus_one_node)))
1226 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1228 pos_step = fold_build2_loc (loc, GT_EXPR, boolean_type_node, step,
1229 build_zero_cst (type));
1231 if (TREE_CODE (type) == INTEGER_TYPE)
1232 utype = unsigned_type_for (type);
1234 utype = unsigned_type_for (gfc_array_index_type);
1235 countm1 = gfc_create_var (utype, "countm1");
1237 /* Cycle and exit statements are implemented with gotos. */
1238 cycle_label = gfc_build_label_decl (NULL_TREE);
1239 exit_label = gfc_build_label_decl (NULL_TREE);
1240 TREE_USED (exit_label) = 1;
1242 /* Put these labels where they can be found later. */
1243 code->cycle_label = cycle_label;
1244 code->exit_label = exit_label;
1246 /* Initialize the DO variable: dovar = from. */
1247 gfc_add_modify (&block, dovar, from);
1249 /* Save value for do-tinkering checking. */
1250 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1252 saved_dovar = gfc_create_var (type, ".saved_dovar");
1253 gfc_add_modify_loc (loc, &block, saved_dovar, dovar);
1256 /* Initialize loop count and jump to exit label if the loop is empty.
1257 This code is executed before we enter the loop body. We generate:
1258 step_sign = sign(1,step);
1269 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1273 if (TREE_CODE (type) == INTEGER_TYPE)
1275 tree pos, neg, step_sign, to2, from2, step2;
1277 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1279 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, step,
1280 build_int_cst (TREE_TYPE (step), 0));
1281 step_sign = fold_build3_loc (loc, COND_EXPR, type, tmp,
1282 build_int_cst (type, -1),
1283 build_int_cst (type, 1));
1285 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, to, from);
1286 pos = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1287 fold_build1_loc (loc, GOTO_EXPR, void_type_node,
1289 build_empty_stmt (loc));
1291 tmp = fold_build2_loc (loc, GT_EXPR, boolean_type_node, to,
1293 neg = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1294 fold_build1_loc (loc, GOTO_EXPR, void_type_node,
1296 build_empty_stmt (loc));
1297 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1298 pos_step, pos, neg);
1300 gfc_add_expr_to_block (&block, tmp);
1302 /* Calculate the loop count. to-from can overflow, so
1303 we cast to unsigned. */
1305 to2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, to);
1306 from2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, from);
1307 step2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, step);
1308 step2 = fold_convert (utype, step2);
1309 tmp = fold_build2_loc (loc, MINUS_EXPR, type, to2, from2);
1310 tmp = fold_convert (utype, tmp);
1311 tmp = fold_build2_loc (loc, TRUNC_DIV_EXPR, utype, tmp, step2);
1312 tmp = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, countm1, tmp);
1313 gfc_add_expr_to_block (&block, tmp);
1317 /* TODO: We could use the same width as the real type.
1318 This would probably cause more problems that it solves
1319 when we implement "long double" types. */
1321 tmp = fold_build2_loc (loc, MINUS_EXPR, type, to, from);
1322 tmp = fold_build2_loc (loc, RDIV_EXPR, type, tmp, step);
1323 tmp = fold_build1_loc (loc, FIX_TRUNC_EXPR, utype, tmp);
1324 gfc_add_modify (&block, countm1, tmp);
1326 /* We need a special check for empty loops:
1327 empty = (step > 0 ? to < from : to > from); */
1328 tmp = fold_build3_loc (loc, COND_EXPR, boolean_type_node, pos_step,
1329 fold_build2_loc (loc, LT_EXPR,
1330 boolean_type_node, to, from),
1331 fold_build2_loc (loc, GT_EXPR,
1332 boolean_type_node, to, from));
1333 /* If the loop is empty, go directly to the exit label. */
1334 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1335 build1_v (GOTO_EXPR, exit_label),
1336 build_empty_stmt (input_location));
1337 gfc_add_expr_to_block (&block, tmp);
1341 gfc_start_block (&body);
1343 /* Main loop body. */
1344 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1345 gfc_add_expr_to_block (&body, tmp);
1347 /* Label for cycle statements (if needed). */
1348 if (TREE_USED (cycle_label))
1350 tmp = build1_v (LABEL_EXPR, cycle_label);
1351 gfc_add_expr_to_block (&body, tmp);
1354 /* Check whether someone has modified the loop variable. */
1355 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1357 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node, dovar,
1359 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1360 "Loop variable has been modified");
1363 /* Exit the loop if there is an I/O result condition or error. */
1366 tmp = build1_v (GOTO_EXPR, exit_label);
1367 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1369 build_empty_stmt (input_location));
1370 gfc_add_expr_to_block (&body, tmp);
1373 /* Increment the loop variable. */
1374 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1375 gfc_add_modify_loc (loc, &body, dovar, tmp);
1377 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1378 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1380 /* End with the loop condition. Loop until countm1 == 0. */
1381 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, countm1,
1382 build_int_cst (utype, 0));
1383 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1384 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1385 cond, tmp, build_empty_stmt (loc));
1386 gfc_add_expr_to_block (&body, tmp);
1388 /* Decrement the loop count. */
1389 tmp = fold_build2_loc (loc, MINUS_EXPR, utype, countm1,
1390 build_int_cst (utype, 1));
1391 gfc_add_modify_loc (loc, &body, countm1, tmp);
1393 /* End of loop body. */
1394 tmp = gfc_finish_block (&body);
1396 /* The for loop itself. */
1397 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1398 gfc_add_expr_to_block (&block, tmp);
1400 /* Add the exit label. */
1401 tmp = build1_v (LABEL_EXPR, exit_label);
1402 gfc_add_expr_to_block (&block, tmp);
1404 return gfc_finish_block (&block);
1408 /* Translate the DO WHILE construct.
1421 if (! cond) goto exit_label;
1427 Because the evaluation of the exit condition `cond' may have side
1428 effects, we can't do much for empty loop bodies. The backend optimizers
1429 should be smart enough to eliminate any dead loops. */
1432 gfc_trans_do_while (gfc_code * code)
1440 /* Everything we build here is part of the loop body. */
1441 gfc_start_block (&block);
1443 /* Cycle and exit statements are implemented with gotos. */
1444 cycle_label = gfc_build_label_decl (NULL_TREE);
1445 exit_label = gfc_build_label_decl (NULL_TREE);
1447 /* Put the labels where they can be found later. See gfc_trans_do(). */
1448 code->cycle_label = cycle_label;
1449 code->exit_label = exit_label;
1451 /* Create a GIMPLE version of the exit condition. */
1452 gfc_init_se (&cond, NULL);
1453 gfc_conv_expr_val (&cond, code->expr1);
1454 gfc_add_block_to_block (&block, &cond.pre);
1455 cond.expr = fold_build1_loc (code->expr1->where.lb->location,
1456 TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1458 /* Build "IF (! cond) GOTO exit_label". */
1459 tmp = build1_v (GOTO_EXPR, exit_label);
1460 TREE_USED (exit_label) = 1;
1461 tmp = fold_build3_loc (code->expr1->where.lb->location, COND_EXPR,
1462 void_type_node, cond.expr, tmp,
1463 build_empty_stmt (code->expr1->where.lb->location));
1464 gfc_add_expr_to_block (&block, tmp);
1466 /* The main body of the loop. */
1467 tmp = gfc_trans_code (code->block->next);
1468 gfc_add_expr_to_block (&block, tmp);
1470 /* Label for cycle statements (if needed). */
1471 if (TREE_USED (cycle_label))
1473 tmp = build1_v (LABEL_EXPR, cycle_label);
1474 gfc_add_expr_to_block (&block, tmp);
1477 /* End of loop body. */
1478 tmp = gfc_finish_block (&block);
1480 gfc_init_block (&block);
1481 /* Build the loop. */
1482 tmp = fold_build1_loc (code->expr1->where.lb->location, LOOP_EXPR,
1483 void_type_node, tmp);
1484 gfc_add_expr_to_block (&block, tmp);
1486 /* Add the exit label. */
1487 tmp = build1_v (LABEL_EXPR, exit_label);
1488 gfc_add_expr_to_block (&block, tmp);
1490 return gfc_finish_block (&block);
1494 /* Translate the SELECT CASE construct for INTEGER case expressions,
1495 without killing all potential optimizations. The problem is that
1496 Fortran allows unbounded cases, but the back-end does not, so we
1497 need to intercept those before we enter the equivalent SWITCH_EXPR
1500 For example, we translate this,
1503 CASE (:100,101,105:115)
1513 to the GENERIC equivalent,
1517 case (minimum value for typeof(expr) ... 100:
1523 case 200 ... (maximum value for typeof(expr):
1540 gfc_trans_integer_select (gfc_code * code)
1550 gfc_start_block (&block);
1552 /* Calculate the switch expression. */
1553 gfc_init_se (&se, NULL);
1554 gfc_conv_expr_val (&se, code->expr1);
1555 gfc_add_block_to_block (&block, &se.pre);
1557 end_label = gfc_build_label_decl (NULL_TREE);
1559 gfc_init_block (&body);
1561 for (c = code->block; c; c = c->block)
1563 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1568 /* Assume it's the default case. */
1569 low = high = NULL_TREE;
1573 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1576 /* If there's only a lower bound, set the high bound to the
1577 maximum value of the case expression. */
1579 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1584 /* Three cases are possible here:
1586 1) There is no lower bound, e.g. CASE (:N).
1587 2) There is a lower bound .NE. high bound, that is
1588 a case range, e.g. CASE (N:M) where M>N (we make
1589 sure that M>N during type resolution).
1590 3) There is a lower bound, and it has the same value
1591 as the high bound, e.g. CASE (N:N). This is our
1592 internal representation of CASE(N).
1594 In the first and second case, we need to set a value for
1595 high. In the third case, we don't because the GCC middle
1596 end represents a single case value by just letting high be
1597 a NULL_TREE. We can't do that because we need to be able
1598 to represent unbounded cases. */
1602 && mpz_cmp (cp->low->value.integer,
1603 cp->high->value.integer) != 0))
1604 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1607 /* Unbounded case. */
1609 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1612 /* Build a label. */
1613 label = gfc_build_label_decl (NULL_TREE);
1615 /* Add this case label.
1616 Add parameter 'label', make it match GCC backend. */
1617 tmp = fold_build3_loc (input_location, CASE_LABEL_EXPR,
1618 void_type_node, low, high, label);
1619 gfc_add_expr_to_block (&body, tmp);
1622 /* Add the statements for this case. */
1623 tmp = gfc_trans_code (c->next);
1624 gfc_add_expr_to_block (&body, tmp);
1626 /* Break to the end of the construct. */
1627 tmp = build1_v (GOTO_EXPR, end_label);
1628 gfc_add_expr_to_block (&body, tmp);
1631 tmp = gfc_finish_block (&body);
1632 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1633 gfc_add_expr_to_block (&block, tmp);
1635 tmp = build1_v (LABEL_EXPR, end_label);
1636 gfc_add_expr_to_block (&block, tmp);
1638 return gfc_finish_block (&block);
1642 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1644 There are only two cases possible here, even though the standard
1645 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1646 .FALSE., and DEFAULT.
1648 We never generate more than two blocks here. Instead, we always
1649 try to eliminate the DEFAULT case. This way, we can translate this
1650 kind of SELECT construct to a simple
1654 expression in GENERIC. */
1657 gfc_trans_logical_select (gfc_code * code)
1660 gfc_code *t, *f, *d;
1665 /* Assume we don't have any cases at all. */
1668 /* Now see which ones we actually do have. We can have at most two
1669 cases in a single case list: one for .TRUE. and one for .FALSE.
1670 The default case is always separate. If the cases for .TRUE. and
1671 .FALSE. are in the same case list, the block for that case list
1672 always executed, and we don't generate code a COND_EXPR. */
1673 for (c = code->block; c; c = c->block)
1675 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1679 if (cp->low->value.logical == 0) /* .FALSE. */
1681 else /* if (cp->value.logical != 0), thus .TRUE. */
1689 /* Start a new block. */
1690 gfc_start_block (&block);
1692 /* Calculate the switch expression. We always need to do this
1693 because it may have side effects. */
1694 gfc_init_se (&se, NULL);
1695 gfc_conv_expr_val (&se, code->expr1);
1696 gfc_add_block_to_block (&block, &se.pre);
1698 if (t == f && t != NULL)
1700 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1701 translate the code for these cases, append it to the current
1703 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1707 tree true_tree, false_tree, stmt;
1709 true_tree = build_empty_stmt (input_location);
1710 false_tree = build_empty_stmt (input_location);
1712 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1713 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1714 make the missing case the default case. */
1715 if (t != NULL && f != NULL)
1725 /* Translate the code for each of these blocks, and append it to
1726 the current block. */
1728 true_tree = gfc_trans_code (t->next);
1731 false_tree = gfc_trans_code (f->next);
1733 stmt = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1734 se.expr, true_tree, false_tree);
1735 gfc_add_expr_to_block (&block, stmt);
1738 return gfc_finish_block (&block);
1742 /* The jump table types are stored in static variables to avoid
1743 constructing them from scratch every single time. */
1744 static GTY(()) tree select_struct[2];
1746 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1747 Instead of generating compares and jumps, it is far simpler to
1748 generate a data structure describing the cases in order and call a
1749 library subroutine that locates the right case.
1750 This is particularly true because this is the only case where we
1751 might have to dispose of a temporary.
1752 The library subroutine returns a pointer to jump to or NULL if no
1753 branches are to be taken. */
1756 gfc_trans_character_select (gfc_code *code)
1758 tree init, end_label, tmp, type, case_num, label, fndecl;
1759 stmtblock_t block, body;
1764 VEC(constructor_elt,gc) *inits = NULL;
1766 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1768 /* The jump table types are stored in static variables to avoid
1769 constructing them from scratch every single time. */
1770 static tree ss_string1[2], ss_string1_len[2];
1771 static tree ss_string2[2], ss_string2_len[2];
1772 static tree ss_target[2];
1774 cp = code->block->ext.block.case_list;
1775 while (cp->left != NULL)
1778 /* Generate the body */
1779 gfc_start_block (&block);
1780 gfc_init_se (&expr1se, NULL);
1781 gfc_conv_expr_reference (&expr1se, code->expr1);
1783 gfc_add_block_to_block (&block, &expr1se.pre);
1785 end_label = gfc_build_label_decl (NULL_TREE);
1787 gfc_init_block (&body);
1789 /* Attempt to optimize length 1 selects. */
1790 if (integer_onep (expr1se.string_length))
1792 for (d = cp; d; d = d->right)
1797 gcc_assert (d->low->expr_type == EXPR_CONSTANT
1798 && d->low->ts.type == BT_CHARACTER);
1799 if (d->low->value.character.length > 1)
1801 for (i = 1; i < d->low->value.character.length; i++)
1802 if (d->low->value.character.string[i] != ' ')
1804 if (i != d->low->value.character.length)
1806 if (optimize && d->high && i == 1)
1808 gcc_assert (d->high->expr_type == EXPR_CONSTANT
1809 && d->high->ts.type == BT_CHARACTER);
1810 if (d->high->value.character.length > 1
1811 && (d->low->value.character.string[0]
1812 == d->high->value.character.string[0])
1813 && d->high->value.character.string[1] != ' '
1814 && ((d->low->value.character.string[1] < ' ')
1815 == (d->high->value.character.string[1]
1825 gcc_assert (d->high->expr_type == EXPR_CONSTANT
1826 && d->high->ts.type == BT_CHARACTER);
1827 if (d->high->value.character.length > 1)
1829 for (i = 1; i < d->high->value.character.length; i++)
1830 if (d->high->value.character.string[i] != ' ')
1832 if (i != d->high->value.character.length)
1839 tree ctype = gfc_get_char_type (code->expr1->ts.kind);
1841 for (c = code->block; c; c = c->block)
1843 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1849 /* Assume it's the default case. */
1850 low = high = NULL_TREE;
1854 /* CASE ('ab') or CASE ('ab':'az') will never match
1855 any length 1 character. */
1856 if (cp->low->value.character.length > 1
1857 && cp->low->value.character.string[1] != ' ')
1860 if (cp->low->value.character.length > 0)
1861 r = cp->low->value.character.string[0];
1864 low = build_int_cst (ctype, r);
1866 /* If there's only a lower bound, set the high bound
1867 to the maximum value of the case expression. */
1869 high = TYPE_MAX_VALUE (ctype);
1875 || (cp->low->value.character.string[0]
1876 != cp->high->value.character.string[0]))
1878 if (cp->high->value.character.length > 0)
1879 r = cp->high->value.character.string[0];
1882 high = build_int_cst (ctype, r);
1885 /* Unbounded case. */
1887 low = TYPE_MIN_VALUE (ctype);
1890 /* Build a label. */
1891 label = gfc_build_label_decl (NULL_TREE);
1893 /* Add this case label.
1894 Add parameter 'label', make it match GCC backend. */
1895 tmp = fold_build3_loc (input_location, CASE_LABEL_EXPR,
1896 void_type_node, low, high, label);
1897 gfc_add_expr_to_block (&body, tmp);
1900 /* Add the statements for this case. */
1901 tmp = gfc_trans_code (c->next);
1902 gfc_add_expr_to_block (&body, tmp);
1904 /* Break to the end of the construct. */
1905 tmp = build1_v (GOTO_EXPR, end_label);
1906 gfc_add_expr_to_block (&body, tmp);
1909 tmp = gfc_string_to_single_character (expr1se.string_length,
1911 code->expr1->ts.kind);
1912 case_num = gfc_create_var (ctype, "case_num");
1913 gfc_add_modify (&block, case_num, tmp);
1915 gfc_add_block_to_block (&block, &expr1se.post);
1917 tmp = gfc_finish_block (&body);
1918 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1919 gfc_add_expr_to_block (&block, tmp);
1921 tmp = build1_v (LABEL_EXPR, end_label);
1922 gfc_add_expr_to_block (&block, tmp);
1924 return gfc_finish_block (&block);
1928 if (code->expr1->ts.kind == 1)
1930 else if (code->expr1->ts.kind == 4)
1935 if (select_struct[k] == NULL)
1938 select_struct[k] = make_node (RECORD_TYPE);
1940 if (code->expr1->ts.kind == 1)
1941 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1942 else if (code->expr1->ts.kind == 4)
1943 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1948 #define ADD_FIELD(NAME, TYPE) \
1949 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
1950 get_identifier (stringize(NAME)), \
1954 ADD_FIELD (string1, pchartype);
1955 ADD_FIELD (string1_len, gfc_charlen_type_node);
1957 ADD_FIELD (string2, pchartype);
1958 ADD_FIELD (string2_len, gfc_charlen_type_node);
1960 ADD_FIELD (target, integer_type_node);
1963 gfc_finish_type (select_struct[k]);
1967 for (d = cp; d; d = d->right)
1970 for (c = code->block; c; c = c->block)
1972 for (d = c->ext.block.case_list; d; d = d->next)
1974 label = gfc_build_label_decl (NULL_TREE);
1975 tmp = fold_build3_loc (input_location, CASE_LABEL_EXPR,
1977 (d->low == NULL && d->high == NULL)
1978 ? NULL : build_int_cst (NULL_TREE, d->n),
1980 gfc_add_expr_to_block (&body, tmp);
1983 tmp = gfc_trans_code (c->next);
1984 gfc_add_expr_to_block (&body, tmp);
1986 tmp = build1_v (GOTO_EXPR, end_label);
1987 gfc_add_expr_to_block (&body, tmp);
1990 /* Generate the structure describing the branches */
1991 for (d = cp; d; d = d->right)
1993 VEC(constructor_elt,gc) *node = NULL;
1995 gfc_init_se (&se, NULL);
1999 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node);
2000 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], integer_zero_node);
2004 gfc_conv_expr_reference (&se, d->low);
2006 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr);
2007 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length);
2010 if (d->high == NULL)
2012 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node);
2013 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], integer_zero_node);
2017 gfc_init_se (&se, NULL);
2018 gfc_conv_expr_reference (&se, d->high);
2020 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr);
2021 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length);
2024 CONSTRUCTOR_APPEND_ELT (node, ss_target[k],
2025 build_int_cst (integer_type_node, d->n));
2027 tmp = build_constructor (select_struct[k], node);
2028 CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp);
2031 type = build_array_type (select_struct[k],
2032 build_index_type (build_int_cst (NULL_TREE, n-1)));
2034 init = build_constructor (type, inits);
2035 TREE_CONSTANT (init) = 1;
2036 TREE_STATIC (init) = 1;
2037 /* Create a static variable to hold the jump table. */
2038 tmp = gfc_create_var (type, "jumptable");
2039 TREE_CONSTANT (tmp) = 1;
2040 TREE_STATIC (tmp) = 1;
2041 TREE_READONLY (tmp) = 1;
2042 DECL_INITIAL (tmp) = init;
2045 /* Build the library call */
2046 init = gfc_build_addr_expr (pvoid_type_node, init);
2048 if (code->expr1->ts.kind == 1)
2049 fndecl = gfor_fndecl_select_string;
2050 else if (code->expr1->ts.kind == 4)
2051 fndecl = gfor_fndecl_select_string_char4;
2055 tmp = build_call_expr_loc (input_location,
2056 fndecl, 4, init, build_int_cst (NULL_TREE, n),
2057 expr1se.expr, expr1se.string_length);
2058 case_num = gfc_create_var (integer_type_node, "case_num");
2059 gfc_add_modify (&block, case_num, tmp);
2061 gfc_add_block_to_block (&block, &expr1se.post);
2063 tmp = gfc_finish_block (&body);
2064 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
2065 gfc_add_expr_to_block (&block, tmp);
2067 tmp = build1_v (LABEL_EXPR, end_label);
2068 gfc_add_expr_to_block (&block, tmp);
2070 return gfc_finish_block (&block);
2074 /* Translate the three variants of the SELECT CASE construct.
2076 SELECT CASEs with INTEGER case expressions can be translated to an
2077 equivalent GENERIC switch statement, and for LOGICAL case
2078 expressions we build one or two if-else compares.
2080 SELECT CASEs with CHARACTER case expressions are a whole different
2081 story, because they don't exist in GENERIC. So we sort them and
2082 do a binary search at runtime.
2084 Fortran has no BREAK statement, and it does not allow jumps from
2085 one case block to another. That makes things a lot easier for
2089 gfc_trans_select (gfc_code * code)
2095 gcc_assert (code && code->expr1);
2096 gfc_init_block (&block);
2098 /* Build the exit label and hang it in. */
2099 exit_label = gfc_build_label_decl (NULL_TREE);
2100 code->exit_label = exit_label;
2102 /* Empty SELECT constructs are legal. */
2103 if (code->block == NULL)
2104 body = build_empty_stmt (input_location);
2106 /* Select the correct translation function. */
2108 switch (code->expr1->ts.type)
2111 body = gfc_trans_logical_select (code);
2115 body = gfc_trans_integer_select (code);
2119 body = gfc_trans_character_select (code);
2123 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2127 /* Build everything together. */
2128 gfc_add_expr_to_block (&block, body);
2129 gfc_add_expr_to_block (&block, build1_v (LABEL_EXPR, exit_label));
2131 return gfc_finish_block (&block);
2135 /* Traversal function to substitute a replacement symtree if the symbol
2136 in the expression is the same as that passed. f == 2 signals that
2137 that variable itself is not to be checked - only the references.
2138 This group of functions is used when the variable expression in a
2139 FORALL assignment has internal references. For example:
2140 FORALL (i = 1:4) p(p(i)) = i
2141 The only recourse here is to store a copy of 'p' for the index
2144 static gfc_symtree *new_symtree;
2145 static gfc_symtree *old_symtree;
2148 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
2150 if (expr->expr_type != EXPR_VARIABLE)
2155 else if (expr->symtree->n.sym == sym)
2156 expr->symtree = new_symtree;
2162 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
2164 gfc_traverse_expr (e, sym, forall_replace, f);
2168 forall_restore (gfc_expr *expr,
2169 gfc_symbol *sym ATTRIBUTE_UNUSED,
2170 int *f ATTRIBUTE_UNUSED)
2172 if (expr->expr_type != EXPR_VARIABLE)
2175 if (expr->symtree == new_symtree)
2176 expr->symtree = old_symtree;
2182 forall_restore_symtree (gfc_expr *e)
2184 gfc_traverse_expr (e, NULL, forall_restore, 0);
2188 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2193 gfc_symbol *new_sym;
2194 gfc_symbol *old_sym;
2198 /* Build a copy of the lvalue. */
2199 old_symtree = c->expr1->symtree;
2200 old_sym = old_symtree->n.sym;
2201 e = gfc_lval_expr_from_sym (old_sym);
2202 if (old_sym->attr.dimension)
2204 gfc_init_se (&tse, NULL);
2205 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
2206 gfc_add_block_to_block (pre, &tse.pre);
2207 gfc_add_block_to_block (post, &tse.post);
2208 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
2210 if (e->ts.type != BT_CHARACTER)
2212 /* Use the variable offset for the temporary. */
2213 tmp = gfc_conv_array_offset (old_sym->backend_decl);
2214 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
2219 gfc_init_se (&tse, NULL);
2220 gfc_init_se (&rse, NULL);
2221 gfc_conv_expr (&rse, e);
2222 if (e->ts.type == BT_CHARACTER)
2224 tse.string_length = rse.string_length;
2225 tmp = gfc_get_character_type_len (gfc_default_character_kind,
2227 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
2229 gfc_add_block_to_block (pre, &tse.pre);
2230 gfc_add_block_to_block (post, &tse.post);
2234 tmp = gfc_typenode_for_spec (&e->ts);
2235 tse.expr = gfc_create_var (tmp, "temp");
2238 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
2239 e->expr_type == EXPR_VARIABLE, true);
2240 gfc_add_expr_to_block (pre, tmp);
2244 /* Create a new symbol to represent the lvalue. */
2245 new_sym = gfc_new_symbol (old_sym->name, NULL);
2246 new_sym->ts = old_sym->ts;
2247 new_sym->attr.referenced = 1;
2248 new_sym->attr.temporary = 1;
2249 new_sym->attr.dimension = old_sym->attr.dimension;
2250 new_sym->attr.flavor = old_sym->attr.flavor;
2252 /* Use the temporary as the backend_decl. */
2253 new_sym->backend_decl = tse.expr;
2255 /* Create a fake symtree for it. */
2257 new_symtree = gfc_new_symtree (&root, old_sym->name);
2258 new_symtree->n.sym = new_sym;
2259 gcc_assert (new_symtree == root);
2261 /* Go through the expression reference replacing the old_symtree
2263 forall_replace_symtree (c->expr1, old_sym, 2);
2265 /* Now we have made this temporary, we might as well use it for
2266 the right hand side. */
2267 forall_replace_symtree (c->expr2, old_sym, 1);
2271 /* Handles dependencies in forall assignments. */
2273 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2280 lsym = c->expr1->symtree->n.sym;
2281 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
2283 /* Now check for dependencies within the 'variable'
2284 expression itself. These are treated by making a complete
2285 copy of variable and changing all the references to it
2286 point to the copy instead. Note that the shallow copy of
2287 the variable will not suffice for derived types with
2288 pointer components. We therefore leave these to their
2290 if (lsym->ts.type == BT_DERIVED
2291 && lsym->ts.u.derived->attr.pointer_comp)
2295 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
2297 forall_make_variable_temp (c, pre, post);
2301 /* Substrings with dependencies are treated in the same
2303 if (c->expr1->ts.type == BT_CHARACTER
2305 && c->expr2->expr_type == EXPR_VARIABLE
2306 && lsym == c->expr2->symtree->n.sym)
2308 for (lref = c->expr1->ref; lref; lref = lref->next)
2309 if (lref->type == REF_SUBSTRING)
2311 for (rref = c->expr2->ref; rref; rref = rref->next)
2312 if (rref->type == REF_SUBSTRING)
2316 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
2318 forall_make_variable_temp (c, pre, post);
2327 cleanup_forall_symtrees (gfc_code *c)
2329 forall_restore_symtree (c->expr1);
2330 forall_restore_symtree (c->expr2);
2331 gfc_free (new_symtree->n.sym);
2332 gfc_free (new_symtree);
2336 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2337 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2338 indicates whether we should generate code to test the FORALLs mask
2339 array. OUTER is the loop header to be used for initializing mask
2342 The generated loop format is:
2343 count = (end - start + step) / step
2356 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
2357 int mask_flag, stmtblock_t *outer)
2365 tree var, start, end, step;
2368 /* Initialize the mask index outside the FORALL nest. */
2369 if (mask_flag && forall_tmp->mask)
2370 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
2372 iter = forall_tmp->this_loop;
2373 nvar = forall_tmp->nvar;
2374 for (n = 0; n < nvar; n++)
2377 start = iter->start;
2381 exit_label = gfc_build_label_decl (NULL_TREE);
2382 TREE_USED (exit_label) = 1;
2384 /* The loop counter. */
2385 count = gfc_create_var (TREE_TYPE (var), "count");
2387 /* The body of the loop. */
2388 gfc_init_block (&block);
2390 /* The exit condition. */
2391 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
2392 count, build_int_cst (TREE_TYPE (count), 0));
2393 tmp = build1_v (GOTO_EXPR, exit_label);
2394 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2395 cond, tmp, build_empty_stmt (input_location));
2396 gfc_add_expr_to_block (&block, tmp);
2398 /* The main loop body. */
2399 gfc_add_expr_to_block (&block, body);
2401 /* Increment the loop variable. */
2402 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), var,
2404 gfc_add_modify (&block, var, tmp);
2406 /* Advance to the next mask element. Only do this for the
2408 if (n == 0 && mask_flag && forall_tmp->mask)
2410 tree maskindex = forall_tmp->maskindex;
2411 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2412 maskindex, gfc_index_one_node);
2413 gfc_add_modify (&block, maskindex, tmp);
2416 /* Decrement the loop counter. */
2417 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), count,
2418 build_int_cst (TREE_TYPE (var), 1));
2419 gfc_add_modify (&block, count, tmp);
2421 body = gfc_finish_block (&block);
2423 /* Loop var initialization. */
2424 gfc_init_block (&block);
2425 gfc_add_modify (&block, var, start);
2428 /* Initialize the loop counter. */
2429 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), step,
2431 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), end,
2433 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, TREE_TYPE (var),
2435 gfc_add_modify (&block, count, tmp);
2437 /* The loop expression. */
2438 tmp = build1_v (LOOP_EXPR, body);
2439 gfc_add_expr_to_block (&block, tmp);
2441 /* The exit label. */
2442 tmp = build1_v (LABEL_EXPR, exit_label);
2443 gfc_add_expr_to_block (&block, tmp);
2445 body = gfc_finish_block (&block);
2452 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2453 is nonzero, the body is controlled by all masks in the forall nest.
2454 Otherwise, the innermost loop is not controlled by it's mask. This
2455 is used for initializing that mask. */
2458 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2463 forall_info *forall_tmp;
2464 tree mask, maskindex;
2466 gfc_start_block (&header);
2468 forall_tmp = nested_forall_info;
2469 while (forall_tmp != NULL)
2471 /* Generate body with masks' control. */
2474 mask = forall_tmp->mask;
2475 maskindex = forall_tmp->maskindex;
2477 /* If a mask was specified make the assignment conditional. */
2480 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2481 body = build3_v (COND_EXPR, tmp, body,
2482 build_empty_stmt (input_location));
2485 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2486 forall_tmp = forall_tmp->prev_nest;
2490 gfc_add_expr_to_block (&header, body);
2491 return gfc_finish_block (&header);
2495 /* Allocate data for holding a temporary array. Returns either a local
2496 temporary array or a pointer variable. */
2499 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2506 if (INTEGER_CST_P (size))
2507 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
2508 size, gfc_index_one_node);
2512 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2513 type = build_array_type (elem_type, type);
2514 if (gfc_can_put_var_on_stack (bytesize))
2516 gcc_assert (INTEGER_CST_P (size));
2517 tmpvar = gfc_create_var (type, "temp");
2522 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2523 *pdata = convert (pvoid_type_node, tmpvar);
2525 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2526 gfc_add_modify (pblock, tmpvar, tmp);
2532 /* Generate codes to copy the temporary to the actual lhs. */
2535 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2536 tree count1, tree wheremask, bool invert)
2540 stmtblock_t block, body;
2546 lss = gfc_walk_expr (expr);
2548 if (lss == gfc_ss_terminator)
2550 gfc_start_block (&block);
2552 gfc_init_se (&lse, NULL);
2554 /* Translate the expression. */
2555 gfc_conv_expr (&lse, expr);
2557 /* Form the expression for the temporary. */
2558 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2560 /* Use the scalar assignment as is. */
2561 gfc_add_block_to_block (&block, &lse.pre);
2562 gfc_add_modify (&block, lse.expr, tmp);
2563 gfc_add_block_to_block (&block, &lse.post);
2565 /* Increment the count1. */
2566 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
2567 count1, gfc_index_one_node);
2568 gfc_add_modify (&block, count1, tmp);
2570 tmp = gfc_finish_block (&block);
2574 gfc_start_block (&block);
2576 gfc_init_loopinfo (&loop1);
2577 gfc_init_se (&rse, NULL);
2578 gfc_init_se (&lse, NULL);
2580 /* Associate the lss with the loop. */
2581 gfc_add_ss_to_loop (&loop1, lss);
2583 /* Calculate the bounds of the scalarization. */
2584 gfc_conv_ss_startstride (&loop1);
2585 /* Setup the scalarizing loops. */
2586 gfc_conv_loop_setup (&loop1, &expr->where);
2588 gfc_mark_ss_chain_used (lss, 1);
2590 /* Start the scalarized loop body. */
2591 gfc_start_scalarized_body (&loop1, &body);
2593 /* Setup the gfc_se structures. */
2594 gfc_copy_loopinfo_to_se (&lse, &loop1);
2597 /* Form the expression of the temporary. */
2598 if (lss != gfc_ss_terminator)
2599 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2600 /* Translate expr. */
2601 gfc_conv_expr (&lse, expr);
2603 /* Use the scalar assignment. */
2604 rse.string_length = lse.string_length;
2605 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
2607 /* Form the mask expression according to the mask tree list. */
2610 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2612 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
2613 TREE_TYPE (wheremaskexpr),
2615 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2617 build_empty_stmt (input_location));
2620 gfc_add_expr_to_block (&body, tmp);
2622 /* Increment count1. */
2623 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2624 count1, gfc_index_one_node);
2625 gfc_add_modify (&body, count1, tmp);
2627 /* Increment count3. */
2630 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2631 gfc_array_index_type, count3,
2632 gfc_index_one_node);
2633 gfc_add_modify (&body, count3, tmp);
2636 /* Generate the copying loops. */
2637 gfc_trans_scalarizing_loops (&loop1, &body);
2638 gfc_add_block_to_block (&block, &loop1.pre);
2639 gfc_add_block_to_block (&block, &loop1.post);
2640 gfc_cleanup_loop (&loop1);
2642 tmp = gfc_finish_block (&block);
2648 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2649 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2650 and should not be freed. WHEREMASK is the conditional execution mask
2651 whose sense may be inverted by INVERT. */
2654 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2655 tree count1, gfc_ss *lss, gfc_ss *rss,
2656 tree wheremask, bool invert)
2658 stmtblock_t block, body1;
2665 gfc_start_block (&block);
2667 gfc_init_se (&rse, NULL);
2668 gfc_init_se (&lse, NULL);
2670 if (lss == gfc_ss_terminator)
2672 gfc_init_block (&body1);
2673 gfc_conv_expr (&rse, expr2);
2674 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2678 /* Initialize the loop. */
2679 gfc_init_loopinfo (&loop);
2681 /* We may need LSS to determine the shape of the expression. */
2682 gfc_add_ss_to_loop (&loop, lss);
2683 gfc_add_ss_to_loop (&loop, rss);
2685 gfc_conv_ss_startstride (&loop);
2686 gfc_conv_loop_setup (&loop, &expr2->where);
2688 gfc_mark_ss_chain_used (rss, 1);
2689 /* Start the loop body. */
2690 gfc_start_scalarized_body (&loop, &body1);
2692 /* Translate the expression. */
2693 gfc_copy_loopinfo_to_se (&rse, &loop);
2695 gfc_conv_expr (&rse, expr2);
2697 /* Form the expression of the temporary. */
2698 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2701 /* Use the scalar assignment. */
2702 lse.string_length = rse.string_length;
2703 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2704 expr2->expr_type == EXPR_VARIABLE, true);
2706 /* Form the mask expression according to the mask tree list. */
2709 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2711 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
2712 TREE_TYPE (wheremaskexpr),
2714 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2716 build_empty_stmt (input_location));
2719 gfc_add_expr_to_block (&body1, tmp);
2721 if (lss == gfc_ss_terminator)
2723 gfc_add_block_to_block (&block, &body1);
2725 /* Increment count1. */
2726 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
2727 count1, gfc_index_one_node);
2728 gfc_add_modify (&block, count1, tmp);
2732 /* Increment count1. */
2733 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2734 count1, gfc_index_one_node);
2735 gfc_add_modify (&body1, count1, tmp);
2737 /* Increment count3. */
2740 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2741 gfc_array_index_type,
2742 count3, gfc_index_one_node);
2743 gfc_add_modify (&body1, count3, tmp);
2746 /* Generate the copying loops. */
2747 gfc_trans_scalarizing_loops (&loop, &body1);
2749 gfc_add_block_to_block (&block, &loop.pre);
2750 gfc_add_block_to_block (&block, &loop.post);
2752 gfc_cleanup_loop (&loop);
2753 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2754 as tree nodes in SS may not be valid in different scope. */
2757 tmp = gfc_finish_block (&block);
2762 /* Calculate the size of temporary needed in the assignment inside forall.
2763 LSS and RSS are filled in this function. */
2766 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2767 stmtblock_t * pblock,
2768 gfc_ss **lss, gfc_ss **rss)
2776 *lss = gfc_walk_expr (expr1);
2779 size = gfc_index_one_node;
2780 if (*lss != gfc_ss_terminator)
2782 gfc_init_loopinfo (&loop);
2784 /* Walk the RHS of the expression. */
2785 *rss = gfc_walk_expr (expr2);
2786 if (*rss == gfc_ss_terminator)
2788 /* The rhs is scalar. Add a ss for the expression. */
2789 *rss = gfc_get_ss ();
2790 (*rss)->next = gfc_ss_terminator;
2791 (*rss)->type = GFC_SS_SCALAR;
2792 (*rss)->expr = expr2;
2795 /* Associate the SS with the loop. */
2796 gfc_add_ss_to_loop (&loop, *lss);
2797 /* We don't actually need to add the rhs at this point, but it might
2798 make guessing the loop bounds a bit easier. */
2799 gfc_add_ss_to_loop (&loop, *rss);
2801 /* We only want the shape of the expression, not rest of the junk
2802 generated by the scalarizer. */
2803 loop.array_parameter = 1;
2805 /* Calculate the bounds of the scalarization. */
2806 save_flag = gfc_option.rtcheck;
2807 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2808 gfc_conv_ss_startstride (&loop);
2809 gfc_option.rtcheck = save_flag;
2810 gfc_conv_loop_setup (&loop, &expr2->where);
2812 /* Figure out how many elements we need. */
2813 for (i = 0; i < loop.dimen; i++)
2815 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2816 gfc_array_index_type,
2817 gfc_index_one_node, loop.from[i]);
2818 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2819 gfc_array_index_type, tmp, loop.to[i]);
2820 size = fold_build2_loc (input_location, MULT_EXPR,
2821 gfc_array_index_type, size, tmp);
2823 gfc_add_block_to_block (pblock, &loop.pre);
2824 size = gfc_evaluate_now (size, pblock);
2825 gfc_add_block_to_block (pblock, &loop.post);
2827 /* TODO: write a function that cleans up a loopinfo without freeing
2828 the SS chains. Currently a NOP. */
2835 /* Calculate the overall iterator number of the nested forall construct.
2836 This routine actually calculates the number of times the body of the
2837 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2838 that by the expression INNER_SIZE. The BLOCK argument specifies the
2839 block in which to calculate the result, and the optional INNER_SIZE_BODY
2840 argument contains any statements that need to executed (inside the loop)
2841 to initialize or calculate INNER_SIZE. */
2844 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2845 stmtblock_t *inner_size_body, stmtblock_t *block)
2847 forall_info *forall_tmp = nested_forall_info;
2851 /* We can eliminate the innermost unconditional loops with constant
2853 if (INTEGER_CST_P (inner_size))
2856 && !forall_tmp->mask
2857 && INTEGER_CST_P (forall_tmp->size))
2859 inner_size = fold_build2_loc (input_location, MULT_EXPR,
2860 gfc_array_index_type,
2861 inner_size, forall_tmp->size);
2862 forall_tmp = forall_tmp->prev_nest;
2865 /* If there are no loops left, we have our constant result. */
2870 /* Otherwise, create a temporary variable to compute the result. */
2871 number = gfc_create_var (gfc_array_index_type, "num");
2872 gfc_add_modify (block, number, gfc_index_zero_node);
2874 gfc_start_block (&body);
2875 if (inner_size_body)
2876 gfc_add_block_to_block (&body, inner_size_body);
2878 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2879 gfc_array_index_type, number, inner_size);
2882 gfc_add_modify (&body, number, tmp);
2883 tmp = gfc_finish_block (&body);
2885 /* Generate loops. */
2886 if (forall_tmp != NULL)
2887 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2889 gfc_add_expr_to_block (block, tmp);
2895 /* Allocate temporary for forall construct. SIZE is the size of temporary
2896 needed. PTEMP1 is returned for space free. */
2899 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2906 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2907 if (!integer_onep (unit))
2908 bytesize = fold_build2_loc (input_location, MULT_EXPR,
2909 gfc_array_index_type, size, unit);
2914 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2917 tmp = build_fold_indirect_ref_loc (input_location, tmp);
2922 /* Allocate temporary for forall construct according to the information in
2923 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2924 assignment inside forall. PTEMP1 is returned for space free. */
2927 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2928 tree inner_size, stmtblock_t * inner_size_body,
2929 stmtblock_t * block, tree * ptemp1)
2933 /* Calculate the total size of temporary needed in forall construct. */
2934 size = compute_overall_iter_number (nested_forall_info, inner_size,
2935 inner_size_body, block);
2937 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2941 /* Handle assignments inside forall which need temporary.
2943 forall (i=start:end:stride; maskexpr)
2946 (where e,f<i> are arbitrary expressions possibly involving i
2947 and there is a dependency between e<i> and f<i>)
2949 masktmp(:) = maskexpr(:)
2954 for (i = start; i <= end; i += stride)
2958 for (i = start; i <= end; i += stride)
2960 if (masktmp[maskindex++])
2961 tmp[count1++] = f<i>
2965 for (i = start; i <= end; i += stride)
2967 if (masktmp[maskindex++])
2968 e<i> = tmp[count1++]
2973 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2974 tree wheremask, bool invert,
2975 forall_info * nested_forall_info,
2976 stmtblock_t * block)
2984 stmtblock_t inner_size_body;
2986 /* Create vars. count1 is the current iterator number of the nested
2988 count1 = gfc_create_var (gfc_array_index_type, "count1");
2990 /* Count is the wheremask index. */
2993 count = gfc_create_var (gfc_array_index_type, "count");
2994 gfc_add_modify (block, count, gfc_index_zero_node);
2999 /* Initialize count1. */
3000 gfc_add_modify (block, count1, gfc_index_zero_node);
3002 /* Calculate the size of temporary needed in the assignment. Return loop, lss
3003 and rss which are used in function generate_loop_for_rhs_to_temp(). */
3004 gfc_init_block (&inner_size_body);
3005 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
3008 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
3009 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
3011 if (!expr1->ts.u.cl->backend_decl)
3014 gfc_init_se (&tse, NULL);
3015 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
3016 expr1->ts.u.cl->backend_decl = tse.expr;
3018 type = gfc_get_character_type_len (gfc_default_character_kind,
3019 expr1->ts.u.cl->backend_decl);
3022 type = gfc_typenode_for_spec (&expr1->ts);
3024 /* Allocate temporary for nested forall construct according to the
3025 information in nested_forall_info and inner_size. */
3026 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
3027 &inner_size_body, block, &ptemp1);
3029 /* Generate codes to copy rhs to the temporary . */
3030 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
3033 /* Generate body and loops according to the information in
3034 nested_forall_info. */
3035 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3036 gfc_add_expr_to_block (block, tmp);
3039 gfc_add_modify (block, count1, gfc_index_zero_node);
3043 gfc_add_modify (block, count, gfc_index_zero_node);
3045 /* Generate codes to copy the temporary to lhs. */
3046 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
3049 /* Generate body and loops according to the information in
3050 nested_forall_info. */
3051 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3052 gfc_add_expr_to_block (block, tmp);
3056 /* Free the temporary. */
3057 tmp = gfc_call_free (ptemp1);
3058 gfc_add_expr_to_block (block, tmp);
3063 /* Translate pointer assignment inside FORALL which need temporary. */
3066 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
3067 forall_info * nested_forall_info,
3068 stmtblock_t * block)
3082 tree tmp, tmp1, ptemp1;
3084 count = gfc_create_var (gfc_array_index_type, "count");
3085 gfc_add_modify (block, count, gfc_index_zero_node);
3087 inner_size = integer_one_node;
3088 lss = gfc_walk_expr (expr1);
3089 rss = gfc_walk_expr (expr2);
3090 if (lss == gfc_ss_terminator)
3092 type = gfc_typenode_for_spec (&expr1->ts);
3093 type = build_pointer_type (type);
3095 /* Allocate temporary for nested forall construct according to the
3096 information in nested_forall_info and inner_size. */
3097 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
3098 inner_size, NULL, block, &ptemp1);
3099 gfc_start_block (&body);
3100 gfc_init_se (&lse, NULL);
3101 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3102 gfc_init_se (&rse, NULL);
3103 rse.want_pointer = 1;
3104 gfc_conv_expr (&rse, expr2);
3105 gfc_add_block_to_block (&body, &rse.pre);
3106 gfc_add_modify (&body, lse.expr,
3107 fold_convert (TREE_TYPE (lse.expr), rse.expr));
3108 gfc_add_block_to_block (&body, &rse.post);
3110 /* Increment count. */
3111 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3112 count, gfc_index_one_node);
3113 gfc_add_modify (&body, count, tmp);
3115 tmp = gfc_finish_block (&body);
3117 /* Generate body and loops according to the information in
3118 nested_forall_info. */
3119 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3120 gfc_add_expr_to_block (block, tmp);
3123 gfc_add_modify (block, count, gfc_index_zero_node);
3125 gfc_start_block (&body);
3126 gfc_init_se (&lse, NULL);
3127 gfc_init_se (&rse, NULL);
3128 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
3129 lse.want_pointer = 1;
3130 gfc_conv_expr (&lse, expr1);
3131 gfc_add_block_to_block (&body, &lse.pre);
3132 gfc_add_modify (&body, lse.expr, rse.expr);
3133 gfc_add_block_to_block (&body, &lse.post);
3134 /* Increment count. */
3135 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3136 count, gfc_index_one_node);
3137 gfc_add_modify (&body, count, tmp);
3138 tmp = gfc_finish_block (&body);
3140 /* Generate body and loops according to the information in
3141 nested_forall_info. */
3142 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3143 gfc_add_expr_to_block (block, tmp);
3147 gfc_init_loopinfo (&loop);
3149 /* Associate the SS with the loop. */
3150 gfc_add_ss_to_loop (&loop, rss);
3152 /* Setup the scalarizing loops and bounds. */
3153 gfc_conv_ss_startstride (&loop);
3155 gfc_conv_loop_setup (&loop, &expr2->where);
3157 info = &rss->data.info;
3158 desc = info->descriptor;
3160 /* Make a new descriptor. */
3161 parmtype = gfc_get_element_type (TREE_TYPE (desc));
3162 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0,
3163 loop.from, loop.to, 1,
3164 GFC_ARRAY_UNKNOWN, true);
3166 /* Allocate temporary for nested forall construct. */
3167 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
3168 inner_size, NULL, block, &ptemp1);
3169 gfc_start_block (&body);
3170 gfc_init_se (&lse, NULL);
3171 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3172 lse.direct_byref = 1;
3173 rss = gfc_walk_expr (expr2);
3174 gfc_conv_expr_descriptor (&lse, expr2, rss);
3176 gfc_add_block_to_block (&body, &lse.pre);
3177 gfc_add_block_to_block (&body, &lse.post);
3179 /* Increment count. */
3180 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3181 count, gfc_index_one_node);
3182 gfc_add_modify (&body, count, tmp);
3184 tmp = gfc_finish_block (&body);
3186 /* Generate body and loops according to the information in
3187 nested_forall_info. */
3188 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3189 gfc_add_expr_to_block (block, tmp);
3192 gfc_add_modify (block, count, gfc_index_zero_node);
3194 parm = gfc_build_array_ref (tmp1, count, NULL);
3195 lss = gfc_walk_expr (expr1);
3196 gfc_init_se (&lse, NULL);
3197 gfc_conv_expr_descriptor (&lse, expr1, lss);
3198 gfc_add_modify (&lse.pre, lse.expr, parm);
3199 gfc_start_block (&body);
3200 gfc_add_block_to_block (&body, &lse.pre);
3201 gfc_add_block_to_block (&body, &lse.post);
3203 /* Increment count. */
3204 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3205 count, gfc_index_one_node);
3206 gfc_add_modify (&body, count, tmp);
3208 tmp = gfc_finish_block (&body);
3210 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3211 gfc_add_expr_to_block (block, tmp);
3213 /* Free the temporary. */
3216 tmp = gfc_call_free (ptemp1);
3217 gfc_add_expr_to_block (block, tmp);
3222 /* FORALL and WHERE statements are really nasty, especially when you nest
3223 them. All the rhs of a forall assignment must be evaluated before the
3224 actual assignments are performed. Presumably this also applies to all the
3225 assignments in an inner where statement. */
3227 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3228 linear array, relying on the fact that we process in the same order in all
3231 forall (i=start:end:stride; maskexpr)
3235 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3237 count = ((end + 1 - start) / stride)
3238 masktmp(:) = maskexpr(:)
3241 for (i = start; i <= end; i += stride)
3243 if (masktmp[maskindex++])
3247 for (i = start; i <= end; i += stride)
3249 if (masktmp[maskindex++])
3253 Note that this code only works when there are no dependencies.
3254 Forall loop with array assignments and data dependencies are a real pain,
3255 because the size of the temporary cannot always be determined before the
3256 loop is executed. This problem is compounded by the presence of nested
3261 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
3281 gfc_forall_iterator *fa;
3284 gfc_saved_var *saved_vars;
3285 iter_info *this_forall;
3289 /* Do nothing if the mask is false. */
3291 && code->expr1->expr_type == EXPR_CONSTANT
3292 && !code->expr1->value.logical)
3293 return build_empty_stmt (input_location);
3296 /* Count the FORALL index number. */
3297 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3301 /* Allocate the space for var, start, end, step, varexpr. */
3302 var = (tree *) gfc_getmem (nvar * sizeof (tree));
3303 start = (tree *) gfc_getmem (nvar * sizeof (tree));
3304 end = (tree *) gfc_getmem (nvar * sizeof (tree));
3305 step = (tree *) gfc_getmem (nvar * sizeof (tree));
3306 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
3307 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
3309 /* Allocate the space for info. */
3310 info = (forall_info *) gfc_getmem (sizeof (forall_info));
3312 gfc_start_block (&pre);
3313 gfc_init_block (&post);
3314 gfc_init_block (&block);
3317 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3319 gfc_symbol *sym = fa->var->symtree->n.sym;
3321 /* Allocate space for this_forall. */
3322 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
3324 /* Create a temporary variable for the FORALL index. */
3325 tmp = gfc_typenode_for_spec (&sym->ts);
3326 var[n] = gfc_create_var (tmp, sym->name);
3327 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
3329 /* Record it in this_forall. */
3330 this_forall->var = var[n];
3332 /* Replace the index symbol's backend_decl with the temporary decl. */
3333 sym->backend_decl = var[n];
3335 /* Work out the start, end and stride for the loop. */
3336 gfc_init_se (&se, NULL);
3337 gfc_conv_expr_val (&se, fa->start);
3338 /* Record it in this_forall. */
3339 this_forall->start = se.expr;
3340 gfc_add_block_to_block (&block, &se.pre);
3343 gfc_init_se (&se, NULL);
3344 gfc_conv_expr_val (&se, fa->end);
3345 /* Record it in this_forall. */
3346 this_forall->end = se.expr;
3347 gfc_make_safe_expr (&se);
3348 gfc_add_block_to_block (&block, &se.pre);
3351 gfc_init_se (&se, NULL);
3352 gfc_conv_expr_val (&se, fa->stride);
3353 /* Record it in this_forall. */
3354 this_forall->step = se.expr;
3355 gfc_make_safe_expr (&se);
3356 gfc_add_block_to_block (&block, &se.pre);
3359 /* Set the NEXT field of this_forall to NULL. */
3360 this_forall->next = NULL;
3361 /* Link this_forall to the info construct. */
3362 if (info->this_loop)
3364 iter_info *iter_tmp = info->this_loop;
3365 while (iter_tmp->next != NULL)
3366 iter_tmp = iter_tmp->next;
3367 iter_tmp->next = this_forall;
3370 info->this_loop = this_forall;
3376 /* Calculate the size needed for the current forall level. */
3377 size = gfc_index_one_node;
3378 for (n = 0; n < nvar; n++)
3380 /* size = (end + step - start) / step. */
3381 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (start[n]),
3383 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (end[n]),
3385 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, TREE_TYPE (tmp),
3387 tmp = convert (gfc_array_index_type, tmp);
3389 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3393 /* Record the nvar and size of current forall level. */
3399 /* If the mask is .true., consider the FORALL unconditional. */
3400 if (code->expr1->expr_type == EXPR_CONSTANT
3401 && code->expr1->value.logical)
3409 /* First we need to allocate the mask. */
3412 /* As the mask array can be very big, prefer compact boolean types. */
3413 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3414 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3415 size, NULL, &block, &pmask);
3416 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3418 /* Record them in the info structure. */
3419 info->maskindex = maskindex;
3424 /* No mask was specified. */
3425 maskindex = NULL_TREE;
3426 mask = pmask = NULL_TREE;
3429 /* Link the current forall level to nested_forall_info. */
3430 info->prev_nest = nested_forall_info;
3431 nested_forall_info = info;
3433 /* Copy the mask into a temporary variable if required.
3434 For now we assume a mask temporary is needed. */
3437 /* As the mask array can be very big, prefer compact boolean types. */
3438 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3440 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3442 /* Start of mask assignment loop body. */
3443 gfc_start_block (&body);
3445 /* Evaluate the mask expression. */
3446 gfc_init_se (&se, NULL);
3447 gfc_conv_expr_val (&se, code->expr1);
3448 gfc_add_block_to_block (&body, &se.pre);
3450 /* Store the mask. */
3451 se.expr = convert (mask_type, se.expr);
3453 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3454 gfc_add_modify (&body, tmp, se.expr);
3456 /* Advance to the next mask element. */
3457 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3458 maskindex, gfc_index_one_node);
3459 gfc_add_modify (&body, maskindex, tmp);
3461 /* Generate the loops. */
3462 tmp = gfc_finish_block (&body);
3463 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3464 gfc_add_expr_to_block (&block, tmp);
3467 c = code->block->next;
3469 /* TODO: loop merging in FORALL statements. */
3470 /* Now that we've got a copy of the mask, generate the assignment loops. */
3476 /* A scalar or array assignment. DO the simple check for
3477 lhs to rhs dependencies. These make a temporary for the
3478 rhs and form a second forall block to copy to variable. */
3479 need_temp = check_forall_dependencies(c, &pre, &post);
3481 /* Temporaries due to array assignment data dependencies introduce
3482 no end of problems. */
3484 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3485 nested_forall_info, &block);
3488 /* Use the normal assignment copying routines. */
3489 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3491 /* Generate body and loops. */
3492 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3494 gfc_add_expr_to_block (&block, tmp);
3497 /* Cleanup any temporary symtrees that have been made to deal
3498 with dependencies. */
3500 cleanup_forall_symtrees (c);
3505 /* Translate WHERE or WHERE construct nested in FORALL. */
3506 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3509 /* Pointer assignment inside FORALL. */
3510 case EXEC_POINTER_ASSIGN:
3511 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3513 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3514 nested_forall_info, &block);
3517 /* Use the normal assignment copying routines. */
3518 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3520 /* Generate body and loops. */
3521 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3523 gfc_add_expr_to_block (&block, tmp);
3528 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3529 gfc_add_expr_to_block (&block, tmp);
3532 /* Explicit subroutine calls are prevented by the frontend but interface
3533 assignments can legitimately produce them. */
3534 case EXEC_ASSIGN_CALL:
3535 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3536 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3537 gfc_add_expr_to_block (&block, tmp);
3547 /* Restore the original index variables. */
3548 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3549 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3551 /* Free the space for var, start, end, step, varexpr. */
3557 gfc_free (saved_vars);
3559 for (this_forall = info->this_loop; this_forall;)
3561 iter_info *next = this_forall->next;
3562 gfc_free (this_forall);
3566 /* Free the space for this forall_info. */
3571 /* Free the temporary for the mask. */
3572 tmp = gfc_call_free (pmask);
3573 gfc_add_expr_to_block (&block, tmp);
3576 pushdecl (maskindex);
3578 gfc_add_block_to_block (&pre, &block);
3579 gfc_add_block_to_block (&pre, &post);
3581 return gfc_finish_block (&pre);
3585 /* Translate the FORALL statement or construct. */
3587 tree gfc_trans_forall (gfc_code * code)
3589 return gfc_trans_forall_1 (code, NULL);
3593 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3594 If the WHERE construct is nested in FORALL, compute the overall temporary
3595 needed by the WHERE mask expression multiplied by the iterator number of
3597 ME is the WHERE mask expression.
3598 MASK is the current execution mask upon input, whose sense may or may
3599 not be inverted as specified by the INVERT argument.
3600 CMASK is the updated execution mask on output, or NULL if not required.
3601 PMASK is the pending execution mask on output, or NULL if not required.
3602 BLOCK is the block in which to place the condition evaluation loops. */
3605 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3606 tree mask, bool invert, tree cmask, tree pmask,
3607 tree mask_type, stmtblock_t * block)
3612 stmtblock_t body, body1;
3613 tree count, cond, mtmp;
3616 gfc_init_loopinfo (&loop);
3618 lss = gfc_walk_expr (me);
3619 rss = gfc_walk_expr (me);
3621 /* Variable to index the temporary. */
3622 count = gfc_create_var (gfc_array_index_type, "count");
3623 /* Initialize count. */
3624 gfc_add_modify (block, count, gfc_index_zero_node);
3626 gfc_start_block (&body);
3628 gfc_init_se (&rse, NULL);
3629 gfc_init_se (&lse, NULL);
3631 if (lss == gfc_ss_terminator)
3633 gfc_init_block (&body1);
3637 /* Initialize the loop. */
3638 gfc_init_loopinfo (&loop);
3640 /* We may need LSS to determine the shape of the expression. */
3641 gfc_add_ss_to_loop (&loop, lss);
3642 gfc_add_ss_to_loop (&loop, rss);
3644 gfc_conv_ss_startstride (&loop);
3645 gfc_conv_loop_setup (&loop, &me->where);
3647 gfc_mark_ss_chain_used (rss, 1);
3648 /* Start the loop body. */
3649 gfc_start_scalarized_body (&loop, &body1);
3651 /* Translate the expression. */
3652 gfc_copy_loopinfo_to_se (&rse, &loop);
3654 gfc_conv_expr (&rse, me);
3657 /* Variable to evaluate mask condition. */
3658 cond = gfc_create_var (mask_type, "cond");
3659 if (mask && (cmask || pmask))
3660 mtmp = gfc_create_var (mask_type, "mask");
3661 else mtmp = NULL_TREE;
3663 gfc_add_block_to_block (&body1, &lse.pre);
3664 gfc_add_block_to_block (&body1, &rse.pre);
3666 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3668 if (mask && (cmask || pmask))
3670 tmp = gfc_build_array_ref (mask, count, NULL);
3672 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, tmp);
3673 gfc_add_modify (&body1, mtmp, tmp);
3678 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3681 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type,
3683 gfc_add_modify (&body1, tmp1, tmp);
3688 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3689 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, cond);
3691 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type, mtmp,
3693 gfc_add_modify (&body1, tmp1, tmp);
3696 gfc_add_block_to_block (&body1, &lse.post);
3697 gfc_add_block_to_block (&body1, &rse.post);
3699 if (lss == gfc_ss_terminator)
3701 gfc_add_block_to_block (&body, &body1);
3705 /* Increment count. */
3706 tmp1 = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3707 count, gfc_index_one_node);
3708 gfc_add_modify (&body1, count, tmp1);
3710 /* Generate the copying loops. */
3711 gfc_trans_scalarizing_loops (&loop, &body1);
3713 gfc_add_block_to_block (&body, &loop.pre);
3714 gfc_add_block_to_block (&body, &loop.post);
3716 gfc_cleanup_loop (&loop);
3717 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3718 as tree nodes in SS may not be valid in different scope. */
3721 tmp1 = gfc_finish_block (&body);
3722 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3723 if (nested_forall_info != NULL)
3724 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3726 gfc_add_expr_to_block (block, tmp1);
3730 /* Translate an assignment statement in a WHERE statement or construct
3731 statement. The MASK expression is used to control which elements
3732 of EXPR1 shall be assigned. The sense of MASK is specified by
3736 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3737 tree mask, bool invert,
3738 tree count1, tree count2,
3744 gfc_ss *lss_section;
3751 tree index, maskexpr;
3753 /* A defined assignment. */
3754 if (cnext && cnext->resolved_sym)
3755 return gfc_trans_call (cnext, true, mask, count1, invert);
3758 /* TODO: handle this special case.
3759 Special case a single function returning an array. */
3760 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3762 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3768 /* Assignment of the form lhs = rhs. */
3769 gfc_start_block (&block);
3771 gfc_init_se (&lse, NULL);
3772 gfc_init_se (&rse, NULL);
3775 lss = gfc_walk_expr (expr1);
3778 /* In each where-assign-stmt, the mask-expr and the variable being
3779 defined shall be arrays of the same shape. */
3780 gcc_assert (lss != gfc_ss_terminator);
3782 /* The assignment needs scalarization. */
3785 /* Find a non-scalar SS from the lhs. */
3786 while (lss_section != gfc_ss_terminator
3787 && lss_section->type != GFC_SS_SECTION)
3788 lss_section = lss_section->next;
3790 gcc_assert (lss_section != gfc_ss_terminator);
3792 /* Initialize the scalarizer. */
3793 gfc_init_loopinfo (&loop);
3796 rss = gfc_walk_expr (expr2);
3797 if (rss == gfc_ss_terminator)
3799 /* The rhs is scalar. Add a ss for the expression. */
3800 rss = gfc_get_ss ();
3802 rss->next = gfc_ss_terminator;
3803 rss->type = GFC_SS_SCALAR;
3807 /* Associate the SS with the loop. */
3808 gfc_add_ss_to_loop (&loop, lss);
3809 gfc_add_ss_to_loop (&loop, rss);
3811 /* Calculate the bounds of the scalarization. */
3812 gfc_conv_ss_startstride (&loop);
3814 /* Resolve any data dependencies in the statement. */
3815 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3817 /* Setup the scalarizing loops. */
3818 gfc_conv_loop_setup (&loop, &expr2->where);
3820 /* Setup the gfc_se structures. */
3821 gfc_copy_loopinfo_to_se (&lse, &loop);
3822 gfc_copy_loopinfo_to_se (&rse, &loop);
3825 gfc_mark_ss_chain_used (rss, 1);
3826 if (loop.temp_ss == NULL)
3829 gfc_mark_ss_chain_used (lss, 1);
3833 lse.ss = loop.temp_ss;
3834 gfc_mark_ss_chain_used (lss, 3);
3835 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3838 /* Start the scalarized loop body. */
3839 gfc_start_scalarized_body (&loop, &body);
3841 /* Translate the expression. */
3842 gfc_conv_expr (&rse, expr2);
3843 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3844 gfc_conv_tmp_array_ref (&lse);
3846 gfc_conv_expr (&lse, expr1);
3848 /* Form the mask expression according to the mask. */
3850 maskexpr = gfc_build_array_ref (mask, index, NULL);
3852 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
3853 TREE_TYPE (maskexpr), maskexpr);
3855 /* Use the scalar assignment as is. */
3856 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3857 loop.temp_ss != NULL, false, true);
3859 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
3861 gfc_add_expr_to_block (&body, tmp);
3863 if (lss == gfc_ss_terminator)
3865 /* Increment count1. */
3866 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3867 count1, gfc_index_one_node);
3868 gfc_add_modify (&body, count1, tmp);
3870 /* Use the scalar assignment as is. */
3871 gfc_add_block_to_block (&block, &body);
3875 gcc_assert (lse.ss == gfc_ss_terminator
3876 && rse.ss == gfc_ss_terminator);
3878 if (loop.temp_ss != NULL)
3880 /* Increment count1 before finish the main body of a scalarized
3882 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3883 gfc_array_index_type, count1, gfc_index_one_node);
3884 gfc_add_modify (&body, count1, tmp);
3885 gfc_trans_scalarized_loop_boundary (&loop, &body);
3887 /* We need to copy the temporary to the actual lhs. */
3888 gfc_init_se (&lse, NULL);
3889 gfc_init_se (&rse, NULL);
3890 gfc_copy_loopinfo_to_se (&lse, &loop);
3891 gfc_copy_loopinfo_to_se (&rse, &loop);
3893 rse.ss = loop.temp_ss;
3896 gfc_conv_tmp_array_ref (&rse);
3897 gfc_conv_expr (&lse, expr1);
3899 gcc_assert (lse.ss == gfc_ss_terminator
3900 && rse.ss == gfc_ss_terminator);
3902 /* Form the mask expression according to the mask tree list. */
3904 maskexpr = gfc_build_array_ref (mask, index, NULL);
3906 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
3907 TREE_TYPE (maskexpr), maskexpr);
3909 /* Use the scalar assignment as is. */
3910 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
3912 tmp = build3_v (COND_EXPR, maskexpr, tmp,
3913 build_empty_stmt (input_location));
3914 gfc_add_expr_to_block (&body, tmp);
3916 /* Increment count2. */
3917 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3918 gfc_array_index_type, count2,
3919 gfc_index_one_node);
3920 gfc_add_modify (&body, count2, tmp);
3924 /* Increment count1. */
3925 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3926 gfc_array_index_type, count1,
3927 gfc_index_one_node);
3928 gfc_add_modify (&body, count1, tmp);
3931 /* Generate the copying loops. */
3932 gfc_trans_scalarizing_loops (&loop, &body);
3934 /* Wrap the whole thing up. */
3935 gfc_add_block_to_block (&block, &loop.pre);
3936 gfc_add_block_to_block (&block, &loop.post);
3937 gfc_cleanup_loop (&loop);
3940 return gfc_finish_block (&block);
3944 /* Translate the WHERE construct or statement.
3945 This function can be called iteratively to translate the nested WHERE
3946 construct or statement.
3947 MASK is the control mask. */
3950 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3951 forall_info * nested_forall_info, stmtblock_t * block)
3953 stmtblock_t inner_size_body;
3954 tree inner_size, size;
3963 tree count1, count2;
3967 tree pcmask = NULL_TREE;
3968 tree ppmask = NULL_TREE;
3969 tree cmask = NULL_TREE;
3970 tree pmask = NULL_TREE;
3971 gfc_actual_arglist *arg;
3973 /* the WHERE statement or the WHERE construct statement. */
3974 cblock = code->block;
3976 /* As the mask array can be very big, prefer compact boolean types. */
3977 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3979 /* Determine which temporary masks are needed. */
3982 /* One clause: No ELSEWHEREs. */
3983 need_cmask = (cblock->next != 0);
3986 else if (cblock->block->block)
3988 /* Three or more clauses: Conditional ELSEWHEREs. */
3992 else if (cblock->next)
3994 /* Two clauses, the first non-empty. */
3996 need_pmask = (mask != NULL_TREE
3997 && cblock->block->next != 0);
3999 else if (!cblock->block->next)
4001 /* Two clauses, both empty. */
4005 /* Two clauses, the first empty, the second non-empty. */
4008 need_cmask = (cblock->block->expr1 != 0);
4017 if (need_cmask || need_pmask)
4019 /* Calculate the size of temporary needed by the mask-expr. */
4020 gfc_init_block (&inner_size_body);
4021 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
4022 &inner_size_body, &lss, &rss);
4024 gfc_free_ss_chain (lss);
4025 gfc_free_ss_chain (rss);
4027 /* Calculate the total size of temporary needed. */
4028 size = compute_overall_iter_number (nested_forall_info, inner_size,
4029 &inner_size_body, block);
4031 /* Check whether the size is negative. */
4032 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, size,
4033 gfc_index_zero_node);
4034 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
4035 cond, gfc_index_zero_node, size);
4036 size = gfc_evaluate_now (size, block);
4038 /* Allocate temporary for WHERE mask if needed. */
4040 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4043 /* Allocate temporary for !mask if needed. */
4045 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4051 /* Each time around this loop, the where clause is conditional
4052 on the value of mask and invert, which are updated at the
4053 bottom of the loop. */
4055 /* Has mask-expr. */
4058 /* Ensure that the WHERE mask will be evaluated exactly once.
4059 If there are no statements in this WHERE/ELSEWHERE clause,
4060 then we don't need to update the control mask (cmask).
4061 If this is the last clause of the WHERE construct, then
4062 we don't need to update the pending control mask (pmask). */
4064 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4066 cblock->next ? cmask : NULL_TREE,
4067 cblock->block ? pmask : NULL_TREE,
4070 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4072 (cblock->next || cblock->block)
4073 ? cmask : NULL_TREE,
4074 NULL_TREE, mask_type, block);
4078 /* It's a final elsewhere-stmt. No mask-expr is present. */
4082 /* The body of this where clause are controlled by cmask with
4083 sense specified by invert. */
4085 /* Get the assignment statement of a WHERE statement, or the first
4086 statement in where-body-construct of a WHERE construct. */
4087 cnext = cblock->next;
4092 /* WHERE assignment statement. */
4093 case EXEC_ASSIGN_CALL:
4095 arg = cnext->ext.actual;
4096 expr1 = expr2 = NULL;
4097 for (; arg; arg = arg->next)
4109 expr1 = cnext->expr1;
4110 expr2 = cnext->expr2;
4112 if (nested_forall_info != NULL)
4114 need_temp = gfc_check_dependency (expr1, expr2, 0);
4115 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
4116 gfc_trans_assign_need_temp (expr1, expr2,
4118 nested_forall_info, block);
4121 /* Variables to control maskexpr. */
4122 count1 = gfc_create_var (gfc_array_index_type, "count1");
4123 count2 = gfc_create_var (gfc_array_index_type, "count2");
4124 gfc_add_modify (block, count1, gfc_index_zero_node);
4125 gfc_add_modify (block, count2, gfc_index_zero_node);
4127 tmp = gfc_trans_where_assign (expr1, expr2,
4132 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
4134 gfc_add_expr_to_block (block, tmp);
4139 /* Variables to control maskexpr. */
4140 count1 = gfc_create_var (gfc_array_index_type, "count1");
4141 count2 = gfc_create_var (gfc_array_index_type, "count2");
4142 gfc_add_modify (block, count1, gfc_index_zero_node);
4143 gfc_add_modify (block, count2, gfc_index_zero_node);
4145 tmp = gfc_trans_where_assign (expr1, expr2,
4149 gfc_add_expr_to_block (block, tmp);
4154 /* WHERE or WHERE construct is part of a where-body-construct. */
4156 gfc_trans_where_2 (cnext, cmask, invert,
4157 nested_forall_info, block);
4164 /* The next statement within the same where-body-construct. */
4165 cnext = cnext->next;
4167 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4168 cblock = cblock->block;
4169 if (mask == NULL_TREE)
4171 /* If we're the initial WHERE, we can simply invert the sense
4172 of the current mask to obtain the "mask" for the remaining
4179 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4185 /* If we allocated a pending mask array, deallocate it now. */
4188 tmp = gfc_call_free (ppmask);
4189 gfc_add_expr_to_block (block, tmp);
4192 /* If we allocated a current mask array, deallocate it now. */
4195 tmp = gfc_call_free (pcmask);
4196 gfc_add_expr_to_block (block, tmp);
4200 /* Translate a simple WHERE construct or statement without dependencies.
4201 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4202 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4203 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4206 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
4208 stmtblock_t block, body;
4209 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
4210 tree tmp, cexpr, tstmt, estmt;
4211 gfc_ss *css, *tdss, *tsss;
4212 gfc_se cse, tdse, tsse, edse, esse;
4217 /* Allow the scalarizer to workshare simple where loops. */
4218 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
4219 ompws_flags |= OMPWS_SCALARIZER_WS;
4221 cond = cblock->expr1;
4222 tdst = cblock->next->expr1;
4223 tsrc = cblock->next->expr2;
4224 edst = eblock ? eblock->next->expr1 : NULL;
4225 esrc = eblock ? eblock->next->expr2 : NULL;
4227 gfc_start_block (&block);
4228 gfc_init_loopinfo (&loop);
4230 /* Handle the condition. */
4231 gfc_init_se (&cse, NULL);
4232 css = gfc_walk_expr (cond);
4233 gfc_add_ss_to_loop (&loop, css);
4235 /* Handle the then-clause. */
4236 gfc_init_se (&tdse, NULL);
4237 gfc_init_se (&tsse, NULL);
4238 tdss = gfc_walk_expr (tdst);
4239 tsss = gfc_walk_expr (tsrc);
4240 if (tsss == gfc_ss_terminator)
4242 tsss = gfc_get_ss ();
4244 tsss->next = gfc_ss_terminator;
4245 tsss->type = GFC_SS_SCALAR;
4248 gfc_add_ss_to_loop (&loop, tdss);
4249 gfc_add_ss_to_loop (&loop, tsss);
4253 /* Handle the else clause. */
4254 gfc_init_se (&edse, NULL);
4255 gfc_init_se (&esse, NULL);
4256 edss = gfc_walk_expr (edst);
4257 esss = gfc_walk_expr (esrc);
4258 if (esss == gfc_ss_terminator)
4260 esss = gfc_get_ss ();
4262 esss->next = gfc_ss_terminator;
4263 esss->type = GFC_SS_SCALAR;
4266 gfc_add_ss_to_loop (&loop, edss);
4267 gfc_add_ss_to_loop (&loop, esss);
4270 gfc_conv_ss_startstride (&loop);
4271 gfc_conv_loop_setup (&loop, &tdst->where);
4273 gfc_mark_ss_chain_used (css, 1);
4274 gfc_mark_ss_chain_used (tdss, 1);
4275 gfc_mark_ss_chain_used (tsss, 1);
4278 gfc_mark_ss_chain_used (edss, 1);
4279 gfc_mark_ss_chain_used (esss, 1);
4282 gfc_start_scalarized_body (&loop, &body);
4284 gfc_copy_loopinfo_to_se (&cse, &loop);
4285 gfc_copy_loopinfo_to_se (&tdse, &loop);
4286 gfc_copy_loopinfo_to_se (&tsse, &loop);
4292 gfc_copy_loopinfo_to_se (&edse, &loop);
4293 gfc_copy_loopinfo_to_se (&esse, &loop);
4298 gfc_conv_expr (&cse, cond);
4299 gfc_add_block_to_block (&body, &cse.pre);
4302 gfc_conv_expr (&tsse, tsrc);
4303 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
4304 gfc_conv_tmp_array_ref (&tdse);
4306 gfc_conv_expr (&tdse, tdst);
4310 gfc_conv_expr (&esse, esrc);
4311 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
4312 gfc_conv_tmp_array_ref (&edse);
4314 gfc_conv_expr (&edse, edst);
4317 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
4318 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
4320 : build_empty_stmt (input_location);
4321 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
4322 gfc_add_expr_to_block (&body, tmp);
4323 gfc_add_block_to_block (&body, &cse.post);
4325 gfc_trans_scalarizing_loops (&loop, &body);
4326 gfc_add_block_to_block (&block, &loop.pre);
4327 gfc_add_block_to_block (&block, &loop.post);
4328 gfc_cleanup_loop (&loop);
4330 return gfc_finish_block (&block);
4333 /* As the WHERE or WHERE construct statement can be nested, we call
4334 gfc_trans_where_2 to do the translation, and pass the initial
4335 NULL values for both the control mask and the pending control mask. */
4338 gfc_trans_where (gfc_code * code)
4344 cblock = code->block;
4346 && cblock->next->op == EXEC_ASSIGN
4347 && !cblock->next->next)
4349 eblock = cblock->block;
4352 /* A simple "WHERE (cond) x = y" statement or block is
4353 dependence free if cond is not dependent upon writing x,
4354 and the source y is unaffected by the destination x. */
4355 if (!gfc_check_dependency (cblock->next->expr1,
4357 && !gfc_check_dependency (cblock->next->expr1,
4358 cblock->next->expr2, 0))
4359 return gfc_trans_where_3 (cblock, NULL);
4361 else if (!eblock->expr1
4364 && eblock->next->op == EXEC_ASSIGN
4365 && !eblock->next->next)
4367 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4368 block is dependence free if cond is not dependent on writes
4369 to x1 and x2, y1 is not dependent on writes to x2, and y2
4370 is not dependent on writes to x1, and both y's are not
4371 dependent upon their own x's. In addition to this, the
4372 final two dependency checks below exclude all but the same
4373 array reference if the where and elswhere destinations
4374 are the same. In short, this is VERY conservative and this
4375 is needed because the two loops, required by the standard
4376 are coalesced in gfc_trans_where_3. */
4377 if (!gfc_check_dependency(cblock->next->expr1,
4379 && !gfc_check_dependency(eblock->next->expr1,
4381 && !gfc_check_dependency(cblock->next->expr1,
4382 eblock->next->expr2, 1)
4383 && !gfc_check_dependency(eblock->next->expr1,
4384 cblock->next->expr2, 1)
4385 && !gfc_check_dependency(cblock->next->expr1,
4386 cblock->next->expr2, 1)
4387 && !gfc_check_dependency(eblock->next->expr1,
4388 eblock->next->expr2, 1)
4389 && !gfc_check_dependency(cblock->next->expr1,
4390 eblock->next->expr1, 0)
4391 && !gfc_check_dependency(eblock->next->expr1,
4392 cblock->next->expr1, 0))
4393 return gfc_trans_where_3 (cblock, eblock);
4397 gfc_start_block (&block);
4399 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4401 return gfc_finish_block (&block);
4405 /* CYCLE a DO loop. The label decl has already been created by
4406 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4407 node at the head of the loop. We must mark the label as used. */
4410 gfc_trans_cycle (gfc_code * code)
4414 cycle_label = code->ext.which_construct->cycle_label;
4415 gcc_assert (cycle_label);
4417 TREE_USED (cycle_label) = 1;
4418 return build1_v (GOTO_EXPR, cycle_label);
4422 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4423 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4427 gfc_trans_exit (gfc_code * code)
4431 exit_label = code->ext.which_construct->exit_label;
4432 gcc_assert (exit_label);
4434 TREE_USED (exit_label) = 1;
4435 return build1_v (GOTO_EXPR, exit_label);
4439 /* Translate the ALLOCATE statement. */
4442 gfc_trans_allocate (gfc_code * code)
4455 if (!code->ext.alloc.list)
4458 pstat = stat = error_label = tmp = memsz = NULL_TREE;
4460 gfc_start_block (&block);
4462 /* Either STAT= and/or ERRMSG is present. */
4463 if (code->expr1 || code->expr2)
4465 tree gfc_int4_type_node = gfc_get_int_type (4);
4467 stat = gfc_create_var (gfc_int4_type_node, "stat");
4468 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4470 error_label = gfc_build_label_decl (NULL_TREE);
4471 TREE_USED (error_label) = 1;
4474 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4476 expr = gfc_copy_expr (al->expr);
4478 if (expr->ts.type == BT_CLASS)
4479 gfc_add_data_component (expr);
4481 gfc_init_se (&se, NULL);
4482 gfc_start_block (&se.pre);
4484 se.want_pointer = 1;
4485 se.descriptor_only = 1;
4486 gfc_conv_expr (&se, expr);
4488 if (!gfc_array_allocate (&se, expr, pstat))
4490 /* A scalar or derived type. */
4492 /* Determine allocate size. */
4493 if (al->expr->ts.type == BT_CLASS && code->expr3)
4495 if (code->expr3->ts.type == BT_CLASS)
4499 sz = gfc_copy_expr (code->expr3);
4500 gfc_add_vptr_component (sz);
4501 gfc_add_size_component (sz);
4502 gfc_init_se (&se_sz, NULL);
4503 gfc_conv_expr (&se_sz, sz);
4508 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4510 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4511 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4513 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4515 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4516 memsz = se.string_length;
4518 /* Allocate - for non-pointers with re-alloc checking. */
4525 /* Find the last reference in the chain. */
4526 while (ref && ref->next != NULL)
4528 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT);
4533 allocatable = expr->symtree->n.sym->attr.allocatable;
4535 allocatable = ref->u.c.component->attr.allocatable;
4538 tmp = gfc_allocate_array_with_status (&se.pre, se.expr, memsz,
4541 tmp = gfc_allocate_with_status (&se.pre, memsz, pstat);
4544 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
4546 fold_convert (TREE_TYPE (se.expr), tmp));
4547 gfc_add_expr_to_block (&se.pre, tmp);
4549 if (code->expr1 || code->expr2)
4551 tmp = build1_v (GOTO_EXPR, error_label);
4552 parm = fold_build2_loc (input_location, NE_EXPR,
4553 boolean_type_node, stat,
4554 build_int_cst (TREE_TYPE (stat), 0));
4555 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
4557 build_empty_stmt (input_location));
4558 gfc_add_expr_to_block (&se.pre, tmp);
4561 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4563 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4564 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4565 gfc_add_expr_to_block (&se.pre, tmp);
4570 tmp = gfc_finish_block (&se.pre);
4571 gfc_add_expr_to_block (&block, tmp);
4573 if (code->expr3 && !code->expr3->mold)
4575 /* Initialization via SOURCE block
4576 (or static default initializer). */
4577 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4578 if (al->expr->ts.type == BT_CLASS)
4581 gfc_actual_arglist *actual;
4583 gfc_init_se (&call, NULL);
4584 /* Do a polymorphic deep copy. */
4585 actual = gfc_get_actual_arglist ();
4586 actual->expr = gfc_copy_expr (rhs);
4587 if (rhs->ts.type == BT_CLASS)
4588 gfc_add_data_component (actual->expr);
4589 actual->next = gfc_get_actual_arglist ();
4590 actual->next->expr = gfc_copy_expr (al->expr);
4591 gfc_add_data_component (actual->next->expr);
4592 if (rhs->ts.type == BT_CLASS)
4594 ppc = gfc_copy_expr (rhs);
4595 gfc_add_vptr_component (ppc);
4598 ppc = gfc_lval_expr_from_sym (gfc_find_derived_vtab (rhs->ts.u.derived));
4599 gfc_add_component_ref (ppc, "_copy");
4600 gfc_conv_procedure_call (&call, ppc->symtree->n.sym, actual,
4602 gfc_add_expr_to_block (&call.pre, call.expr);
4603 gfc_add_block_to_block (&call.pre, &call.post);
4604 tmp = gfc_finish_block (&call.pre);
4607 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4609 gfc_free_expr (rhs);
4610 gfc_add_expr_to_block (&block, tmp);
4612 else if (code->expr3 && code->expr3->mold
4613 && code->expr3->ts.type == BT_CLASS)
4615 /* Default-initialization via MOLD (polymorphic). */
4616 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4618 gfc_add_vptr_component (rhs);
4619 gfc_add_def_init_component (rhs);
4620 gfc_init_se (&dst, NULL);
4621 gfc_init_se (&src, NULL);
4622 gfc_conv_expr (&dst, expr);
4623 gfc_conv_expr (&src, rhs);
4624 gfc_add_block_to_block (&block, &src.pre);
4625 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4626 gfc_add_expr_to_block (&block, tmp);
4627 gfc_free_expr (rhs);
4630 /* Allocation of CLASS entities. */
4631 gfc_free_expr (expr);
4633 if (expr->ts.type == BT_CLASS)
4638 /* Initialize VPTR for CLASS objects. */
4639 lhs = gfc_expr_to_initialize (expr);
4640 gfc_add_vptr_component (lhs);
4642 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4644 /* Polymorphic SOURCE: VPTR must be determined at run time. */
4645 rhs = gfc_copy_expr (code->expr3);
4646 gfc_add_vptr_component (rhs);
4647 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4648 gfc_add_expr_to_block (&block, tmp);
4649 gfc_free_expr (rhs);
4653 /* VPTR is fixed at compile time. */
4657 ts = &code->expr3->ts;
4658 else if (expr->ts.type == BT_DERIVED)
4660 else if (code->ext.alloc.ts.type == BT_DERIVED)
4661 ts = &code->ext.alloc.ts;
4662 else if (expr->ts.type == BT_CLASS)
4663 ts = &CLASS_DATA (expr)->ts;
4667 if (ts->type == BT_DERIVED)
4669 vtab = gfc_find_derived_vtab (ts->u.derived);
4671 gfc_init_se (&lse, NULL);
4672 lse.want_pointer = 1;
4673 gfc_conv_expr (&lse, lhs);
4674 tmp = gfc_build_addr_expr (NULL_TREE,
4675 gfc_get_symbol_decl (vtab));
4676 gfc_add_modify (&block, lse.expr,
4677 fold_convert (TREE_TYPE (lse.expr), tmp));
4680 gfc_free_expr (lhs);
4688 tmp = build1_v (LABEL_EXPR, error_label);
4689 gfc_add_expr_to_block (&block, tmp);
4691 gfc_init_se (&se, NULL);
4692 gfc_conv_expr_lhs (&se, code->expr1);
4693 tmp = convert (TREE_TYPE (se.expr), stat);
4694 gfc_add_modify (&block, se.expr, tmp);
4700 /* A better error message may be possible, but not required. */
4701 const char *msg = "Attempt to allocate an allocated object";
4702 tree errmsg, slen, dlen;
4704 gfc_init_se (&se, NULL);
4705 gfc_conv_expr_lhs (&se, code->expr2);
4707 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4709 gfc_add_modify (&block, errmsg,
4710 gfc_build_addr_expr (pchar_type_node,
4711 gfc_build_localized_cstring_const (msg)));
4713 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4714 dlen = gfc_get_expr_charlen (code->expr2);
4715 slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen,
4718 dlen = build_call_expr_loc (input_location,
4719 built_in_decls[BUILT_IN_MEMCPY], 3,
4720 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4722 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
4723 build_int_cst (TREE_TYPE (stat), 0));
4725 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4727 gfc_add_expr_to_block (&block, tmp);
4730 return gfc_finish_block (&block);
4734 /* Translate a DEALLOCATE statement. */
4737 gfc_trans_deallocate (gfc_code *code)
4741 tree apstat, astat, pstat, stat, tmp;
4744 pstat = apstat = stat = astat = tmp = NULL_TREE;
4746 gfc_start_block (&block);
4748 /* Count the number of failed deallocations. If deallocate() was
4749 called with STAT= , then set STAT to the count. If deallocate
4750 was called with ERRMSG, then set ERRMG to a string. */
4751 if (code->expr1 || code->expr2)
4753 tree gfc_int4_type_node = gfc_get_int_type (4);
4755 stat = gfc_create_var (gfc_int4_type_node, "stat");
4756 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4758 /* Running total of possible deallocation failures. */
4759 astat = gfc_create_var (gfc_int4_type_node, "astat");
4760 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4762 /* Initialize astat to 0. */
4763 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4766 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4768 gfc_expr *expr = gfc_copy_expr (al->expr);
4769 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4771 if (expr->ts.type == BT_CLASS)
4772 gfc_add_data_component (expr);
4774 gfc_init_se (&se, NULL);
4775 gfc_start_block (&se.pre);
4777 se.want_pointer = 1;
4778 se.descriptor_only = 1;
4779 gfc_conv_expr (&se, expr);
4783 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4786 gfc_ref *last = NULL;
4787 for (ref = expr->ref; ref; ref = ref->next)
4788 if (ref->type == REF_COMPONENT)
4791 /* Do not deallocate the components of a derived type
4792 ultimate pointer component. */
4793 if (!(last && last->u.c.component->attr.pointer)
4794 && !(!last && expr->symtree->n.sym->attr.pointer))
4796 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
4798 gfc_add_expr_to_block (&se.pre, tmp);
4801 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4802 gfc_add_expr_to_block (&se.pre, tmp);
4806 tmp = gfc_deallocate_scalar_with_status (se.expr, pstat, false,
4808 gfc_add_expr_to_block (&se.pre, tmp);
4810 /* Set to zero after deallocation. */
4811 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
4813 build_int_cst (TREE_TYPE (se.expr), 0));
4814 gfc_add_expr_to_block (&se.pre, tmp);
4816 if (al->expr->ts.type == BT_CLASS)
4818 /* Reset _vptr component to declared type. */
4819 gfc_expr *rhs, *lhs = gfc_copy_expr (al->expr);
4820 gfc_symbol *vtab = gfc_find_derived_vtab (al->expr->ts.u.derived);
4821 gfc_add_vptr_component (lhs);
4822 rhs = gfc_lval_expr_from_sym (vtab);
4823 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4824 gfc_add_expr_to_block (&se.pre, tmp);
4825 gfc_free_expr (lhs);
4826 gfc_free_expr (rhs);
4830 /* Keep track of the number of failed deallocations by adding stat
4831 of the last deallocation to the running total. */
4832 if (code->expr1 || code->expr2)
4834 apstat = fold_build2_loc (input_location, PLUS_EXPR,
4835 TREE_TYPE (stat), astat, stat);
4836 gfc_add_modify (&se.pre, astat, apstat);
4839 tmp = gfc_finish_block (&se.pre);
4840 gfc_add_expr_to_block (&block, tmp);
4841 gfc_free_expr (expr);
4847 gfc_init_se (&se, NULL);
4848 gfc_conv_expr_lhs (&se, code->expr1);
4849 tmp = convert (TREE_TYPE (se.expr), astat);
4850 gfc_add_modify (&block, se.expr, tmp);
4856 /* A better error message may be possible, but not required. */
4857 const char *msg = "Attempt to deallocate an unallocated object";
4858 tree errmsg, slen, dlen;
4860 gfc_init_se (&se, NULL);
4861 gfc_conv_expr_lhs (&se, code->expr2);
4863 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4865 gfc_add_modify (&block, errmsg,
4866 gfc_build_addr_expr (pchar_type_node,
4867 gfc_build_localized_cstring_const (msg)));
4869 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4870 dlen = gfc_get_expr_charlen (code->expr2);
4871 slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen,
4874 dlen = build_call_expr_loc (input_location,
4875 built_in_decls[BUILT_IN_MEMCPY], 3,
4876 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4878 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, astat,
4879 build_int_cst (TREE_TYPE (astat), 0));
4881 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4883 gfc_add_expr_to_block (&block, tmp);
4886 return gfc_finish_block (&block);
4889 #include "gt-fortran-trans-stmt.h"