1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
40 #include "dependency.h"
42 typedef struct iter_info
48 struct iter_info *next;
52 typedef struct forall_info
59 struct forall_info *prev_nest;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
95 gfc_trans_label_assign (gfc_code * code)
104 /* Start a new block. */
105 gfc_init_se (&se, NULL);
106 gfc_start_block (&se.pre);
107 gfc_conv_label_variable (&se, code->expr1);
109 len = GFC_DECL_STRING_LEN (se.expr);
110 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112 label_tree = gfc_get_label_decl (code->label1);
114 if (code->label1->defined == ST_LABEL_TARGET)
116 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117 len_tree = integer_minus_one_node;
121 gfc_expr *format = code->label1->format;
123 label_len = format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126 format->value.character.string);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify (&se.pre, len, len_tree);
131 gfc_add_modify (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
147 if (code->label1 != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr1);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158 "Assigned label is not a target label");
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
162 /* We're going to ignore a label list. It does not really change the
163 statement's semantics (because it is just a further restriction on
164 what's legal code); before, we were comparing label addresses here, but
165 that's a very fragile business and may break with optimization. So
168 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
169 gfc_add_expr_to_block (&se.pre, target);
170 return gfc_finish_block (&se.pre);
174 /* Translate an ENTRY statement. Just adds a label for this entry point. */
176 gfc_trans_entry (gfc_code * code)
178 return build1_v (LABEL_EXPR, code->ext.entry->label);
182 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
183 elemental subroutines. Make temporaries for output arguments if any such
184 dependencies are found. Output arguments are chosen because internal_unpack
185 can be used, as is, to copy the result back to the variable. */
187 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
188 gfc_symbol * sym, gfc_actual_arglist * arg,
189 gfc_dep_check check_variable)
191 gfc_actual_arglist *arg0;
193 gfc_formal_arglist *formal;
194 gfc_loopinfo tmp_loop;
206 if (loopse->ss == NULL)
211 formal = sym->formal;
213 /* Loop over all the arguments testing for dependencies. */
214 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
220 /* Obtain the info structure for the current argument. */
222 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
226 info = &ss->data.info;
230 /* If there is a dependency, create a temporary and use it
231 instead of the variable. */
232 fsym = formal ? formal->sym : NULL;
233 if (e->expr_type == EXPR_VARIABLE
235 && fsym->attr.intent != INTENT_IN
236 && gfc_check_fncall_dependency (e, fsym->attr.intent,
237 sym, arg0, check_variable))
239 tree initial, temptype;
240 stmtblock_t temp_post;
242 /* Make a local loopinfo for the temporary creation, so that
243 none of the other ss->info's have to be renormalized. */
244 gfc_init_loopinfo (&tmp_loop);
245 for (n = 0; n < info->dimen; n++)
247 tmp_loop.to[n] = loopse->loop->to[n];
248 tmp_loop.from[n] = loopse->loop->from[n];
249 tmp_loop.order[n] = loopse->loop->order[n];
252 /* Obtain the argument descriptor for unpacking. */
253 gfc_init_se (&parmse, NULL);
254 parmse.want_pointer = 1;
256 /* The scalarizer introduces some specific peculiarities when
257 handling elemental subroutines; the stride can be needed up to
258 the dim_array - 1, rather than dim_loop - 1 to calculate
259 offsets outside the loop. For this reason, we make sure that
260 the descriptor has the dimensionality of the array by converting
261 trailing elements into ranges with end = start. */
262 for (ref = e->ref; ref; ref = ref->next)
263 if (ref->type == REF_ARRAY && ref->u.ar.type == AR_SECTION)
268 bool seen_range = false;
269 for (n = 0; n < ref->u.ar.dimen; n++)
271 if (ref->u.ar.dimen_type[n] == DIMEN_RANGE)
275 || ref->u.ar.dimen_type[n] != DIMEN_ELEMENT)
278 ref->u.ar.end[n] = gfc_copy_expr (ref->u.ar.start[n]);
279 ref->u.ar.dimen_type[n] = DIMEN_RANGE;
283 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
284 gfc_add_block_to_block (&se->pre, &parmse.pre);
286 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
287 initialize the array temporary with a copy of the values. */
288 if (fsym->attr.intent == INTENT_INOUT
289 || (fsym->ts.type ==BT_DERIVED
290 && fsym->attr.intent == INTENT_OUT))
291 initial = parmse.expr;
295 /* Find the type of the temporary to create; we don't use the type
296 of e itself as this breaks for subcomponent-references in e (where
297 the type of e is that of the final reference, but parmse.expr's
298 type corresponds to the full derived-type). */
299 /* TODO: Fix this somehow so we don't need a temporary of the whole
300 array but instead only the components referenced. */
301 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
302 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
303 temptype = TREE_TYPE (temptype);
304 temptype = gfc_get_element_type (temptype);
306 /* Generate the temporary. Cleaning up the temporary should be the
307 very last thing done, so we add the code to a new block and add it
308 to se->post as last instructions. */
309 size = gfc_create_var (gfc_array_index_type, NULL);
310 data = gfc_create_var (pvoid_type_node, NULL);
311 gfc_init_block (&temp_post);
312 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
313 &tmp_loop, info, temptype,
317 gfc_add_modify (&se->pre, size, tmp);
318 tmp = fold_convert (pvoid_type_node, info->data);
319 gfc_add_modify (&se->pre, data, tmp);
321 /* Calculate the offset for the temporary. */
322 offset = gfc_index_zero_node;
323 for (n = 0; n < info->dimen; n++)
325 tmp = gfc_conv_descriptor_stride_get (info->descriptor,
327 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
328 loopse->loop->from[n], tmp);
329 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
332 info->offset = gfc_create_var (gfc_array_index_type, NULL);
333 gfc_add_modify (&se->pre, info->offset, offset);
335 /* Copy the result back using unpack. */
336 tmp = build_call_expr_loc (input_location,
337 gfor_fndecl_in_unpack, 2, parmse.expr, data);
338 gfc_add_expr_to_block (&se->post, tmp);
340 /* parmse.pre is already added above. */
341 gfc_add_block_to_block (&se->post, &parmse.post);
342 gfc_add_block_to_block (&se->post, &temp_post);
348 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
351 gfc_trans_call (gfc_code * code, bool dependency_check,
352 tree mask, tree count1, bool invert)
356 int has_alternate_specifier;
357 gfc_dep_check check_variable;
358 tree index = NULL_TREE;
359 tree maskexpr = NULL_TREE;
362 /* A CALL starts a new block because the actual arguments may have to
363 be evaluated first. */
364 gfc_init_se (&se, NULL);
365 gfc_start_block (&se.pre);
367 gcc_assert (code->resolved_sym);
369 ss = gfc_ss_terminator;
370 if (code->resolved_sym->attr.elemental)
371 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
373 /* Is not an elemental subroutine call with array valued arguments. */
374 if (ss == gfc_ss_terminator)
377 /* Translate the call. */
378 has_alternate_specifier
379 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
380 code->expr1, NULL_TREE);
382 /* A subroutine without side-effect, by definition, does nothing! */
383 TREE_SIDE_EFFECTS (se.expr) = 1;
385 /* Chain the pieces together and return the block. */
386 if (has_alternate_specifier)
388 gfc_code *select_code;
390 select_code = code->next;
391 gcc_assert(select_code->op == EXEC_SELECT);
392 sym = select_code->expr1->symtree->n.sym;
393 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
394 if (sym->backend_decl == NULL)
395 sym->backend_decl = gfc_get_symbol_decl (sym);
396 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
399 gfc_add_expr_to_block (&se.pre, se.expr);
401 gfc_add_block_to_block (&se.pre, &se.post);
406 /* An elemental subroutine call with array valued arguments has
414 /* gfc_walk_elemental_function_args renders the ss chain in the
415 reverse order to the actual argument order. */
416 ss = gfc_reverse_ss (ss);
418 /* Initialize the loop. */
419 gfc_init_se (&loopse, NULL);
420 gfc_init_loopinfo (&loop);
421 gfc_add_ss_to_loop (&loop, ss);
423 gfc_conv_ss_startstride (&loop);
424 /* TODO: gfc_conv_loop_setup generates a temporary for vector
425 subscripts. This could be prevented in the elemental case
426 as temporaries are handled separatedly
427 (below in gfc_conv_elemental_dependencies). */
428 gfc_conv_loop_setup (&loop, &code->expr1->where);
429 gfc_mark_ss_chain_used (ss, 1);
431 /* Convert the arguments, checking for dependencies. */
432 gfc_copy_loopinfo_to_se (&loopse, &loop);
435 /* For operator assignment, do dependency checking. */
436 if (dependency_check)
437 check_variable = ELEM_CHECK_VARIABLE;
439 check_variable = ELEM_DONT_CHECK_VARIABLE;
441 gfc_init_se (&depse, NULL);
442 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
443 code->ext.actual, check_variable);
445 gfc_add_block_to_block (&loop.pre, &depse.pre);
446 gfc_add_block_to_block (&loop.post, &depse.post);
448 /* Generate the loop body. */
449 gfc_start_scalarized_body (&loop, &body);
450 gfc_init_block (&block);
454 /* Form the mask expression according to the mask. */
456 maskexpr = gfc_build_array_ref (mask, index, NULL);
458 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
462 /* Add the subroutine call to the block. */
463 gfc_conv_procedure_call (&loopse, code->resolved_sym,
464 code->ext.actual, code->expr1,
469 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
470 build_empty_stmt (input_location));
471 gfc_add_expr_to_block (&loopse.pre, tmp);
472 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
473 count1, gfc_index_one_node);
474 gfc_add_modify (&loopse.pre, count1, tmp);
477 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
479 gfc_add_block_to_block (&block, &loopse.pre);
480 gfc_add_block_to_block (&block, &loopse.post);
482 /* Finish up the loop block and the loop. */
483 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
484 gfc_trans_scalarizing_loops (&loop, &body);
485 gfc_add_block_to_block (&se.pre, &loop.pre);
486 gfc_add_block_to_block (&se.pre, &loop.post);
487 gfc_add_block_to_block (&se.pre, &se.post);
488 gfc_cleanup_loop (&loop);
491 return gfc_finish_block (&se.pre);
495 /* Translate the RETURN statement. */
498 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
506 /* If code->expr is not NULL, this return statement must appear
507 in a subroutine and current_fake_result_decl has already
510 result = gfc_get_fake_result_decl (NULL, 0);
513 gfc_warning ("An alternate return at %L without a * dummy argument",
514 &code->expr1->where);
515 return build1_v (GOTO_EXPR, gfc_get_return_label ());
518 /* Start a new block for this statement. */
519 gfc_init_se (&se, NULL);
520 gfc_start_block (&se.pre);
522 gfc_conv_expr (&se, code->expr1);
524 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
525 fold_convert (TREE_TYPE (result), se.expr));
526 gfc_add_expr_to_block (&se.pre, tmp);
528 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
529 gfc_add_expr_to_block (&se.pre, tmp);
530 gfc_add_block_to_block (&se.pre, &se.post);
531 return gfc_finish_block (&se.pre);
534 return build1_v (GOTO_EXPR, gfc_get_return_label ());
538 /* Translate the PAUSE statement. We have to translate this statement
539 to a runtime library call. */
542 gfc_trans_pause (gfc_code * code)
544 tree gfc_int4_type_node = gfc_get_int_type (4);
548 /* Start a new block for this statement. */
549 gfc_init_se (&se, NULL);
550 gfc_start_block (&se.pre);
553 if (code->expr1 == NULL)
555 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
556 tmp = build_call_expr_loc (input_location,
557 gfor_fndecl_pause_numeric, 1, tmp);
561 gfc_conv_expr_reference (&se, code->expr1);
562 tmp = build_call_expr_loc (input_location,
563 gfor_fndecl_pause_string, 2,
564 se.expr, se.string_length);
567 gfc_add_expr_to_block (&se.pre, tmp);
569 gfc_add_block_to_block (&se.pre, &se.post);
571 return gfc_finish_block (&se.pre);
575 /* Translate the STOP statement. We have to translate this statement
576 to a runtime library call. */
579 gfc_trans_stop (gfc_code *code, bool error_stop)
581 tree gfc_int4_type_node = gfc_get_int_type (4);
585 /* Start a new block for this statement. */
586 gfc_init_se (&se, NULL);
587 gfc_start_block (&se.pre);
589 if (code->expr1 == NULL)
591 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
592 tmp = build_call_expr_loc (input_location,
593 gfor_fndecl_stop_numeric, 1, tmp);
597 gfc_conv_expr_reference (&se, code->expr1);
598 tmp = build_call_expr_loc (input_location,
599 error_stop ? gfor_fndecl_error_stop_string
600 : gfor_fndecl_stop_string,
601 2, se.expr, se.string_length);
604 gfc_add_expr_to_block (&se.pre, tmp);
606 gfc_add_block_to_block (&se.pre, &se.post);
608 return gfc_finish_block (&se.pre);
613 gfc_trans_sync (gfc_code *code, gfc_exec_op type __attribute__ ((unused)))
617 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
619 gfc_init_se (&se, NULL);
620 gfc_start_block (&se.pre);
623 /* Check SYNC IMAGES(imageset) for valid image index.
624 FIXME: Add a check for image-set arrays. */
625 if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
626 && code->expr1->rank == 0)
629 gfc_conv_expr (&se, code->expr1);
630 cond = fold_build2 (NE_EXPR, boolean_type_node, se.expr,
631 build_int_cst (TREE_TYPE (se.expr), 1));
632 gfc_trans_runtime_check (true, false, cond, &se.pre,
633 &code->expr1->where, "Invalid image number "
635 fold_convert (integer_type_node, se.expr));
638 /* If STAT is present, set it to zero. */
641 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
642 gfc_conv_expr (&se, code->expr2);
643 gfc_add_modify (&se.pre, se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
646 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
647 return gfc_finish_block (&se.pre);
653 /* Generate GENERIC for the IF construct. This function also deals with
654 the simple IF statement, because the front end translates the IF
655 statement into an IF construct.
687 where COND_S is the simplified version of the predicate. PRE_COND_S
688 are the pre side-effects produced by the translation of the
690 We need to build the chain recursively otherwise we run into
691 problems with folding incomplete statements. */
694 gfc_trans_if_1 (gfc_code * code)
699 /* Check for an unconditional ELSE clause. */
701 return gfc_trans_code (code->next);
703 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
704 gfc_init_se (&if_se, NULL);
705 gfc_start_block (&if_se.pre);
707 /* Calculate the IF condition expression. */
708 gfc_conv_expr_val (&if_se, code->expr1);
710 /* Translate the THEN clause. */
711 stmt = gfc_trans_code (code->next);
713 /* Translate the ELSE clause. */
715 elsestmt = gfc_trans_if_1 (code->block);
717 elsestmt = build_empty_stmt (input_location);
719 /* Build the condition expression and add it to the condition block. */
720 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
722 gfc_add_expr_to_block (&if_se.pre, stmt);
724 /* Finish off this statement. */
725 return gfc_finish_block (&if_se.pre);
729 gfc_trans_if (gfc_code * code)
731 /* Ignore the top EXEC_IF, it only announces an IF construct. The
732 actual code we must translate is in code->block. */
734 return gfc_trans_if_1 (code->block);
738 /* Translate an arithmetic IF expression.
740 IF (cond) label1, label2, label3 translates to
752 An optimized version can be generated in case of equal labels.
753 E.g., if label1 is equal to label2, we can translate it to
762 gfc_trans_arithmetic_if (gfc_code * code)
770 /* Start a new block. */
771 gfc_init_se (&se, NULL);
772 gfc_start_block (&se.pre);
774 /* Pre-evaluate COND. */
775 gfc_conv_expr_val (&se, code->expr1);
776 se.expr = gfc_evaluate_now (se.expr, &se.pre);
778 /* Build something to compare with. */
779 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
781 if (code->label1->value != code->label2->value)
783 /* If (cond < 0) take branch1 else take branch2.
784 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
785 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
786 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
788 if (code->label1->value != code->label3->value)
789 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
791 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
793 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
796 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
798 if (code->label1->value != code->label3->value
799 && code->label2->value != code->label3->value)
801 /* if (cond <= 0) take branch1 else take branch2. */
802 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
803 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
804 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
807 /* Append the COND_EXPR to the evaluation of COND, and return. */
808 gfc_add_expr_to_block (&se.pre, branch1);
809 return gfc_finish_block (&se.pre);
813 /* Translate a CRITICAL block. */
815 gfc_trans_critical (gfc_code *code)
820 gfc_start_block (&block);
821 tmp = gfc_trans_code (code->block->next);
822 gfc_add_expr_to_block (&block, tmp);
824 return gfc_finish_block (&block);
828 /* Translate a BLOCK construct. This is basically what we would do for a
832 gfc_trans_block_construct (gfc_code* code)
844 gcc_assert (!sym->tlink);
847 gfc_start_block (&body);
848 gfc_process_block_locals (ns);
850 tmp = gfc_trans_code (ns->code);
851 tmp = gfc_trans_deferred_vars (sym, tmp);
853 gfc_add_expr_to_block (&body, tmp);
854 return gfc_finish_block (&body);
858 /* Translate the simple DO construct. This is where the loop variable has
859 integer type and step +-1. We can't use this in the general case
860 because integer overflow and floating point errors could give incorrect
862 We translate a do loop from:
864 DO dovar = from, to, step
870 [Evaluate loop bounds and step]
872 if ((step > 0) ? (dovar <= to) : (dovar => to))
878 cond = (dovar == to);
880 if (cond) goto end_label;
885 This helps the optimizers by avoiding the extra induction variable
886 used in the general case. */
889 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
890 tree from, tree to, tree step, tree exit_cond)
896 tree saved_dovar = NULL;
900 type = TREE_TYPE (dovar);
902 /* Initialize the DO variable: dovar = from. */
903 gfc_add_modify (pblock, dovar, from);
905 /* Save value for do-tinkering checking. */
906 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
908 saved_dovar = gfc_create_var (type, ".saved_dovar");
909 gfc_add_modify (pblock, saved_dovar, dovar);
912 /* Cycle and exit statements are implemented with gotos. */
913 cycle_label = gfc_build_label_decl (NULL_TREE);
914 exit_label = gfc_build_label_decl (NULL_TREE);
916 /* Put the labels where they can be found later. See gfc_trans_do(). */
917 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
920 gfc_start_block (&body);
922 /* Main loop body. */
923 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
924 gfc_add_expr_to_block (&body, tmp);
926 /* Label for cycle statements (if needed). */
927 if (TREE_USED (cycle_label))
929 tmp = build1_v (LABEL_EXPR, cycle_label);
930 gfc_add_expr_to_block (&body, tmp);
933 /* Check whether someone has modified the loop variable. */
934 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
936 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
937 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
938 "Loop variable has been modified");
941 /* Exit the loop if there is an I/O result condition or error. */
944 tmp = build1_v (GOTO_EXPR, exit_label);
945 tmp = fold_build3 (COND_EXPR, void_type_node, exit_cond, tmp,
946 build_empty_stmt (input_location));
947 gfc_add_expr_to_block (&body, tmp);
950 /* Evaluate the loop condition. */
951 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
952 cond = gfc_evaluate_now (cond, &body);
954 /* Increment the loop variable. */
955 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
956 gfc_add_modify (&body, dovar, tmp);
958 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
959 gfc_add_modify (&body, saved_dovar, dovar);
962 tmp = build1_v (GOTO_EXPR, exit_label);
963 TREE_USED (exit_label) = 1;
964 tmp = fold_build3 (COND_EXPR, void_type_node,
965 cond, tmp, build_empty_stmt (input_location));
966 gfc_add_expr_to_block (&body, tmp);
968 /* Finish the loop body. */
969 tmp = gfc_finish_block (&body);
970 tmp = build1_v (LOOP_EXPR, tmp);
972 /* Only execute the loop if the number of iterations is positive. */
973 if (tree_int_cst_sgn (step) > 0)
974 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
976 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
977 tmp = fold_build3 (COND_EXPR, void_type_node,
978 cond, tmp, build_empty_stmt (input_location));
979 gfc_add_expr_to_block (pblock, tmp);
981 /* Add the exit label. */
982 tmp = build1_v (LABEL_EXPR, exit_label);
983 gfc_add_expr_to_block (pblock, tmp);
985 return gfc_finish_block (pblock);
988 /* Translate the DO construct. This obviously is one of the most
989 important ones to get right with any compiler, but especially
992 We special case some loop forms as described in gfc_trans_simple_do.
993 For other cases we implement them with a separate loop count,
994 as described in the standard.
996 We translate a do loop from:
998 DO dovar = from, to, step
1004 [evaluate loop bounds and step]
1005 empty = (step > 0 ? to < from : to > from);
1006 countm1 = (to - from) / step;
1008 if (empty) goto exit_label;
1014 if (countm1 ==0) goto exit_label;
1019 countm1 is an unsigned integer. It is equal to the loop count minus one,
1020 because the loop count itself can overflow. */
1023 gfc_trans_do (gfc_code * code, tree exit_cond)
1027 tree saved_dovar = NULL;
1042 gfc_start_block (&block);
1044 /* Evaluate all the expressions in the iterator. */
1045 gfc_init_se (&se, NULL);
1046 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1047 gfc_add_block_to_block (&block, &se.pre);
1049 type = TREE_TYPE (dovar);
1051 gfc_init_se (&se, NULL);
1052 gfc_conv_expr_val (&se, code->ext.iterator->start);
1053 gfc_add_block_to_block (&block, &se.pre);
1054 from = gfc_evaluate_now (se.expr, &block);
1056 gfc_init_se (&se, NULL);
1057 gfc_conv_expr_val (&se, code->ext.iterator->end);
1058 gfc_add_block_to_block (&block, &se.pre);
1059 to = gfc_evaluate_now (se.expr, &block);
1061 gfc_init_se (&se, NULL);
1062 gfc_conv_expr_val (&se, code->ext.iterator->step);
1063 gfc_add_block_to_block (&block, &se.pre);
1064 step = gfc_evaluate_now (se.expr, &block);
1066 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1068 tmp = fold_build2 (EQ_EXPR, boolean_type_node, step,
1069 fold_convert (type, integer_zero_node));
1070 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1071 "DO step value is zero");
1074 /* Special case simple loops. */
1075 if (TREE_CODE (type) == INTEGER_TYPE
1076 && (integer_onep (step)
1077 || tree_int_cst_equal (step, integer_minus_one_node)))
1078 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1080 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
1081 fold_convert (type, integer_zero_node));
1083 if (TREE_CODE (type) == INTEGER_TYPE)
1084 utype = unsigned_type_for (type);
1086 utype = unsigned_type_for (gfc_array_index_type);
1087 countm1 = gfc_create_var (utype, "countm1");
1089 /* Cycle and exit statements are implemented with gotos. */
1090 cycle_label = gfc_build_label_decl (NULL_TREE);
1091 exit_label = gfc_build_label_decl (NULL_TREE);
1092 TREE_USED (exit_label) = 1;
1094 /* Initialize the DO variable: dovar = from. */
1095 gfc_add_modify (&block, dovar, from);
1097 /* Save value for do-tinkering checking. */
1098 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1100 saved_dovar = gfc_create_var (type, ".saved_dovar");
1101 gfc_add_modify (&block, saved_dovar, dovar);
1104 /* Initialize loop count and jump to exit label if the loop is empty.
1105 This code is executed before we enter the loop body. We generate:
1106 step_sign = sign(1,step);
1117 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1121 if (TREE_CODE (type) == INTEGER_TYPE)
1123 tree pos, neg, step_sign, to2, from2, step2;
1125 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1127 tmp = fold_build2 (LT_EXPR, boolean_type_node, step,
1128 build_int_cst (TREE_TYPE (step), 0));
1129 step_sign = fold_build3 (COND_EXPR, type, tmp,
1130 build_int_cst (type, -1),
1131 build_int_cst (type, 1));
1133 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
1134 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
1135 build1_v (GOTO_EXPR, exit_label),
1136 build_empty_stmt (input_location));
1138 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
1139 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
1140 build1_v (GOTO_EXPR, exit_label),
1141 build_empty_stmt (input_location));
1142 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
1144 gfc_add_expr_to_block (&block, tmp);
1146 /* Calculate the loop count. to-from can overflow, so
1147 we cast to unsigned. */
1149 to2 = fold_build2 (MULT_EXPR, type, step_sign, to);
1150 from2 = fold_build2 (MULT_EXPR, type, step_sign, from);
1151 step2 = fold_build2 (MULT_EXPR, type, step_sign, step);
1152 step2 = fold_convert (utype, step2);
1153 tmp = fold_build2 (MINUS_EXPR, type, to2, from2);
1154 tmp = fold_convert (utype, tmp);
1155 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp, step2);
1156 tmp = fold_build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1157 gfc_add_expr_to_block (&block, tmp);
1161 /* TODO: We could use the same width as the real type.
1162 This would probably cause more problems that it solves
1163 when we implement "long double" types. */
1165 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1166 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
1167 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
1168 gfc_add_modify (&block, countm1, tmp);
1170 /* We need a special check for empty loops:
1171 empty = (step > 0 ? to < from : to > from); */
1172 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
1173 fold_build2 (LT_EXPR, boolean_type_node, to, from),
1174 fold_build2 (GT_EXPR, boolean_type_node, to, from));
1175 /* If the loop is empty, go directly to the exit label. */
1176 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
1177 build1_v (GOTO_EXPR, exit_label),
1178 build_empty_stmt (input_location));
1179 gfc_add_expr_to_block (&block, tmp);
1183 gfc_start_block (&body);
1185 /* Put these labels where they can be found later. We put the
1186 labels in a TREE_LIST node (because TREE_CHAIN is already
1187 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1188 label in TREE_VALUE (backend_decl). */
1190 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1192 /* Main loop body. */
1193 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1194 gfc_add_expr_to_block (&body, tmp);
1196 /* Label for cycle statements (if needed). */
1197 if (TREE_USED (cycle_label))
1199 tmp = build1_v (LABEL_EXPR, cycle_label);
1200 gfc_add_expr_to_block (&body, tmp);
1203 /* Check whether someone has modified the loop variable. */
1204 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1206 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
1207 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1208 "Loop variable has been modified");
1211 /* Exit the loop if there is an I/O result condition or error. */
1214 tmp = build1_v (GOTO_EXPR, exit_label);
1215 tmp = fold_build3 (COND_EXPR, void_type_node, exit_cond, tmp,
1216 build_empty_stmt (input_location));
1217 gfc_add_expr_to_block (&body, tmp);
1220 /* Increment the loop variable. */
1221 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1222 gfc_add_modify (&body, dovar, tmp);
1224 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1225 gfc_add_modify (&body, saved_dovar, dovar);
1227 /* End with the loop condition. Loop until countm1 == 0. */
1228 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1229 build_int_cst (utype, 0));
1230 tmp = build1_v (GOTO_EXPR, exit_label);
1231 tmp = fold_build3 (COND_EXPR, void_type_node,
1232 cond, tmp, build_empty_stmt (input_location));
1233 gfc_add_expr_to_block (&body, tmp);
1235 /* Decrement the loop count. */
1236 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1237 gfc_add_modify (&body, countm1, tmp);
1239 /* End of loop body. */
1240 tmp = gfc_finish_block (&body);
1242 /* The for loop itself. */
1243 tmp = build1_v (LOOP_EXPR, tmp);
1244 gfc_add_expr_to_block (&block, tmp);
1246 /* Add the exit label. */
1247 tmp = build1_v (LABEL_EXPR, exit_label);
1248 gfc_add_expr_to_block (&block, tmp);
1250 return gfc_finish_block (&block);
1254 /* Translate the DO WHILE construct.
1267 if (! cond) goto exit_label;
1273 Because the evaluation of the exit condition `cond' may have side
1274 effects, we can't do much for empty loop bodies. The backend optimizers
1275 should be smart enough to eliminate any dead loops. */
1278 gfc_trans_do_while (gfc_code * code)
1286 /* Everything we build here is part of the loop body. */
1287 gfc_start_block (&block);
1289 /* Cycle and exit statements are implemented with gotos. */
1290 cycle_label = gfc_build_label_decl (NULL_TREE);
1291 exit_label = gfc_build_label_decl (NULL_TREE);
1293 /* Put the labels where they can be found later. See gfc_trans_do(). */
1294 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1296 /* Create a GIMPLE version of the exit condition. */
1297 gfc_init_se (&cond, NULL);
1298 gfc_conv_expr_val (&cond, code->expr1);
1299 gfc_add_block_to_block (&block, &cond.pre);
1300 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1302 /* Build "IF (! cond) GOTO exit_label". */
1303 tmp = build1_v (GOTO_EXPR, exit_label);
1304 TREE_USED (exit_label) = 1;
1305 tmp = fold_build3 (COND_EXPR, void_type_node,
1306 cond.expr, tmp, build_empty_stmt (input_location));
1307 gfc_add_expr_to_block (&block, tmp);
1309 /* The main body of the loop. */
1310 tmp = gfc_trans_code (code->block->next);
1311 gfc_add_expr_to_block (&block, tmp);
1313 /* Label for cycle statements (if needed). */
1314 if (TREE_USED (cycle_label))
1316 tmp = build1_v (LABEL_EXPR, cycle_label);
1317 gfc_add_expr_to_block (&block, tmp);
1320 /* End of loop body. */
1321 tmp = gfc_finish_block (&block);
1323 gfc_init_block (&block);
1324 /* Build the loop. */
1325 tmp = build1_v (LOOP_EXPR, tmp);
1326 gfc_add_expr_to_block (&block, tmp);
1328 /* Add the exit label. */
1329 tmp = build1_v (LABEL_EXPR, exit_label);
1330 gfc_add_expr_to_block (&block, tmp);
1332 return gfc_finish_block (&block);
1336 /* Translate the SELECT CASE construct for INTEGER case expressions,
1337 without killing all potential optimizations. The problem is that
1338 Fortran allows unbounded cases, but the back-end does not, so we
1339 need to intercept those before we enter the equivalent SWITCH_EXPR
1342 For example, we translate this,
1345 CASE (:100,101,105:115)
1355 to the GENERIC equivalent,
1359 case (minimum value for typeof(expr) ... 100:
1365 case 200 ... (maximum value for typeof(expr):
1382 gfc_trans_integer_select (gfc_code * code)
1392 gfc_start_block (&block);
1394 /* Calculate the switch expression. */
1395 gfc_init_se (&se, NULL);
1396 gfc_conv_expr_val (&se, code->expr1);
1397 gfc_add_block_to_block (&block, &se.pre);
1399 end_label = gfc_build_label_decl (NULL_TREE);
1401 gfc_init_block (&body);
1403 for (c = code->block; c; c = c->block)
1405 for (cp = c->ext.case_list; cp; cp = cp->next)
1410 /* Assume it's the default case. */
1411 low = high = NULL_TREE;
1415 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1418 /* If there's only a lower bound, set the high bound to the
1419 maximum value of the case expression. */
1421 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1426 /* Three cases are possible here:
1428 1) There is no lower bound, e.g. CASE (:N).
1429 2) There is a lower bound .NE. high bound, that is
1430 a case range, e.g. CASE (N:M) where M>N (we make
1431 sure that M>N during type resolution).
1432 3) There is a lower bound, and it has the same value
1433 as the high bound, e.g. CASE (N:N). This is our
1434 internal representation of CASE(N).
1436 In the first and second case, we need to set a value for
1437 high. In the third case, we don't because the GCC middle
1438 end represents a single case value by just letting high be
1439 a NULL_TREE. We can't do that because we need to be able
1440 to represent unbounded cases. */
1444 && mpz_cmp (cp->low->value.integer,
1445 cp->high->value.integer) != 0))
1446 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1449 /* Unbounded case. */
1451 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1454 /* Build a label. */
1455 label = gfc_build_label_decl (NULL_TREE);
1457 /* Add this case label.
1458 Add parameter 'label', make it match GCC backend. */
1459 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1461 gfc_add_expr_to_block (&body, tmp);
1464 /* Add the statements for this case. */
1465 tmp = gfc_trans_code (c->next);
1466 gfc_add_expr_to_block (&body, tmp);
1468 /* Break to the end of the construct. */
1469 tmp = build1_v (GOTO_EXPR, end_label);
1470 gfc_add_expr_to_block (&body, tmp);
1473 tmp = gfc_finish_block (&body);
1474 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1475 gfc_add_expr_to_block (&block, tmp);
1477 tmp = build1_v (LABEL_EXPR, end_label);
1478 gfc_add_expr_to_block (&block, tmp);
1480 return gfc_finish_block (&block);
1484 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1486 There are only two cases possible here, even though the standard
1487 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1488 .FALSE., and DEFAULT.
1490 We never generate more than two blocks here. Instead, we always
1491 try to eliminate the DEFAULT case. This way, we can translate this
1492 kind of SELECT construct to a simple
1496 expression in GENERIC. */
1499 gfc_trans_logical_select (gfc_code * code)
1502 gfc_code *t, *f, *d;
1507 /* Assume we don't have any cases at all. */
1510 /* Now see which ones we actually do have. We can have at most two
1511 cases in a single case list: one for .TRUE. and one for .FALSE.
1512 The default case is always separate. If the cases for .TRUE. and
1513 .FALSE. are in the same case list, the block for that case list
1514 always executed, and we don't generate code a COND_EXPR. */
1515 for (c = code->block; c; c = c->block)
1517 for (cp = c->ext.case_list; cp; cp = cp->next)
1521 if (cp->low->value.logical == 0) /* .FALSE. */
1523 else /* if (cp->value.logical != 0), thus .TRUE. */
1531 /* Start a new block. */
1532 gfc_start_block (&block);
1534 /* Calculate the switch expression. We always need to do this
1535 because it may have side effects. */
1536 gfc_init_se (&se, NULL);
1537 gfc_conv_expr_val (&se, code->expr1);
1538 gfc_add_block_to_block (&block, &se.pre);
1540 if (t == f && t != NULL)
1542 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1543 translate the code for these cases, append it to the current
1545 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1549 tree true_tree, false_tree, stmt;
1551 true_tree = build_empty_stmt (input_location);
1552 false_tree = build_empty_stmt (input_location);
1554 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1555 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1556 make the missing case the default case. */
1557 if (t != NULL && f != NULL)
1567 /* Translate the code for each of these blocks, and append it to
1568 the current block. */
1570 true_tree = gfc_trans_code (t->next);
1573 false_tree = gfc_trans_code (f->next);
1575 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1576 true_tree, false_tree);
1577 gfc_add_expr_to_block (&block, stmt);
1580 return gfc_finish_block (&block);
1584 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1585 Instead of generating compares and jumps, it is far simpler to
1586 generate a data structure describing the cases in order and call a
1587 library subroutine that locates the right case.
1588 This is particularly true because this is the only case where we
1589 might have to dispose of a temporary.
1590 The library subroutine returns a pointer to jump to or NULL if no
1591 branches are to be taken. */
1594 gfc_trans_character_select (gfc_code *code)
1596 tree init, end_label, tmp, type, case_num, label, fndecl;
1597 stmtblock_t block, body;
1602 VEC(constructor_elt,gc) *inits = NULL;
1604 /* The jump table types are stored in static variables to avoid
1605 constructing them from scratch every single time. */
1606 static tree select_struct[2];
1607 static tree ss_string1[2], ss_string1_len[2];
1608 static tree ss_string2[2], ss_string2_len[2];
1609 static tree ss_target[2];
1611 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1613 if (code->expr1->ts.kind == 1)
1615 else if (code->expr1->ts.kind == 4)
1620 if (select_struct[k] == NULL)
1622 select_struct[k] = make_node (RECORD_TYPE);
1624 if (code->expr1->ts.kind == 1)
1625 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1626 else if (code->expr1->ts.kind == 4)
1627 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1632 #define ADD_FIELD(NAME, TYPE) \
1633 ss_##NAME[k] = gfc_add_field_to_struct \
1634 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1635 get_identifier (stringize(NAME)), TYPE)
1637 ADD_FIELD (string1, pchartype);
1638 ADD_FIELD (string1_len, gfc_charlen_type_node);
1640 ADD_FIELD (string2, pchartype);
1641 ADD_FIELD (string2_len, gfc_charlen_type_node);
1643 ADD_FIELD (target, integer_type_node);
1646 gfc_finish_type (select_struct[k]);
1649 cp = code->block->ext.case_list;
1650 while (cp->left != NULL)
1654 for (d = cp; d; d = d->right)
1657 end_label = gfc_build_label_decl (NULL_TREE);
1659 /* Generate the body */
1660 gfc_start_block (&block);
1661 gfc_init_block (&body);
1663 for (c = code->block; c; c = c->block)
1665 for (d = c->ext.case_list; d; d = d->next)
1667 label = gfc_build_label_decl (NULL_TREE);
1668 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1669 build_int_cst (NULL_TREE, d->n),
1670 build_int_cst (NULL_TREE, d->n), label);
1671 gfc_add_expr_to_block (&body, tmp);
1674 tmp = gfc_trans_code (c->next);
1675 gfc_add_expr_to_block (&body, tmp);
1677 tmp = build1_v (GOTO_EXPR, end_label);
1678 gfc_add_expr_to_block (&body, tmp);
1681 /* Generate the structure describing the branches */
1682 for(d = cp; d; d = d->right)
1684 VEC(constructor_elt,gc) *node = NULL;
1686 gfc_init_se (&se, NULL);
1690 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node);
1691 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], integer_zero_node);
1695 gfc_conv_expr_reference (&se, d->low);
1697 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr);
1698 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length);
1701 if (d->high == NULL)
1703 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node);
1704 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], integer_zero_node);
1708 gfc_init_se (&se, NULL);
1709 gfc_conv_expr_reference (&se, d->high);
1711 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr);
1712 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length);
1715 CONSTRUCTOR_APPEND_ELT (node, ss_target[k],
1716 build_int_cst (integer_type_node, d->n));
1718 tmp = build_constructor (select_struct[k], node);
1719 CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp);
1722 type = build_array_type (select_struct[k],
1723 build_index_type (build_int_cst (NULL_TREE, n-1)));
1725 init = build_constructor (type, inits);
1726 TREE_CONSTANT (init) = 1;
1727 TREE_STATIC (init) = 1;
1728 /* Create a static variable to hold the jump table. */
1729 tmp = gfc_create_var (type, "jumptable");
1730 TREE_CONSTANT (tmp) = 1;
1731 TREE_STATIC (tmp) = 1;
1732 TREE_READONLY (tmp) = 1;
1733 DECL_INITIAL (tmp) = init;
1736 /* Build the library call */
1737 init = gfc_build_addr_expr (pvoid_type_node, init);
1739 gfc_init_se (&se, NULL);
1740 gfc_conv_expr_reference (&se, code->expr1);
1742 gfc_add_block_to_block (&block, &se.pre);
1744 if (code->expr1->ts.kind == 1)
1745 fndecl = gfor_fndecl_select_string;
1746 else if (code->expr1->ts.kind == 4)
1747 fndecl = gfor_fndecl_select_string_char4;
1751 tmp = build_call_expr_loc (input_location,
1752 fndecl, 4, init, build_int_cst (NULL_TREE, n),
1753 se.expr, se.string_length);
1754 case_num = gfc_create_var (integer_type_node, "case_num");
1755 gfc_add_modify (&block, case_num, tmp);
1757 gfc_add_block_to_block (&block, &se.post);
1759 tmp = gfc_finish_block (&body);
1760 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1761 gfc_add_expr_to_block (&block, tmp);
1763 tmp = build1_v (LABEL_EXPR, end_label);
1764 gfc_add_expr_to_block (&block, tmp);
1766 return gfc_finish_block (&block);
1770 /* Translate the three variants of the SELECT CASE construct.
1772 SELECT CASEs with INTEGER case expressions can be translated to an
1773 equivalent GENERIC switch statement, and for LOGICAL case
1774 expressions we build one or two if-else compares.
1776 SELECT CASEs with CHARACTER case expressions are a whole different
1777 story, because they don't exist in GENERIC. So we sort them and
1778 do a binary search at runtime.
1780 Fortran has no BREAK statement, and it does not allow jumps from
1781 one case block to another. That makes things a lot easier for
1785 gfc_trans_select (gfc_code * code)
1787 gcc_assert (code && code->expr1);
1789 /* Empty SELECT constructs are legal. */
1790 if (code->block == NULL)
1791 return build_empty_stmt (input_location);
1793 /* Select the correct translation function. */
1794 switch (code->expr1->ts.type)
1796 case BT_LOGICAL: return gfc_trans_logical_select (code);
1797 case BT_INTEGER: return gfc_trans_integer_select (code);
1798 case BT_CHARACTER: return gfc_trans_character_select (code);
1800 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1806 /* Traversal function to substitute a replacement symtree if the symbol
1807 in the expression is the same as that passed. f == 2 signals that
1808 that variable itself is not to be checked - only the references.
1809 This group of functions is used when the variable expression in a
1810 FORALL assignment has internal references. For example:
1811 FORALL (i = 1:4) p(p(i)) = i
1812 The only recourse here is to store a copy of 'p' for the index
1815 static gfc_symtree *new_symtree;
1816 static gfc_symtree *old_symtree;
1819 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1821 if (expr->expr_type != EXPR_VARIABLE)
1826 else if (expr->symtree->n.sym == sym)
1827 expr->symtree = new_symtree;
1833 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1835 gfc_traverse_expr (e, sym, forall_replace, f);
1839 forall_restore (gfc_expr *expr,
1840 gfc_symbol *sym ATTRIBUTE_UNUSED,
1841 int *f ATTRIBUTE_UNUSED)
1843 if (expr->expr_type != EXPR_VARIABLE)
1846 if (expr->symtree == new_symtree)
1847 expr->symtree = old_symtree;
1853 forall_restore_symtree (gfc_expr *e)
1855 gfc_traverse_expr (e, NULL, forall_restore, 0);
1859 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1864 gfc_symbol *new_sym;
1865 gfc_symbol *old_sym;
1869 /* Build a copy of the lvalue. */
1870 old_symtree = c->expr1->symtree;
1871 old_sym = old_symtree->n.sym;
1872 e = gfc_lval_expr_from_sym (old_sym);
1873 if (old_sym->attr.dimension)
1875 gfc_init_se (&tse, NULL);
1876 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
1877 gfc_add_block_to_block (pre, &tse.pre);
1878 gfc_add_block_to_block (post, &tse.post);
1879 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
1881 if (e->ts.type != BT_CHARACTER)
1883 /* Use the variable offset for the temporary. */
1884 tmp = gfc_conv_array_offset (old_sym->backend_decl);
1885 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
1890 gfc_init_se (&tse, NULL);
1891 gfc_init_se (&rse, NULL);
1892 gfc_conv_expr (&rse, e);
1893 if (e->ts.type == BT_CHARACTER)
1895 tse.string_length = rse.string_length;
1896 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1898 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1900 gfc_add_block_to_block (pre, &tse.pre);
1901 gfc_add_block_to_block (post, &tse.post);
1905 tmp = gfc_typenode_for_spec (&e->ts);
1906 tse.expr = gfc_create_var (tmp, "temp");
1909 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1910 e->expr_type == EXPR_VARIABLE, true);
1911 gfc_add_expr_to_block (pre, tmp);
1915 /* Create a new symbol to represent the lvalue. */
1916 new_sym = gfc_new_symbol (old_sym->name, NULL);
1917 new_sym->ts = old_sym->ts;
1918 new_sym->attr.referenced = 1;
1919 new_sym->attr.temporary = 1;
1920 new_sym->attr.dimension = old_sym->attr.dimension;
1921 new_sym->attr.flavor = old_sym->attr.flavor;
1923 /* Use the temporary as the backend_decl. */
1924 new_sym->backend_decl = tse.expr;
1926 /* Create a fake symtree for it. */
1928 new_symtree = gfc_new_symtree (&root, old_sym->name);
1929 new_symtree->n.sym = new_sym;
1930 gcc_assert (new_symtree == root);
1932 /* Go through the expression reference replacing the old_symtree
1934 forall_replace_symtree (c->expr1, old_sym, 2);
1936 /* Now we have made this temporary, we might as well use it for
1937 the right hand side. */
1938 forall_replace_symtree (c->expr2, old_sym, 1);
1942 /* Handles dependencies in forall assignments. */
1944 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1951 lsym = c->expr1->symtree->n.sym;
1952 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
1954 /* Now check for dependencies within the 'variable'
1955 expression itself. These are treated by making a complete
1956 copy of variable and changing all the references to it
1957 point to the copy instead. Note that the shallow copy of
1958 the variable will not suffice for derived types with
1959 pointer components. We therefore leave these to their
1961 if (lsym->ts.type == BT_DERIVED
1962 && lsym->ts.u.derived->attr.pointer_comp)
1966 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
1968 forall_make_variable_temp (c, pre, post);
1972 /* Substrings with dependencies are treated in the same
1974 if (c->expr1->ts.type == BT_CHARACTER
1976 && c->expr2->expr_type == EXPR_VARIABLE
1977 && lsym == c->expr2->symtree->n.sym)
1979 for (lref = c->expr1->ref; lref; lref = lref->next)
1980 if (lref->type == REF_SUBSTRING)
1982 for (rref = c->expr2->ref; rref; rref = rref->next)
1983 if (rref->type == REF_SUBSTRING)
1987 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1989 forall_make_variable_temp (c, pre, post);
1998 cleanup_forall_symtrees (gfc_code *c)
2000 forall_restore_symtree (c->expr1);
2001 forall_restore_symtree (c->expr2);
2002 gfc_free (new_symtree->n.sym);
2003 gfc_free (new_symtree);
2007 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2008 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2009 indicates whether we should generate code to test the FORALLs mask
2010 array. OUTER is the loop header to be used for initializing mask
2013 The generated loop format is:
2014 count = (end - start + step) / step
2027 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
2028 int mask_flag, stmtblock_t *outer)
2036 tree var, start, end, step;
2039 /* Initialize the mask index outside the FORALL nest. */
2040 if (mask_flag && forall_tmp->mask)
2041 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
2043 iter = forall_tmp->this_loop;
2044 nvar = forall_tmp->nvar;
2045 for (n = 0; n < nvar; n++)
2048 start = iter->start;
2052 exit_label = gfc_build_label_decl (NULL_TREE);
2053 TREE_USED (exit_label) = 1;
2055 /* The loop counter. */
2056 count = gfc_create_var (TREE_TYPE (var), "count");
2058 /* The body of the loop. */
2059 gfc_init_block (&block);
2061 /* The exit condition. */
2062 cond = fold_build2 (LE_EXPR, boolean_type_node,
2063 count, build_int_cst (TREE_TYPE (count), 0));
2064 tmp = build1_v (GOTO_EXPR, exit_label);
2065 tmp = fold_build3 (COND_EXPR, void_type_node,
2066 cond, tmp, build_empty_stmt (input_location));
2067 gfc_add_expr_to_block (&block, tmp);
2069 /* The main loop body. */
2070 gfc_add_expr_to_block (&block, body);
2072 /* Increment the loop variable. */
2073 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
2074 gfc_add_modify (&block, var, tmp);
2076 /* Advance to the next mask element. Only do this for the
2078 if (n == 0 && mask_flag && forall_tmp->mask)
2080 tree maskindex = forall_tmp->maskindex;
2081 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2082 maskindex, gfc_index_one_node);
2083 gfc_add_modify (&block, maskindex, tmp);
2086 /* Decrement the loop counter. */
2087 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
2088 build_int_cst (TREE_TYPE (var), 1));
2089 gfc_add_modify (&block, count, tmp);
2091 body = gfc_finish_block (&block);
2093 /* Loop var initialization. */
2094 gfc_init_block (&block);
2095 gfc_add_modify (&block, var, start);
2098 /* Initialize the loop counter. */
2099 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
2100 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
2101 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
2102 gfc_add_modify (&block, count, tmp);
2104 /* The loop expression. */
2105 tmp = build1_v (LOOP_EXPR, body);
2106 gfc_add_expr_to_block (&block, tmp);
2108 /* The exit label. */
2109 tmp = build1_v (LABEL_EXPR, exit_label);
2110 gfc_add_expr_to_block (&block, tmp);
2112 body = gfc_finish_block (&block);
2119 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2120 is nonzero, the body is controlled by all masks in the forall nest.
2121 Otherwise, the innermost loop is not controlled by it's mask. This
2122 is used for initializing that mask. */
2125 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2130 forall_info *forall_tmp;
2131 tree mask, maskindex;
2133 gfc_start_block (&header);
2135 forall_tmp = nested_forall_info;
2136 while (forall_tmp != NULL)
2138 /* Generate body with masks' control. */
2141 mask = forall_tmp->mask;
2142 maskindex = forall_tmp->maskindex;
2144 /* If a mask was specified make the assignment conditional. */
2147 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2148 body = build3_v (COND_EXPR, tmp, body,
2149 build_empty_stmt (input_location));
2152 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2153 forall_tmp = forall_tmp->prev_nest;
2157 gfc_add_expr_to_block (&header, body);
2158 return gfc_finish_block (&header);
2162 /* Allocate data for holding a temporary array. Returns either a local
2163 temporary array or a pointer variable. */
2166 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2173 if (INTEGER_CST_P (size))
2175 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
2176 gfc_index_one_node);
2181 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2182 type = build_array_type (elem_type, type);
2183 if (gfc_can_put_var_on_stack (bytesize))
2185 gcc_assert (INTEGER_CST_P (size));
2186 tmpvar = gfc_create_var (type, "temp");
2191 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2192 *pdata = convert (pvoid_type_node, tmpvar);
2194 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2195 gfc_add_modify (pblock, tmpvar, tmp);
2201 /* Generate codes to copy the temporary to the actual lhs. */
2204 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2205 tree count1, tree wheremask, bool invert)
2209 stmtblock_t block, body;
2215 lss = gfc_walk_expr (expr);
2217 if (lss == gfc_ss_terminator)
2219 gfc_start_block (&block);
2221 gfc_init_se (&lse, NULL);
2223 /* Translate the expression. */
2224 gfc_conv_expr (&lse, expr);
2226 /* Form the expression for the temporary. */
2227 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2229 /* Use the scalar assignment as is. */
2230 gfc_add_block_to_block (&block, &lse.pre);
2231 gfc_add_modify (&block, lse.expr, tmp);
2232 gfc_add_block_to_block (&block, &lse.post);
2234 /* Increment the count1. */
2235 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2236 gfc_index_one_node);
2237 gfc_add_modify (&block, count1, tmp);
2239 tmp = gfc_finish_block (&block);
2243 gfc_start_block (&block);
2245 gfc_init_loopinfo (&loop1);
2246 gfc_init_se (&rse, NULL);
2247 gfc_init_se (&lse, NULL);
2249 /* Associate the lss with the loop. */
2250 gfc_add_ss_to_loop (&loop1, lss);
2252 /* Calculate the bounds of the scalarization. */
2253 gfc_conv_ss_startstride (&loop1);
2254 /* Setup the scalarizing loops. */
2255 gfc_conv_loop_setup (&loop1, &expr->where);
2257 gfc_mark_ss_chain_used (lss, 1);
2259 /* Start the scalarized loop body. */
2260 gfc_start_scalarized_body (&loop1, &body);
2262 /* Setup the gfc_se structures. */
2263 gfc_copy_loopinfo_to_se (&lse, &loop1);
2266 /* Form the expression of the temporary. */
2267 if (lss != gfc_ss_terminator)
2268 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2269 /* Translate expr. */
2270 gfc_conv_expr (&lse, expr);
2272 /* Use the scalar assignment. */
2273 rse.string_length = lse.string_length;
2274 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
2276 /* Form the mask expression according to the mask tree list. */
2279 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2281 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2282 TREE_TYPE (wheremaskexpr),
2284 tmp = fold_build3 (COND_EXPR, void_type_node,
2286 build_empty_stmt (input_location));
2289 gfc_add_expr_to_block (&body, tmp);
2291 /* Increment count1. */
2292 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2293 count1, gfc_index_one_node);
2294 gfc_add_modify (&body, count1, tmp);
2296 /* Increment count3. */
2299 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2300 count3, gfc_index_one_node);
2301 gfc_add_modify (&body, count3, tmp);
2304 /* Generate the copying loops. */
2305 gfc_trans_scalarizing_loops (&loop1, &body);
2306 gfc_add_block_to_block (&block, &loop1.pre);
2307 gfc_add_block_to_block (&block, &loop1.post);
2308 gfc_cleanup_loop (&loop1);
2310 tmp = gfc_finish_block (&block);
2316 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2317 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2318 and should not be freed. WHEREMASK is the conditional execution mask
2319 whose sense may be inverted by INVERT. */
2322 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2323 tree count1, gfc_ss *lss, gfc_ss *rss,
2324 tree wheremask, bool invert)
2326 stmtblock_t block, body1;
2333 gfc_start_block (&block);
2335 gfc_init_se (&rse, NULL);
2336 gfc_init_se (&lse, NULL);
2338 if (lss == gfc_ss_terminator)
2340 gfc_init_block (&body1);
2341 gfc_conv_expr (&rse, expr2);
2342 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2346 /* Initialize the loop. */
2347 gfc_init_loopinfo (&loop);
2349 /* We may need LSS to determine the shape of the expression. */
2350 gfc_add_ss_to_loop (&loop, lss);
2351 gfc_add_ss_to_loop (&loop, rss);
2353 gfc_conv_ss_startstride (&loop);
2354 gfc_conv_loop_setup (&loop, &expr2->where);
2356 gfc_mark_ss_chain_used (rss, 1);
2357 /* Start the loop body. */
2358 gfc_start_scalarized_body (&loop, &body1);
2360 /* Translate the expression. */
2361 gfc_copy_loopinfo_to_se (&rse, &loop);
2363 gfc_conv_expr (&rse, expr2);
2365 /* Form the expression of the temporary. */
2366 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2369 /* Use the scalar assignment. */
2370 lse.string_length = rse.string_length;
2371 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2372 expr2->expr_type == EXPR_VARIABLE, true);
2374 /* Form the mask expression according to the mask tree list. */
2377 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2379 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2380 TREE_TYPE (wheremaskexpr),
2382 tmp = fold_build3 (COND_EXPR, void_type_node,
2383 wheremaskexpr, tmp, build_empty_stmt (input_location));
2386 gfc_add_expr_to_block (&body1, tmp);
2388 if (lss == gfc_ss_terminator)
2390 gfc_add_block_to_block (&block, &body1);
2392 /* Increment count1. */
2393 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2394 gfc_index_one_node);
2395 gfc_add_modify (&block, count1, tmp);
2399 /* Increment count1. */
2400 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2401 count1, gfc_index_one_node);
2402 gfc_add_modify (&body1, count1, tmp);
2404 /* Increment count3. */
2407 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2408 count3, gfc_index_one_node);
2409 gfc_add_modify (&body1, count3, tmp);
2412 /* Generate the copying loops. */
2413 gfc_trans_scalarizing_loops (&loop, &body1);
2415 gfc_add_block_to_block (&block, &loop.pre);
2416 gfc_add_block_to_block (&block, &loop.post);
2418 gfc_cleanup_loop (&loop);
2419 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2420 as tree nodes in SS may not be valid in different scope. */
2423 tmp = gfc_finish_block (&block);
2428 /* Calculate the size of temporary needed in the assignment inside forall.
2429 LSS and RSS are filled in this function. */
2432 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2433 stmtblock_t * pblock,
2434 gfc_ss **lss, gfc_ss **rss)
2442 *lss = gfc_walk_expr (expr1);
2445 size = gfc_index_one_node;
2446 if (*lss != gfc_ss_terminator)
2448 gfc_init_loopinfo (&loop);
2450 /* Walk the RHS of the expression. */
2451 *rss = gfc_walk_expr (expr2);
2452 if (*rss == gfc_ss_terminator)
2454 /* The rhs is scalar. Add a ss for the expression. */
2455 *rss = gfc_get_ss ();
2456 (*rss)->next = gfc_ss_terminator;
2457 (*rss)->type = GFC_SS_SCALAR;
2458 (*rss)->expr = expr2;
2461 /* Associate the SS with the loop. */
2462 gfc_add_ss_to_loop (&loop, *lss);
2463 /* We don't actually need to add the rhs at this point, but it might
2464 make guessing the loop bounds a bit easier. */
2465 gfc_add_ss_to_loop (&loop, *rss);
2467 /* We only want the shape of the expression, not rest of the junk
2468 generated by the scalarizer. */
2469 loop.array_parameter = 1;
2471 /* Calculate the bounds of the scalarization. */
2472 save_flag = gfc_option.rtcheck;
2473 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2474 gfc_conv_ss_startstride (&loop);
2475 gfc_option.rtcheck = save_flag;
2476 gfc_conv_loop_setup (&loop, &expr2->where);
2478 /* Figure out how many elements we need. */
2479 for (i = 0; i < loop.dimen; i++)
2481 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2482 gfc_index_one_node, loop.from[i]);
2483 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2485 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2487 gfc_add_block_to_block (pblock, &loop.pre);
2488 size = gfc_evaluate_now (size, pblock);
2489 gfc_add_block_to_block (pblock, &loop.post);
2491 /* TODO: write a function that cleans up a loopinfo without freeing
2492 the SS chains. Currently a NOP. */
2499 /* Calculate the overall iterator number of the nested forall construct.
2500 This routine actually calculates the number of times the body of the
2501 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2502 that by the expression INNER_SIZE. The BLOCK argument specifies the
2503 block in which to calculate the result, and the optional INNER_SIZE_BODY
2504 argument contains any statements that need to executed (inside the loop)
2505 to initialize or calculate INNER_SIZE. */
2508 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2509 stmtblock_t *inner_size_body, stmtblock_t *block)
2511 forall_info *forall_tmp = nested_forall_info;
2515 /* We can eliminate the innermost unconditional loops with constant
2517 if (INTEGER_CST_P (inner_size))
2520 && !forall_tmp->mask
2521 && INTEGER_CST_P (forall_tmp->size))
2523 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2524 inner_size, forall_tmp->size);
2525 forall_tmp = forall_tmp->prev_nest;
2528 /* If there are no loops left, we have our constant result. */
2533 /* Otherwise, create a temporary variable to compute the result. */
2534 number = gfc_create_var (gfc_array_index_type, "num");
2535 gfc_add_modify (block, number, gfc_index_zero_node);
2537 gfc_start_block (&body);
2538 if (inner_size_body)
2539 gfc_add_block_to_block (&body, inner_size_body);
2541 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2542 number, inner_size);
2545 gfc_add_modify (&body, number, tmp);
2546 tmp = gfc_finish_block (&body);
2548 /* Generate loops. */
2549 if (forall_tmp != NULL)
2550 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2552 gfc_add_expr_to_block (block, tmp);
2558 /* Allocate temporary for forall construct. SIZE is the size of temporary
2559 needed. PTEMP1 is returned for space free. */
2562 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2569 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2570 if (!integer_onep (unit))
2571 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2576 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2579 tmp = build_fold_indirect_ref_loc (input_location, tmp);
2584 /* Allocate temporary for forall construct according to the information in
2585 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2586 assignment inside forall. PTEMP1 is returned for space free. */
2589 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2590 tree inner_size, stmtblock_t * inner_size_body,
2591 stmtblock_t * block, tree * ptemp1)
2595 /* Calculate the total size of temporary needed in forall construct. */
2596 size = compute_overall_iter_number (nested_forall_info, inner_size,
2597 inner_size_body, block);
2599 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2603 /* Handle assignments inside forall which need temporary.
2605 forall (i=start:end:stride; maskexpr)
2608 (where e,f<i> are arbitrary expressions possibly involving i
2609 and there is a dependency between e<i> and f<i>)
2611 masktmp(:) = maskexpr(:)
2616 for (i = start; i <= end; i += stride)
2620 for (i = start; i <= end; i += stride)
2622 if (masktmp[maskindex++])
2623 tmp[count1++] = f<i>
2627 for (i = start; i <= end; i += stride)
2629 if (masktmp[maskindex++])
2630 e<i> = tmp[count1++]
2635 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2636 tree wheremask, bool invert,
2637 forall_info * nested_forall_info,
2638 stmtblock_t * block)
2646 stmtblock_t inner_size_body;
2648 /* Create vars. count1 is the current iterator number of the nested
2650 count1 = gfc_create_var (gfc_array_index_type, "count1");
2652 /* Count is the wheremask index. */
2655 count = gfc_create_var (gfc_array_index_type, "count");
2656 gfc_add_modify (block, count, gfc_index_zero_node);
2661 /* Initialize count1. */
2662 gfc_add_modify (block, count1, gfc_index_zero_node);
2664 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2665 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2666 gfc_init_block (&inner_size_body);
2667 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2670 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2671 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
2673 if (!expr1->ts.u.cl->backend_decl)
2676 gfc_init_se (&tse, NULL);
2677 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
2678 expr1->ts.u.cl->backend_decl = tse.expr;
2680 type = gfc_get_character_type_len (gfc_default_character_kind,
2681 expr1->ts.u.cl->backend_decl);
2684 type = gfc_typenode_for_spec (&expr1->ts);
2686 /* Allocate temporary for nested forall construct according to the
2687 information in nested_forall_info and inner_size. */
2688 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2689 &inner_size_body, block, &ptemp1);
2691 /* Generate codes to copy rhs to the temporary . */
2692 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2695 /* Generate body and loops according to the information in
2696 nested_forall_info. */
2697 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2698 gfc_add_expr_to_block (block, tmp);
2701 gfc_add_modify (block, count1, gfc_index_zero_node);
2705 gfc_add_modify (block, count, gfc_index_zero_node);
2707 /* Generate codes to copy the temporary to lhs. */
2708 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2711 /* Generate body and loops according to the information in
2712 nested_forall_info. */
2713 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2714 gfc_add_expr_to_block (block, tmp);
2718 /* Free the temporary. */
2719 tmp = gfc_call_free (ptemp1);
2720 gfc_add_expr_to_block (block, tmp);
2725 /* Translate pointer assignment inside FORALL which need temporary. */
2728 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2729 forall_info * nested_forall_info,
2730 stmtblock_t * block)
2744 tree tmp, tmp1, ptemp1;
2746 count = gfc_create_var (gfc_array_index_type, "count");
2747 gfc_add_modify (block, count, gfc_index_zero_node);
2749 inner_size = integer_one_node;
2750 lss = gfc_walk_expr (expr1);
2751 rss = gfc_walk_expr (expr2);
2752 if (lss == gfc_ss_terminator)
2754 type = gfc_typenode_for_spec (&expr1->ts);
2755 type = build_pointer_type (type);
2757 /* Allocate temporary for nested forall construct according to the
2758 information in nested_forall_info and inner_size. */
2759 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2760 inner_size, NULL, block, &ptemp1);
2761 gfc_start_block (&body);
2762 gfc_init_se (&lse, NULL);
2763 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2764 gfc_init_se (&rse, NULL);
2765 rse.want_pointer = 1;
2766 gfc_conv_expr (&rse, expr2);
2767 gfc_add_block_to_block (&body, &rse.pre);
2768 gfc_add_modify (&body, lse.expr,
2769 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2770 gfc_add_block_to_block (&body, &rse.post);
2772 /* Increment count. */
2773 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2774 count, gfc_index_one_node);
2775 gfc_add_modify (&body, count, tmp);
2777 tmp = gfc_finish_block (&body);
2779 /* Generate body and loops according to the information in
2780 nested_forall_info. */
2781 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2782 gfc_add_expr_to_block (block, tmp);
2785 gfc_add_modify (block, count, gfc_index_zero_node);
2787 gfc_start_block (&body);
2788 gfc_init_se (&lse, NULL);
2789 gfc_init_se (&rse, NULL);
2790 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2791 lse.want_pointer = 1;
2792 gfc_conv_expr (&lse, expr1);
2793 gfc_add_block_to_block (&body, &lse.pre);
2794 gfc_add_modify (&body, lse.expr, rse.expr);
2795 gfc_add_block_to_block (&body, &lse.post);
2796 /* Increment count. */
2797 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2798 count, gfc_index_one_node);
2799 gfc_add_modify (&body, count, tmp);
2800 tmp = gfc_finish_block (&body);
2802 /* Generate body and loops according to the information in
2803 nested_forall_info. */
2804 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2805 gfc_add_expr_to_block (block, tmp);
2809 gfc_init_loopinfo (&loop);
2811 /* Associate the SS with the loop. */
2812 gfc_add_ss_to_loop (&loop, rss);
2814 /* Setup the scalarizing loops and bounds. */
2815 gfc_conv_ss_startstride (&loop);
2817 gfc_conv_loop_setup (&loop, &expr2->where);
2819 info = &rss->data.info;
2820 desc = info->descriptor;
2822 /* Make a new descriptor. */
2823 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2824 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0,
2825 loop.from, loop.to, 1,
2826 GFC_ARRAY_UNKNOWN, true);
2828 /* Allocate temporary for nested forall construct. */
2829 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2830 inner_size, NULL, block, &ptemp1);
2831 gfc_start_block (&body);
2832 gfc_init_se (&lse, NULL);
2833 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2834 lse.direct_byref = 1;
2835 rss = gfc_walk_expr (expr2);
2836 gfc_conv_expr_descriptor (&lse, expr2, rss);
2838 gfc_add_block_to_block (&body, &lse.pre);
2839 gfc_add_block_to_block (&body, &lse.post);
2841 /* Increment count. */
2842 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2843 count, gfc_index_one_node);
2844 gfc_add_modify (&body, count, tmp);
2846 tmp = gfc_finish_block (&body);
2848 /* Generate body and loops according to the information in
2849 nested_forall_info. */
2850 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2851 gfc_add_expr_to_block (block, tmp);
2854 gfc_add_modify (block, count, gfc_index_zero_node);
2856 parm = gfc_build_array_ref (tmp1, count, NULL);
2857 lss = gfc_walk_expr (expr1);
2858 gfc_init_se (&lse, NULL);
2859 gfc_conv_expr_descriptor (&lse, expr1, lss);
2860 gfc_add_modify (&lse.pre, lse.expr, parm);
2861 gfc_start_block (&body);
2862 gfc_add_block_to_block (&body, &lse.pre);
2863 gfc_add_block_to_block (&body, &lse.post);
2865 /* Increment count. */
2866 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2867 count, gfc_index_one_node);
2868 gfc_add_modify (&body, count, tmp);
2870 tmp = gfc_finish_block (&body);
2872 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2873 gfc_add_expr_to_block (block, tmp);
2875 /* Free the temporary. */
2878 tmp = gfc_call_free (ptemp1);
2879 gfc_add_expr_to_block (block, tmp);
2884 /* FORALL and WHERE statements are really nasty, especially when you nest
2885 them. All the rhs of a forall assignment must be evaluated before the
2886 actual assignments are performed. Presumably this also applies to all the
2887 assignments in an inner where statement. */
2889 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2890 linear array, relying on the fact that we process in the same order in all
2893 forall (i=start:end:stride; maskexpr)
2897 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2899 count = ((end + 1 - start) / stride)
2900 masktmp(:) = maskexpr(:)
2903 for (i = start; i <= end; i += stride)
2905 if (masktmp[maskindex++])
2909 for (i = start; i <= end; i += stride)
2911 if (masktmp[maskindex++])
2915 Note that this code only works when there are no dependencies.
2916 Forall loop with array assignments and data dependencies are a real pain,
2917 because the size of the temporary cannot always be determined before the
2918 loop is executed. This problem is compounded by the presence of nested
2923 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2943 gfc_forall_iterator *fa;
2946 gfc_saved_var *saved_vars;
2947 iter_info *this_forall;
2951 /* Do nothing if the mask is false. */
2953 && code->expr1->expr_type == EXPR_CONSTANT
2954 && !code->expr1->value.logical)
2955 return build_empty_stmt (input_location);
2958 /* Count the FORALL index number. */
2959 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2963 /* Allocate the space for var, start, end, step, varexpr. */
2964 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2965 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2966 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2967 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2968 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2969 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2971 /* Allocate the space for info. */
2972 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2974 gfc_start_block (&pre);
2975 gfc_init_block (&post);
2976 gfc_init_block (&block);
2979 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2981 gfc_symbol *sym = fa->var->symtree->n.sym;
2983 /* Allocate space for this_forall. */
2984 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2986 /* Create a temporary variable for the FORALL index. */
2987 tmp = gfc_typenode_for_spec (&sym->ts);
2988 var[n] = gfc_create_var (tmp, sym->name);
2989 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2991 /* Record it in this_forall. */
2992 this_forall->var = var[n];
2994 /* Replace the index symbol's backend_decl with the temporary decl. */
2995 sym->backend_decl = var[n];
2997 /* Work out the start, end and stride for the loop. */
2998 gfc_init_se (&se, NULL);
2999 gfc_conv_expr_val (&se, fa->start);
3000 /* Record it in this_forall. */
3001 this_forall->start = se.expr;
3002 gfc_add_block_to_block (&block, &se.pre);
3005 gfc_init_se (&se, NULL);
3006 gfc_conv_expr_val (&se, fa->end);
3007 /* Record it in this_forall. */
3008 this_forall->end = se.expr;
3009 gfc_make_safe_expr (&se);
3010 gfc_add_block_to_block (&block, &se.pre);
3013 gfc_init_se (&se, NULL);
3014 gfc_conv_expr_val (&se, fa->stride);
3015 /* Record it in this_forall. */
3016 this_forall->step = se.expr;
3017 gfc_make_safe_expr (&se);
3018 gfc_add_block_to_block (&block, &se.pre);
3021 /* Set the NEXT field of this_forall to NULL. */
3022 this_forall->next = NULL;
3023 /* Link this_forall to the info construct. */
3024 if (info->this_loop)
3026 iter_info *iter_tmp = info->this_loop;
3027 while (iter_tmp->next != NULL)
3028 iter_tmp = iter_tmp->next;
3029 iter_tmp->next = this_forall;
3032 info->this_loop = this_forall;
3038 /* Calculate the size needed for the current forall level. */
3039 size = gfc_index_one_node;
3040 for (n = 0; n < nvar; n++)
3042 /* size = (end + step - start) / step. */
3043 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
3045 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
3047 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
3048 tmp = convert (gfc_array_index_type, tmp);
3050 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
3053 /* Record the nvar and size of current forall level. */
3059 /* If the mask is .true., consider the FORALL unconditional. */
3060 if (code->expr1->expr_type == EXPR_CONSTANT
3061 && code->expr1->value.logical)
3069 /* First we need to allocate the mask. */
3072 /* As the mask array can be very big, prefer compact boolean types. */
3073 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3074 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3075 size, NULL, &block, &pmask);
3076 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3078 /* Record them in the info structure. */
3079 info->maskindex = maskindex;
3084 /* No mask was specified. */
3085 maskindex = NULL_TREE;
3086 mask = pmask = NULL_TREE;
3089 /* Link the current forall level to nested_forall_info. */
3090 info->prev_nest = nested_forall_info;
3091 nested_forall_info = info;
3093 /* Copy the mask into a temporary variable if required.
3094 For now we assume a mask temporary is needed. */
3097 /* As the mask array can be very big, prefer compact boolean types. */
3098 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3100 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3102 /* Start of mask assignment loop body. */
3103 gfc_start_block (&body);
3105 /* Evaluate the mask expression. */
3106 gfc_init_se (&se, NULL);
3107 gfc_conv_expr_val (&se, code->expr1);
3108 gfc_add_block_to_block (&body, &se.pre);
3110 /* Store the mask. */
3111 se.expr = convert (mask_type, se.expr);
3113 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3114 gfc_add_modify (&body, tmp, se.expr);
3116 /* Advance to the next mask element. */
3117 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3118 maskindex, gfc_index_one_node);
3119 gfc_add_modify (&body, maskindex, tmp);
3121 /* Generate the loops. */
3122 tmp = gfc_finish_block (&body);
3123 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3124 gfc_add_expr_to_block (&block, tmp);
3127 c = code->block->next;
3129 /* TODO: loop merging in FORALL statements. */
3130 /* Now that we've got a copy of the mask, generate the assignment loops. */
3136 /* A scalar or array assignment. DO the simple check for
3137 lhs to rhs dependencies. These make a temporary for the
3138 rhs and form a second forall block to copy to variable. */
3139 need_temp = check_forall_dependencies(c, &pre, &post);
3141 /* Temporaries due to array assignment data dependencies introduce
3142 no end of problems. */
3144 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3145 nested_forall_info, &block);
3148 /* Use the normal assignment copying routines. */
3149 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3151 /* Generate body and loops. */
3152 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3154 gfc_add_expr_to_block (&block, tmp);
3157 /* Cleanup any temporary symtrees that have been made to deal
3158 with dependencies. */
3160 cleanup_forall_symtrees (c);
3165 /* Translate WHERE or WHERE construct nested in FORALL. */
3166 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3169 /* Pointer assignment inside FORALL. */
3170 case EXEC_POINTER_ASSIGN:
3171 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3173 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3174 nested_forall_info, &block);
3177 /* Use the normal assignment copying routines. */
3178 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3180 /* Generate body and loops. */
3181 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3183 gfc_add_expr_to_block (&block, tmp);
3188 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3189 gfc_add_expr_to_block (&block, tmp);
3192 /* Explicit subroutine calls are prevented by the frontend but interface
3193 assignments can legitimately produce them. */
3194 case EXEC_ASSIGN_CALL:
3195 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3196 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3197 gfc_add_expr_to_block (&block, tmp);
3207 /* Restore the original index variables. */
3208 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3209 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3211 /* Free the space for var, start, end, step, varexpr. */
3217 gfc_free (saved_vars);
3219 /* Free the space for this forall_info. */
3224 /* Free the temporary for the mask. */
3225 tmp = gfc_call_free (pmask);
3226 gfc_add_expr_to_block (&block, tmp);
3229 pushdecl (maskindex);
3231 gfc_add_block_to_block (&pre, &block);
3232 gfc_add_block_to_block (&pre, &post);
3234 return gfc_finish_block (&pre);
3238 /* Translate the FORALL statement or construct. */
3240 tree gfc_trans_forall (gfc_code * code)
3242 return gfc_trans_forall_1 (code, NULL);
3246 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3247 If the WHERE construct is nested in FORALL, compute the overall temporary
3248 needed by the WHERE mask expression multiplied by the iterator number of
3250 ME is the WHERE mask expression.
3251 MASK is the current execution mask upon input, whose sense may or may
3252 not be inverted as specified by the INVERT argument.
3253 CMASK is the updated execution mask on output, or NULL if not required.
3254 PMASK is the pending execution mask on output, or NULL if not required.
3255 BLOCK is the block in which to place the condition evaluation loops. */
3258 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3259 tree mask, bool invert, tree cmask, tree pmask,
3260 tree mask_type, stmtblock_t * block)
3265 stmtblock_t body, body1;
3266 tree count, cond, mtmp;
3269 gfc_init_loopinfo (&loop);
3271 lss = gfc_walk_expr (me);
3272 rss = gfc_walk_expr (me);
3274 /* Variable to index the temporary. */
3275 count = gfc_create_var (gfc_array_index_type, "count");
3276 /* Initialize count. */
3277 gfc_add_modify (block, count, gfc_index_zero_node);
3279 gfc_start_block (&body);
3281 gfc_init_se (&rse, NULL);
3282 gfc_init_se (&lse, NULL);
3284 if (lss == gfc_ss_terminator)
3286 gfc_init_block (&body1);
3290 /* Initialize the loop. */
3291 gfc_init_loopinfo (&loop);
3293 /* We may need LSS to determine the shape of the expression. */
3294 gfc_add_ss_to_loop (&loop, lss);
3295 gfc_add_ss_to_loop (&loop, rss);
3297 gfc_conv_ss_startstride (&loop);
3298 gfc_conv_loop_setup (&loop, &me->where);
3300 gfc_mark_ss_chain_used (rss, 1);
3301 /* Start the loop body. */
3302 gfc_start_scalarized_body (&loop, &body1);
3304 /* Translate the expression. */
3305 gfc_copy_loopinfo_to_se (&rse, &loop);
3307 gfc_conv_expr (&rse, me);
3310 /* Variable to evaluate mask condition. */
3311 cond = gfc_create_var (mask_type, "cond");
3312 if (mask && (cmask || pmask))
3313 mtmp = gfc_create_var (mask_type, "mask");
3314 else mtmp = NULL_TREE;
3316 gfc_add_block_to_block (&body1, &lse.pre);
3317 gfc_add_block_to_block (&body1, &rse.pre);
3319 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3321 if (mask && (cmask || pmask))
3323 tmp = gfc_build_array_ref (mask, count, NULL);
3325 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3326 gfc_add_modify (&body1, mtmp, tmp);
3331 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3334 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3335 gfc_add_modify (&body1, tmp1, tmp);
3340 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3341 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3343 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3344 gfc_add_modify (&body1, tmp1, tmp);
3347 gfc_add_block_to_block (&body1, &lse.post);
3348 gfc_add_block_to_block (&body1, &rse.post);
3350 if (lss == gfc_ss_terminator)
3352 gfc_add_block_to_block (&body, &body1);
3356 /* Increment count. */
3357 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3358 gfc_index_one_node);
3359 gfc_add_modify (&body1, count, tmp1);
3361 /* Generate the copying loops. */
3362 gfc_trans_scalarizing_loops (&loop, &body1);
3364 gfc_add_block_to_block (&body, &loop.pre);
3365 gfc_add_block_to_block (&body, &loop.post);
3367 gfc_cleanup_loop (&loop);
3368 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3369 as tree nodes in SS may not be valid in different scope. */
3372 tmp1 = gfc_finish_block (&body);
3373 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3374 if (nested_forall_info != NULL)
3375 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3377 gfc_add_expr_to_block (block, tmp1);
3381 /* Translate an assignment statement in a WHERE statement or construct
3382 statement. The MASK expression is used to control which elements
3383 of EXPR1 shall be assigned. The sense of MASK is specified by
3387 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3388 tree mask, bool invert,
3389 tree count1, tree count2,
3395 gfc_ss *lss_section;
3402 tree index, maskexpr;
3404 /* A defined assignment. */
3405 if (cnext && cnext->resolved_sym)
3406 return gfc_trans_call (cnext, true, mask, count1, invert);
3409 /* TODO: handle this special case.
3410 Special case a single function returning an array. */
3411 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3413 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3419 /* Assignment of the form lhs = rhs. */
3420 gfc_start_block (&block);
3422 gfc_init_se (&lse, NULL);
3423 gfc_init_se (&rse, NULL);
3426 lss = gfc_walk_expr (expr1);
3429 /* In each where-assign-stmt, the mask-expr and the variable being
3430 defined shall be arrays of the same shape. */
3431 gcc_assert (lss != gfc_ss_terminator);
3433 /* The assignment needs scalarization. */
3436 /* Find a non-scalar SS from the lhs. */
3437 while (lss_section != gfc_ss_terminator
3438 && lss_section->type != GFC_SS_SECTION)
3439 lss_section = lss_section->next;
3441 gcc_assert (lss_section != gfc_ss_terminator);
3443 /* Initialize the scalarizer. */
3444 gfc_init_loopinfo (&loop);
3447 rss = gfc_walk_expr (expr2);
3448 if (rss == gfc_ss_terminator)
3450 /* The rhs is scalar. Add a ss for the expression. */
3451 rss = gfc_get_ss ();
3453 rss->next = gfc_ss_terminator;
3454 rss->type = GFC_SS_SCALAR;
3458 /* Associate the SS with the loop. */
3459 gfc_add_ss_to_loop (&loop, lss);
3460 gfc_add_ss_to_loop (&loop, rss);
3462 /* Calculate the bounds of the scalarization. */
3463 gfc_conv_ss_startstride (&loop);
3465 /* Resolve any data dependencies in the statement. */
3466 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3468 /* Setup the scalarizing loops. */
3469 gfc_conv_loop_setup (&loop, &expr2->where);
3471 /* Setup the gfc_se structures. */
3472 gfc_copy_loopinfo_to_se (&lse, &loop);
3473 gfc_copy_loopinfo_to_se (&rse, &loop);
3476 gfc_mark_ss_chain_used (rss, 1);
3477 if (loop.temp_ss == NULL)
3480 gfc_mark_ss_chain_used (lss, 1);
3484 lse.ss = loop.temp_ss;
3485 gfc_mark_ss_chain_used (lss, 3);
3486 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3489 /* Start the scalarized loop body. */
3490 gfc_start_scalarized_body (&loop, &body);
3492 /* Translate the expression. */
3493 gfc_conv_expr (&rse, expr2);
3494 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3496 gfc_conv_tmp_array_ref (&lse);
3497 gfc_advance_se_ss_chain (&lse);
3500 gfc_conv_expr (&lse, expr1);
3502 /* Form the mask expression according to the mask. */
3504 maskexpr = gfc_build_array_ref (mask, index, NULL);
3506 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3508 /* Use the scalar assignment as is. */
3509 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3510 loop.temp_ss != NULL, false, true);
3512 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
3514 gfc_add_expr_to_block (&body, tmp);
3516 if (lss == gfc_ss_terminator)
3518 /* Increment count1. */
3519 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3520 count1, gfc_index_one_node);
3521 gfc_add_modify (&body, count1, tmp);
3523 /* Use the scalar assignment as is. */
3524 gfc_add_block_to_block (&block, &body);
3528 gcc_assert (lse.ss == gfc_ss_terminator
3529 && rse.ss == gfc_ss_terminator);
3531 if (loop.temp_ss != NULL)
3533 /* Increment count1 before finish the main body of a scalarized
3535 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3536 count1, gfc_index_one_node);
3537 gfc_add_modify (&body, count1, tmp);
3538 gfc_trans_scalarized_loop_boundary (&loop, &body);
3540 /* We need to copy the temporary to the actual lhs. */
3541 gfc_init_se (&lse, NULL);
3542 gfc_init_se (&rse, NULL);
3543 gfc_copy_loopinfo_to_se (&lse, &loop);
3544 gfc_copy_loopinfo_to_se (&rse, &loop);
3546 rse.ss = loop.temp_ss;
3549 gfc_conv_tmp_array_ref (&rse);
3550 gfc_advance_se_ss_chain (&rse);
3551 gfc_conv_expr (&lse, expr1);
3553 gcc_assert (lse.ss == gfc_ss_terminator
3554 && rse.ss == gfc_ss_terminator);
3556 /* Form the mask expression according to the mask tree list. */
3558 maskexpr = gfc_build_array_ref (mask, index, NULL);
3560 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3563 /* Use the scalar assignment as is. */
3564 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
3566 tmp = build3_v (COND_EXPR, maskexpr, tmp,
3567 build_empty_stmt (input_location));
3568 gfc_add_expr_to_block (&body, tmp);
3570 /* Increment count2. */
3571 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3572 count2, gfc_index_one_node);
3573 gfc_add_modify (&body, count2, tmp);
3577 /* Increment count1. */
3578 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3579 count1, gfc_index_one_node);
3580 gfc_add_modify (&body, count1, tmp);
3583 /* Generate the copying loops. */
3584 gfc_trans_scalarizing_loops (&loop, &body);
3586 /* Wrap the whole thing up. */
3587 gfc_add_block_to_block (&block, &loop.pre);
3588 gfc_add_block_to_block (&block, &loop.post);
3589 gfc_cleanup_loop (&loop);
3592 return gfc_finish_block (&block);
3596 /* Translate the WHERE construct or statement.
3597 This function can be called iteratively to translate the nested WHERE
3598 construct or statement.
3599 MASK is the control mask. */
3602 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3603 forall_info * nested_forall_info, stmtblock_t * block)
3605 stmtblock_t inner_size_body;
3606 tree inner_size, size;
3615 tree count1, count2;
3619 tree pcmask = NULL_TREE;
3620 tree ppmask = NULL_TREE;
3621 tree cmask = NULL_TREE;
3622 tree pmask = NULL_TREE;
3623 gfc_actual_arglist *arg;
3625 /* the WHERE statement or the WHERE construct statement. */
3626 cblock = code->block;
3628 /* As the mask array can be very big, prefer compact boolean types. */
3629 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3631 /* Determine which temporary masks are needed. */
3634 /* One clause: No ELSEWHEREs. */
3635 need_cmask = (cblock->next != 0);
3638 else if (cblock->block->block)
3640 /* Three or more clauses: Conditional ELSEWHEREs. */
3644 else if (cblock->next)
3646 /* Two clauses, the first non-empty. */
3648 need_pmask = (mask != NULL_TREE
3649 && cblock->block->next != 0);
3651 else if (!cblock->block->next)
3653 /* Two clauses, both empty. */
3657 /* Two clauses, the first empty, the second non-empty. */
3660 need_cmask = (cblock->block->expr1 != 0);
3669 if (need_cmask || need_pmask)
3671 /* Calculate the size of temporary needed by the mask-expr. */
3672 gfc_init_block (&inner_size_body);
3673 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
3674 &inner_size_body, &lss, &rss);
3676 /* Calculate the total size of temporary needed. */
3677 size = compute_overall_iter_number (nested_forall_info, inner_size,
3678 &inner_size_body, block);
3680 /* Check whether the size is negative. */
3681 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3682 gfc_index_zero_node);
3683 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3684 gfc_index_zero_node, size);
3685 size = gfc_evaluate_now (size, block);
3687 /* Allocate temporary for WHERE mask if needed. */
3689 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3692 /* Allocate temporary for !mask if needed. */
3694 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3700 /* Each time around this loop, the where clause is conditional
3701 on the value of mask and invert, which are updated at the
3702 bottom of the loop. */
3704 /* Has mask-expr. */
3707 /* Ensure that the WHERE mask will be evaluated exactly once.
3708 If there are no statements in this WHERE/ELSEWHERE clause,
3709 then we don't need to update the control mask (cmask).
3710 If this is the last clause of the WHERE construct, then
3711 we don't need to update the pending control mask (pmask). */
3713 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3715 cblock->next ? cmask : NULL_TREE,
3716 cblock->block ? pmask : NULL_TREE,
3719 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3721 (cblock->next || cblock->block)
3722 ? cmask : NULL_TREE,
3723 NULL_TREE, mask_type, block);
3727 /* It's a final elsewhere-stmt. No mask-expr is present. */
3731 /* The body of this where clause are controlled by cmask with
3732 sense specified by invert. */
3734 /* Get the assignment statement of a WHERE statement, or the first
3735 statement in where-body-construct of a WHERE construct. */
3736 cnext = cblock->next;
3741 /* WHERE assignment statement. */
3742 case EXEC_ASSIGN_CALL:
3744 arg = cnext->ext.actual;
3745 expr1 = expr2 = NULL;
3746 for (; arg; arg = arg->next)
3758 expr1 = cnext->expr1;
3759 expr2 = cnext->expr2;
3761 if (nested_forall_info != NULL)
3763 need_temp = gfc_check_dependency (expr1, expr2, 0);
3764 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3765 gfc_trans_assign_need_temp (expr1, expr2,
3767 nested_forall_info, block);
3770 /* Variables to control maskexpr. */
3771 count1 = gfc_create_var (gfc_array_index_type, "count1");
3772 count2 = gfc_create_var (gfc_array_index_type, "count2");
3773 gfc_add_modify (block, count1, gfc_index_zero_node);
3774 gfc_add_modify (block, count2, gfc_index_zero_node);
3776 tmp = gfc_trans_where_assign (expr1, expr2,
3781 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3783 gfc_add_expr_to_block (block, tmp);
3788 /* Variables to control maskexpr. */
3789 count1 = gfc_create_var (gfc_array_index_type, "count1");
3790 count2 = gfc_create_var (gfc_array_index_type, "count2");
3791 gfc_add_modify (block, count1, gfc_index_zero_node);
3792 gfc_add_modify (block, count2, gfc_index_zero_node);
3794 tmp = gfc_trans_where_assign (expr1, expr2,
3798 gfc_add_expr_to_block (block, tmp);
3803 /* WHERE or WHERE construct is part of a where-body-construct. */
3805 gfc_trans_where_2 (cnext, cmask, invert,
3806 nested_forall_info, block);
3813 /* The next statement within the same where-body-construct. */
3814 cnext = cnext->next;
3816 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3817 cblock = cblock->block;
3818 if (mask == NULL_TREE)
3820 /* If we're the initial WHERE, we can simply invert the sense
3821 of the current mask to obtain the "mask" for the remaining
3828 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3834 /* If we allocated a pending mask array, deallocate it now. */
3837 tmp = gfc_call_free (ppmask);
3838 gfc_add_expr_to_block (block, tmp);
3841 /* If we allocated a current mask array, deallocate it now. */
3844 tmp = gfc_call_free (pcmask);
3845 gfc_add_expr_to_block (block, tmp);
3849 /* Translate a simple WHERE construct or statement without dependencies.
3850 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3851 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3852 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3855 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3857 stmtblock_t block, body;
3858 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3859 tree tmp, cexpr, tstmt, estmt;
3860 gfc_ss *css, *tdss, *tsss;
3861 gfc_se cse, tdse, tsse, edse, esse;
3866 /* Allow the scalarizer to workshare simple where loops. */
3867 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
3868 ompws_flags |= OMPWS_SCALARIZER_WS;
3870 cond = cblock->expr1;
3871 tdst = cblock->next->expr1;
3872 tsrc = cblock->next->expr2;
3873 edst = eblock ? eblock->next->expr1 : NULL;
3874 esrc = eblock ? eblock->next->expr2 : NULL;
3876 gfc_start_block (&block);
3877 gfc_init_loopinfo (&loop);
3879 /* Handle the condition. */
3880 gfc_init_se (&cse, NULL);
3881 css = gfc_walk_expr (cond);
3882 gfc_add_ss_to_loop (&loop, css);
3884 /* Handle the then-clause. */
3885 gfc_init_se (&tdse, NULL);
3886 gfc_init_se (&tsse, NULL);
3887 tdss = gfc_walk_expr (tdst);
3888 tsss = gfc_walk_expr (tsrc);
3889 if (tsss == gfc_ss_terminator)
3891 tsss = gfc_get_ss ();
3893 tsss->next = gfc_ss_terminator;
3894 tsss->type = GFC_SS_SCALAR;
3897 gfc_add_ss_to_loop (&loop, tdss);
3898 gfc_add_ss_to_loop (&loop, tsss);
3902 /* Handle the else clause. */
3903 gfc_init_se (&edse, NULL);
3904 gfc_init_se (&esse, NULL);
3905 edss = gfc_walk_expr (edst);
3906 esss = gfc_walk_expr (esrc);
3907 if (esss == gfc_ss_terminator)
3909 esss = gfc_get_ss ();
3911 esss->next = gfc_ss_terminator;
3912 esss->type = GFC_SS_SCALAR;
3915 gfc_add_ss_to_loop (&loop, edss);
3916 gfc_add_ss_to_loop (&loop, esss);
3919 gfc_conv_ss_startstride (&loop);
3920 gfc_conv_loop_setup (&loop, &tdst->where);
3922 gfc_mark_ss_chain_used (css, 1);
3923 gfc_mark_ss_chain_used (tdss, 1);
3924 gfc_mark_ss_chain_used (tsss, 1);
3927 gfc_mark_ss_chain_used (edss, 1);
3928 gfc_mark_ss_chain_used (esss, 1);
3931 gfc_start_scalarized_body (&loop, &body);
3933 gfc_copy_loopinfo_to_se (&cse, &loop);
3934 gfc_copy_loopinfo_to_se (&tdse, &loop);
3935 gfc_copy_loopinfo_to_se (&tsse, &loop);
3941 gfc_copy_loopinfo_to_se (&edse, &loop);
3942 gfc_copy_loopinfo_to_se (&esse, &loop);
3947 gfc_conv_expr (&cse, cond);
3948 gfc_add_block_to_block (&body, &cse.pre);
3951 gfc_conv_expr (&tsse, tsrc);
3952 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3954 gfc_conv_tmp_array_ref (&tdse);
3955 gfc_advance_se_ss_chain (&tdse);
3958 gfc_conv_expr (&tdse, tdst);
3962 gfc_conv_expr (&esse, esrc);
3963 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3965 gfc_conv_tmp_array_ref (&edse);
3966 gfc_advance_se_ss_chain (&edse);
3969 gfc_conv_expr (&edse, edst);
3972 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
3973 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
3975 : build_empty_stmt (input_location);
3976 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3977 gfc_add_expr_to_block (&body, tmp);
3978 gfc_add_block_to_block (&body, &cse.post);
3980 gfc_trans_scalarizing_loops (&loop, &body);
3981 gfc_add_block_to_block (&block, &loop.pre);
3982 gfc_add_block_to_block (&block, &loop.post);
3983 gfc_cleanup_loop (&loop);
3985 return gfc_finish_block (&block);
3988 /* As the WHERE or WHERE construct statement can be nested, we call
3989 gfc_trans_where_2 to do the translation, and pass the initial
3990 NULL values for both the control mask and the pending control mask. */
3993 gfc_trans_where (gfc_code * code)
3999 cblock = code->block;
4001 && cblock->next->op == EXEC_ASSIGN
4002 && !cblock->next->next)
4004 eblock = cblock->block;
4007 /* A simple "WHERE (cond) x = y" statement or block is
4008 dependence free if cond is not dependent upon writing x,
4009 and the source y is unaffected by the destination x. */
4010 if (!gfc_check_dependency (cblock->next->expr1,
4012 && !gfc_check_dependency (cblock->next->expr1,
4013 cblock->next->expr2, 0))
4014 return gfc_trans_where_3 (cblock, NULL);
4016 else if (!eblock->expr1
4019 && eblock->next->op == EXEC_ASSIGN
4020 && !eblock->next->next)
4022 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4023 block is dependence free if cond is not dependent on writes
4024 to x1 and x2, y1 is not dependent on writes to x2, and y2
4025 is not dependent on writes to x1, and both y's are not
4026 dependent upon their own x's. In addition to this, the
4027 final two dependency checks below exclude all but the same
4028 array reference if the where and elswhere destinations
4029 are the same. In short, this is VERY conservative and this
4030 is needed because the two loops, required by the standard
4031 are coalesced in gfc_trans_where_3. */
4032 if (!gfc_check_dependency(cblock->next->expr1,
4034 && !gfc_check_dependency(eblock->next->expr1,
4036 && !gfc_check_dependency(cblock->next->expr1,
4037 eblock->next->expr2, 1)
4038 && !gfc_check_dependency(eblock->next->expr1,
4039 cblock->next->expr2, 1)
4040 && !gfc_check_dependency(cblock->next->expr1,
4041 cblock->next->expr2, 1)
4042 && !gfc_check_dependency(eblock->next->expr1,
4043 eblock->next->expr2, 1)
4044 && !gfc_check_dependency(cblock->next->expr1,
4045 eblock->next->expr1, 0)
4046 && !gfc_check_dependency(eblock->next->expr1,
4047 cblock->next->expr1, 0))
4048 return gfc_trans_where_3 (cblock, eblock);
4052 gfc_start_block (&block);
4054 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4056 return gfc_finish_block (&block);
4060 /* CYCLE a DO loop. The label decl has already been created by
4061 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4062 node at the head of the loop. We must mark the label as used. */
4065 gfc_trans_cycle (gfc_code * code)
4069 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
4070 TREE_USED (cycle_label) = 1;
4071 return build1_v (GOTO_EXPR, cycle_label);
4075 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4076 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4080 gfc_trans_exit (gfc_code * code)
4084 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
4085 TREE_USED (exit_label) = 1;
4086 return build1_v (GOTO_EXPR, exit_label);
4090 /* Translate the ALLOCATE statement. */
4093 gfc_trans_allocate (gfc_code * code)
4106 if (!code->ext.alloc.list)
4109 pstat = stat = error_label = tmp = memsz = NULL_TREE;
4111 gfc_start_block (&block);
4113 /* Either STAT= and/or ERRMSG is present. */
4114 if (code->expr1 || code->expr2)
4116 tree gfc_int4_type_node = gfc_get_int_type (4);
4118 stat = gfc_create_var (gfc_int4_type_node, "stat");
4119 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4121 error_label = gfc_build_label_decl (NULL_TREE);
4122 TREE_USED (error_label) = 1;
4125 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4127 expr = gfc_copy_expr (al->expr);
4129 if (expr->ts.type == BT_CLASS)
4130 gfc_add_component_ref (expr, "$data");
4132 gfc_init_se (&se, NULL);
4133 gfc_start_block (&se.pre);
4135 se.want_pointer = 1;
4136 se.descriptor_only = 1;
4137 gfc_conv_expr (&se, expr);
4139 if (!gfc_array_allocate (&se, expr, pstat))
4141 /* A scalar or derived type. */
4143 /* Determine allocate size. */
4144 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4148 sz = gfc_copy_expr (code->expr3);
4149 gfc_add_component_ref (sz, "$vptr");
4150 gfc_add_component_ref (sz, "$size");
4151 gfc_init_se (&se_sz, NULL);
4152 gfc_conv_expr (&se_sz, sz);
4156 else if (code->expr3 && code->expr3->ts.type != BT_CLASS)
4157 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4158 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4159 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4161 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4163 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4164 memsz = se.string_length;
4166 /* Allocate - for non-pointers with re-alloc checking. */
4173 /* Find the last reference in the chain. */
4174 while (ref && ref->next != NULL)
4176 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT);
4181 allocatable = expr->symtree->n.sym->attr.allocatable;
4183 allocatable = ref->u.c.component->attr.allocatable;
4186 tmp = gfc_allocate_array_with_status (&se.pre, se.expr, memsz,
4189 tmp = gfc_allocate_with_status (&se.pre, memsz, pstat);
4192 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
4193 fold_convert (TREE_TYPE (se.expr), tmp));
4194 gfc_add_expr_to_block (&se.pre, tmp);
4196 if (code->expr1 || code->expr2)
4198 tmp = build1_v (GOTO_EXPR, error_label);
4199 parm = fold_build2 (NE_EXPR, boolean_type_node,
4200 stat, build_int_cst (TREE_TYPE (stat), 0));
4201 tmp = fold_build3 (COND_EXPR, void_type_node,
4202 parm, tmp, build_empty_stmt (input_location));
4203 gfc_add_expr_to_block (&se.pre, tmp);
4206 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4208 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4209 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4210 gfc_add_expr_to_block (&se.pre, tmp);
4215 tmp = gfc_finish_block (&se.pre);
4216 gfc_add_expr_to_block (&block, tmp);
4218 /* Initialization via SOURCE block. */
4221 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4222 if (al->expr->ts.type == BT_CLASS)
4225 if (rhs->ts.type == BT_CLASS)
4226 gfc_add_component_ref (rhs, "$data");
4227 gfc_init_se (&dst, NULL);
4228 gfc_init_se (&src, NULL);
4229 gfc_conv_expr (&dst, expr);
4230 gfc_conv_expr (&src, rhs);
4231 gfc_add_block_to_block (&block, &src.pre);
4232 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4235 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4237 gfc_free_expr (rhs);
4238 gfc_add_expr_to_block (&block, tmp);
4241 /* Allocation of CLASS entities. */
4242 gfc_free_expr (expr);
4244 if (expr->ts.type == BT_CLASS)
4249 /* Initialize VPTR for CLASS objects. */
4250 lhs = gfc_expr_to_initialize (expr);
4251 gfc_add_component_ref (lhs, "$vptr");
4253 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4255 /* VPTR must be determined at run time. */
4256 rhs = gfc_copy_expr (code->expr3);
4257 gfc_add_component_ref (rhs, "$vptr");
4258 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4259 gfc_add_expr_to_block (&block, tmp);
4260 gfc_free_expr (rhs);
4264 /* VPTR is fixed at compile time. */
4268 ts = &code->expr3->ts;
4269 else if (expr->ts.type == BT_DERIVED)
4271 else if (code->ext.alloc.ts.type == BT_DERIVED)
4272 ts = &code->ext.alloc.ts;
4273 else if (expr->ts.type == BT_CLASS)
4274 ts = &expr->ts.u.derived->components->ts;
4278 if (ts->type == BT_DERIVED)
4280 vtab = gfc_find_derived_vtab (ts->u.derived, true);
4282 gfc_trans_assign_vtab_procs (&block, ts->u.derived, vtab);
4283 gfc_init_se (&lse, NULL);
4284 lse.want_pointer = 1;
4285 gfc_conv_expr (&lse, lhs);
4286 tmp = gfc_build_addr_expr (NULL_TREE,
4287 gfc_get_symbol_decl (vtab));
4288 gfc_add_modify (&block, lse.expr,
4289 fold_convert (TREE_TYPE (lse.expr), tmp));
4299 tmp = build1_v (LABEL_EXPR, error_label);
4300 gfc_add_expr_to_block (&block, tmp);
4302 gfc_init_se (&se, NULL);
4303 gfc_conv_expr_lhs (&se, code->expr1);
4304 tmp = convert (TREE_TYPE (se.expr), stat);
4305 gfc_add_modify (&block, se.expr, tmp);
4311 /* A better error message may be possible, but not required. */
4312 const char *msg = "Attempt to allocate an allocated object";
4313 tree errmsg, slen, dlen;
4315 gfc_init_se (&se, NULL);
4316 gfc_conv_expr_lhs (&se, code->expr2);
4318 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4320 gfc_add_modify (&block, errmsg,
4321 gfc_build_addr_expr (pchar_type_node,
4322 gfc_build_localized_cstring_const (msg)));
4324 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4325 dlen = gfc_get_expr_charlen (code->expr2);
4326 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4328 dlen = build_call_expr_loc (input_location,
4329 built_in_decls[BUILT_IN_MEMCPY], 3,
4330 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4332 tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
4333 build_int_cst (TREE_TYPE (stat), 0));
4335 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4337 gfc_add_expr_to_block (&block, tmp);
4340 return gfc_finish_block (&block);
4344 /* Translate a DEALLOCATE statement. */
4347 gfc_trans_deallocate (gfc_code *code)
4352 tree apstat, astat, pstat, stat, tmp;
4355 pstat = apstat = stat = astat = tmp = NULL_TREE;
4357 gfc_start_block (&block);
4359 /* Count the number of failed deallocations. If deallocate() was
4360 called with STAT= , then set STAT to the count. If deallocate
4361 was called with ERRMSG, then set ERRMG to a string. */
4362 if (code->expr1 || code->expr2)
4364 tree gfc_int4_type_node = gfc_get_int_type (4);
4366 stat = gfc_create_var (gfc_int4_type_node, "stat");
4367 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4369 /* Running total of possible deallocation failures. */
4370 astat = gfc_create_var (gfc_int4_type_node, "astat");
4371 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4373 /* Initialize astat to 0. */
4374 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4377 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4380 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4382 gfc_init_se (&se, NULL);
4383 gfc_start_block (&se.pre);
4385 se.want_pointer = 1;
4386 se.descriptor_only = 1;
4387 gfc_conv_expr (&se, expr);
4389 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4392 gfc_ref *last = NULL;
4393 for (ref = expr->ref; ref; ref = ref->next)
4394 if (ref->type == REF_COMPONENT)
4397 /* Do not deallocate the components of a derived type
4398 ultimate pointer component. */
4399 if (!(last && last->u.c.component->attr.pointer)
4400 && !(!last && expr->symtree->n.sym->attr.pointer))
4402 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
4404 gfc_add_expr_to_block (&se.pre, tmp);
4409 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4412 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4413 gfc_add_expr_to_block (&se.pre, tmp);
4415 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4416 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4419 gfc_add_expr_to_block (&se.pre, tmp);
4421 /* Keep track of the number of failed deallocations by adding stat
4422 of the last deallocation to the running total. */
4423 if (code->expr1 || code->expr2)
4425 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4426 gfc_add_modify (&se.pre, astat, apstat);
4429 tmp = gfc_finish_block (&se.pre);
4430 gfc_add_expr_to_block (&block, tmp);
4437 gfc_init_se (&se, NULL);
4438 gfc_conv_expr_lhs (&se, code->expr1);
4439 tmp = convert (TREE_TYPE (se.expr), astat);
4440 gfc_add_modify (&block, se.expr, tmp);
4446 /* A better error message may be possible, but not required. */
4447 const char *msg = "Attempt to deallocate an unallocated object";
4448 tree errmsg, slen, dlen;
4450 gfc_init_se (&se, NULL);
4451 gfc_conv_expr_lhs (&se, code->expr2);
4453 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4455 gfc_add_modify (&block, errmsg,
4456 gfc_build_addr_expr (pchar_type_node,
4457 gfc_build_localized_cstring_const (msg)));
4459 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4460 dlen = gfc_get_expr_charlen (code->expr2);
4461 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4463 dlen = build_call_expr_loc (input_location,
4464 built_in_decls[BUILT_IN_MEMCPY], 3,
4465 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4467 tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
4468 build_int_cst (TREE_TYPE (astat), 0));
4470 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4472 gfc_add_expr_to_block (&block, tmp);
4475 return gfc_finish_block (&block);