1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
40 #include "dependency.h"
42 typedef struct iter_info
48 struct iter_info *next;
52 typedef struct forall_info
59 struct forall_info *prev_nest;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
95 gfc_trans_label_assign (gfc_code * code)
104 /* Start a new block. */
105 gfc_init_se (&se, NULL);
106 gfc_start_block (&se.pre);
107 gfc_conv_label_variable (&se, code->expr1);
109 len = GFC_DECL_STRING_LEN (se.expr);
110 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112 label_tree = gfc_get_label_decl (code->label1);
114 if (code->label1->defined == ST_LABEL_TARGET)
116 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117 len_tree = integer_minus_one_node;
121 gfc_expr *format = code->label1->format;
123 label_len = format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126 format->value.character.string);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify (&se.pre, len, len_tree);
131 gfc_add_modify (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
147 if (code->label1 != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr1);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158 "Assigned label is not a target label");
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
162 /* We're going to ignore a label list. It does not really change the
163 statement's semantics (because it is just a further restriction on
164 what's legal code); before, we were comparing label addresses here, but
165 that's a very fragile business and may break with optimization. So
168 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
169 gfc_add_expr_to_block (&se.pre, target);
170 return gfc_finish_block (&se.pre);
174 /* Translate an ENTRY statement. Just adds a label for this entry point. */
176 gfc_trans_entry (gfc_code * code)
178 return build1_v (LABEL_EXPR, code->ext.entry->label);
182 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
183 elemental subroutines. Make temporaries for output arguments if any such
184 dependencies are found. Output arguments are chosen because internal_unpack
185 can be used, as is, to copy the result back to the variable. */
187 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
188 gfc_symbol * sym, gfc_actual_arglist * arg,
189 gfc_dep_check check_variable)
191 gfc_actual_arglist *arg0;
193 gfc_formal_arglist *formal;
194 gfc_loopinfo tmp_loop;
206 if (loopse->ss == NULL)
211 formal = sym->formal;
213 /* Loop over all the arguments testing for dependencies. */
214 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
220 /* Obtain the info structure for the current argument. */
222 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
226 info = &ss->data.info;
230 /* If there is a dependency, create a temporary and use it
231 instead of the variable. */
232 fsym = formal ? formal->sym : NULL;
233 if (e->expr_type == EXPR_VARIABLE
235 && fsym->attr.intent != INTENT_IN
236 && gfc_check_fncall_dependency (e, fsym->attr.intent,
237 sym, arg0, check_variable))
239 tree initial, temptype;
240 stmtblock_t temp_post;
242 /* Make a local loopinfo for the temporary creation, so that
243 none of the other ss->info's have to be renormalized. */
244 gfc_init_loopinfo (&tmp_loop);
245 for (n = 0; n < info->dimen; n++)
247 tmp_loop.to[n] = loopse->loop->to[n];
248 tmp_loop.from[n] = loopse->loop->from[n];
249 tmp_loop.order[n] = loopse->loop->order[n];
252 /* Obtain the argument descriptor for unpacking. */
253 gfc_init_se (&parmse, NULL);
254 parmse.want_pointer = 1;
256 /* The scalarizer introduces some specific peculiarities when
257 handling elemental subroutines; the stride can be needed up to
258 the dim_array - 1, rather than dim_loop - 1 to calculate
259 offsets outside the loop. For this reason, we make sure that
260 the descriptor has the dimensionality of the array by converting
261 trailing elements into ranges with end = start. */
262 for (ref = e->ref; ref; ref = ref->next)
263 if (ref->type == REF_ARRAY && ref->u.ar.type == AR_SECTION)
268 bool seen_range = false;
269 for (n = 0; n < ref->u.ar.dimen; n++)
271 if (ref->u.ar.dimen_type[n] == DIMEN_RANGE)
275 || ref->u.ar.dimen_type[n] != DIMEN_ELEMENT)
278 ref->u.ar.end[n] = gfc_copy_expr (ref->u.ar.start[n]);
279 ref->u.ar.dimen_type[n] = DIMEN_RANGE;
283 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
284 gfc_add_block_to_block (&se->pre, &parmse.pre);
286 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
287 initialize the array temporary with a copy of the values. */
288 if (fsym->attr.intent == INTENT_INOUT
289 || (fsym->ts.type ==BT_DERIVED
290 && fsym->attr.intent == INTENT_OUT))
291 initial = parmse.expr;
295 /* Find the type of the temporary to create; we don't use the type
296 of e itself as this breaks for subcomponent-references in e (where
297 the type of e is that of the final reference, but parmse.expr's
298 type corresponds to the full derived-type). */
299 /* TODO: Fix this somehow so we don't need a temporary of the whole
300 array but instead only the components referenced. */
301 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
302 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
303 temptype = TREE_TYPE (temptype);
304 temptype = gfc_get_element_type (temptype);
306 /* Generate the temporary. Cleaning up the temporary should be the
307 very last thing done, so we add the code to a new block and add it
308 to se->post as last instructions. */
309 size = gfc_create_var (gfc_array_index_type, NULL);
310 data = gfc_create_var (pvoid_type_node, NULL);
311 gfc_init_block (&temp_post);
312 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
313 &tmp_loop, info, temptype,
317 gfc_add_modify (&se->pre, size, tmp);
318 tmp = fold_convert (pvoid_type_node, info->data);
319 gfc_add_modify (&se->pre, data, tmp);
321 /* Calculate the offset for the temporary. */
322 offset = gfc_index_zero_node;
323 for (n = 0; n < info->dimen; n++)
325 tmp = gfc_conv_descriptor_stride_get (info->descriptor,
327 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
328 loopse->loop->from[n], tmp);
329 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
332 info->offset = gfc_create_var (gfc_array_index_type, NULL);
333 gfc_add_modify (&se->pre, info->offset, offset);
335 /* Copy the result back using unpack. */
336 tmp = build_call_expr_loc (input_location,
337 gfor_fndecl_in_unpack, 2, parmse.expr, data);
338 gfc_add_expr_to_block (&se->post, tmp);
340 /* parmse.pre is already added above. */
341 gfc_add_block_to_block (&se->post, &parmse.post);
342 gfc_add_block_to_block (&se->post, &temp_post);
348 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
351 gfc_trans_call (gfc_code * code, bool dependency_check,
352 tree mask, tree count1, bool invert)
356 int has_alternate_specifier;
357 gfc_dep_check check_variable;
358 tree index = NULL_TREE;
359 tree maskexpr = NULL_TREE;
362 /* A CALL starts a new block because the actual arguments may have to
363 be evaluated first. */
364 gfc_init_se (&se, NULL);
365 gfc_start_block (&se.pre);
367 gcc_assert (code->resolved_sym);
369 ss = gfc_ss_terminator;
370 if (code->resolved_sym->attr.elemental)
371 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
373 /* Is not an elemental subroutine call with array valued arguments. */
374 if (ss == gfc_ss_terminator)
377 /* Translate the call. */
378 has_alternate_specifier
379 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
380 code->expr1, NULL_TREE);
382 /* A subroutine without side-effect, by definition, does nothing! */
383 TREE_SIDE_EFFECTS (se.expr) = 1;
385 /* Chain the pieces together and return the block. */
386 if (has_alternate_specifier)
388 gfc_code *select_code;
390 select_code = code->next;
391 gcc_assert(select_code->op == EXEC_SELECT);
392 sym = select_code->expr1->symtree->n.sym;
393 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
394 if (sym->backend_decl == NULL)
395 sym->backend_decl = gfc_get_symbol_decl (sym);
396 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
399 gfc_add_expr_to_block (&se.pre, se.expr);
401 gfc_add_block_to_block (&se.pre, &se.post);
406 /* An elemental subroutine call with array valued arguments has
414 /* gfc_walk_elemental_function_args renders the ss chain in the
415 reverse order to the actual argument order. */
416 ss = gfc_reverse_ss (ss);
418 /* Initialize the loop. */
419 gfc_init_se (&loopse, NULL);
420 gfc_init_loopinfo (&loop);
421 gfc_add_ss_to_loop (&loop, ss);
423 gfc_conv_ss_startstride (&loop);
424 /* TODO: gfc_conv_loop_setup generates a temporary for vector
425 subscripts. This could be prevented in the elemental case
426 as temporaries are handled separatedly
427 (below in gfc_conv_elemental_dependencies). */
428 gfc_conv_loop_setup (&loop, &code->expr1->where);
429 gfc_mark_ss_chain_used (ss, 1);
431 /* Convert the arguments, checking for dependencies. */
432 gfc_copy_loopinfo_to_se (&loopse, &loop);
435 /* For operator assignment, do dependency checking. */
436 if (dependency_check)
437 check_variable = ELEM_CHECK_VARIABLE;
439 check_variable = ELEM_DONT_CHECK_VARIABLE;
441 gfc_init_se (&depse, NULL);
442 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
443 code->ext.actual, check_variable);
445 gfc_add_block_to_block (&loop.pre, &depse.pre);
446 gfc_add_block_to_block (&loop.post, &depse.post);
448 /* Generate the loop body. */
449 gfc_start_scalarized_body (&loop, &body);
450 gfc_init_block (&block);
454 /* Form the mask expression according to the mask. */
456 maskexpr = gfc_build_array_ref (mask, index, NULL);
458 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
462 /* Add the subroutine call to the block. */
463 gfc_conv_procedure_call (&loopse, code->resolved_sym,
464 code->ext.actual, code->expr1,
469 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
470 build_empty_stmt (input_location));
471 gfc_add_expr_to_block (&loopse.pre, tmp);
472 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
473 count1, gfc_index_one_node);
474 gfc_add_modify (&loopse.pre, count1, tmp);
477 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
479 gfc_add_block_to_block (&block, &loopse.pre);
480 gfc_add_block_to_block (&block, &loopse.post);
482 /* Finish up the loop block and the loop. */
483 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
484 gfc_trans_scalarizing_loops (&loop, &body);
485 gfc_add_block_to_block (&se.pre, &loop.pre);
486 gfc_add_block_to_block (&se.pre, &loop.post);
487 gfc_add_block_to_block (&se.pre, &se.post);
488 gfc_cleanup_loop (&loop);
491 return gfc_finish_block (&se.pre);
495 /* Translate the RETURN statement. */
498 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
506 /* If code->expr is not NULL, this return statement must appear
507 in a subroutine and current_fake_result_decl has already
510 result = gfc_get_fake_result_decl (NULL, 0);
513 gfc_warning ("An alternate return at %L without a * dummy argument",
514 &code->expr1->where);
515 return build1_v (GOTO_EXPR, gfc_get_return_label ());
518 /* Start a new block for this statement. */
519 gfc_init_se (&se, NULL);
520 gfc_start_block (&se.pre);
522 gfc_conv_expr (&se, code->expr1);
524 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
525 fold_convert (TREE_TYPE (result), se.expr));
526 gfc_add_expr_to_block (&se.pre, tmp);
528 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
529 gfc_add_expr_to_block (&se.pre, tmp);
530 gfc_add_block_to_block (&se.pre, &se.post);
531 return gfc_finish_block (&se.pre);
534 return build1_v (GOTO_EXPR, gfc_get_return_label ());
538 /* Translate the PAUSE statement. We have to translate this statement
539 to a runtime library call. */
542 gfc_trans_pause (gfc_code * code)
544 tree gfc_int4_type_node = gfc_get_int_type (4);
548 /* Start a new block for this statement. */
549 gfc_init_se (&se, NULL);
550 gfc_start_block (&se.pre);
553 if (code->expr1 == NULL)
555 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
556 tmp = build_call_expr_loc (input_location,
557 gfor_fndecl_pause_numeric, 1, tmp);
561 gfc_conv_expr_reference (&se, code->expr1);
562 tmp = build_call_expr_loc (input_location,
563 gfor_fndecl_pause_string, 2,
564 se.expr, se.string_length);
567 gfc_add_expr_to_block (&se.pre, tmp);
569 gfc_add_block_to_block (&se.pre, &se.post);
571 return gfc_finish_block (&se.pre);
575 /* Translate the STOP statement. We have to translate this statement
576 to a runtime library call. */
579 gfc_trans_stop (gfc_code *code, bool error_stop)
581 tree gfc_int4_type_node = gfc_get_int_type (4);
585 /* Start a new block for this statement. */
586 gfc_init_se (&se, NULL);
587 gfc_start_block (&se.pre);
589 if (code->expr1 == NULL)
591 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
592 tmp = build_call_expr_loc (input_location,
593 gfor_fndecl_stop_numeric, 1, tmp);
597 gfc_conv_expr_reference (&se, code->expr1);
598 tmp = build_call_expr_loc (input_location,
599 error_stop ? gfor_fndecl_error_stop_string
600 : gfor_fndecl_stop_string,
601 2, se.expr, se.string_length);
604 gfc_add_expr_to_block (&se.pre, tmp);
606 gfc_add_block_to_block (&se.pre, &se.post);
608 return gfc_finish_block (&se.pre);
613 gfc_trans_sync (gfc_code *code, gfc_exec_op type __attribute__ ((unused)))
617 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
619 gfc_init_se (&se, NULL);
620 gfc_start_block (&se.pre);
623 /* Check SYNC IMAGES(imageset) for valid image index.
624 FIXME: Add a check for image-set arrays. */
625 if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
626 && code->expr1->rank == 0)
629 gfc_conv_expr (&se, code->expr1);
630 cond = fold_build2 (NE_EXPR, boolean_type_node, se.expr,
631 build_int_cst (TREE_TYPE (se.expr), 1));
632 gfc_trans_runtime_check (true, false, cond, &se.pre,
633 &code->expr1->where, "Invalid image number "
635 fold_convert (integer_type_node, se.expr));
638 /* If STAT is present, set it to zero. */
641 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
642 gfc_conv_expr (&se, code->expr2);
643 gfc_add_modify (&se.pre, se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
646 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
647 return gfc_finish_block (&se.pre);
653 /* Generate GENERIC for the IF construct. This function also deals with
654 the simple IF statement, because the front end translates the IF
655 statement into an IF construct.
687 where COND_S is the simplified version of the predicate. PRE_COND_S
688 are the pre side-effects produced by the translation of the
690 We need to build the chain recursively otherwise we run into
691 problems with folding incomplete statements. */
694 gfc_trans_if_1 (gfc_code * code)
699 /* Check for an unconditional ELSE clause. */
701 return gfc_trans_code (code->next);
703 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
704 gfc_init_se (&if_se, NULL);
705 gfc_start_block (&if_se.pre);
707 /* Calculate the IF condition expression. */
708 gfc_conv_expr_val (&if_se, code->expr1);
710 /* Translate the THEN clause. */
711 stmt = gfc_trans_code (code->next);
713 /* Translate the ELSE clause. */
715 elsestmt = gfc_trans_if_1 (code->block);
717 elsestmt = build_empty_stmt (input_location);
719 /* Build the condition expression and add it to the condition block. */
720 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
722 gfc_add_expr_to_block (&if_se.pre, stmt);
724 /* Finish off this statement. */
725 return gfc_finish_block (&if_se.pre);
729 gfc_trans_if (gfc_code * code)
731 /* Ignore the top EXEC_IF, it only announces an IF construct. The
732 actual code we must translate is in code->block. */
734 return gfc_trans_if_1 (code->block);
738 /* Translate an arithmetic IF expression.
740 IF (cond) label1, label2, label3 translates to
752 An optimized version can be generated in case of equal labels.
753 E.g., if label1 is equal to label2, we can translate it to
762 gfc_trans_arithmetic_if (gfc_code * code)
770 /* Start a new block. */
771 gfc_init_se (&se, NULL);
772 gfc_start_block (&se.pre);
774 /* Pre-evaluate COND. */
775 gfc_conv_expr_val (&se, code->expr1);
776 se.expr = gfc_evaluate_now (se.expr, &se.pre);
778 /* Build something to compare with. */
779 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
781 if (code->label1->value != code->label2->value)
783 /* If (cond < 0) take branch1 else take branch2.
784 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
785 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
786 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
788 if (code->label1->value != code->label3->value)
789 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
791 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
793 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
796 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
798 if (code->label1->value != code->label3->value
799 && code->label2->value != code->label3->value)
801 /* if (cond <= 0) take branch1 else take branch2. */
802 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
803 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
804 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
807 /* Append the COND_EXPR to the evaluation of COND, and return. */
808 gfc_add_expr_to_block (&se.pre, branch1);
809 return gfc_finish_block (&se.pre);
813 /* Translate a CRITICAL block. */
815 gfc_trans_critical (gfc_code *code)
820 gfc_start_block (&block);
821 tmp = gfc_trans_code (code->block->next);
822 gfc_add_expr_to_block (&block, tmp);
824 return gfc_finish_block (&block);
828 /* Translate a BLOCK construct. This is basically what we would do for a
832 gfc_trans_block_construct (gfc_code* code)
844 gcc_assert (!sym->tlink);
847 gfc_start_block (&body);
848 gfc_process_block_locals (ns);
850 tmp = gfc_trans_code (ns->code);
851 tmp = gfc_trans_deferred_vars (sym, tmp);
853 gfc_add_expr_to_block (&body, tmp);
854 return gfc_finish_block (&body);
858 /* Translate the simple DO construct. This is where the loop variable has
859 integer type and step +-1. We can't use this in the general case
860 because integer overflow and floating point errors could give incorrect
862 We translate a do loop from:
864 DO dovar = from, to, step
870 [Evaluate loop bounds and step]
872 if ((step > 0) ? (dovar <= to) : (dovar => to))
878 cond = (dovar == to);
880 if (cond) goto end_label;
885 This helps the optimizers by avoiding the extra induction variable
886 used in the general case. */
889 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
890 tree from, tree to, tree step, tree exit_cond)
896 tree saved_dovar = NULL;
900 type = TREE_TYPE (dovar);
902 /* Initialize the DO variable: dovar = from. */
903 gfc_add_modify (pblock, dovar, from);
905 /* Save value for do-tinkering checking. */
906 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
908 saved_dovar = gfc_create_var (type, ".saved_dovar");
909 gfc_add_modify (pblock, saved_dovar, dovar);
912 /* Cycle and exit statements are implemented with gotos. */
913 cycle_label = gfc_build_label_decl (NULL_TREE);
914 exit_label = gfc_build_label_decl (NULL_TREE);
916 /* Put the labels where they can be found later. See gfc_trans_do(). */
917 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
920 gfc_start_block (&body);
922 /* Main loop body. */
923 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
924 gfc_add_expr_to_block (&body, tmp);
926 /* Label for cycle statements (if needed). */
927 if (TREE_USED (cycle_label))
929 tmp = build1_v (LABEL_EXPR, cycle_label);
930 gfc_add_expr_to_block (&body, tmp);
933 /* Check whether someone has modified the loop variable. */
934 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
936 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
937 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
938 "Loop variable has been modified");
941 /* Exit the loop if there is an I/O result condition or error. */
944 tmp = build1_v (GOTO_EXPR, exit_label);
945 tmp = fold_build3 (COND_EXPR, void_type_node, exit_cond, tmp,
946 build_empty_stmt (input_location));
947 gfc_add_expr_to_block (&body, tmp);
950 /* Evaluate the loop condition. */
951 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
952 cond = gfc_evaluate_now (cond, &body);
954 /* Increment the loop variable. */
955 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
956 gfc_add_modify (&body, dovar, tmp);
958 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
959 gfc_add_modify (&body, saved_dovar, dovar);
962 tmp = build1_v (GOTO_EXPR, exit_label);
963 TREE_USED (exit_label) = 1;
964 tmp = fold_build3 (COND_EXPR, void_type_node,
965 cond, tmp, build_empty_stmt (input_location));
966 gfc_add_expr_to_block (&body, tmp);
968 /* Finish the loop body. */
969 tmp = gfc_finish_block (&body);
970 tmp = build1_v (LOOP_EXPR, tmp);
972 /* Only execute the loop if the number of iterations is positive. */
973 if (tree_int_cst_sgn (step) > 0)
974 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
976 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
977 tmp = fold_build3 (COND_EXPR, void_type_node,
978 cond, tmp, build_empty_stmt (input_location));
979 gfc_add_expr_to_block (pblock, tmp);
981 /* Add the exit label. */
982 tmp = build1_v (LABEL_EXPR, exit_label);
983 gfc_add_expr_to_block (pblock, tmp);
985 return gfc_finish_block (pblock);
988 /* Translate the DO construct. This obviously is one of the most
989 important ones to get right with any compiler, but especially
992 We special case some loop forms as described in gfc_trans_simple_do.
993 For other cases we implement them with a separate loop count,
994 as described in the standard.
996 We translate a do loop from:
998 DO dovar = from, to, step
1004 [evaluate loop bounds and step]
1005 empty = (step > 0 ? to < from : to > from);
1006 countm1 = (to - from) / step;
1008 if (empty) goto exit_label;
1014 if (countm1 ==0) goto exit_label;
1019 countm1 is an unsigned integer. It is equal to the loop count minus one,
1020 because the loop count itself can overflow. */
1023 gfc_trans_do (gfc_code * code, tree exit_cond)
1027 tree saved_dovar = NULL;
1042 gfc_start_block (&block);
1044 /* Evaluate all the expressions in the iterator. */
1045 gfc_init_se (&se, NULL);
1046 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1047 gfc_add_block_to_block (&block, &se.pre);
1049 type = TREE_TYPE (dovar);
1051 gfc_init_se (&se, NULL);
1052 gfc_conv_expr_val (&se, code->ext.iterator->start);
1053 gfc_add_block_to_block (&block, &se.pre);
1054 from = gfc_evaluate_now (se.expr, &block);
1056 gfc_init_se (&se, NULL);
1057 gfc_conv_expr_val (&se, code->ext.iterator->end);
1058 gfc_add_block_to_block (&block, &se.pre);
1059 to = gfc_evaluate_now (se.expr, &block);
1061 gfc_init_se (&se, NULL);
1062 gfc_conv_expr_val (&se, code->ext.iterator->step);
1063 gfc_add_block_to_block (&block, &se.pre);
1064 step = gfc_evaluate_now (se.expr, &block);
1066 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1068 tmp = fold_build2 (EQ_EXPR, boolean_type_node, step,
1069 fold_convert (type, integer_zero_node));
1070 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1071 "DO step value is zero");
1074 /* Special case simple loops. */
1075 if (TREE_CODE (type) == INTEGER_TYPE
1076 && (integer_onep (step)
1077 || tree_int_cst_equal (step, integer_minus_one_node)))
1078 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1080 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
1081 fold_convert (type, integer_zero_node));
1083 if (TREE_CODE (type) == INTEGER_TYPE)
1084 utype = unsigned_type_for (type);
1086 utype = unsigned_type_for (gfc_array_index_type);
1087 countm1 = gfc_create_var (utype, "countm1");
1089 /* Cycle and exit statements are implemented with gotos. */
1090 cycle_label = gfc_build_label_decl (NULL_TREE);
1091 exit_label = gfc_build_label_decl (NULL_TREE);
1092 TREE_USED (exit_label) = 1;
1094 /* Initialize the DO variable: dovar = from. */
1095 gfc_add_modify (&block, dovar, from);
1097 /* Save value for do-tinkering checking. */
1098 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1100 saved_dovar = gfc_create_var (type, ".saved_dovar");
1101 gfc_add_modify (&block, saved_dovar, dovar);
1104 /* Initialize loop count and jump to exit label if the loop is empty.
1105 This code is executed before we enter the loop body. We generate:
1106 step_sign = sign(1,step);
1117 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1121 if (TREE_CODE (type) == INTEGER_TYPE)
1123 tree pos, neg, step_sign, to2, from2, step2;
1125 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1127 tmp = fold_build2 (LT_EXPR, boolean_type_node, step,
1128 build_int_cst (TREE_TYPE (step), 0));
1129 step_sign = fold_build3 (COND_EXPR, type, tmp,
1130 build_int_cst (type, -1),
1131 build_int_cst (type, 1));
1133 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
1134 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
1135 build1_v (GOTO_EXPR, exit_label),
1136 build_empty_stmt (input_location));
1138 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
1139 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
1140 build1_v (GOTO_EXPR, exit_label),
1141 build_empty_stmt (input_location));
1142 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
1144 gfc_add_expr_to_block (&block, tmp);
1146 /* Calculate the loop count. to-from can overflow, so
1147 we cast to unsigned. */
1149 to2 = fold_build2 (MULT_EXPR, type, step_sign, to);
1150 from2 = fold_build2 (MULT_EXPR, type, step_sign, from);
1151 step2 = fold_build2 (MULT_EXPR, type, step_sign, step);
1152 step2 = fold_convert (utype, step2);
1153 tmp = fold_build2 (MINUS_EXPR, type, to2, from2);
1154 tmp = fold_convert (utype, tmp);
1155 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp, step2);
1156 tmp = fold_build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1157 gfc_add_expr_to_block (&block, tmp);
1161 /* TODO: We could use the same width as the real type.
1162 This would probably cause more problems that it solves
1163 when we implement "long double" types. */
1165 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1166 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
1167 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
1168 gfc_add_modify (&block, countm1, tmp);
1170 /* We need a special check for empty loops:
1171 empty = (step > 0 ? to < from : to > from); */
1172 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
1173 fold_build2 (LT_EXPR, boolean_type_node, to, from),
1174 fold_build2 (GT_EXPR, boolean_type_node, to, from));
1175 /* If the loop is empty, go directly to the exit label. */
1176 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
1177 build1_v (GOTO_EXPR, exit_label),
1178 build_empty_stmt (input_location));
1179 gfc_add_expr_to_block (&block, tmp);
1183 gfc_start_block (&body);
1185 /* Put these labels where they can be found later. We put the
1186 labels in a TREE_LIST node (because TREE_CHAIN is already
1187 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1188 label in TREE_VALUE (backend_decl). */
1190 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1192 /* Main loop body. */
1193 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1194 gfc_add_expr_to_block (&body, tmp);
1196 /* Label for cycle statements (if needed). */
1197 if (TREE_USED (cycle_label))
1199 tmp = build1_v (LABEL_EXPR, cycle_label);
1200 gfc_add_expr_to_block (&body, tmp);
1203 /* Check whether someone has modified the loop variable. */
1204 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1206 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
1207 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1208 "Loop variable has been modified");
1211 /* Exit the loop if there is an I/O result condition or error. */
1214 tmp = build1_v (GOTO_EXPR, exit_label);
1215 tmp = fold_build3 (COND_EXPR, void_type_node, exit_cond, tmp,
1216 build_empty_stmt (input_location));
1217 gfc_add_expr_to_block (&body, tmp);
1220 /* Increment the loop variable. */
1221 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1222 gfc_add_modify (&body, dovar, tmp);
1224 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1225 gfc_add_modify (&body, saved_dovar, dovar);
1227 /* End with the loop condition. Loop until countm1 == 0. */
1228 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1229 build_int_cst (utype, 0));
1230 tmp = build1_v (GOTO_EXPR, exit_label);
1231 tmp = fold_build3 (COND_EXPR, void_type_node,
1232 cond, tmp, build_empty_stmt (input_location));
1233 gfc_add_expr_to_block (&body, tmp);
1235 /* Decrement the loop count. */
1236 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1237 gfc_add_modify (&body, countm1, tmp);
1239 /* End of loop body. */
1240 tmp = gfc_finish_block (&body);
1242 /* The for loop itself. */
1243 tmp = build1_v (LOOP_EXPR, tmp);
1244 gfc_add_expr_to_block (&block, tmp);
1246 /* Add the exit label. */
1247 tmp = build1_v (LABEL_EXPR, exit_label);
1248 gfc_add_expr_to_block (&block, tmp);
1250 return gfc_finish_block (&block);
1254 /* Translate the DO WHILE construct.
1267 if (! cond) goto exit_label;
1273 Because the evaluation of the exit condition `cond' may have side
1274 effects, we can't do much for empty loop bodies. The backend optimizers
1275 should be smart enough to eliminate any dead loops. */
1278 gfc_trans_do_while (gfc_code * code)
1286 /* Everything we build here is part of the loop body. */
1287 gfc_start_block (&block);
1289 /* Cycle and exit statements are implemented with gotos. */
1290 cycle_label = gfc_build_label_decl (NULL_TREE);
1291 exit_label = gfc_build_label_decl (NULL_TREE);
1293 /* Put the labels where they can be found later. See gfc_trans_do(). */
1294 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1296 /* Create a GIMPLE version of the exit condition. */
1297 gfc_init_se (&cond, NULL);
1298 gfc_conv_expr_val (&cond, code->expr1);
1299 gfc_add_block_to_block (&block, &cond.pre);
1300 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1302 /* Build "IF (! cond) GOTO exit_label". */
1303 tmp = build1_v (GOTO_EXPR, exit_label);
1304 TREE_USED (exit_label) = 1;
1305 tmp = fold_build3 (COND_EXPR, void_type_node,
1306 cond.expr, tmp, build_empty_stmt (input_location));
1307 gfc_add_expr_to_block (&block, tmp);
1309 /* The main body of the loop. */
1310 tmp = gfc_trans_code (code->block->next);
1311 gfc_add_expr_to_block (&block, tmp);
1313 /* Label for cycle statements (if needed). */
1314 if (TREE_USED (cycle_label))
1316 tmp = build1_v (LABEL_EXPR, cycle_label);
1317 gfc_add_expr_to_block (&block, tmp);
1320 /* End of loop body. */
1321 tmp = gfc_finish_block (&block);
1323 gfc_init_block (&block);
1324 /* Build the loop. */
1325 tmp = build1_v (LOOP_EXPR, tmp);
1326 gfc_add_expr_to_block (&block, tmp);
1328 /* Add the exit label. */
1329 tmp = build1_v (LABEL_EXPR, exit_label);
1330 gfc_add_expr_to_block (&block, tmp);
1332 return gfc_finish_block (&block);
1336 /* Translate the SELECT CASE construct for INTEGER case expressions,
1337 without killing all potential optimizations. The problem is that
1338 Fortran allows unbounded cases, but the back-end does not, so we
1339 need to intercept those before we enter the equivalent SWITCH_EXPR
1342 For example, we translate this,
1345 CASE (:100,101,105:115)
1355 to the GENERIC equivalent,
1359 case (minimum value for typeof(expr) ... 100:
1365 case 200 ... (maximum value for typeof(expr):
1382 gfc_trans_integer_select (gfc_code * code)
1392 gfc_start_block (&block);
1394 /* Calculate the switch expression. */
1395 gfc_init_se (&se, NULL);
1396 gfc_conv_expr_val (&se, code->expr1);
1397 gfc_add_block_to_block (&block, &se.pre);
1399 end_label = gfc_build_label_decl (NULL_TREE);
1401 gfc_init_block (&body);
1403 for (c = code->block; c; c = c->block)
1405 for (cp = c->ext.case_list; cp; cp = cp->next)
1410 /* Assume it's the default case. */
1411 low = high = NULL_TREE;
1415 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1418 /* If there's only a lower bound, set the high bound to the
1419 maximum value of the case expression. */
1421 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1426 /* Three cases are possible here:
1428 1) There is no lower bound, e.g. CASE (:N).
1429 2) There is a lower bound .NE. high bound, that is
1430 a case range, e.g. CASE (N:M) where M>N (we make
1431 sure that M>N during type resolution).
1432 3) There is a lower bound, and it has the same value
1433 as the high bound, e.g. CASE (N:N). This is our
1434 internal representation of CASE(N).
1436 In the first and second case, we need to set a value for
1437 high. In the third case, we don't because the GCC middle
1438 end represents a single case value by just letting high be
1439 a NULL_TREE. We can't do that because we need to be able
1440 to represent unbounded cases. */
1444 && mpz_cmp (cp->low->value.integer,
1445 cp->high->value.integer) != 0))
1446 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1449 /* Unbounded case. */
1451 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1454 /* Build a label. */
1455 label = gfc_build_label_decl (NULL_TREE);
1457 /* Add this case label.
1458 Add parameter 'label', make it match GCC backend. */
1459 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1461 gfc_add_expr_to_block (&body, tmp);
1464 /* Add the statements for this case. */
1465 tmp = gfc_trans_code (c->next);
1466 gfc_add_expr_to_block (&body, tmp);
1468 /* Break to the end of the construct. */
1469 tmp = build1_v (GOTO_EXPR, end_label);
1470 gfc_add_expr_to_block (&body, tmp);
1473 tmp = gfc_finish_block (&body);
1474 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1475 gfc_add_expr_to_block (&block, tmp);
1477 tmp = build1_v (LABEL_EXPR, end_label);
1478 gfc_add_expr_to_block (&block, tmp);
1480 return gfc_finish_block (&block);
1484 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1486 There are only two cases possible here, even though the standard
1487 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1488 .FALSE., and DEFAULT.
1490 We never generate more than two blocks here. Instead, we always
1491 try to eliminate the DEFAULT case. This way, we can translate this
1492 kind of SELECT construct to a simple
1496 expression in GENERIC. */
1499 gfc_trans_logical_select (gfc_code * code)
1502 gfc_code *t, *f, *d;
1507 /* Assume we don't have any cases at all. */
1510 /* Now see which ones we actually do have. We can have at most two
1511 cases in a single case list: one for .TRUE. and one for .FALSE.
1512 The default case is always separate. If the cases for .TRUE. and
1513 .FALSE. are in the same case list, the block for that case list
1514 always executed, and we don't generate code a COND_EXPR. */
1515 for (c = code->block; c; c = c->block)
1517 for (cp = c->ext.case_list; cp; cp = cp->next)
1521 if (cp->low->value.logical == 0) /* .FALSE. */
1523 else /* if (cp->value.logical != 0), thus .TRUE. */
1531 /* Start a new block. */
1532 gfc_start_block (&block);
1534 /* Calculate the switch expression. We always need to do this
1535 because it may have side effects. */
1536 gfc_init_se (&se, NULL);
1537 gfc_conv_expr_val (&se, code->expr1);
1538 gfc_add_block_to_block (&block, &se.pre);
1540 if (t == f && t != NULL)
1542 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1543 translate the code for these cases, append it to the current
1545 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1549 tree true_tree, false_tree, stmt;
1551 true_tree = build_empty_stmt (input_location);
1552 false_tree = build_empty_stmt (input_location);
1554 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1555 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1556 make the missing case the default case. */
1557 if (t != NULL && f != NULL)
1567 /* Translate the code for each of these blocks, and append it to
1568 the current block. */
1570 true_tree = gfc_trans_code (t->next);
1573 false_tree = gfc_trans_code (f->next);
1575 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1576 true_tree, false_tree);
1577 gfc_add_expr_to_block (&block, stmt);
1580 return gfc_finish_block (&block);
1584 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1585 Instead of generating compares and jumps, it is far simpler to
1586 generate a data structure describing the cases in order and call a
1587 library subroutine that locates the right case.
1588 This is particularly true because this is the only case where we
1589 might have to dispose of a temporary.
1590 The library subroutine returns a pointer to jump to or NULL if no
1591 branches are to be taken. */
1594 gfc_trans_character_select (gfc_code *code)
1596 tree init, node, end_label, tmp, type, case_num, label, fndecl;
1597 stmtblock_t block, body;
1603 /* The jump table types are stored in static variables to avoid
1604 constructing them from scratch every single time. */
1605 static tree select_struct[2];
1606 static tree ss_string1[2], ss_string1_len[2];
1607 static tree ss_string2[2], ss_string2_len[2];
1608 static tree ss_target[2];
1610 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1612 if (code->expr1->ts.kind == 1)
1614 else if (code->expr1->ts.kind == 4)
1619 if (select_struct[k] == NULL)
1621 select_struct[k] = make_node (RECORD_TYPE);
1623 if (code->expr1->ts.kind == 1)
1624 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1625 else if (code->expr1->ts.kind == 4)
1626 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1631 #define ADD_FIELD(NAME, TYPE) \
1632 ss_##NAME[k] = gfc_add_field_to_struct \
1633 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1634 get_identifier (stringize(NAME)), TYPE)
1636 ADD_FIELD (string1, pchartype);
1637 ADD_FIELD (string1_len, gfc_charlen_type_node);
1639 ADD_FIELD (string2, pchartype);
1640 ADD_FIELD (string2_len, gfc_charlen_type_node);
1642 ADD_FIELD (target, integer_type_node);
1645 gfc_finish_type (select_struct[k]);
1648 cp = code->block->ext.case_list;
1649 while (cp->left != NULL)
1653 for (d = cp; d; d = d->right)
1656 end_label = gfc_build_label_decl (NULL_TREE);
1658 /* Generate the body */
1659 gfc_start_block (&block);
1660 gfc_init_block (&body);
1662 for (c = code->block; c; c = c->block)
1664 for (d = c->ext.case_list; d; d = d->next)
1666 label = gfc_build_label_decl (NULL_TREE);
1667 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1668 build_int_cst (NULL_TREE, d->n),
1669 build_int_cst (NULL_TREE, d->n), label);
1670 gfc_add_expr_to_block (&body, tmp);
1673 tmp = gfc_trans_code (c->next);
1674 gfc_add_expr_to_block (&body, tmp);
1676 tmp = build1_v (GOTO_EXPR, end_label);
1677 gfc_add_expr_to_block (&body, tmp);
1680 /* Generate the structure describing the branches */
1683 for(d = cp; d; d = d->right)
1687 gfc_init_se (&se, NULL);
1691 node = tree_cons (ss_string1[k], null_pointer_node, node);
1692 node = tree_cons (ss_string1_len[k], integer_zero_node, node);
1696 gfc_conv_expr_reference (&se, d->low);
1698 node = tree_cons (ss_string1[k], se.expr, node);
1699 node = tree_cons (ss_string1_len[k], se.string_length, node);
1702 if (d->high == NULL)
1704 node = tree_cons (ss_string2[k], null_pointer_node, node);
1705 node = tree_cons (ss_string2_len[k], integer_zero_node, node);
1709 gfc_init_se (&se, NULL);
1710 gfc_conv_expr_reference (&se, d->high);
1712 node = tree_cons (ss_string2[k], se.expr, node);
1713 node = tree_cons (ss_string2_len[k], se.string_length, node);
1716 node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n),
1719 tmp = build_constructor_from_list (select_struct[k], nreverse (node));
1720 init = tree_cons (NULL_TREE, tmp, init);
1723 type = build_array_type (select_struct[k],
1724 build_index_type (build_int_cst (NULL_TREE, n-1)));
1726 init = build_constructor_from_list (type, nreverse(init));
1727 TREE_CONSTANT (init) = 1;
1728 TREE_STATIC (init) = 1;
1729 /* Create a static variable to hold the jump table. */
1730 tmp = gfc_create_var (type, "jumptable");
1731 TREE_CONSTANT (tmp) = 1;
1732 TREE_STATIC (tmp) = 1;
1733 TREE_READONLY (tmp) = 1;
1734 DECL_INITIAL (tmp) = init;
1737 /* Build the library call */
1738 init = gfc_build_addr_expr (pvoid_type_node, init);
1740 gfc_init_se (&se, NULL);
1741 gfc_conv_expr_reference (&se, code->expr1);
1743 gfc_add_block_to_block (&block, &se.pre);
1745 if (code->expr1->ts.kind == 1)
1746 fndecl = gfor_fndecl_select_string;
1747 else if (code->expr1->ts.kind == 4)
1748 fndecl = gfor_fndecl_select_string_char4;
1752 tmp = build_call_expr_loc (input_location,
1753 fndecl, 4, init, build_int_cst (NULL_TREE, n),
1754 se.expr, se.string_length);
1755 case_num = gfc_create_var (integer_type_node, "case_num");
1756 gfc_add_modify (&block, case_num, tmp);
1758 gfc_add_block_to_block (&block, &se.post);
1760 tmp = gfc_finish_block (&body);
1761 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1762 gfc_add_expr_to_block (&block, tmp);
1764 tmp = build1_v (LABEL_EXPR, end_label);
1765 gfc_add_expr_to_block (&block, tmp);
1767 return gfc_finish_block (&block);
1771 /* Translate the three variants of the SELECT CASE construct.
1773 SELECT CASEs with INTEGER case expressions can be translated to an
1774 equivalent GENERIC switch statement, and for LOGICAL case
1775 expressions we build one or two if-else compares.
1777 SELECT CASEs with CHARACTER case expressions are a whole different
1778 story, because they don't exist in GENERIC. So we sort them and
1779 do a binary search at runtime.
1781 Fortran has no BREAK statement, and it does not allow jumps from
1782 one case block to another. That makes things a lot easier for
1786 gfc_trans_select (gfc_code * code)
1788 gcc_assert (code && code->expr1);
1790 /* Empty SELECT constructs are legal. */
1791 if (code->block == NULL)
1792 return build_empty_stmt (input_location);
1794 /* Select the correct translation function. */
1795 switch (code->expr1->ts.type)
1797 case BT_LOGICAL: return gfc_trans_logical_select (code);
1798 case BT_INTEGER: return gfc_trans_integer_select (code);
1799 case BT_CHARACTER: return gfc_trans_character_select (code);
1801 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1807 /* Traversal function to substitute a replacement symtree if the symbol
1808 in the expression is the same as that passed. f == 2 signals that
1809 that variable itself is not to be checked - only the references.
1810 This group of functions is used when the variable expression in a
1811 FORALL assignment has internal references. For example:
1812 FORALL (i = 1:4) p(p(i)) = i
1813 The only recourse here is to store a copy of 'p' for the index
1816 static gfc_symtree *new_symtree;
1817 static gfc_symtree *old_symtree;
1820 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1822 if (expr->expr_type != EXPR_VARIABLE)
1827 else if (expr->symtree->n.sym == sym)
1828 expr->symtree = new_symtree;
1834 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1836 gfc_traverse_expr (e, sym, forall_replace, f);
1840 forall_restore (gfc_expr *expr,
1841 gfc_symbol *sym ATTRIBUTE_UNUSED,
1842 int *f ATTRIBUTE_UNUSED)
1844 if (expr->expr_type != EXPR_VARIABLE)
1847 if (expr->symtree == new_symtree)
1848 expr->symtree = old_symtree;
1854 forall_restore_symtree (gfc_expr *e)
1856 gfc_traverse_expr (e, NULL, forall_restore, 0);
1860 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1865 gfc_symbol *new_sym;
1866 gfc_symbol *old_sym;
1870 /* Build a copy of the lvalue. */
1871 old_symtree = c->expr1->symtree;
1872 old_sym = old_symtree->n.sym;
1873 e = gfc_lval_expr_from_sym (old_sym);
1874 if (old_sym->attr.dimension)
1876 gfc_init_se (&tse, NULL);
1877 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
1878 gfc_add_block_to_block (pre, &tse.pre);
1879 gfc_add_block_to_block (post, &tse.post);
1880 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
1882 if (e->ts.type != BT_CHARACTER)
1884 /* Use the variable offset for the temporary. */
1885 tmp = gfc_conv_array_offset (old_sym->backend_decl);
1886 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
1891 gfc_init_se (&tse, NULL);
1892 gfc_init_se (&rse, NULL);
1893 gfc_conv_expr (&rse, e);
1894 if (e->ts.type == BT_CHARACTER)
1896 tse.string_length = rse.string_length;
1897 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1899 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1901 gfc_add_block_to_block (pre, &tse.pre);
1902 gfc_add_block_to_block (post, &tse.post);
1906 tmp = gfc_typenode_for_spec (&e->ts);
1907 tse.expr = gfc_create_var (tmp, "temp");
1910 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1911 e->expr_type == EXPR_VARIABLE, true);
1912 gfc_add_expr_to_block (pre, tmp);
1916 /* Create a new symbol to represent the lvalue. */
1917 new_sym = gfc_new_symbol (old_sym->name, NULL);
1918 new_sym->ts = old_sym->ts;
1919 new_sym->attr.referenced = 1;
1920 new_sym->attr.temporary = 1;
1921 new_sym->attr.dimension = old_sym->attr.dimension;
1922 new_sym->attr.flavor = old_sym->attr.flavor;
1924 /* Use the temporary as the backend_decl. */
1925 new_sym->backend_decl = tse.expr;
1927 /* Create a fake symtree for it. */
1929 new_symtree = gfc_new_symtree (&root, old_sym->name);
1930 new_symtree->n.sym = new_sym;
1931 gcc_assert (new_symtree == root);
1933 /* Go through the expression reference replacing the old_symtree
1935 forall_replace_symtree (c->expr1, old_sym, 2);
1937 /* Now we have made this temporary, we might as well use it for
1938 the right hand side. */
1939 forall_replace_symtree (c->expr2, old_sym, 1);
1943 /* Handles dependencies in forall assignments. */
1945 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1952 lsym = c->expr1->symtree->n.sym;
1953 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
1955 /* Now check for dependencies within the 'variable'
1956 expression itself. These are treated by making a complete
1957 copy of variable and changing all the references to it
1958 point to the copy instead. Note that the shallow copy of
1959 the variable will not suffice for derived types with
1960 pointer components. We therefore leave these to their
1962 if (lsym->ts.type == BT_DERIVED
1963 && lsym->ts.u.derived->attr.pointer_comp)
1967 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
1969 forall_make_variable_temp (c, pre, post);
1973 /* Substrings with dependencies are treated in the same
1975 if (c->expr1->ts.type == BT_CHARACTER
1977 && c->expr2->expr_type == EXPR_VARIABLE
1978 && lsym == c->expr2->symtree->n.sym)
1980 for (lref = c->expr1->ref; lref; lref = lref->next)
1981 if (lref->type == REF_SUBSTRING)
1983 for (rref = c->expr2->ref; rref; rref = rref->next)
1984 if (rref->type == REF_SUBSTRING)
1988 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1990 forall_make_variable_temp (c, pre, post);
1999 cleanup_forall_symtrees (gfc_code *c)
2001 forall_restore_symtree (c->expr1);
2002 forall_restore_symtree (c->expr2);
2003 gfc_free (new_symtree->n.sym);
2004 gfc_free (new_symtree);
2008 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2009 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2010 indicates whether we should generate code to test the FORALLs mask
2011 array. OUTER is the loop header to be used for initializing mask
2014 The generated loop format is:
2015 count = (end - start + step) / step
2028 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
2029 int mask_flag, stmtblock_t *outer)
2037 tree var, start, end, step;
2040 /* Initialize the mask index outside the FORALL nest. */
2041 if (mask_flag && forall_tmp->mask)
2042 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
2044 iter = forall_tmp->this_loop;
2045 nvar = forall_tmp->nvar;
2046 for (n = 0; n < nvar; n++)
2049 start = iter->start;
2053 exit_label = gfc_build_label_decl (NULL_TREE);
2054 TREE_USED (exit_label) = 1;
2056 /* The loop counter. */
2057 count = gfc_create_var (TREE_TYPE (var), "count");
2059 /* The body of the loop. */
2060 gfc_init_block (&block);
2062 /* The exit condition. */
2063 cond = fold_build2 (LE_EXPR, boolean_type_node,
2064 count, build_int_cst (TREE_TYPE (count), 0));
2065 tmp = build1_v (GOTO_EXPR, exit_label);
2066 tmp = fold_build3 (COND_EXPR, void_type_node,
2067 cond, tmp, build_empty_stmt (input_location));
2068 gfc_add_expr_to_block (&block, tmp);
2070 /* The main loop body. */
2071 gfc_add_expr_to_block (&block, body);
2073 /* Increment the loop variable. */
2074 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
2075 gfc_add_modify (&block, var, tmp);
2077 /* Advance to the next mask element. Only do this for the
2079 if (n == 0 && mask_flag && forall_tmp->mask)
2081 tree maskindex = forall_tmp->maskindex;
2082 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2083 maskindex, gfc_index_one_node);
2084 gfc_add_modify (&block, maskindex, tmp);
2087 /* Decrement the loop counter. */
2088 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
2089 build_int_cst (TREE_TYPE (var), 1));
2090 gfc_add_modify (&block, count, tmp);
2092 body = gfc_finish_block (&block);
2094 /* Loop var initialization. */
2095 gfc_init_block (&block);
2096 gfc_add_modify (&block, var, start);
2099 /* Initialize the loop counter. */
2100 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
2101 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
2102 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
2103 gfc_add_modify (&block, count, tmp);
2105 /* The loop expression. */
2106 tmp = build1_v (LOOP_EXPR, body);
2107 gfc_add_expr_to_block (&block, tmp);
2109 /* The exit label. */
2110 tmp = build1_v (LABEL_EXPR, exit_label);
2111 gfc_add_expr_to_block (&block, tmp);
2113 body = gfc_finish_block (&block);
2120 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2121 is nonzero, the body is controlled by all masks in the forall nest.
2122 Otherwise, the innermost loop is not controlled by it's mask. This
2123 is used for initializing that mask. */
2126 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2131 forall_info *forall_tmp;
2132 tree mask, maskindex;
2134 gfc_start_block (&header);
2136 forall_tmp = nested_forall_info;
2137 while (forall_tmp != NULL)
2139 /* Generate body with masks' control. */
2142 mask = forall_tmp->mask;
2143 maskindex = forall_tmp->maskindex;
2145 /* If a mask was specified make the assignment conditional. */
2148 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2149 body = build3_v (COND_EXPR, tmp, body,
2150 build_empty_stmt (input_location));
2153 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2154 forall_tmp = forall_tmp->prev_nest;
2158 gfc_add_expr_to_block (&header, body);
2159 return gfc_finish_block (&header);
2163 /* Allocate data for holding a temporary array. Returns either a local
2164 temporary array or a pointer variable. */
2167 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2174 if (INTEGER_CST_P (size))
2176 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
2177 gfc_index_one_node);
2182 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2183 type = build_array_type (elem_type, type);
2184 if (gfc_can_put_var_on_stack (bytesize))
2186 gcc_assert (INTEGER_CST_P (size));
2187 tmpvar = gfc_create_var (type, "temp");
2192 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2193 *pdata = convert (pvoid_type_node, tmpvar);
2195 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2196 gfc_add_modify (pblock, tmpvar, tmp);
2202 /* Generate codes to copy the temporary to the actual lhs. */
2205 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2206 tree count1, tree wheremask, bool invert)
2210 stmtblock_t block, body;
2216 lss = gfc_walk_expr (expr);
2218 if (lss == gfc_ss_terminator)
2220 gfc_start_block (&block);
2222 gfc_init_se (&lse, NULL);
2224 /* Translate the expression. */
2225 gfc_conv_expr (&lse, expr);
2227 /* Form the expression for the temporary. */
2228 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2230 /* Use the scalar assignment as is. */
2231 gfc_add_block_to_block (&block, &lse.pre);
2232 gfc_add_modify (&block, lse.expr, tmp);
2233 gfc_add_block_to_block (&block, &lse.post);
2235 /* Increment the count1. */
2236 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2237 gfc_index_one_node);
2238 gfc_add_modify (&block, count1, tmp);
2240 tmp = gfc_finish_block (&block);
2244 gfc_start_block (&block);
2246 gfc_init_loopinfo (&loop1);
2247 gfc_init_se (&rse, NULL);
2248 gfc_init_se (&lse, NULL);
2250 /* Associate the lss with the loop. */
2251 gfc_add_ss_to_loop (&loop1, lss);
2253 /* Calculate the bounds of the scalarization. */
2254 gfc_conv_ss_startstride (&loop1);
2255 /* Setup the scalarizing loops. */
2256 gfc_conv_loop_setup (&loop1, &expr->where);
2258 gfc_mark_ss_chain_used (lss, 1);
2260 /* Start the scalarized loop body. */
2261 gfc_start_scalarized_body (&loop1, &body);
2263 /* Setup the gfc_se structures. */
2264 gfc_copy_loopinfo_to_se (&lse, &loop1);
2267 /* Form the expression of the temporary. */
2268 if (lss != gfc_ss_terminator)
2269 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2270 /* Translate expr. */
2271 gfc_conv_expr (&lse, expr);
2273 /* Use the scalar assignment. */
2274 rse.string_length = lse.string_length;
2275 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
2277 /* Form the mask expression according to the mask tree list. */
2280 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2282 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2283 TREE_TYPE (wheremaskexpr),
2285 tmp = fold_build3 (COND_EXPR, void_type_node,
2287 build_empty_stmt (input_location));
2290 gfc_add_expr_to_block (&body, tmp);
2292 /* Increment count1. */
2293 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2294 count1, gfc_index_one_node);
2295 gfc_add_modify (&body, count1, tmp);
2297 /* Increment count3. */
2300 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2301 count3, gfc_index_one_node);
2302 gfc_add_modify (&body, count3, tmp);
2305 /* Generate the copying loops. */
2306 gfc_trans_scalarizing_loops (&loop1, &body);
2307 gfc_add_block_to_block (&block, &loop1.pre);
2308 gfc_add_block_to_block (&block, &loop1.post);
2309 gfc_cleanup_loop (&loop1);
2311 tmp = gfc_finish_block (&block);
2317 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2318 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2319 and should not be freed. WHEREMASK is the conditional execution mask
2320 whose sense may be inverted by INVERT. */
2323 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2324 tree count1, gfc_ss *lss, gfc_ss *rss,
2325 tree wheremask, bool invert)
2327 stmtblock_t block, body1;
2334 gfc_start_block (&block);
2336 gfc_init_se (&rse, NULL);
2337 gfc_init_se (&lse, NULL);
2339 if (lss == gfc_ss_terminator)
2341 gfc_init_block (&body1);
2342 gfc_conv_expr (&rse, expr2);
2343 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2347 /* Initialize the loop. */
2348 gfc_init_loopinfo (&loop);
2350 /* We may need LSS to determine the shape of the expression. */
2351 gfc_add_ss_to_loop (&loop, lss);
2352 gfc_add_ss_to_loop (&loop, rss);
2354 gfc_conv_ss_startstride (&loop);
2355 gfc_conv_loop_setup (&loop, &expr2->where);
2357 gfc_mark_ss_chain_used (rss, 1);
2358 /* Start the loop body. */
2359 gfc_start_scalarized_body (&loop, &body1);
2361 /* Translate the expression. */
2362 gfc_copy_loopinfo_to_se (&rse, &loop);
2364 gfc_conv_expr (&rse, expr2);
2366 /* Form the expression of the temporary. */
2367 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2370 /* Use the scalar assignment. */
2371 lse.string_length = rse.string_length;
2372 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2373 expr2->expr_type == EXPR_VARIABLE, true);
2375 /* Form the mask expression according to the mask tree list. */
2378 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2380 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2381 TREE_TYPE (wheremaskexpr),
2383 tmp = fold_build3 (COND_EXPR, void_type_node,
2384 wheremaskexpr, tmp, build_empty_stmt (input_location));
2387 gfc_add_expr_to_block (&body1, tmp);
2389 if (lss == gfc_ss_terminator)
2391 gfc_add_block_to_block (&block, &body1);
2393 /* Increment count1. */
2394 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2395 gfc_index_one_node);
2396 gfc_add_modify (&block, count1, tmp);
2400 /* Increment count1. */
2401 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2402 count1, gfc_index_one_node);
2403 gfc_add_modify (&body1, count1, tmp);
2405 /* Increment count3. */
2408 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2409 count3, gfc_index_one_node);
2410 gfc_add_modify (&body1, count3, tmp);
2413 /* Generate the copying loops. */
2414 gfc_trans_scalarizing_loops (&loop, &body1);
2416 gfc_add_block_to_block (&block, &loop.pre);
2417 gfc_add_block_to_block (&block, &loop.post);
2419 gfc_cleanup_loop (&loop);
2420 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2421 as tree nodes in SS may not be valid in different scope. */
2424 tmp = gfc_finish_block (&block);
2429 /* Calculate the size of temporary needed in the assignment inside forall.
2430 LSS and RSS are filled in this function. */
2433 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2434 stmtblock_t * pblock,
2435 gfc_ss **lss, gfc_ss **rss)
2443 *lss = gfc_walk_expr (expr1);
2446 size = gfc_index_one_node;
2447 if (*lss != gfc_ss_terminator)
2449 gfc_init_loopinfo (&loop);
2451 /* Walk the RHS of the expression. */
2452 *rss = gfc_walk_expr (expr2);
2453 if (*rss == gfc_ss_terminator)
2455 /* The rhs is scalar. Add a ss for the expression. */
2456 *rss = gfc_get_ss ();
2457 (*rss)->next = gfc_ss_terminator;
2458 (*rss)->type = GFC_SS_SCALAR;
2459 (*rss)->expr = expr2;
2462 /* Associate the SS with the loop. */
2463 gfc_add_ss_to_loop (&loop, *lss);
2464 /* We don't actually need to add the rhs at this point, but it might
2465 make guessing the loop bounds a bit easier. */
2466 gfc_add_ss_to_loop (&loop, *rss);
2468 /* We only want the shape of the expression, not rest of the junk
2469 generated by the scalarizer. */
2470 loop.array_parameter = 1;
2472 /* Calculate the bounds of the scalarization. */
2473 save_flag = gfc_option.rtcheck;
2474 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2475 gfc_conv_ss_startstride (&loop);
2476 gfc_option.rtcheck = save_flag;
2477 gfc_conv_loop_setup (&loop, &expr2->where);
2479 /* Figure out how many elements we need. */
2480 for (i = 0; i < loop.dimen; i++)
2482 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2483 gfc_index_one_node, loop.from[i]);
2484 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2486 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2488 gfc_add_block_to_block (pblock, &loop.pre);
2489 size = gfc_evaluate_now (size, pblock);
2490 gfc_add_block_to_block (pblock, &loop.post);
2492 /* TODO: write a function that cleans up a loopinfo without freeing
2493 the SS chains. Currently a NOP. */
2500 /* Calculate the overall iterator number of the nested forall construct.
2501 This routine actually calculates the number of times the body of the
2502 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2503 that by the expression INNER_SIZE. The BLOCK argument specifies the
2504 block in which to calculate the result, and the optional INNER_SIZE_BODY
2505 argument contains any statements that need to executed (inside the loop)
2506 to initialize or calculate INNER_SIZE. */
2509 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2510 stmtblock_t *inner_size_body, stmtblock_t *block)
2512 forall_info *forall_tmp = nested_forall_info;
2516 /* We can eliminate the innermost unconditional loops with constant
2518 if (INTEGER_CST_P (inner_size))
2521 && !forall_tmp->mask
2522 && INTEGER_CST_P (forall_tmp->size))
2524 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2525 inner_size, forall_tmp->size);
2526 forall_tmp = forall_tmp->prev_nest;
2529 /* If there are no loops left, we have our constant result. */
2534 /* Otherwise, create a temporary variable to compute the result. */
2535 number = gfc_create_var (gfc_array_index_type, "num");
2536 gfc_add_modify (block, number, gfc_index_zero_node);
2538 gfc_start_block (&body);
2539 if (inner_size_body)
2540 gfc_add_block_to_block (&body, inner_size_body);
2542 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2543 number, inner_size);
2546 gfc_add_modify (&body, number, tmp);
2547 tmp = gfc_finish_block (&body);
2549 /* Generate loops. */
2550 if (forall_tmp != NULL)
2551 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2553 gfc_add_expr_to_block (block, tmp);
2559 /* Allocate temporary for forall construct. SIZE is the size of temporary
2560 needed. PTEMP1 is returned for space free. */
2563 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2570 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2571 if (!integer_onep (unit))
2572 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2577 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2580 tmp = build_fold_indirect_ref_loc (input_location, tmp);
2585 /* Allocate temporary for forall construct according to the information in
2586 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2587 assignment inside forall. PTEMP1 is returned for space free. */
2590 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2591 tree inner_size, stmtblock_t * inner_size_body,
2592 stmtblock_t * block, tree * ptemp1)
2596 /* Calculate the total size of temporary needed in forall construct. */
2597 size = compute_overall_iter_number (nested_forall_info, inner_size,
2598 inner_size_body, block);
2600 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2604 /* Handle assignments inside forall which need temporary.
2606 forall (i=start:end:stride; maskexpr)
2609 (where e,f<i> are arbitrary expressions possibly involving i
2610 and there is a dependency between e<i> and f<i>)
2612 masktmp(:) = maskexpr(:)
2617 for (i = start; i <= end; i += stride)
2621 for (i = start; i <= end; i += stride)
2623 if (masktmp[maskindex++])
2624 tmp[count1++] = f<i>
2628 for (i = start; i <= end; i += stride)
2630 if (masktmp[maskindex++])
2631 e<i> = tmp[count1++]
2636 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2637 tree wheremask, bool invert,
2638 forall_info * nested_forall_info,
2639 stmtblock_t * block)
2647 stmtblock_t inner_size_body;
2649 /* Create vars. count1 is the current iterator number of the nested
2651 count1 = gfc_create_var (gfc_array_index_type, "count1");
2653 /* Count is the wheremask index. */
2656 count = gfc_create_var (gfc_array_index_type, "count");
2657 gfc_add_modify (block, count, gfc_index_zero_node);
2662 /* Initialize count1. */
2663 gfc_add_modify (block, count1, gfc_index_zero_node);
2665 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2666 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2667 gfc_init_block (&inner_size_body);
2668 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2671 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2672 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
2674 if (!expr1->ts.u.cl->backend_decl)
2677 gfc_init_se (&tse, NULL);
2678 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
2679 expr1->ts.u.cl->backend_decl = tse.expr;
2681 type = gfc_get_character_type_len (gfc_default_character_kind,
2682 expr1->ts.u.cl->backend_decl);
2685 type = gfc_typenode_for_spec (&expr1->ts);
2687 /* Allocate temporary for nested forall construct according to the
2688 information in nested_forall_info and inner_size. */
2689 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2690 &inner_size_body, block, &ptemp1);
2692 /* Generate codes to copy rhs to the temporary . */
2693 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2696 /* Generate body and loops according to the information in
2697 nested_forall_info. */
2698 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2699 gfc_add_expr_to_block (block, tmp);
2702 gfc_add_modify (block, count1, gfc_index_zero_node);
2706 gfc_add_modify (block, count, gfc_index_zero_node);
2708 /* Generate codes to copy the temporary to lhs. */
2709 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2712 /* Generate body and loops according to the information in
2713 nested_forall_info. */
2714 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2715 gfc_add_expr_to_block (block, tmp);
2719 /* Free the temporary. */
2720 tmp = gfc_call_free (ptemp1);
2721 gfc_add_expr_to_block (block, tmp);
2726 /* Translate pointer assignment inside FORALL which need temporary. */
2729 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2730 forall_info * nested_forall_info,
2731 stmtblock_t * block)
2745 tree tmp, tmp1, ptemp1;
2747 count = gfc_create_var (gfc_array_index_type, "count");
2748 gfc_add_modify (block, count, gfc_index_zero_node);
2750 inner_size = integer_one_node;
2751 lss = gfc_walk_expr (expr1);
2752 rss = gfc_walk_expr (expr2);
2753 if (lss == gfc_ss_terminator)
2755 type = gfc_typenode_for_spec (&expr1->ts);
2756 type = build_pointer_type (type);
2758 /* Allocate temporary for nested forall construct according to the
2759 information in nested_forall_info and inner_size. */
2760 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2761 inner_size, NULL, block, &ptemp1);
2762 gfc_start_block (&body);
2763 gfc_init_se (&lse, NULL);
2764 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2765 gfc_init_se (&rse, NULL);
2766 rse.want_pointer = 1;
2767 gfc_conv_expr (&rse, expr2);
2768 gfc_add_block_to_block (&body, &rse.pre);
2769 gfc_add_modify (&body, lse.expr,
2770 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2771 gfc_add_block_to_block (&body, &rse.post);
2773 /* Increment count. */
2774 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2775 count, gfc_index_one_node);
2776 gfc_add_modify (&body, count, tmp);
2778 tmp = gfc_finish_block (&body);
2780 /* Generate body and loops according to the information in
2781 nested_forall_info. */
2782 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2783 gfc_add_expr_to_block (block, tmp);
2786 gfc_add_modify (block, count, gfc_index_zero_node);
2788 gfc_start_block (&body);
2789 gfc_init_se (&lse, NULL);
2790 gfc_init_se (&rse, NULL);
2791 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2792 lse.want_pointer = 1;
2793 gfc_conv_expr (&lse, expr1);
2794 gfc_add_block_to_block (&body, &lse.pre);
2795 gfc_add_modify (&body, lse.expr, rse.expr);
2796 gfc_add_block_to_block (&body, &lse.post);
2797 /* Increment count. */
2798 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2799 count, gfc_index_one_node);
2800 gfc_add_modify (&body, count, tmp);
2801 tmp = gfc_finish_block (&body);
2803 /* Generate body and loops according to the information in
2804 nested_forall_info. */
2805 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2806 gfc_add_expr_to_block (block, tmp);
2810 gfc_init_loopinfo (&loop);
2812 /* Associate the SS with the loop. */
2813 gfc_add_ss_to_loop (&loop, rss);
2815 /* Setup the scalarizing loops and bounds. */
2816 gfc_conv_ss_startstride (&loop);
2818 gfc_conv_loop_setup (&loop, &expr2->where);
2820 info = &rss->data.info;
2821 desc = info->descriptor;
2823 /* Make a new descriptor. */
2824 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2825 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2826 loop.from, loop.to, 1,
2827 GFC_ARRAY_UNKNOWN, true);
2829 /* Allocate temporary for nested forall construct. */
2830 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2831 inner_size, NULL, block, &ptemp1);
2832 gfc_start_block (&body);
2833 gfc_init_se (&lse, NULL);
2834 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2835 lse.direct_byref = 1;
2836 rss = gfc_walk_expr (expr2);
2837 gfc_conv_expr_descriptor (&lse, expr2, rss);
2839 gfc_add_block_to_block (&body, &lse.pre);
2840 gfc_add_block_to_block (&body, &lse.post);
2842 /* Increment count. */
2843 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2844 count, gfc_index_one_node);
2845 gfc_add_modify (&body, count, tmp);
2847 tmp = gfc_finish_block (&body);
2849 /* Generate body and loops according to the information in
2850 nested_forall_info. */
2851 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2852 gfc_add_expr_to_block (block, tmp);
2855 gfc_add_modify (block, count, gfc_index_zero_node);
2857 parm = gfc_build_array_ref (tmp1, count, NULL);
2858 lss = gfc_walk_expr (expr1);
2859 gfc_init_se (&lse, NULL);
2860 gfc_conv_expr_descriptor (&lse, expr1, lss);
2861 gfc_add_modify (&lse.pre, lse.expr, parm);
2862 gfc_start_block (&body);
2863 gfc_add_block_to_block (&body, &lse.pre);
2864 gfc_add_block_to_block (&body, &lse.post);
2866 /* Increment count. */
2867 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2868 count, gfc_index_one_node);
2869 gfc_add_modify (&body, count, tmp);
2871 tmp = gfc_finish_block (&body);
2873 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2874 gfc_add_expr_to_block (block, tmp);
2876 /* Free the temporary. */
2879 tmp = gfc_call_free (ptemp1);
2880 gfc_add_expr_to_block (block, tmp);
2885 /* FORALL and WHERE statements are really nasty, especially when you nest
2886 them. All the rhs of a forall assignment must be evaluated before the
2887 actual assignments are performed. Presumably this also applies to all the
2888 assignments in an inner where statement. */
2890 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2891 linear array, relying on the fact that we process in the same order in all
2894 forall (i=start:end:stride; maskexpr)
2898 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2900 count = ((end + 1 - start) / stride)
2901 masktmp(:) = maskexpr(:)
2904 for (i = start; i <= end; i += stride)
2906 if (masktmp[maskindex++])
2910 for (i = start; i <= end; i += stride)
2912 if (masktmp[maskindex++])
2916 Note that this code only works when there are no dependencies.
2917 Forall loop with array assignments and data dependencies are a real pain,
2918 because the size of the temporary cannot always be determined before the
2919 loop is executed. This problem is compounded by the presence of nested
2924 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2944 gfc_forall_iterator *fa;
2947 gfc_saved_var *saved_vars;
2948 iter_info *this_forall;
2952 /* Do nothing if the mask is false. */
2954 && code->expr1->expr_type == EXPR_CONSTANT
2955 && !code->expr1->value.logical)
2956 return build_empty_stmt (input_location);
2959 /* Count the FORALL index number. */
2960 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2964 /* Allocate the space for var, start, end, step, varexpr. */
2965 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2966 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2967 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2968 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2969 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2970 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2972 /* Allocate the space for info. */
2973 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2975 gfc_start_block (&pre);
2976 gfc_init_block (&post);
2977 gfc_init_block (&block);
2980 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2982 gfc_symbol *sym = fa->var->symtree->n.sym;
2984 /* Allocate space for this_forall. */
2985 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2987 /* Create a temporary variable for the FORALL index. */
2988 tmp = gfc_typenode_for_spec (&sym->ts);
2989 var[n] = gfc_create_var (tmp, sym->name);
2990 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2992 /* Record it in this_forall. */
2993 this_forall->var = var[n];
2995 /* Replace the index symbol's backend_decl with the temporary decl. */
2996 sym->backend_decl = var[n];
2998 /* Work out the start, end and stride for the loop. */
2999 gfc_init_se (&se, NULL);
3000 gfc_conv_expr_val (&se, fa->start);
3001 /* Record it in this_forall. */
3002 this_forall->start = se.expr;
3003 gfc_add_block_to_block (&block, &se.pre);
3006 gfc_init_se (&se, NULL);
3007 gfc_conv_expr_val (&se, fa->end);
3008 /* Record it in this_forall. */
3009 this_forall->end = se.expr;
3010 gfc_make_safe_expr (&se);
3011 gfc_add_block_to_block (&block, &se.pre);
3014 gfc_init_se (&se, NULL);
3015 gfc_conv_expr_val (&se, fa->stride);
3016 /* Record it in this_forall. */
3017 this_forall->step = se.expr;
3018 gfc_make_safe_expr (&se);
3019 gfc_add_block_to_block (&block, &se.pre);
3022 /* Set the NEXT field of this_forall to NULL. */
3023 this_forall->next = NULL;
3024 /* Link this_forall to the info construct. */
3025 if (info->this_loop)
3027 iter_info *iter_tmp = info->this_loop;
3028 while (iter_tmp->next != NULL)
3029 iter_tmp = iter_tmp->next;
3030 iter_tmp->next = this_forall;
3033 info->this_loop = this_forall;
3039 /* Calculate the size needed for the current forall level. */
3040 size = gfc_index_one_node;
3041 for (n = 0; n < nvar; n++)
3043 /* size = (end + step - start) / step. */
3044 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
3046 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
3048 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
3049 tmp = convert (gfc_array_index_type, tmp);
3051 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
3054 /* Record the nvar and size of current forall level. */
3060 /* If the mask is .true., consider the FORALL unconditional. */
3061 if (code->expr1->expr_type == EXPR_CONSTANT
3062 && code->expr1->value.logical)
3070 /* First we need to allocate the mask. */
3073 /* As the mask array can be very big, prefer compact boolean types. */
3074 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3075 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3076 size, NULL, &block, &pmask);
3077 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3079 /* Record them in the info structure. */
3080 info->maskindex = maskindex;
3085 /* No mask was specified. */
3086 maskindex = NULL_TREE;
3087 mask = pmask = NULL_TREE;
3090 /* Link the current forall level to nested_forall_info. */
3091 info->prev_nest = nested_forall_info;
3092 nested_forall_info = info;
3094 /* Copy the mask into a temporary variable if required.
3095 For now we assume a mask temporary is needed. */
3098 /* As the mask array can be very big, prefer compact boolean types. */
3099 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3101 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3103 /* Start of mask assignment loop body. */
3104 gfc_start_block (&body);
3106 /* Evaluate the mask expression. */
3107 gfc_init_se (&se, NULL);
3108 gfc_conv_expr_val (&se, code->expr1);
3109 gfc_add_block_to_block (&body, &se.pre);
3111 /* Store the mask. */
3112 se.expr = convert (mask_type, se.expr);
3114 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3115 gfc_add_modify (&body, tmp, se.expr);
3117 /* Advance to the next mask element. */
3118 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3119 maskindex, gfc_index_one_node);
3120 gfc_add_modify (&body, maskindex, tmp);
3122 /* Generate the loops. */
3123 tmp = gfc_finish_block (&body);
3124 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3125 gfc_add_expr_to_block (&block, tmp);
3128 c = code->block->next;
3130 /* TODO: loop merging in FORALL statements. */
3131 /* Now that we've got a copy of the mask, generate the assignment loops. */
3137 /* A scalar or array assignment. DO the simple check for
3138 lhs to rhs dependencies. These make a temporary for the
3139 rhs and form a second forall block to copy to variable. */
3140 need_temp = check_forall_dependencies(c, &pre, &post);
3142 /* Temporaries due to array assignment data dependencies introduce
3143 no end of problems. */
3145 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3146 nested_forall_info, &block);
3149 /* Use the normal assignment copying routines. */
3150 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3152 /* Generate body and loops. */
3153 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3155 gfc_add_expr_to_block (&block, tmp);
3158 /* Cleanup any temporary symtrees that have been made to deal
3159 with dependencies. */
3161 cleanup_forall_symtrees (c);
3166 /* Translate WHERE or WHERE construct nested in FORALL. */
3167 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3170 /* Pointer assignment inside FORALL. */
3171 case EXEC_POINTER_ASSIGN:
3172 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3174 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3175 nested_forall_info, &block);
3178 /* Use the normal assignment copying routines. */
3179 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3181 /* Generate body and loops. */
3182 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3184 gfc_add_expr_to_block (&block, tmp);
3189 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3190 gfc_add_expr_to_block (&block, tmp);
3193 /* Explicit subroutine calls are prevented by the frontend but interface
3194 assignments can legitimately produce them. */
3195 case EXEC_ASSIGN_CALL:
3196 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3197 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3198 gfc_add_expr_to_block (&block, tmp);
3208 /* Restore the original index variables. */
3209 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3210 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3212 /* Free the space for var, start, end, step, varexpr. */
3218 gfc_free (saved_vars);
3220 /* Free the space for this forall_info. */
3225 /* Free the temporary for the mask. */
3226 tmp = gfc_call_free (pmask);
3227 gfc_add_expr_to_block (&block, tmp);
3230 pushdecl (maskindex);
3232 gfc_add_block_to_block (&pre, &block);
3233 gfc_add_block_to_block (&pre, &post);
3235 return gfc_finish_block (&pre);
3239 /* Translate the FORALL statement or construct. */
3241 tree gfc_trans_forall (gfc_code * code)
3243 return gfc_trans_forall_1 (code, NULL);
3247 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3248 If the WHERE construct is nested in FORALL, compute the overall temporary
3249 needed by the WHERE mask expression multiplied by the iterator number of
3251 ME is the WHERE mask expression.
3252 MASK is the current execution mask upon input, whose sense may or may
3253 not be inverted as specified by the INVERT argument.
3254 CMASK is the updated execution mask on output, or NULL if not required.
3255 PMASK is the pending execution mask on output, or NULL if not required.
3256 BLOCK is the block in which to place the condition evaluation loops. */
3259 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3260 tree mask, bool invert, tree cmask, tree pmask,
3261 tree mask_type, stmtblock_t * block)
3266 stmtblock_t body, body1;
3267 tree count, cond, mtmp;
3270 gfc_init_loopinfo (&loop);
3272 lss = gfc_walk_expr (me);
3273 rss = gfc_walk_expr (me);
3275 /* Variable to index the temporary. */
3276 count = gfc_create_var (gfc_array_index_type, "count");
3277 /* Initialize count. */
3278 gfc_add_modify (block, count, gfc_index_zero_node);
3280 gfc_start_block (&body);
3282 gfc_init_se (&rse, NULL);
3283 gfc_init_se (&lse, NULL);
3285 if (lss == gfc_ss_terminator)
3287 gfc_init_block (&body1);
3291 /* Initialize the loop. */
3292 gfc_init_loopinfo (&loop);
3294 /* We may need LSS to determine the shape of the expression. */
3295 gfc_add_ss_to_loop (&loop, lss);
3296 gfc_add_ss_to_loop (&loop, rss);
3298 gfc_conv_ss_startstride (&loop);
3299 gfc_conv_loop_setup (&loop, &me->where);
3301 gfc_mark_ss_chain_used (rss, 1);
3302 /* Start the loop body. */
3303 gfc_start_scalarized_body (&loop, &body1);
3305 /* Translate the expression. */
3306 gfc_copy_loopinfo_to_se (&rse, &loop);
3308 gfc_conv_expr (&rse, me);
3311 /* Variable to evaluate mask condition. */
3312 cond = gfc_create_var (mask_type, "cond");
3313 if (mask && (cmask || pmask))
3314 mtmp = gfc_create_var (mask_type, "mask");
3315 else mtmp = NULL_TREE;
3317 gfc_add_block_to_block (&body1, &lse.pre);
3318 gfc_add_block_to_block (&body1, &rse.pre);
3320 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3322 if (mask && (cmask || pmask))
3324 tmp = gfc_build_array_ref (mask, count, NULL);
3326 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3327 gfc_add_modify (&body1, mtmp, tmp);
3332 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3335 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3336 gfc_add_modify (&body1, tmp1, tmp);
3341 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3342 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3344 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3345 gfc_add_modify (&body1, tmp1, tmp);
3348 gfc_add_block_to_block (&body1, &lse.post);
3349 gfc_add_block_to_block (&body1, &rse.post);
3351 if (lss == gfc_ss_terminator)
3353 gfc_add_block_to_block (&body, &body1);
3357 /* Increment count. */
3358 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3359 gfc_index_one_node);
3360 gfc_add_modify (&body1, count, tmp1);
3362 /* Generate the copying loops. */
3363 gfc_trans_scalarizing_loops (&loop, &body1);
3365 gfc_add_block_to_block (&body, &loop.pre);
3366 gfc_add_block_to_block (&body, &loop.post);
3368 gfc_cleanup_loop (&loop);
3369 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3370 as tree nodes in SS may not be valid in different scope. */
3373 tmp1 = gfc_finish_block (&body);
3374 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3375 if (nested_forall_info != NULL)
3376 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3378 gfc_add_expr_to_block (block, tmp1);
3382 /* Translate an assignment statement in a WHERE statement or construct
3383 statement. The MASK expression is used to control which elements
3384 of EXPR1 shall be assigned. The sense of MASK is specified by
3388 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3389 tree mask, bool invert,
3390 tree count1, tree count2,
3396 gfc_ss *lss_section;
3403 tree index, maskexpr;
3405 /* A defined assignment. */
3406 if (cnext && cnext->resolved_sym)
3407 return gfc_trans_call (cnext, true, mask, count1, invert);
3410 /* TODO: handle this special case.
3411 Special case a single function returning an array. */
3412 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3414 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3420 /* Assignment of the form lhs = rhs. */
3421 gfc_start_block (&block);
3423 gfc_init_se (&lse, NULL);
3424 gfc_init_se (&rse, NULL);
3427 lss = gfc_walk_expr (expr1);
3430 /* In each where-assign-stmt, the mask-expr and the variable being
3431 defined shall be arrays of the same shape. */
3432 gcc_assert (lss != gfc_ss_terminator);
3434 /* The assignment needs scalarization. */
3437 /* Find a non-scalar SS from the lhs. */
3438 while (lss_section != gfc_ss_terminator
3439 && lss_section->type != GFC_SS_SECTION)
3440 lss_section = lss_section->next;
3442 gcc_assert (lss_section != gfc_ss_terminator);
3444 /* Initialize the scalarizer. */
3445 gfc_init_loopinfo (&loop);
3448 rss = gfc_walk_expr (expr2);
3449 if (rss == gfc_ss_terminator)
3451 /* The rhs is scalar. Add a ss for the expression. */
3452 rss = gfc_get_ss ();
3454 rss->next = gfc_ss_terminator;
3455 rss->type = GFC_SS_SCALAR;
3459 /* Associate the SS with the loop. */
3460 gfc_add_ss_to_loop (&loop, lss);
3461 gfc_add_ss_to_loop (&loop, rss);
3463 /* Calculate the bounds of the scalarization. */
3464 gfc_conv_ss_startstride (&loop);
3466 /* Resolve any data dependencies in the statement. */
3467 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3469 /* Setup the scalarizing loops. */
3470 gfc_conv_loop_setup (&loop, &expr2->where);
3472 /* Setup the gfc_se structures. */
3473 gfc_copy_loopinfo_to_se (&lse, &loop);
3474 gfc_copy_loopinfo_to_se (&rse, &loop);
3477 gfc_mark_ss_chain_used (rss, 1);
3478 if (loop.temp_ss == NULL)
3481 gfc_mark_ss_chain_used (lss, 1);
3485 lse.ss = loop.temp_ss;
3486 gfc_mark_ss_chain_used (lss, 3);
3487 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3490 /* Start the scalarized loop body. */
3491 gfc_start_scalarized_body (&loop, &body);
3493 /* Translate the expression. */
3494 gfc_conv_expr (&rse, expr2);
3495 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3497 gfc_conv_tmp_array_ref (&lse);
3498 gfc_advance_se_ss_chain (&lse);
3501 gfc_conv_expr (&lse, expr1);
3503 /* Form the mask expression according to the mask. */
3505 maskexpr = gfc_build_array_ref (mask, index, NULL);
3507 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3509 /* Use the scalar assignment as is. */
3510 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3511 loop.temp_ss != NULL, false, true);
3513 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
3515 gfc_add_expr_to_block (&body, tmp);
3517 if (lss == gfc_ss_terminator)
3519 /* Increment count1. */
3520 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3521 count1, gfc_index_one_node);
3522 gfc_add_modify (&body, count1, tmp);
3524 /* Use the scalar assignment as is. */
3525 gfc_add_block_to_block (&block, &body);
3529 gcc_assert (lse.ss == gfc_ss_terminator
3530 && rse.ss == gfc_ss_terminator);
3532 if (loop.temp_ss != NULL)
3534 /* Increment count1 before finish the main body of a scalarized
3536 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3537 count1, gfc_index_one_node);
3538 gfc_add_modify (&body, count1, tmp);
3539 gfc_trans_scalarized_loop_boundary (&loop, &body);
3541 /* We need to copy the temporary to the actual lhs. */
3542 gfc_init_se (&lse, NULL);
3543 gfc_init_se (&rse, NULL);
3544 gfc_copy_loopinfo_to_se (&lse, &loop);
3545 gfc_copy_loopinfo_to_se (&rse, &loop);
3547 rse.ss = loop.temp_ss;
3550 gfc_conv_tmp_array_ref (&rse);
3551 gfc_advance_se_ss_chain (&rse);
3552 gfc_conv_expr (&lse, expr1);
3554 gcc_assert (lse.ss == gfc_ss_terminator
3555 && rse.ss == gfc_ss_terminator);
3557 /* Form the mask expression according to the mask tree list. */
3559 maskexpr = gfc_build_array_ref (mask, index, NULL);
3561 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3564 /* Use the scalar assignment as is. */
3565 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
3567 tmp = build3_v (COND_EXPR, maskexpr, tmp,
3568 build_empty_stmt (input_location));
3569 gfc_add_expr_to_block (&body, tmp);
3571 /* Increment count2. */
3572 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3573 count2, gfc_index_one_node);
3574 gfc_add_modify (&body, count2, tmp);
3578 /* Increment count1. */
3579 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3580 count1, gfc_index_one_node);
3581 gfc_add_modify (&body, count1, tmp);
3584 /* Generate the copying loops. */
3585 gfc_trans_scalarizing_loops (&loop, &body);
3587 /* Wrap the whole thing up. */
3588 gfc_add_block_to_block (&block, &loop.pre);
3589 gfc_add_block_to_block (&block, &loop.post);
3590 gfc_cleanup_loop (&loop);
3593 return gfc_finish_block (&block);
3597 /* Translate the WHERE construct or statement.
3598 This function can be called iteratively to translate the nested WHERE
3599 construct or statement.
3600 MASK is the control mask. */
3603 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3604 forall_info * nested_forall_info, stmtblock_t * block)
3606 stmtblock_t inner_size_body;
3607 tree inner_size, size;
3616 tree count1, count2;
3620 tree pcmask = NULL_TREE;
3621 tree ppmask = NULL_TREE;
3622 tree cmask = NULL_TREE;
3623 tree pmask = NULL_TREE;
3624 gfc_actual_arglist *arg;
3626 /* the WHERE statement or the WHERE construct statement. */
3627 cblock = code->block;
3629 /* As the mask array can be very big, prefer compact boolean types. */
3630 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3632 /* Determine which temporary masks are needed. */
3635 /* One clause: No ELSEWHEREs. */
3636 need_cmask = (cblock->next != 0);
3639 else if (cblock->block->block)
3641 /* Three or more clauses: Conditional ELSEWHEREs. */
3645 else if (cblock->next)
3647 /* Two clauses, the first non-empty. */
3649 need_pmask = (mask != NULL_TREE
3650 && cblock->block->next != 0);
3652 else if (!cblock->block->next)
3654 /* Two clauses, both empty. */
3658 /* Two clauses, the first empty, the second non-empty. */
3661 need_cmask = (cblock->block->expr1 != 0);
3670 if (need_cmask || need_pmask)
3672 /* Calculate the size of temporary needed by the mask-expr. */
3673 gfc_init_block (&inner_size_body);
3674 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
3675 &inner_size_body, &lss, &rss);
3677 /* Calculate the total size of temporary needed. */
3678 size = compute_overall_iter_number (nested_forall_info, inner_size,
3679 &inner_size_body, block);
3681 /* Check whether the size is negative. */
3682 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3683 gfc_index_zero_node);
3684 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3685 gfc_index_zero_node, size);
3686 size = gfc_evaluate_now (size, block);
3688 /* Allocate temporary for WHERE mask if needed. */
3690 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3693 /* Allocate temporary for !mask if needed. */
3695 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3701 /* Each time around this loop, the where clause is conditional
3702 on the value of mask and invert, which are updated at the
3703 bottom of the loop. */
3705 /* Has mask-expr. */
3708 /* Ensure that the WHERE mask will be evaluated exactly once.
3709 If there are no statements in this WHERE/ELSEWHERE clause,
3710 then we don't need to update the control mask (cmask).
3711 If this is the last clause of the WHERE construct, then
3712 we don't need to update the pending control mask (pmask). */
3714 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3716 cblock->next ? cmask : NULL_TREE,
3717 cblock->block ? pmask : NULL_TREE,
3720 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3722 (cblock->next || cblock->block)
3723 ? cmask : NULL_TREE,
3724 NULL_TREE, mask_type, block);
3728 /* It's a final elsewhere-stmt. No mask-expr is present. */
3732 /* The body of this where clause are controlled by cmask with
3733 sense specified by invert. */
3735 /* Get the assignment statement of a WHERE statement, or the first
3736 statement in where-body-construct of a WHERE construct. */
3737 cnext = cblock->next;
3742 /* WHERE assignment statement. */
3743 case EXEC_ASSIGN_CALL:
3745 arg = cnext->ext.actual;
3746 expr1 = expr2 = NULL;
3747 for (; arg; arg = arg->next)
3759 expr1 = cnext->expr1;
3760 expr2 = cnext->expr2;
3762 if (nested_forall_info != NULL)
3764 need_temp = gfc_check_dependency (expr1, expr2, 0);
3765 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3766 gfc_trans_assign_need_temp (expr1, expr2,
3768 nested_forall_info, block);
3771 /* Variables to control maskexpr. */
3772 count1 = gfc_create_var (gfc_array_index_type, "count1");
3773 count2 = gfc_create_var (gfc_array_index_type, "count2");
3774 gfc_add_modify (block, count1, gfc_index_zero_node);
3775 gfc_add_modify (block, count2, gfc_index_zero_node);
3777 tmp = gfc_trans_where_assign (expr1, expr2,
3782 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3784 gfc_add_expr_to_block (block, tmp);
3789 /* Variables to control maskexpr. */
3790 count1 = gfc_create_var (gfc_array_index_type, "count1");
3791 count2 = gfc_create_var (gfc_array_index_type, "count2");
3792 gfc_add_modify (block, count1, gfc_index_zero_node);
3793 gfc_add_modify (block, count2, gfc_index_zero_node);
3795 tmp = gfc_trans_where_assign (expr1, expr2,
3799 gfc_add_expr_to_block (block, tmp);
3804 /* WHERE or WHERE construct is part of a where-body-construct. */
3806 gfc_trans_where_2 (cnext, cmask, invert,
3807 nested_forall_info, block);
3814 /* The next statement within the same where-body-construct. */
3815 cnext = cnext->next;
3817 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3818 cblock = cblock->block;
3819 if (mask == NULL_TREE)
3821 /* If we're the initial WHERE, we can simply invert the sense
3822 of the current mask to obtain the "mask" for the remaining
3829 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3835 /* If we allocated a pending mask array, deallocate it now. */
3838 tmp = gfc_call_free (ppmask);
3839 gfc_add_expr_to_block (block, tmp);
3842 /* If we allocated a current mask array, deallocate it now. */
3845 tmp = gfc_call_free (pcmask);
3846 gfc_add_expr_to_block (block, tmp);
3850 /* Translate a simple WHERE construct or statement without dependencies.
3851 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3852 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3853 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3856 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3858 stmtblock_t block, body;
3859 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3860 tree tmp, cexpr, tstmt, estmt;
3861 gfc_ss *css, *tdss, *tsss;
3862 gfc_se cse, tdse, tsse, edse, esse;
3867 /* Allow the scalarizer to workshare simple where loops. */
3868 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
3869 ompws_flags |= OMPWS_SCALARIZER_WS;
3871 cond = cblock->expr1;
3872 tdst = cblock->next->expr1;
3873 tsrc = cblock->next->expr2;
3874 edst = eblock ? eblock->next->expr1 : NULL;
3875 esrc = eblock ? eblock->next->expr2 : NULL;
3877 gfc_start_block (&block);
3878 gfc_init_loopinfo (&loop);
3880 /* Handle the condition. */
3881 gfc_init_se (&cse, NULL);
3882 css = gfc_walk_expr (cond);
3883 gfc_add_ss_to_loop (&loop, css);
3885 /* Handle the then-clause. */
3886 gfc_init_se (&tdse, NULL);
3887 gfc_init_se (&tsse, NULL);
3888 tdss = gfc_walk_expr (tdst);
3889 tsss = gfc_walk_expr (tsrc);
3890 if (tsss == gfc_ss_terminator)
3892 tsss = gfc_get_ss ();
3894 tsss->next = gfc_ss_terminator;
3895 tsss->type = GFC_SS_SCALAR;
3898 gfc_add_ss_to_loop (&loop, tdss);
3899 gfc_add_ss_to_loop (&loop, tsss);
3903 /* Handle the else clause. */
3904 gfc_init_se (&edse, NULL);
3905 gfc_init_se (&esse, NULL);
3906 edss = gfc_walk_expr (edst);
3907 esss = gfc_walk_expr (esrc);
3908 if (esss == gfc_ss_terminator)
3910 esss = gfc_get_ss ();
3912 esss->next = gfc_ss_terminator;
3913 esss->type = GFC_SS_SCALAR;
3916 gfc_add_ss_to_loop (&loop, edss);
3917 gfc_add_ss_to_loop (&loop, esss);
3920 gfc_conv_ss_startstride (&loop);
3921 gfc_conv_loop_setup (&loop, &tdst->where);
3923 gfc_mark_ss_chain_used (css, 1);
3924 gfc_mark_ss_chain_used (tdss, 1);
3925 gfc_mark_ss_chain_used (tsss, 1);
3928 gfc_mark_ss_chain_used (edss, 1);
3929 gfc_mark_ss_chain_used (esss, 1);
3932 gfc_start_scalarized_body (&loop, &body);
3934 gfc_copy_loopinfo_to_se (&cse, &loop);
3935 gfc_copy_loopinfo_to_se (&tdse, &loop);
3936 gfc_copy_loopinfo_to_se (&tsse, &loop);
3942 gfc_copy_loopinfo_to_se (&edse, &loop);
3943 gfc_copy_loopinfo_to_se (&esse, &loop);
3948 gfc_conv_expr (&cse, cond);
3949 gfc_add_block_to_block (&body, &cse.pre);
3952 gfc_conv_expr (&tsse, tsrc);
3953 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3955 gfc_conv_tmp_array_ref (&tdse);
3956 gfc_advance_se_ss_chain (&tdse);
3959 gfc_conv_expr (&tdse, tdst);
3963 gfc_conv_expr (&esse, esrc);
3964 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3966 gfc_conv_tmp_array_ref (&edse);
3967 gfc_advance_se_ss_chain (&edse);
3970 gfc_conv_expr (&edse, edst);
3973 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
3974 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
3976 : build_empty_stmt (input_location);
3977 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3978 gfc_add_expr_to_block (&body, tmp);
3979 gfc_add_block_to_block (&body, &cse.post);
3981 gfc_trans_scalarizing_loops (&loop, &body);
3982 gfc_add_block_to_block (&block, &loop.pre);
3983 gfc_add_block_to_block (&block, &loop.post);
3984 gfc_cleanup_loop (&loop);
3986 return gfc_finish_block (&block);
3989 /* As the WHERE or WHERE construct statement can be nested, we call
3990 gfc_trans_where_2 to do the translation, and pass the initial
3991 NULL values for both the control mask and the pending control mask. */
3994 gfc_trans_where (gfc_code * code)
4000 cblock = code->block;
4002 && cblock->next->op == EXEC_ASSIGN
4003 && !cblock->next->next)
4005 eblock = cblock->block;
4008 /* A simple "WHERE (cond) x = y" statement or block is
4009 dependence free if cond is not dependent upon writing x,
4010 and the source y is unaffected by the destination x. */
4011 if (!gfc_check_dependency (cblock->next->expr1,
4013 && !gfc_check_dependency (cblock->next->expr1,
4014 cblock->next->expr2, 0))
4015 return gfc_trans_where_3 (cblock, NULL);
4017 else if (!eblock->expr1
4020 && eblock->next->op == EXEC_ASSIGN
4021 && !eblock->next->next)
4023 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4024 block is dependence free if cond is not dependent on writes
4025 to x1 and x2, y1 is not dependent on writes to x2, and y2
4026 is not dependent on writes to x1, and both y's are not
4027 dependent upon their own x's. In addition to this, the
4028 final two dependency checks below exclude all but the same
4029 array reference if the where and elswhere destinations
4030 are the same. In short, this is VERY conservative and this
4031 is needed because the two loops, required by the standard
4032 are coalesced in gfc_trans_where_3. */
4033 if (!gfc_check_dependency(cblock->next->expr1,
4035 && !gfc_check_dependency(eblock->next->expr1,
4037 && !gfc_check_dependency(cblock->next->expr1,
4038 eblock->next->expr2, 1)
4039 && !gfc_check_dependency(eblock->next->expr1,
4040 cblock->next->expr2, 1)
4041 && !gfc_check_dependency(cblock->next->expr1,
4042 cblock->next->expr2, 1)
4043 && !gfc_check_dependency(eblock->next->expr1,
4044 eblock->next->expr2, 1)
4045 && !gfc_check_dependency(cblock->next->expr1,
4046 eblock->next->expr1, 0)
4047 && !gfc_check_dependency(eblock->next->expr1,
4048 cblock->next->expr1, 0))
4049 return gfc_trans_where_3 (cblock, eblock);
4053 gfc_start_block (&block);
4055 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4057 return gfc_finish_block (&block);
4061 /* CYCLE a DO loop. The label decl has already been created by
4062 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4063 node at the head of the loop. We must mark the label as used. */
4066 gfc_trans_cycle (gfc_code * code)
4070 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
4071 TREE_USED (cycle_label) = 1;
4072 return build1_v (GOTO_EXPR, cycle_label);
4076 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4077 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4081 gfc_trans_exit (gfc_code * code)
4085 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
4086 TREE_USED (exit_label) = 1;
4087 return build1_v (GOTO_EXPR, exit_label);
4091 /* Translate the ALLOCATE statement. */
4094 gfc_trans_allocate (gfc_code * code)
4107 if (!code->ext.alloc.list)
4110 pstat = stat = error_label = tmp = memsz = NULL_TREE;
4112 gfc_start_block (&block);
4114 /* Either STAT= and/or ERRMSG is present. */
4115 if (code->expr1 || code->expr2)
4117 tree gfc_int4_type_node = gfc_get_int_type (4);
4119 stat = gfc_create_var (gfc_int4_type_node, "stat");
4120 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4122 error_label = gfc_build_label_decl (NULL_TREE);
4123 TREE_USED (error_label) = 1;
4126 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4128 expr = gfc_copy_expr (al->expr);
4130 if (expr->ts.type == BT_CLASS)
4131 gfc_add_component_ref (expr, "$data");
4133 gfc_init_se (&se, NULL);
4134 gfc_start_block (&se.pre);
4136 se.want_pointer = 1;
4137 se.descriptor_only = 1;
4138 gfc_conv_expr (&se, expr);
4140 if (!gfc_array_allocate (&se, expr, pstat))
4142 /* A scalar or derived type. */
4144 /* Determine allocate size. */
4145 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4149 sz = gfc_copy_expr (code->expr3);
4150 gfc_add_component_ref (sz, "$vptr");
4151 gfc_add_component_ref (sz, "$size");
4152 gfc_init_se (&se_sz, NULL);
4153 gfc_conv_expr (&se_sz, sz);
4157 else if (code->expr3 && code->expr3->ts.type != BT_CLASS)
4158 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4159 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4160 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4162 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4164 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4165 memsz = se.string_length;
4167 /* Allocate - for non-pointers with re-alloc checking. */
4174 /* Find the last reference in the chain. */
4175 while (ref && ref->next != NULL)
4177 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT);
4182 allocatable = expr->symtree->n.sym->attr.allocatable;
4184 allocatable = ref->u.c.component->attr.allocatable;
4187 tmp = gfc_allocate_array_with_status (&se.pre, se.expr, memsz,
4190 tmp = gfc_allocate_with_status (&se.pre, memsz, pstat);
4193 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
4194 fold_convert (TREE_TYPE (se.expr), tmp));
4195 gfc_add_expr_to_block (&se.pre, tmp);
4197 if (code->expr1 || code->expr2)
4199 tmp = build1_v (GOTO_EXPR, error_label);
4200 parm = fold_build2 (NE_EXPR, boolean_type_node,
4201 stat, build_int_cst (TREE_TYPE (stat), 0));
4202 tmp = fold_build3 (COND_EXPR, void_type_node,
4203 parm, tmp, build_empty_stmt (input_location));
4204 gfc_add_expr_to_block (&se.pre, tmp);
4207 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4209 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4210 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4211 gfc_add_expr_to_block (&se.pre, tmp);
4216 tmp = gfc_finish_block (&se.pre);
4217 gfc_add_expr_to_block (&block, tmp);
4219 /* Initialization via SOURCE block. */
4222 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4223 if (al->expr->ts.type == BT_CLASS)
4226 if (rhs->ts.type == BT_CLASS)
4227 gfc_add_component_ref (rhs, "$data");
4228 gfc_init_se (&dst, NULL);
4229 gfc_init_se (&src, NULL);
4230 gfc_conv_expr (&dst, expr);
4231 gfc_conv_expr (&src, rhs);
4232 gfc_add_block_to_block (&block, &src.pre);
4233 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4236 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4238 gfc_free_expr (rhs);
4239 gfc_add_expr_to_block (&block, tmp);
4242 /* Allocation of CLASS entities. */
4243 gfc_free_expr (expr);
4245 if (expr->ts.type == BT_CLASS)
4250 /* Initialize VPTR for CLASS objects. */
4251 lhs = gfc_expr_to_initialize (expr);
4252 gfc_add_component_ref (lhs, "$vptr");
4254 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4256 /* VPTR must be determined at run time. */
4257 rhs = gfc_copy_expr (code->expr3);
4258 gfc_add_component_ref (rhs, "$vptr");
4259 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4260 gfc_add_expr_to_block (&block, tmp);
4261 gfc_free_expr (rhs);
4265 /* VPTR is fixed at compile time. */
4269 ts = &code->expr3->ts;
4270 else if (expr->ts.type == BT_DERIVED)
4272 else if (code->ext.alloc.ts.type == BT_DERIVED)
4273 ts = &code->ext.alloc.ts;
4274 else if (expr->ts.type == BT_CLASS)
4275 ts = &expr->ts.u.derived->components->ts;
4279 if (ts->type == BT_DERIVED)
4281 vtab = gfc_find_derived_vtab (ts->u.derived);
4283 gfc_init_se (&lse, NULL);
4284 lse.want_pointer = 1;
4285 gfc_conv_expr (&lse, lhs);
4286 tmp = gfc_build_addr_expr (NULL_TREE,
4287 gfc_get_symbol_decl (vtab));
4288 gfc_add_modify (&block, lse.expr,
4289 fold_convert (TREE_TYPE (lse.expr), tmp));
4299 tmp = build1_v (LABEL_EXPR, error_label);
4300 gfc_add_expr_to_block (&block, tmp);
4302 gfc_init_se (&se, NULL);
4303 gfc_conv_expr_lhs (&se, code->expr1);
4304 tmp = convert (TREE_TYPE (se.expr), stat);
4305 gfc_add_modify (&block, se.expr, tmp);
4311 /* A better error message may be possible, but not required. */
4312 const char *msg = "Attempt to allocate an allocated object";
4313 tree errmsg, slen, dlen;
4315 gfc_init_se (&se, NULL);
4316 gfc_conv_expr_lhs (&se, code->expr2);
4318 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4320 gfc_add_modify (&block, errmsg,
4321 gfc_build_addr_expr (pchar_type_node,
4322 gfc_build_localized_cstring_const (msg)));
4324 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4325 dlen = gfc_get_expr_charlen (code->expr2);
4326 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4328 dlen = build_call_expr_loc (input_location,
4329 built_in_decls[BUILT_IN_MEMCPY], 3,
4330 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4332 tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
4333 build_int_cst (TREE_TYPE (stat), 0));
4335 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4337 gfc_add_expr_to_block (&block, tmp);
4340 return gfc_finish_block (&block);
4344 /* Translate a DEALLOCATE statement. */
4347 gfc_trans_deallocate (gfc_code *code)
4352 tree apstat, astat, pstat, stat, tmp;
4355 pstat = apstat = stat = astat = tmp = NULL_TREE;
4357 gfc_start_block (&block);
4359 /* Count the number of failed deallocations. If deallocate() was
4360 called with STAT= , then set STAT to the count. If deallocate
4361 was called with ERRMSG, then set ERRMG to a string. */
4362 if (code->expr1 || code->expr2)
4364 tree gfc_int4_type_node = gfc_get_int_type (4);
4366 stat = gfc_create_var (gfc_int4_type_node, "stat");
4367 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4369 /* Running total of possible deallocation failures. */
4370 astat = gfc_create_var (gfc_int4_type_node, "astat");
4371 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4373 /* Initialize astat to 0. */
4374 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4377 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4380 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4382 gfc_init_se (&se, NULL);
4383 gfc_start_block (&se.pre);
4385 se.want_pointer = 1;
4386 se.descriptor_only = 1;
4387 gfc_conv_expr (&se, expr);
4389 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4392 gfc_ref *last = NULL;
4393 for (ref = expr->ref; ref; ref = ref->next)
4394 if (ref->type == REF_COMPONENT)
4397 /* Do not deallocate the components of a derived type
4398 ultimate pointer component. */
4399 if (!(last && last->u.c.component->attr.pointer)
4400 && !(!last && expr->symtree->n.sym->attr.pointer))
4402 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
4404 gfc_add_expr_to_block (&se.pre, tmp);
4409 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4412 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4413 gfc_add_expr_to_block (&se.pre, tmp);
4415 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4416 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4419 gfc_add_expr_to_block (&se.pre, tmp);
4421 /* Keep track of the number of failed deallocations by adding stat
4422 of the last deallocation to the running total. */
4423 if (code->expr1 || code->expr2)
4425 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4426 gfc_add_modify (&se.pre, astat, apstat);
4429 tmp = gfc_finish_block (&se.pre);
4430 gfc_add_expr_to_block (&block, tmp);
4437 gfc_init_se (&se, NULL);
4438 gfc_conv_expr_lhs (&se, code->expr1);
4439 tmp = convert (TREE_TYPE (se.expr), astat);
4440 gfc_add_modify (&block, se.expr, tmp);
4446 /* A better error message may be possible, but not required. */
4447 const char *msg = "Attempt to deallocate an unallocated object";
4448 tree errmsg, slen, dlen;
4450 gfc_init_se (&se, NULL);
4451 gfc_conv_expr_lhs (&se, code->expr2);
4453 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4455 gfc_add_modify (&block, errmsg,
4456 gfc_build_addr_expr (pchar_type_node,
4457 gfc_build_localized_cstring_const (msg)));
4459 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4460 dlen = gfc_get_expr_charlen (code->expr2);
4461 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4463 dlen = build_call_expr_loc (input_location,
4464 built_in_decls[BUILT_IN_MEMCPY], 3,
4465 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4467 tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
4468 build_int_cst (TREE_TYPE (astat), 0));
4470 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4472 gfc_add_expr_to_block (&block, tmp);
4475 return gfc_finish_block (&block);