1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 Free Software Foundation, Inc.
5 Contributed by Paul Brook <paul@nowt.org>
6 and Steven Bosscher <s.bosscher@student.tudelft.nl>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
37 #include "dependency.h"
40 typedef struct iter_info
46 struct iter_info *next;
50 typedef struct forall_info
57 struct forall_info *prev_nest;
61 static void gfc_trans_where_2 (gfc_code *, tree, bool,
62 forall_info *, stmtblock_t *);
64 /* Translate a F95 label number to a LABEL_EXPR. */
67 gfc_trans_label_here (gfc_code * code)
69 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
73 /* Given a variable expression which has been ASSIGNed to, find the decl
74 containing the auxiliary variables. For variables in common blocks this
78 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
80 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
81 gfc_conv_expr (se, expr);
82 /* Deals with variable in common block. Get the field declaration. */
83 if (TREE_CODE (se->expr) == COMPONENT_REF)
84 se->expr = TREE_OPERAND (se->expr, 1);
85 /* Deals with dummy argument. Get the parameter declaration. */
86 else if (TREE_CODE (se->expr) == INDIRECT_REF)
87 se->expr = TREE_OPERAND (se->expr, 0);
90 /* Translate a label assignment statement. */
93 gfc_trans_label_assign (gfc_code * code)
102 /* Start a new block. */
103 gfc_init_se (&se, NULL);
104 gfc_start_block (&se.pre);
105 gfc_conv_label_variable (&se, code->expr1);
107 len = GFC_DECL_STRING_LEN (se.expr);
108 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
110 label_tree = gfc_get_label_decl (code->label1);
112 if (code->label1->defined == ST_LABEL_TARGET)
114 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
115 len_tree = integer_minus_one_node;
119 gfc_expr *format = code->label1->format;
121 label_len = format->value.character.length;
122 len_tree = build_int_cst (gfc_charlen_type_node, label_len);
123 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
124 format->value.character.string);
125 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
128 gfc_add_modify (&se.pre, len, len_tree);
129 gfc_add_modify (&se.pre, addr, label_tree);
131 return gfc_finish_block (&se.pre);
134 /* Translate a GOTO statement. */
137 gfc_trans_goto (gfc_code * code)
139 locus loc = code->loc;
145 if (code->label1 != NULL)
146 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
149 gfc_init_se (&se, NULL);
150 gfc_start_block (&se.pre);
151 gfc_conv_label_variable (&se, code->expr1);
152 tmp = GFC_DECL_STRING_LEN (se.expr);
153 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, tmp,
154 build_int_cst (TREE_TYPE (tmp), -1));
155 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
156 "Assigned label is not a target label");
158 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
160 /* We're going to ignore a label list. It does not really change the
161 statement's semantics (because it is just a further restriction on
162 what's legal code); before, we were comparing label addresses here, but
163 that's a very fragile business and may break with optimization. So
166 target = fold_build1_loc (input_location, GOTO_EXPR, void_type_node,
168 gfc_add_expr_to_block (&se.pre, target);
169 return gfc_finish_block (&se.pre);
173 /* Translate an ENTRY statement. Just adds a label for this entry point. */
175 gfc_trans_entry (gfc_code * code)
177 return build1_v (LABEL_EXPR, code->ext.entry->label);
181 /* Replace a gfc_ss structure by another both in the gfc_se struct
182 and the gfc_loopinfo struct. This is used in gfc_conv_elemental_dependencies
183 to replace a variable ss by the corresponding temporary. */
186 replace_ss (gfc_se *se, gfc_ss *old_ss, gfc_ss *new_ss)
188 gfc_ss **sess, **loopss;
190 /* The old_ss is a ss for a single variable. */
191 gcc_assert (old_ss->info->type == GFC_SS_SECTION);
193 for (sess = &(se->ss); *sess != gfc_ss_terminator; sess = &((*sess)->next))
196 gcc_assert (*sess != gfc_ss_terminator);
199 new_ss->next = old_ss->next;
202 for (loopss = &(se->loop->ss); *loopss != gfc_ss_terminator;
203 loopss = &((*loopss)->loop_chain))
204 if (*loopss == old_ss)
206 gcc_assert (*loopss != gfc_ss_terminator);
209 new_ss->loop_chain = old_ss->loop_chain;
210 new_ss->loop = old_ss->loop;
212 gfc_free_ss (old_ss);
216 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
217 elemental subroutines. Make temporaries for output arguments if any such
218 dependencies are found. Output arguments are chosen because internal_unpack
219 can be used, as is, to copy the result back to the variable. */
221 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
222 gfc_symbol * sym, gfc_actual_arglist * arg,
223 gfc_dep_check check_variable)
225 gfc_actual_arglist *arg0;
227 gfc_formal_arglist *formal;
235 if (loopse->ss == NULL)
240 formal = sym->formal;
242 /* Loop over all the arguments testing for dependencies. */
243 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
249 /* Obtain the info structure for the current argument. */
250 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
251 if (ss->info->expr == e)
254 /* If there is a dependency, create a temporary and use it
255 instead of the variable. */
256 fsym = formal ? formal->sym : NULL;
257 if (e->expr_type == EXPR_VARIABLE
259 && fsym->attr.intent != INTENT_IN
260 && gfc_check_fncall_dependency (e, fsym->attr.intent,
261 sym, arg0, check_variable))
263 tree initial, temptype;
264 stmtblock_t temp_post;
267 tmp_ss = gfc_get_array_ss (gfc_ss_terminator, NULL, ss->dimen,
269 gfc_mark_ss_chain_used (tmp_ss, 1);
270 tmp_ss->info->expr = ss->info->expr;
271 replace_ss (loopse, ss, tmp_ss);
273 /* Obtain the argument descriptor for unpacking. */
274 gfc_init_se (&parmse, NULL);
275 parmse.want_pointer = 1;
276 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
277 gfc_add_block_to_block (&se->pre, &parmse.pre);
279 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
280 initialize the array temporary with a copy of the values. */
281 if (fsym->attr.intent == INTENT_INOUT
282 || (fsym->ts.type ==BT_DERIVED
283 && fsym->attr.intent == INTENT_OUT))
284 initial = parmse.expr;
288 /* Find the type of the temporary to create; we don't use the type
289 of e itself as this breaks for subcomponent-references in e (where
290 the type of e is that of the final reference, but parmse.expr's
291 type corresponds to the full derived-type). */
292 /* TODO: Fix this somehow so we don't need a temporary of the whole
293 array but instead only the components referenced. */
294 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
295 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
296 temptype = TREE_TYPE (temptype);
297 temptype = gfc_get_element_type (temptype);
299 /* Generate the temporary. Cleaning up the temporary should be the
300 very last thing done, so we add the code to a new block and add it
301 to se->post as last instructions. */
302 size = gfc_create_var (gfc_array_index_type, NULL);
303 data = gfc_create_var (pvoid_type_node, NULL);
304 gfc_init_block (&temp_post);
305 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post, tmp_ss,
306 temptype, initial, false, true,
307 false, &arg->expr->where);
308 gfc_add_modify (&se->pre, size, tmp);
309 tmp = fold_convert (pvoid_type_node, tmp_ss->info->data.array.data);
310 gfc_add_modify (&se->pre, data, tmp);
312 /* Update other ss' delta. */
313 gfc_set_delta (loopse->loop);
315 /* Copy the result back using unpack. */
316 tmp = build_call_expr_loc (input_location,
317 gfor_fndecl_in_unpack, 2, parmse.expr, data);
318 gfc_add_expr_to_block (&se->post, tmp);
320 /* parmse.pre is already added above. */
321 gfc_add_block_to_block (&se->post, &parmse.post);
322 gfc_add_block_to_block (&se->post, &temp_post);
328 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
331 gfc_trans_call (gfc_code * code, bool dependency_check,
332 tree mask, tree count1, bool invert)
336 int has_alternate_specifier;
337 gfc_dep_check check_variable;
338 tree index = NULL_TREE;
339 tree maskexpr = NULL_TREE;
342 /* A CALL starts a new block because the actual arguments may have to
343 be evaluated first. */
344 gfc_init_se (&se, NULL);
345 gfc_start_block (&se.pre);
347 gcc_assert (code->resolved_sym);
349 ss = gfc_ss_terminator;
350 if (code->resolved_sym->attr.elemental)
351 ss = gfc_walk_elemental_function_args (ss, code->ext.actual,
352 code->expr1, GFC_SS_REFERENCE);
354 /* Is not an elemental subroutine call with array valued arguments. */
355 if (ss == gfc_ss_terminator)
358 /* Translate the call. */
359 has_alternate_specifier
360 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
363 /* A subroutine without side-effect, by definition, does nothing! */
364 TREE_SIDE_EFFECTS (se.expr) = 1;
366 /* Chain the pieces together and return the block. */
367 if (has_alternate_specifier)
369 gfc_code *select_code;
371 select_code = code->next;
372 gcc_assert(select_code->op == EXEC_SELECT);
373 sym = select_code->expr1->symtree->n.sym;
374 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
375 if (sym->backend_decl == NULL)
376 sym->backend_decl = gfc_get_symbol_decl (sym);
377 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
380 gfc_add_expr_to_block (&se.pre, se.expr);
382 gfc_add_block_to_block (&se.pre, &se.post);
387 /* An elemental subroutine call with array valued arguments has
395 /* gfc_walk_elemental_function_args renders the ss chain in the
396 reverse order to the actual argument order. */
397 ss = gfc_reverse_ss (ss);
399 /* Initialize the loop. */
400 gfc_init_se (&loopse, NULL);
401 gfc_init_loopinfo (&loop);
402 gfc_add_ss_to_loop (&loop, ss);
404 gfc_conv_ss_startstride (&loop);
405 /* TODO: gfc_conv_loop_setup generates a temporary for vector
406 subscripts. This could be prevented in the elemental case
407 as temporaries are handled separatedly
408 (below in gfc_conv_elemental_dependencies). */
409 gfc_conv_loop_setup (&loop, &code->expr1->where);
410 gfc_mark_ss_chain_used (ss, 1);
412 /* Convert the arguments, checking for dependencies. */
413 gfc_copy_loopinfo_to_se (&loopse, &loop);
416 /* For operator assignment, do dependency checking. */
417 if (dependency_check)
418 check_variable = ELEM_CHECK_VARIABLE;
420 check_variable = ELEM_DONT_CHECK_VARIABLE;
422 gfc_init_se (&depse, NULL);
423 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
424 code->ext.actual, check_variable);
426 gfc_add_block_to_block (&loop.pre, &depse.pre);
427 gfc_add_block_to_block (&loop.post, &depse.post);
429 /* Generate the loop body. */
430 gfc_start_scalarized_body (&loop, &body);
431 gfc_init_block (&block);
435 /* Form the mask expression according to the mask. */
437 maskexpr = gfc_build_array_ref (mask, index, NULL);
439 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
440 TREE_TYPE (maskexpr), maskexpr);
443 /* Add the subroutine call to the block. */
444 gfc_conv_procedure_call (&loopse, code->resolved_sym,
445 code->ext.actual, code->expr1, NULL);
449 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
450 build_empty_stmt (input_location));
451 gfc_add_expr_to_block (&loopse.pre, tmp);
452 tmp = fold_build2_loc (input_location, PLUS_EXPR,
453 gfc_array_index_type,
454 count1, gfc_index_one_node);
455 gfc_add_modify (&loopse.pre, count1, tmp);
458 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
460 gfc_add_block_to_block (&block, &loopse.pre);
461 gfc_add_block_to_block (&block, &loopse.post);
463 /* Finish up the loop block and the loop. */
464 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
465 gfc_trans_scalarizing_loops (&loop, &body);
466 gfc_add_block_to_block (&se.pre, &loop.pre);
467 gfc_add_block_to_block (&se.pre, &loop.post);
468 gfc_add_block_to_block (&se.pre, &se.post);
469 gfc_cleanup_loop (&loop);
472 return gfc_finish_block (&se.pre);
476 /* Translate the RETURN statement. */
479 gfc_trans_return (gfc_code * code)
487 /* If code->expr is not NULL, this return statement must appear
488 in a subroutine and current_fake_result_decl has already
491 result = gfc_get_fake_result_decl (NULL, 0);
494 gfc_warning ("An alternate return at %L without a * dummy argument",
495 &code->expr1->where);
496 return gfc_generate_return ();
499 /* Start a new block for this statement. */
500 gfc_init_se (&se, NULL);
501 gfc_start_block (&se.pre);
503 gfc_conv_expr (&se, code->expr1);
505 /* Note that the actually returned expression is a simple value and
506 does not depend on any pointers or such; thus we can clean-up with
507 se.post before returning. */
508 tmp = fold_build2_loc (input_location, MODIFY_EXPR, TREE_TYPE (result),
509 result, fold_convert (TREE_TYPE (result),
511 gfc_add_expr_to_block (&se.pre, tmp);
512 gfc_add_block_to_block (&se.pre, &se.post);
514 tmp = gfc_generate_return ();
515 gfc_add_expr_to_block (&se.pre, tmp);
516 return gfc_finish_block (&se.pre);
519 return gfc_generate_return ();
523 /* Translate the PAUSE statement. We have to translate this statement
524 to a runtime library call. */
527 gfc_trans_pause (gfc_code * code)
529 tree gfc_int4_type_node = gfc_get_int_type (4);
533 /* Start a new block for this statement. */
534 gfc_init_se (&se, NULL);
535 gfc_start_block (&se.pre);
538 if (code->expr1 == NULL)
540 tmp = build_int_cst (gfc_int4_type_node, 0);
541 tmp = build_call_expr_loc (input_location,
542 gfor_fndecl_pause_string, 2,
543 build_int_cst (pchar_type_node, 0), tmp);
545 else if (code->expr1->ts.type == BT_INTEGER)
547 gfc_conv_expr (&se, code->expr1);
548 tmp = build_call_expr_loc (input_location,
549 gfor_fndecl_pause_numeric, 1,
550 fold_convert (gfc_int4_type_node, se.expr));
554 gfc_conv_expr_reference (&se, code->expr1);
555 tmp = build_call_expr_loc (input_location,
556 gfor_fndecl_pause_string, 2,
557 se.expr, se.string_length);
560 gfc_add_expr_to_block (&se.pre, tmp);
562 gfc_add_block_to_block (&se.pre, &se.post);
564 return gfc_finish_block (&se.pre);
568 /* Translate the STOP statement. We have to translate this statement
569 to a runtime library call. */
572 gfc_trans_stop (gfc_code *code, bool error_stop)
574 tree gfc_int4_type_node = gfc_get_int_type (4);
578 /* Start a new block for this statement. */
579 gfc_init_se (&se, NULL);
580 gfc_start_block (&se.pre);
582 if (gfc_option.coarray == GFC_FCOARRAY_LIB && !error_stop)
584 /* Per F2008, 8.5.1 STOP implies a SYNC MEMORY. */
585 tmp = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
586 tmp = build_call_expr_loc (input_location, tmp, 0);
587 gfc_add_expr_to_block (&se.pre, tmp);
589 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_finalize, 0);
590 gfc_add_expr_to_block (&se.pre, tmp);
593 if (code->expr1 == NULL)
595 tmp = build_int_cst (gfc_int4_type_node, 0);
596 tmp = build_call_expr_loc (input_location,
598 ? (gfc_option.coarray == GFC_FCOARRAY_LIB
599 ? gfor_fndecl_caf_error_stop_str
600 : gfor_fndecl_error_stop_string)
601 : gfor_fndecl_stop_string,
602 2, build_int_cst (pchar_type_node, 0), tmp);
604 else if (code->expr1->ts.type == BT_INTEGER)
606 gfc_conv_expr (&se, code->expr1);
607 tmp = build_call_expr_loc (input_location,
609 ? (gfc_option.coarray == GFC_FCOARRAY_LIB
610 ? gfor_fndecl_caf_error_stop
611 : gfor_fndecl_error_stop_numeric)
612 : gfor_fndecl_stop_numeric_f08, 1,
613 fold_convert (gfc_int4_type_node, se.expr));
617 gfc_conv_expr_reference (&se, code->expr1);
618 tmp = build_call_expr_loc (input_location,
620 ? (gfc_option.coarray == GFC_FCOARRAY_LIB
621 ? gfor_fndecl_caf_error_stop_str
622 : gfor_fndecl_error_stop_string)
623 : gfor_fndecl_stop_string,
624 2, se.expr, se.string_length);
627 gfc_add_expr_to_block (&se.pre, tmp);
629 gfc_add_block_to_block (&se.pre, &se.post);
631 return gfc_finish_block (&se.pre);
636 gfc_trans_lock_unlock (gfc_code *code, gfc_exec_op type ATTRIBUTE_UNUSED)
639 tree stat = NULL_TREE, lock_acquired = NULL_TREE;
641 /* Short cut: For single images without STAT= or LOCK_ACQUIRED
642 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
643 if (!code->expr2 && !code->expr4 && gfc_option.coarray != GFC_FCOARRAY_LIB)
646 gfc_init_se (&se, NULL);
647 gfc_start_block (&se.pre);
651 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
652 gfc_init_se (&argse, NULL);
653 gfc_conv_expr_val (&argse, code->expr2);
659 gcc_assert (code->expr4->expr_type == EXPR_VARIABLE);
660 gfc_init_se (&argse, NULL);
661 gfc_conv_expr_val (&argse, code->expr4);
662 lock_acquired = argse.expr;
665 if (stat != NULL_TREE)
666 gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0));
668 if (lock_acquired != NULL_TREE)
669 gfc_add_modify (&se.pre, lock_acquired,
670 fold_convert (TREE_TYPE (lock_acquired),
673 return gfc_finish_block (&se.pre);
678 gfc_trans_sync (gfc_code *code, gfc_exec_op type)
682 tree images = NULL_TREE, stat = NULL_TREE,
683 errmsg = NULL_TREE, errmsglen = NULL_TREE;
685 /* Short cut: For single images without bound checking or without STAT=,
686 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
687 if (!code->expr2 && !(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
688 && gfc_option.coarray != GFC_FCOARRAY_LIB)
691 gfc_init_se (&se, NULL);
692 gfc_start_block (&se.pre);
694 if (code->expr1 && code->expr1->rank == 0)
696 gfc_init_se (&argse, NULL);
697 gfc_conv_expr_val (&argse, code->expr1);
703 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
704 gfc_init_se (&argse, NULL);
705 gfc_conv_expr_val (&argse, code->expr2);
709 stat = null_pointer_node;
711 if (code->expr3 && gfc_option.coarray == GFC_FCOARRAY_LIB
712 && type != EXEC_SYNC_MEMORY)
714 gcc_assert (code->expr3->expr_type == EXPR_VARIABLE);
715 gfc_init_se (&argse, NULL);
716 gfc_conv_expr (&argse, code->expr3);
717 gfc_conv_string_parameter (&argse);
718 errmsg = gfc_build_addr_expr (NULL, argse.expr);
719 errmsglen = argse.string_length;
721 else if (gfc_option.coarray == GFC_FCOARRAY_LIB && type != EXEC_SYNC_MEMORY)
723 errmsg = null_pointer_node;
724 errmsglen = build_int_cst (integer_type_node, 0);
727 /* Check SYNC IMAGES(imageset) for valid image index.
728 FIXME: Add a check for image-set arrays. */
729 if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
730 && code->expr1->rank == 0)
733 if (gfc_option.coarray != GFC_FCOARRAY_LIB)
734 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
735 images, build_int_cst (TREE_TYPE (images), 1));
739 cond = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
740 images, gfort_gvar_caf_num_images);
741 cond2 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
743 build_int_cst (TREE_TYPE (images), 1));
744 cond = fold_build2_loc (input_location, TRUTH_AND_EXPR,
745 boolean_type_node, cond, cond2);
747 gfc_trans_runtime_check (true, false, cond, &se.pre,
748 &code->expr1->where, "Invalid image number "
750 fold_convert (integer_type_node, se.expr));
753 /* Per F2008, 8.5.1, a SYNC MEMORY is implied by calling the
754 image control statements SYNC IMAGES and SYNC ALL. */
755 if (gfc_option.coarray == GFC_FCOARRAY_LIB)
757 tmp = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
758 tmp = build_call_expr_loc (input_location, tmp, 0);
759 gfc_add_expr_to_block (&se.pre, tmp);
762 if (gfc_option.coarray != GFC_FCOARRAY_LIB || type == EXEC_SYNC_MEMORY)
764 /* Set STAT to zero. */
766 gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0));
768 else if (type == EXEC_SYNC_ALL)
770 /* SYNC ALL => stat == null_pointer_node
771 SYNC ALL(stat=s) => stat has an integer type
773 If "stat" has the wrong integer type, use a temp variable of
774 the right type and later cast the result back into "stat". */
775 if (stat == null_pointer_node || TREE_TYPE (stat) == integer_type_node)
777 if (TREE_TYPE (stat) == integer_type_node)
778 stat = gfc_build_addr_expr (NULL, stat);
780 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all,
781 3, stat, errmsg, errmsglen);
782 gfc_add_expr_to_block (&se.pre, tmp);
786 tree tmp_stat = gfc_create_var (integer_type_node, "stat");
788 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all,
789 3, gfc_build_addr_expr (NULL, tmp_stat),
791 gfc_add_expr_to_block (&se.pre, tmp);
793 gfc_add_modify (&se.pre, stat,
794 fold_convert (TREE_TYPE (stat), tmp_stat));
801 gcc_assert (type == EXEC_SYNC_IMAGES);
805 len = build_int_cst (integer_type_node, -1);
806 images = null_pointer_node;
808 else if (code->expr1->rank == 0)
810 len = build_int_cst (integer_type_node, 1);
811 images = gfc_build_addr_expr (NULL_TREE, images);
816 if (code->expr1->ts.kind != gfc_c_int_kind)
817 gfc_fatal_error ("Sorry, only support for integer kind %d "
818 "implemented for image-set at %L",
819 gfc_c_int_kind, &code->expr1->where);
821 gfc_conv_array_parameter (&se, code->expr1,
822 gfc_walk_expr (code->expr1), true, NULL,
826 tmp = gfc_typenode_for_spec (&code->expr1->ts);
827 if (GFC_ARRAY_TYPE_P (tmp) || GFC_DESCRIPTOR_TYPE_P (tmp))
828 tmp = gfc_get_element_type (tmp);
830 len = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
831 TREE_TYPE (len), len,
832 fold_convert (TREE_TYPE (len),
833 TYPE_SIZE_UNIT (tmp)));
834 len = fold_convert (integer_type_node, len);
837 /* SYNC IMAGES(imgs) => stat == null_pointer_node
838 SYNC IMAGES(imgs,stat=s) => stat has an integer type
840 If "stat" has the wrong integer type, use a temp variable of
841 the right type and later cast the result back into "stat". */
842 if (stat == null_pointer_node || TREE_TYPE (stat) == integer_type_node)
844 if (TREE_TYPE (stat) == integer_type_node)
845 stat = gfc_build_addr_expr (NULL, stat);
847 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images,
848 5, fold_convert (integer_type_node, len),
849 images, stat, errmsg, errmsglen);
850 gfc_add_expr_to_block (&se.pre, tmp);
854 tree tmp_stat = gfc_create_var (integer_type_node, "stat");
856 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images,
857 5, fold_convert (integer_type_node, len),
858 images, gfc_build_addr_expr (NULL, tmp_stat),
860 gfc_add_expr_to_block (&se.pre, tmp);
862 gfc_add_modify (&se.pre, stat,
863 fold_convert (TREE_TYPE (stat), tmp_stat));
867 return gfc_finish_block (&se.pre);
871 /* Generate GENERIC for the IF construct. This function also deals with
872 the simple IF statement, because the front end translates the IF
873 statement into an IF construct.
905 where COND_S is the simplified version of the predicate. PRE_COND_S
906 are the pre side-effects produced by the translation of the
908 We need to build the chain recursively otherwise we run into
909 problems with folding incomplete statements. */
912 gfc_trans_if_1 (gfc_code * code)
919 /* Check for an unconditional ELSE clause. */
921 return gfc_trans_code (code->next);
923 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
924 gfc_init_se (&if_se, NULL);
925 gfc_start_block (&if_se.pre);
927 /* Calculate the IF condition expression. */
928 if (code->expr1->where.lb)
930 gfc_save_backend_locus (&saved_loc);
931 gfc_set_backend_locus (&code->expr1->where);
934 gfc_conv_expr_val (&if_se, code->expr1);
936 if (code->expr1->where.lb)
937 gfc_restore_backend_locus (&saved_loc);
939 /* Translate the THEN clause. */
940 stmt = gfc_trans_code (code->next);
942 /* Translate the ELSE clause. */
944 elsestmt = gfc_trans_if_1 (code->block);
946 elsestmt = build_empty_stmt (input_location);
948 /* Build the condition expression and add it to the condition block. */
949 loc = code->expr1->where.lb ? code->expr1->where.lb->location : input_location;
950 stmt = fold_build3_loc (loc, COND_EXPR, void_type_node, if_se.expr, stmt,
953 gfc_add_expr_to_block (&if_se.pre, stmt);
955 /* Finish off this statement. */
956 return gfc_finish_block (&if_se.pre);
960 gfc_trans_if (gfc_code * code)
965 /* Create exit label so it is available for trans'ing the body code. */
966 exit_label = gfc_build_label_decl (NULL_TREE);
967 code->exit_label = exit_label;
969 /* Translate the actual code in code->block. */
970 gfc_init_block (&body);
971 gfc_add_expr_to_block (&body, gfc_trans_if_1 (code->block));
973 /* Add exit label. */
974 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
976 return gfc_finish_block (&body);
980 /* Translate an arithmetic IF expression.
982 IF (cond) label1, label2, label3 translates to
994 An optimized version can be generated in case of equal labels.
995 E.g., if label1 is equal to label2, we can translate it to
1004 gfc_trans_arithmetic_if (gfc_code * code)
1012 /* Start a new block. */
1013 gfc_init_se (&se, NULL);
1014 gfc_start_block (&se.pre);
1016 /* Pre-evaluate COND. */
1017 gfc_conv_expr_val (&se, code->expr1);
1018 se.expr = gfc_evaluate_now (se.expr, &se.pre);
1020 /* Build something to compare with. */
1021 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
1023 if (code->label1->value != code->label2->value)
1025 /* If (cond < 0) take branch1 else take branch2.
1026 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
1027 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
1028 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
1030 if (code->label1->value != code->label3->value)
1031 tmp = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
1034 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
1037 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1038 tmp, branch1, branch2);
1041 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
1043 if (code->label1->value != code->label3->value
1044 && code->label2->value != code->label3->value)
1046 /* if (cond <= 0) take branch1 else take branch2. */
1047 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
1048 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
1050 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1051 tmp, branch1, branch2);
1054 /* Append the COND_EXPR to the evaluation of COND, and return. */
1055 gfc_add_expr_to_block (&se.pre, branch1);
1056 return gfc_finish_block (&se.pre);
1060 /* Translate a CRITICAL block. */
1062 gfc_trans_critical (gfc_code *code)
1067 gfc_start_block (&block);
1069 if (gfc_option.coarray == GFC_FCOARRAY_LIB)
1071 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_critical, 0);
1072 gfc_add_expr_to_block (&block, tmp);
1075 tmp = gfc_trans_code (code->block->next);
1076 gfc_add_expr_to_block (&block, tmp);
1078 if (gfc_option.coarray == GFC_FCOARRAY_LIB)
1080 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_end_critical,
1082 gfc_add_expr_to_block (&block, tmp);
1086 return gfc_finish_block (&block);
1090 /* Do proper initialization for ASSOCIATE names. */
1093 trans_associate_var (gfc_symbol *sym, gfc_wrapped_block *block)
1099 gcc_assert (sym->assoc);
1100 e = sym->assoc->target;
1102 class_target = (e->expr_type == EXPR_VARIABLE)
1103 && (gfc_is_class_scalar_expr (e)
1104 || gfc_is_class_array_ref (e, NULL));
1106 /* Do a `pointer assignment' with updated descriptor (or assign descriptor
1107 to array temporary) for arrays with either unknown shape or if associating
1109 if (sym->attr.dimension && !class_target
1110 && (sym->as->type == AS_DEFERRED || sym->assoc->variable))
1116 desc = sym->backend_decl;
1118 /* If association is to an expression, evaluate it and create temporary.
1119 Otherwise, get descriptor of target for pointer assignment. */
1120 gfc_init_se (&se, NULL);
1121 ss = gfc_walk_expr (e);
1122 if (sym->assoc->variable)
1124 se.direct_byref = 1;
1127 gfc_conv_expr_descriptor (&se, e, ss);
1129 /* If we didn't already do the pointer assignment, set associate-name
1130 descriptor to the one generated for the temporary. */
1131 if (!sym->assoc->variable)
1135 gfc_add_modify (&se.pre, desc, se.expr);
1137 /* The generated descriptor has lower bound zero (as array
1138 temporary), shift bounds so we get lower bounds of 1. */
1139 for (dim = 0; dim < e->rank; ++dim)
1140 gfc_conv_shift_descriptor_lbound (&se.pre, desc,
1141 dim, gfc_index_one_node);
1144 /* Done, register stuff as init / cleanup code. */
1145 gfc_add_init_cleanup (block, gfc_finish_block (&se.pre),
1146 gfc_finish_block (&se.post));
1149 /* CLASS arrays just need the descriptor to be directly assigned. */
1150 else if (class_target && sym->attr.dimension)
1154 gfc_init_se (&se, NULL);
1155 gfc_conv_expr (&se, e);
1157 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se.expr)));
1158 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (sym->backend_decl)));
1160 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
1162 gfc_add_init_cleanup (block, gfc_finish_block( &se.pre),
1163 gfc_finish_block (&se.post));
1166 /* Do a scalar pointer assignment; this is for scalar variable targets. */
1167 else if (gfc_is_associate_pointer (sym))
1171 gcc_assert (!sym->attr.dimension);
1173 gfc_init_se (&se, NULL);
1174 gfc_conv_expr (&se, e);
1176 tmp = TREE_TYPE (sym->backend_decl);
1177 tmp = gfc_build_addr_expr (tmp, se.expr);
1178 gfc_add_modify (&se.pre, sym->backend_decl, tmp);
1180 gfc_add_init_cleanup (block, gfc_finish_block( &se.pre),
1181 gfc_finish_block (&se.post));
1184 /* Do a simple assignment. This is for scalar expressions, where we
1185 can simply use expression assignment. */
1190 lhs = gfc_lval_expr_from_sym (sym);
1191 tmp = gfc_trans_assignment (lhs, e, false, true);
1192 gfc_add_init_cleanup (block, tmp, NULL_TREE);
1197 /* Translate a BLOCK construct. This is basically what we would do for a
1201 gfc_trans_block_construct (gfc_code* code)
1205 gfc_wrapped_block block;
1208 gfc_association_list *ass;
1210 ns = code->ext.block.ns;
1212 sym = ns->proc_name;
1215 /* Process local variables. */
1216 gcc_assert (!sym->tlink);
1218 gfc_process_block_locals (ns);
1220 /* Generate code including exit-label. */
1221 gfc_init_block (&body);
1222 exit_label = gfc_build_label_decl (NULL_TREE);
1223 code->exit_label = exit_label;
1224 gfc_add_expr_to_block (&body, gfc_trans_code (ns->code));
1225 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
1227 /* Finish everything. */
1228 gfc_start_wrapped_block (&block, gfc_finish_block (&body));
1229 gfc_trans_deferred_vars (sym, &block);
1230 for (ass = code->ext.block.assoc; ass; ass = ass->next)
1231 trans_associate_var (ass->st->n.sym, &block);
1233 return gfc_finish_wrapped_block (&block);
1237 /* Translate the simple DO construct. This is where the loop variable has
1238 integer type and step +-1. We can't use this in the general case
1239 because integer overflow and floating point errors could give incorrect
1241 We translate a do loop from:
1243 DO dovar = from, to, step
1249 [Evaluate loop bounds and step]
1251 if ((step > 0) ? (dovar <= to) : (dovar => to))
1257 cond = (dovar == to);
1259 if (cond) goto end_label;
1264 This helps the optimizers by avoiding the extra induction variable
1265 used in the general case. */
1268 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
1269 tree from, tree to, tree step, tree exit_cond)
1275 tree saved_dovar = NULL;
1280 type = TREE_TYPE (dovar);
1282 loc = code->ext.iterator->start->where.lb->location;
1284 /* Initialize the DO variable: dovar = from. */
1285 gfc_add_modify_loc (loc, pblock, dovar,
1286 fold_convert (TREE_TYPE(dovar), from));
1288 /* Save value for do-tinkering checking. */
1289 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1291 saved_dovar = gfc_create_var (type, ".saved_dovar");
1292 gfc_add_modify_loc (loc, pblock, saved_dovar, dovar);
1295 /* Cycle and exit statements are implemented with gotos. */
1296 cycle_label = gfc_build_label_decl (NULL_TREE);
1297 exit_label = gfc_build_label_decl (NULL_TREE);
1299 /* Put the labels where they can be found later. See gfc_trans_do(). */
1300 code->cycle_label = cycle_label;
1301 code->exit_label = exit_label;
1304 gfc_start_block (&body);
1306 /* Main loop body. */
1307 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1308 gfc_add_expr_to_block (&body, tmp);
1310 /* Label for cycle statements (if needed). */
1311 if (TREE_USED (cycle_label))
1313 tmp = build1_v (LABEL_EXPR, cycle_label);
1314 gfc_add_expr_to_block (&body, tmp);
1317 /* Check whether someone has modified the loop variable. */
1318 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1320 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node,
1321 dovar, saved_dovar);
1322 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1323 "Loop variable has been modified");
1326 /* Exit the loop if there is an I/O result condition or error. */
1329 tmp = build1_v (GOTO_EXPR, exit_label);
1330 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1332 build_empty_stmt (loc));
1333 gfc_add_expr_to_block (&body, tmp);
1336 /* Evaluate the loop condition. */
1337 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, dovar,
1339 cond = gfc_evaluate_now_loc (loc, cond, &body);
1341 /* Increment the loop variable. */
1342 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1343 gfc_add_modify_loc (loc, &body, dovar, tmp);
1345 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1346 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1348 /* The loop exit. */
1349 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1350 TREE_USED (exit_label) = 1;
1351 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1352 cond, tmp, build_empty_stmt (loc));
1353 gfc_add_expr_to_block (&body, tmp);
1355 /* Finish the loop body. */
1356 tmp = gfc_finish_block (&body);
1357 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1359 /* Only execute the loop if the number of iterations is positive. */
1360 if (tree_int_cst_sgn (step) > 0)
1361 cond = fold_build2_loc (loc, LE_EXPR, boolean_type_node, dovar,
1364 cond = fold_build2_loc (loc, GE_EXPR, boolean_type_node, dovar,
1366 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, cond, tmp,
1367 build_empty_stmt (loc));
1368 gfc_add_expr_to_block (pblock, tmp);
1370 /* Add the exit label. */
1371 tmp = build1_v (LABEL_EXPR, exit_label);
1372 gfc_add_expr_to_block (pblock, tmp);
1374 return gfc_finish_block (pblock);
1377 /* Translate the DO construct. This obviously is one of the most
1378 important ones to get right with any compiler, but especially
1381 We special case some loop forms as described in gfc_trans_simple_do.
1382 For other cases we implement them with a separate loop count,
1383 as described in the standard.
1385 We translate a do loop from:
1387 DO dovar = from, to, step
1393 [evaluate loop bounds and step]
1394 empty = (step > 0 ? to < from : to > from);
1395 countm1 = (to - from) / step;
1397 if (empty) goto exit_label;
1403 if (countm1 ==0) goto exit_label;
1408 countm1 is an unsigned integer. It is equal to the loop count minus one,
1409 because the loop count itself can overflow. */
1412 gfc_trans_do (gfc_code * code, tree exit_cond)
1416 tree saved_dovar = NULL;
1432 gfc_start_block (&block);
1434 loc = code->ext.iterator->start->where.lb->location;
1436 /* Evaluate all the expressions in the iterator. */
1437 gfc_init_se (&se, NULL);
1438 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1439 gfc_add_block_to_block (&block, &se.pre);
1441 type = TREE_TYPE (dovar);
1443 gfc_init_se (&se, NULL);
1444 gfc_conv_expr_val (&se, code->ext.iterator->start);
1445 gfc_add_block_to_block (&block, &se.pre);
1446 from = gfc_evaluate_now (se.expr, &block);
1448 gfc_init_se (&se, NULL);
1449 gfc_conv_expr_val (&se, code->ext.iterator->end);
1450 gfc_add_block_to_block (&block, &se.pre);
1451 to = gfc_evaluate_now (se.expr, &block);
1453 gfc_init_se (&se, NULL);
1454 gfc_conv_expr_val (&se, code->ext.iterator->step);
1455 gfc_add_block_to_block (&block, &se.pre);
1456 step = gfc_evaluate_now (se.expr, &block);
1458 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1460 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, step,
1461 build_zero_cst (type));
1462 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1463 "DO step value is zero");
1466 /* Special case simple loops. */
1467 if (TREE_CODE (type) == INTEGER_TYPE
1468 && (integer_onep (step)
1469 || tree_int_cst_equal (step, integer_minus_one_node)))
1470 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1472 pos_step = fold_build2_loc (loc, GT_EXPR, boolean_type_node, step,
1473 build_zero_cst (type));
1475 if (TREE_CODE (type) == INTEGER_TYPE)
1476 utype = unsigned_type_for (type);
1478 utype = unsigned_type_for (gfc_array_index_type);
1479 countm1 = gfc_create_var (utype, "countm1");
1481 /* Cycle and exit statements are implemented with gotos. */
1482 cycle_label = gfc_build_label_decl (NULL_TREE);
1483 exit_label = gfc_build_label_decl (NULL_TREE);
1484 TREE_USED (exit_label) = 1;
1486 /* Put these labels where they can be found later. */
1487 code->cycle_label = cycle_label;
1488 code->exit_label = exit_label;
1490 /* Initialize the DO variable: dovar = from. */
1491 gfc_add_modify (&block, dovar, from);
1493 /* Save value for do-tinkering checking. */
1494 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1496 saved_dovar = gfc_create_var (type, ".saved_dovar");
1497 gfc_add_modify_loc (loc, &block, saved_dovar, dovar);
1500 /* Initialize loop count and jump to exit label if the loop is empty.
1501 This code is executed before we enter the loop body. We generate:
1502 step_sign = sign(1,step);
1513 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1517 if (TREE_CODE (type) == INTEGER_TYPE)
1519 tree pos, neg, step_sign, to2, from2, step2;
1521 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1523 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, step,
1524 build_int_cst (TREE_TYPE (step), 0));
1525 step_sign = fold_build3_loc (loc, COND_EXPR, type, tmp,
1526 build_int_cst (type, -1),
1527 build_int_cst (type, 1));
1529 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, to, from);
1530 pos = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1531 fold_build1_loc (loc, GOTO_EXPR, void_type_node,
1533 build_empty_stmt (loc));
1535 tmp = fold_build2_loc (loc, GT_EXPR, boolean_type_node, to,
1537 neg = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1538 fold_build1_loc (loc, GOTO_EXPR, void_type_node,
1540 build_empty_stmt (loc));
1541 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1542 pos_step, pos, neg);
1544 gfc_add_expr_to_block (&block, tmp);
1546 /* Calculate the loop count. to-from can overflow, so
1547 we cast to unsigned. */
1549 to2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, to);
1550 from2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, from);
1551 step2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, step);
1552 step2 = fold_convert (utype, step2);
1553 tmp = fold_build2_loc (loc, MINUS_EXPR, type, to2, from2);
1554 tmp = fold_convert (utype, tmp);
1555 tmp = fold_build2_loc (loc, TRUNC_DIV_EXPR, utype, tmp, step2);
1556 tmp = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, countm1, tmp);
1557 gfc_add_expr_to_block (&block, tmp);
1561 /* TODO: We could use the same width as the real type.
1562 This would probably cause more problems that it solves
1563 when we implement "long double" types. */
1565 tmp = fold_build2_loc (loc, MINUS_EXPR, type, to, from);
1566 tmp = fold_build2_loc (loc, RDIV_EXPR, type, tmp, step);
1567 tmp = fold_build1_loc (loc, FIX_TRUNC_EXPR, utype, tmp);
1568 gfc_add_modify (&block, countm1, tmp);
1570 /* We need a special check for empty loops:
1571 empty = (step > 0 ? to < from : to > from); */
1572 tmp = fold_build3_loc (loc, COND_EXPR, boolean_type_node, pos_step,
1573 fold_build2_loc (loc, LT_EXPR,
1574 boolean_type_node, to, from),
1575 fold_build2_loc (loc, GT_EXPR,
1576 boolean_type_node, to, from));
1577 /* If the loop is empty, go directly to the exit label. */
1578 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1579 build1_v (GOTO_EXPR, exit_label),
1580 build_empty_stmt (input_location));
1581 gfc_add_expr_to_block (&block, tmp);
1585 gfc_start_block (&body);
1587 /* Main loop body. */
1588 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1589 gfc_add_expr_to_block (&body, tmp);
1591 /* Label for cycle statements (if needed). */
1592 if (TREE_USED (cycle_label))
1594 tmp = build1_v (LABEL_EXPR, cycle_label);
1595 gfc_add_expr_to_block (&body, tmp);
1598 /* Check whether someone has modified the loop variable. */
1599 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1601 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node, dovar,
1603 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1604 "Loop variable has been modified");
1607 /* Exit the loop if there is an I/O result condition or error. */
1610 tmp = build1_v (GOTO_EXPR, exit_label);
1611 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1613 build_empty_stmt (input_location));
1614 gfc_add_expr_to_block (&body, tmp);
1617 /* Increment the loop variable. */
1618 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1619 gfc_add_modify_loc (loc, &body, dovar, tmp);
1621 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1622 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1624 /* End with the loop condition. Loop until countm1 == 0. */
1625 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, countm1,
1626 build_int_cst (utype, 0));
1627 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1628 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1629 cond, tmp, build_empty_stmt (loc));
1630 gfc_add_expr_to_block (&body, tmp);
1632 /* Decrement the loop count. */
1633 tmp = fold_build2_loc (loc, MINUS_EXPR, utype, countm1,
1634 build_int_cst (utype, 1));
1635 gfc_add_modify_loc (loc, &body, countm1, tmp);
1637 /* End of loop body. */
1638 tmp = gfc_finish_block (&body);
1640 /* The for loop itself. */
1641 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1642 gfc_add_expr_to_block (&block, tmp);
1644 /* Add the exit label. */
1645 tmp = build1_v (LABEL_EXPR, exit_label);
1646 gfc_add_expr_to_block (&block, tmp);
1648 return gfc_finish_block (&block);
1652 /* Translate the DO WHILE construct.
1665 if (! cond) goto exit_label;
1671 Because the evaluation of the exit condition `cond' may have side
1672 effects, we can't do much for empty loop bodies. The backend optimizers
1673 should be smart enough to eliminate any dead loops. */
1676 gfc_trans_do_while (gfc_code * code)
1684 /* Everything we build here is part of the loop body. */
1685 gfc_start_block (&block);
1687 /* Cycle and exit statements are implemented with gotos. */
1688 cycle_label = gfc_build_label_decl (NULL_TREE);
1689 exit_label = gfc_build_label_decl (NULL_TREE);
1691 /* Put the labels where they can be found later. See gfc_trans_do(). */
1692 code->cycle_label = cycle_label;
1693 code->exit_label = exit_label;
1695 /* Create a GIMPLE version of the exit condition. */
1696 gfc_init_se (&cond, NULL);
1697 gfc_conv_expr_val (&cond, code->expr1);
1698 gfc_add_block_to_block (&block, &cond.pre);
1699 cond.expr = fold_build1_loc (code->expr1->where.lb->location,
1700 TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1702 /* Build "IF (! cond) GOTO exit_label". */
1703 tmp = build1_v (GOTO_EXPR, exit_label);
1704 TREE_USED (exit_label) = 1;
1705 tmp = fold_build3_loc (code->expr1->where.lb->location, COND_EXPR,
1706 void_type_node, cond.expr, tmp,
1707 build_empty_stmt (code->expr1->where.lb->location));
1708 gfc_add_expr_to_block (&block, tmp);
1710 /* The main body of the loop. */
1711 tmp = gfc_trans_code (code->block->next);
1712 gfc_add_expr_to_block (&block, tmp);
1714 /* Label for cycle statements (if needed). */
1715 if (TREE_USED (cycle_label))
1717 tmp = build1_v (LABEL_EXPR, cycle_label);
1718 gfc_add_expr_to_block (&block, tmp);
1721 /* End of loop body. */
1722 tmp = gfc_finish_block (&block);
1724 gfc_init_block (&block);
1725 /* Build the loop. */
1726 tmp = fold_build1_loc (code->expr1->where.lb->location, LOOP_EXPR,
1727 void_type_node, tmp);
1728 gfc_add_expr_to_block (&block, tmp);
1730 /* Add the exit label. */
1731 tmp = build1_v (LABEL_EXPR, exit_label);
1732 gfc_add_expr_to_block (&block, tmp);
1734 return gfc_finish_block (&block);
1738 /* Translate the SELECT CASE construct for INTEGER case expressions,
1739 without killing all potential optimizations. The problem is that
1740 Fortran allows unbounded cases, but the back-end does not, so we
1741 need to intercept those before we enter the equivalent SWITCH_EXPR
1744 For example, we translate this,
1747 CASE (:100,101,105:115)
1757 to the GENERIC equivalent,
1761 case (minimum value for typeof(expr) ... 100:
1767 case 200 ... (maximum value for typeof(expr):
1784 gfc_trans_integer_select (gfc_code * code)
1794 gfc_start_block (&block);
1796 /* Calculate the switch expression. */
1797 gfc_init_se (&se, NULL);
1798 gfc_conv_expr_val (&se, code->expr1);
1799 gfc_add_block_to_block (&block, &se.pre);
1801 end_label = gfc_build_label_decl (NULL_TREE);
1803 gfc_init_block (&body);
1805 for (c = code->block; c; c = c->block)
1807 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1812 /* Assume it's the default case. */
1813 low = high = NULL_TREE;
1817 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1820 /* If there's only a lower bound, set the high bound to the
1821 maximum value of the case expression. */
1823 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1828 /* Three cases are possible here:
1830 1) There is no lower bound, e.g. CASE (:N).
1831 2) There is a lower bound .NE. high bound, that is
1832 a case range, e.g. CASE (N:M) where M>N (we make
1833 sure that M>N during type resolution).
1834 3) There is a lower bound, and it has the same value
1835 as the high bound, e.g. CASE (N:N). This is our
1836 internal representation of CASE(N).
1838 In the first and second case, we need to set a value for
1839 high. In the third case, we don't because the GCC middle
1840 end represents a single case value by just letting high be
1841 a NULL_TREE. We can't do that because we need to be able
1842 to represent unbounded cases. */
1846 && mpz_cmp (cp->low->value.integer,
1847 cp->high->value.integer) != 0))
1848 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1851 /* Unbounded case. */
1853 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1856 /* Build a label. */
1857 label = gfc_build_label_decl (NULL_TREE);
1859 /* Add this case label.
1860 Add parameter 'label', make it match GCC backend. */
1861 tmp = build_case_label (low, high, label);
1862 gfc_add_expr_to_block (&body, tmp);
1865 /* Add the statements for this case. */
1866 tmp = gfc_trans_code (c->next);
1867 gfc_add_expr_to_block (&body, tmp);
1869 /* Break to the end of the construct. */
1870 tmp = build1_v (GOTO_EXPR, end_label);
1871 gfc_add_expr_to_block (&body, tmp);
1874 tmp = gfc_finish_block (&body);
1875 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1876 gfc_add_expr_to_block (&block, tmp);
1878 tmp = build1_v (LABEL_EXPR, end_label);
1879 gfc_add_expr_to_block (&block, tmp);
1881 return gfc_finish_block (&block);
1885 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1887 There are only two cases possible here, even though the standard
1888 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1889 .FALSE., and DEFAULT.
1891 We never generate more than two blocks here. Instead, we always
1892 try to eliminate the DEFAULT case. This way, we can translate this
1893 kind of SELECT construct to a simple
1897 expression in GENERIC. */
1900 gfc_trans_logical_select (gfc_code * code)
1903 gfc_code *t, *f, *d;
1908 /* Assume we don't have any cases at all. */
1911 /* Now see which ones we actually do have. We can have at most two
1912 cases in a single case list: one for .TRUE. and one for .FALSE.
1913 The default case is always separate. If the cases for .TRUE. and
1914 .FALSE. are in the same case list, the block for that case list
1915 always executed, and we don't generate code a COND_EXPR. */
1916 for (c = code->block; c; c = c->block)
1918 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1922 if (cp->low->value.logical == 0) /* .FALSE. */
1924 else /* if (cp->value.logical != 0), thus .TRUE. */
1932 /* Start a new block. */
1933 gfc_start_block (&block);
1935 /* Calculate the switch expression. We always need to do this
1936 because it may have side effects. */
1937 gfc_init_se (&se, NULL);
1938 gfc_conv_expr_val (&se, code->expr1);
1939 gfc_add_block_to_block (&block, &se.pre);
1941 if (t == f && t != NULL)
1943 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1944 translate the code for these cases, append it to the current
1946 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1950 tree true_tree, false_tree, stmt;
1952 true_tree = build_empty_stmt (input_location);
1953 false_tree = build_empty_stmt (input_location);
1955 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1956 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1957 make the missing case the default case. */
1958 if (t != NULL && f != NULL)
1968 /* Translate the code for each of these blocks, and append it to
1969 the current block. */
1971 true_tree = gfc_trans_code (t->next);
1974 false_tree = gfc_trans_code (f->next);
1976 stmt = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1977 se.expr, true_tree, false_tree);
1978 gfc_add_expr_to_block (&block, stmt);
1981 return gfc_finish_block (&block);
1985 /* The jump table types are stored in static variables to avoid
1986 constructing them from scratch every single time. */
1987 static GTY(()) tree select_struct[2];
1989 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1990 Instead of generating compares and jumps, it is far simpler to
1991 generate a data structure describing the cases in order and call a
1992 library subroutine that locates the right case.
1993 This is particularly true because this is the only case where we
1994 might have to dispose of a temporary.
1995 The library subroutine returns a pointer to jump to or NULL if no
1996 branches are to be taken. */
1999 gfc_trans_character_select (gfc_code *code)
2001 tree init, end_label, tmp, type, case_num, label, fndecl;
2002 stmtblock_t block, body;
2007 VEC(constructor_elt,gc) *inits = NULL;
2009 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
2011 /* The jump table types are stored in static variables to avoid
2012 constructing them from scratch every single time. */
2013 static tree ss_string1[2], ss_string1_len[2];
2014 static tree ss_string2[2], ss_string2_len[2];
2015 static tree ss_target[2];
2017 cp = code->block->ext.block.case_list;
2018 while (cp->left != NULL)
2021 /* Generate the body */
2022 gfc_start_block (&block);
2023 gfc_init_se (&expr1se, NULL);
2024 gfc_conv_expr_reference (&expr1se, code->expr1);
2026 gfc_add_block_to_block (&block, &expr1se.pre);
2028 end_label = gfc_build_label_decl (NULL_TREE);
2030 gfc_init_block (&body);
2032 /* Attempt to optimize length 1 selects. */
2033 if (integer_onep (expr1se.string_length))
2035 for (d = cp; d; d = d->right)
2040 gcc_assert (d->low->expr_type == EXPR_CONSTANT
2041 && d->low->ts.type == BT_CHARACTER);
2042 if (d->low->value.character.length > 1)
2044 for (i = 1; i < d->low->value.character.length; i++)
2045 if (d->low->value.character.string[i] != ' ')
2047 if (i != d->low->value.character.length)
2049 if (optimize && d->high && i == 1)
2051 gcc_assert (d->high->expr_type == EXPR_CONSTANT
2052 && d->high->ts.type == BT_CHARACTER);
2053 if (d->high->value.character.length > 1
2054 && (d->low->value.character.string[0]
2055 == d->high->value.character.string[0])
2056 && d->high->value.character.string[1] != ' '
2057 && ((d->low->value.character.string[1] < ' ')
2058 == (d->high->value.character.string[1]
2068 gcc_assert (d->high->expr_type == EXPR_CONSTANT
2069 && d->high->ts.type == BT_CHARACTER);
2070 if (d->high->value.character.length > 1)
2072 for (i = 1; i < d->high->value.character.length; i++)
2073 if (d->high->value.character.string[i] != ' ')
2075 if (i != d->high->value.character.length)
2082 tree ctype = gfc_get_char_type (code->expr1->ts.kind);
2084 for (c = code->block; c; c = c->block)
2086 for (cp = c->ext.block.case_list; cp; cp = cp->next)
2092 /* Assume it's the default case. */
2093 low = high = NULL_TREE;
2097 /* CASE ('ab') or CASE ('ab':'az') will never match
2098 any length 1 character. */
2099 if (cp->low->value.character.length > 1
2100 && cp->low->value.character.string[1] != ' ')
2103 if (cp->low->value.character.length > 0)
2104 r = cp->low->value.character.string[0];
2107 low = build_int_cst (ctype, r);
2109 /* If there's only a lower bound, set the high bound
2110 to the maximum value of the case expression. */
2112 high = TYPE_MAX_VALUE (ctype);
2118 || (cp->low->value.character.string[0]
2119 != cp->high->value.character.string[0]))
2121 if (cp->high->value.character.length > 0)
2122 r = cp->high->value.character.string[0];
2125 high = build_int_cst (ctype, r);
2128 /* Unbounded case. */
2130 low = TYPE_MIN_VALUE (ctype);
2133 /* Build a label. */
2134 label = gfc_build_label_decl (NULL_TREE);
2136 /* Add this case label.
2137 Add parameter 'label', make it match GCC backend. */
2138 tmp = build_case_label (low, high, label);
2139 gfc_add_expr_to_block (&body, tmp);
2142 /* Add the statements for this case. */
2143 tmp = gfc_trans_code (c->next);
2144 gfc_add_expr_to_block (&body, tmp);
2146 /* Break to the end of the construct. */
2147 tmp = build1_v (GOTO_EXPR, end_label);
2148 gfc_add_expr_to_block (&body, tmp);
2151 tmp = gfc_string_to_single_character (expr1se.string_length,
2153 code->expr1->ts.kind);
2154 case_num = gfc_create_var (ctype, "case_num");
2155 gfc_add_modify (&block, case_num, tmp);
2157 gfc_add_block_to_block (&block, &expr1se.post);
2159 tmp = gfc_finish_block (&body);
2160 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
2161 gfc_add_expr_to_block (&block, tmp);
2163 tmp = build1_v (LABEL_EXPR, end_label);
2164 gfc_add_expr_to_block (&block, tmp);
2166 return gfc_finish_block (&block);
2170 if (code->expr1->ts.kind == 1)
2172 else if (code->expr1->ts.kind == 4)
2177 if (select_struct[k] == NULL)
2180 select_struct[k] = make_node (RECORD_TYPE);
2182 if (code->expr1->ts.kind == 1)
2183 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
2184 else if (code->expr1->ts.kind == 4)
2185 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
2190 #define ADD_FIELD(NAME, TYPE) \
2191 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
2192 get_identifier (stringize(NAME)), \
2196 ADD_FIELD (string1, pchartype);
2197 ADD_FIELD (string1_len, gfc_charlen_type_node);
2199 ADD_FIELD (string2, pchartype);
2200 ADD_FIELD (string2_len, gfc_charlen_type_node);
2202 ADD_FIELD (target, integer_type_node);
2205 gfc_finish_type (select_struct[k]);
2209 for (d = cp; d; d = d->right)
2212 for (c = code->block; c; c = c->block)
2214 for (d = c->ext.block.case_list; d; d = d->next)
2216 label = gfc_build_label_decl (NULL_TREE);
2217 tmp = build_case_label ((d->low == NULL && d->high == NULL)
2219 : build_int_cst (integer_type_node, d->n),
2221 gfc_add_expr_to_block (&body, tmp);
2224 tmp = gfc_trans_code (c->next);
2225 gfc_add_expr_to_block (&body, tmp);
2227 tmp = build1_v (GOTO_EXPR, end_label);
2228 gfc_add_expr_to_block (&body, tmp);
2231 /* Generate the structure describing the branches */
2232 for (d = cp; d; d = d->right)
2234 VEC(constructor_elt,gc) *node = NULL;
2236 gfc_init_se (&se, NULL);
2240 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node);
2241 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], integer_zero_node);
2245 gfc_conv_expr_reference (&se, d->low);
2247 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr);
2248 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length);
2251 if (d->high == NULL)
2253 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node);
2254 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], integer_zero_node);
2258 gfc_init_se (&se, NULL);
2259 gfc_conv_expr_reference (&se, d->high);
2261 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr);
2262 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length);
2265 CONSTRUCTOR_APPEND_ELT (node, ss_target[k],
2266 build_int_cst (integer_type_node, d->n));
2268 tmp = build_constructor (select_struct[k], node);
2269 CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp);
2272 type = build_array_type (select_struct[k],
2273 build_index_type (size_int (n-1)));
2275 init = build_constructor (type, inits);
2276 TREE_CONSTANT (init) = 1;
2277 TREE_STATIC (init) = 1;
2278 /* Create a static variable to hold the jump table. */
2279 tmp = gfc_create_var (type, "jumptable");
2280 TREE_CONSTANT (tmp) = 1;
2281 TREE_STATIC (tmp) = 1;
2282 TREE_READONLY (tmp) = 1;
2283 DECL_INITIAL (tmp) = init;
2286 /* Build the library call */
2287 init = gfc_build_addr_expr (pvoid_type_node, init);
2289 if (code->expr1->ts.kind == 1)
2290 fndecl = gfor_fndecl_select_string;
2291 else if (code->expr1->ts.kind == 4)
2292 fndecl = gfor_fndecl_select_string_char4;
2296 tmp = build_call_expr_loc (input_location,
2298 build_int_cst (gfc_charlen_type_node, n),
2299 expr1se.expr, expr1se.string_length);
2300 case_num = gfc_create_var (integer_type_node, "case_num");
2301 gfc_add_modify (&block, case_num, tmp);
2303 gfc_add_block_to_block (&block, &expr1se.post);
2305 tmp = gfc_finish_block (&body);
2306 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
2307 gfc_add_expr_to_block (&block, tmp);
2309 tmp = build1_v (LABEL_EXPR, end_label);
2310 gfc_add_expr_to_block (&block, tmp);
2312 return gfc_finish_block (&block);
2316 /* Translate the three variants of the SELECT CASE construct.
2318 SELECT CASEs with INTEGER case expressions can be translated to an
2319 equivalent GENERIC switch statement, and for LOGICAL case
2320 expressions we build one or two if-else compares.
2322 SELECT CASEs with CHARACTER case expressions are a whole different
2323 story, because they don't exist in GENERIC. So we sort them and
2324 do a binary search at runtime.
2326 Fortran has no BREAK statement, and it does not allow jumps from
2327 one case block to another. That makes things a lot easier for
2331 gfc_trans_select (gfc_code * code)
2337 gcc_assert (code && code->expr1);
2338 gfc_init_block (&block);
2340 /* Build the exit label and hang it in. */
2341 exit_label = gfc_build_label_decl (NULL_TREE);
2342 code->exit_label = exit_label;
2344 /* Empty SELECT constructs are legal. */
2345 if (code->block == NULL)
2346 body = build_empty_stmt (input_location);
2348 /* Select the correct translation function. */
2350 switch (code->expr1->ts.type)
2353 body = gfc_trans_logical_select (code);
2357 body = gfc_trans_integer_select (code);
2361 body = gfc_trans_character_select (code);
2365 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2369 /* Build everything together. */
2370 gfc_add_expr_to_block (&block, body);
2371 gfc_add_expr_to_block (&block, build1_v (LABEL_EXPR, exit_label));
2373 return gfc_finish_block (&block);
2377 /* Traversal function to substitute a replacement symtree if the symbol
2378 in the expression is the same as that passed. f == 2 signals that
2379 that variable itself is not to be checked - only the references.
2380 This group of functions is used when the variable expression in a
2381 FORALL assignment has internal references. For example:
2382 FORALL (i = 1:4) p(p(i)) = i
2383 The only recourse here is to store a copy of 'p' for the index
2386 static gfc_symtree *new_symtree;
2387 static gfc_symtree *old_symtree;
2390 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
2392 if (expr->expr_type != EXPR_VARIABLE)
2397 else if (expr->symtree->n.sym == sym)
2398 expr->symtree = new_symtree;
2404 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
2406 gfc_traverse_expr (e, sym, forall_replace, f);
2410 forall_restore (gfc_expr *expr,
2411 gfc_symbol *sym ATTRIBUTE_UNUSED,
2412 int *f ATTRIBUTE_UNUSED)
2414 if (expr->expr_type != EXPR_VARIABLE)
2417 if (expr->symtree == new_symtree)
2418 expr->symtree = old_symtree;
2424 forall_restore_symtree (gfc_expr *e)
2426 gfc_traverse_expr (e, NULL, forall_restore, 0);
2430 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2435 gfc_symbol *new_sym;
2436 gfc_symbol *old_sym;
2440 /* Build a copy of the lvalue. */
2441 old_symtree = c->expr1->symtree;
2442 old_sym = old_symtree->n.sym;
2443 e = gfc_lval_expr_from_sym (old_sym);
2444 if (old_sym->attr.dimension)
2446 gfc_init_se (&tse, NULL);
2447 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
2448 gfc_add_block_to_block (pre, &tse.pre);
2449 gfc_add_block_to_block (post, &tse.post);
2450 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
2452 if (e->ts.type != BT_CHARACTER)
2454 /* Use the variable offset for the temporary. */
2455 tmp = gfc_conv_array_offset (old_sym->backend_decl);
2456 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
2461 gfc_init_se (&tse, NULL);
2462 gfc_init_se (&rse, NULL);
2463 gfc_conv_expr (&rse, e);
2464 if (e->ts.type == BT_CHARACTER)
2466 tse.string_length = rse.string_length;
2467 tmp = gfc_get_character_type_len (gfc_default_character_kind,
2469 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
2471 gfc_add_block_to_block (pre, &tse.pre);
2472 gfc_add_block_to_block (post, &tse.post);
2476 tmp = gfc_typenode_for_spec (&e->ts);
2477 tse.expr = gfc_create_var (tmp, "temp");
2480 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
2481 e->expr_type == EXPR_VARIABLE, true);
2482 gfc_add_expr_to_block (pre, tmp);
2486 /* Create a new symbol to represent the lvalue. */
2487 new_sym = gfc_new_symbol (old_sym->name, NULL);
2488 new_sym->ts = old_sym->ts;
2489 new_sym->attr.referenced = 1;
2490 new_sym->attr.temporary = 1;
2491 new_sym->attr.dimension = old_sym->attr.dimension;
2492 new_sym->attr.flavor = old_sym->attr.flavor;
2494 /* Use the temporary as the backend_decl. */
2495 new_sym->backend_decl = tse.expr;
2497 /* Create a fake symtree for it. */
2499 new_symtree = gfc_new_symtree (&root, old_sym->name);
2500 new_symtree->n.sym = new_sym;
2501 gcc_assert (new_symtree == root);
2503 /* Go through the expression reference replacing the old_symtree
2505 forall_replace_symtree (c->expr1, old_sym, 2);
2507 /* Now we have made this temporary, we might as well use it for
2508 the right hand side. */
2509 forall_replace_symtree (c->expr2, old_sym, 1);
2513 /* Handles dependencies in forall assignments. */
2515 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2522 lsym = c->expr1->symtree->n.sym;
2523 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
2525 /* Now check for dependencies within the 'variable'
2526 expression itself. These are treated by making a complete
2527 copy of variable and changing all the references to it
2528 point to the copy instead. Note that the shallow copy of
2529 the variable will not suffice for derived types with
2530 pointer components. We therefore leave these to their
2532 if (lsym->ts.type == BT_DERIVED
2533 && lsym->ts.u.derived->attr.pointer_comp)
2537 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
2539 forall_make_variable_temp (c, pre, post);
2543 /* Substrings with dependencies are treated in the same
2545 if (c->expr1->ts.type == BT_CHARACTER
2547 && c->expr2->expr_type == EXPR_VARIABLE
2548 && lsym == c->expr2->symtree->n.sym)
2550 for (lref = c->expr1->ref; lref; lref = lref->next)
2551 if (lref->type == REF_SUBSTRING)
2553 for (rref = c->expr2->ref; rref; rref = rref->next)
2554 if (rref->type == REF_SUBSTRING)
2558 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
2560 forall_make_variable_temp (c, pre, post);
2569 cleanup_forall_symtrees (gfc_code *c)
2571 forall_restore_symtree (c->expr1);
2572 forall_restore_symtree (c->expr2);
2573 free (new_symtree->n.sym);
2578 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2579 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2580 indicates whether we should generate code to test the FORALLs mask
2581 array. OUTER is the loop header to be used for initializing mask
2584 The generated loop format is:
2585 count = (end - start + step) / step
2598 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
2599 int mask_flag, stmtblock_t *outer)
2607 tree var, start, end, step;
2610 /* Initialize the mask index outside the FORALL nest. */
2611 if (mask_flag && forall_tmp->mask)
2612 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
2614 iter = forall_tmp->this_loop;
2615 nvar = forall_tmp->nvar;
2616 for (n = 0; n < nvar; n++)
2619 start = iter->start;
2623 exit_label = gfc_build_label_decl (NULL_TREE);
2624 TREE_USED (exit_label) = 1;
2626 /* The loop counter. */
2627 count = gfc_create_var (TREE_TYPE (var), "count");
2629 /* The body of the loop. */
2630 gfc_init_block (&block);
2632 /* The exit condition. */
2633 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
2634 count, build_int_cst (TREE_TYPE (count), 0));
2635 tmp = build1_v (GOTO_EXPR, exit_label);
2636 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2637 cond, tmp, build_empty_stmt (input_location));
2638 gfc_add_expr_to_block (&block, tmp);
2640 /* The main loop body. */
2641 gfc_add_expr_to_block (&block, body);
2643 /* Increment the loop variable. */
2644 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), var,
2646 gfc_add_modify (&block, var, tmp);
2648 /* Advance to the next mask element. Only do this for the
2650 if (n == 0 && mask_flag && forall_tmp->mask)
2652 tree maskindex = forall_tmp->maskindex;
2653 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2654 maskindex, gfc_index_one_node);
2655 gfc_add_modify (&block, maskindex, tmp);
2658 /* Decrement the loop counter. */
2659 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), count,
2660 build_int_cst (TREE_TYPE (var), 1));
2661 gfc_add_modify (&block, count, tmp);
2663 body = gfc_finish_block (&block);
2665 /* Loop var initialization. */
2666 gfc_init_block (&block);
2667 gfc_add_modify (&block, var, start);
2670 /* Initialize the loop counter. */
2671 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), step,
2673 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), end,
2675 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, TREE_TYPE (var),
2677 gfc_add_modify (&block, count, tmp);
2679 /* The loop expression. */
2680 tmp = build1_v (LOOP_EXPR, body);
2681 gfc_add_expr_to_block (&block, tmp);
2683 /* The exit label. */
2684 tmp = build1_v (LABEL_EXPR, exit_label);
2685 gfc_add_expr_to_block (&block, tmp);
2687 body = gfc_finish_block (&block);
2694 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2695 is nonzero, the body is controlled by all masks in the forall nest.
2696 Otherwise, the innermost loop is not controlled by it's mask. This
2697 is used for initializing that mask. */
2700 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2705 forall_info *forall_tmp;
2706 tree mask, maskindex;
2708 gfc_start_block (&header);
2710 forall_tmp = nested_forall_info;
2711 while (forall_tmp != NULL)
2713 /* Generate body with masks' control. */
2716 mask = forall_tmp->mask;
2717 maskindex = forall_tmp->maskindex;
2719 /* If a mask was specified make the assignment conditional. */
2722 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2723 body = build3_v (COND_EXPR, tmp, body,
2724 build_empty_stmt (input_location));
2727 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2728 forall_tmp = forall_tmp->prev_nest;
2732 gfc_add_expr_to_block (&header, body);
2733 return gfc_finish_block (&header);
2737 /* Allocate data for holding a temporary array. Returns either a local
2738 temporary array or a pointer variable. */
2741 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2748 if (INTEGER_CST_P (size))
2749 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
2750 size, gfc_index_one_node);
2754 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2755 type = build_array_type (elem_type, type);
2756 if (gfc_can_put_var_on_stack (bytesize))
2758 gcc_assert (INTEGER_CST_P (size));
2759 tmpvar = gfc_create_var (type, "temp");
2764 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2765 *pdata = convert (pvoid_type_node, tmpvar);
2767 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2768 gfc_add_modify (pblock, tmpvar, tmp);
2774 /* Generate codes to copy the temporary to the actual lhs. */
2777 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2778 tree count1, tree wheremask, bool invert)
2782 stmtblock_t block, body;
2788 lss = gfc_walk_expr (expr);
2790 if (lss == gfc_ss_terminator)
2792 gfc_start_block (&block);
2794 gfc_init_se (&lse, NULL);
2796 /* Translate the expression. */
2797 gfc_conv_expr (&lse, expr);
2799 /* Form the expression for the temporary. */
2800 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2802 /* Use the scalar assignment as is. */
2803 gfc_add_block_to_block (&block, &lse.pre);
2804 gfc_add_modify (&block, lse.expr, tmp);
2805 gfc_add_block_to_block (&block, &lse.post);
2807 /* Increment the count1. */
2808 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
2809 count1, gfc_index_one_node);
2810 gfc_add_modify (&block, count1, tmp);
2812 tmp = gfc_finish_block (&block);
2816 gfc_start_block (&block);
2818 gfc_init_loopinfo (&loop1);
2819 gfc_init_se (&rse, NULL);
2820 gfc_init_se (&lse, NULL);
2822 /* Associate the lss with the loop. */
2823 gfc_add_ss_to_loop (&loop1, lss);
2825 /* Calculate the bounds of the scalarization. */
2826 gfc_conv_ss_startstride (&loop1);
2827 /* Setup the scalarizing loops. */
2828 gfc_conv_loop_setup (&loop1, &expr->where);
2830 gfc_mark_ss_chain_used (lss, 1);
2832 /* Start the scalarized loop body. */
2833 gfc_start_scalarized_body (&loop1, &body);
2835 /* Setup the gfc_se structures. */
2836 gfc_copy_loopinfo_to_se (&lse, &loop1);
2839 /* Form the expression of the temporary. */
2840 if (lss != gfc_ss_terminator)
2841 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2842 /* Translate expr. */
2843 gfc_conv_expr (&lse, expr);
2845 /* Use the scalar assignment. */
2846 rse.string_length = lse.string_length;
2847 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
2849 /* Form the mask expression according to the mask tree list. */
2852 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2854 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
2855 TREE_TYPE (wheremaskexpr),
2857 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2859 build_empty_stmt (input_location));
2862 gfc_add_expr_to_block (&body, tmp);
2864 /* Increment count1. */
2865 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2866 count1, gfc_index_one_node);
2867 gfc_add_modify (&body, count1, tmp);
2869 /* Increment count3. */
2872 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2873 gfc_array_index_type, count3,
2874 gfc_index_one_node);
2875 gfc_add_modify (&body, count3, tmp);
2878 /* Generate the copying loops. */
2879 gfc_trans_scalarizing_loops (&loop1, &body);
2880 gfc_add_block_to_block (&block, &loop1.pre);
2881 gfc_add_block_to_block (&block, &loop1.post);
2882 gfc_cleanup_loop (&loop1);
2884 tmp = gfc_finish_block (&block);
2890 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2891 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2892 and should not be freed. WHEREMASK is the conditional execution mask
2893 whose sense may be inverted by INVERT. */
2896 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2897 tree count1, gfc_ss *lss, gfc_ss *rss,
2898 tree wheremask, bool invert)
2900 stmtblock_t block, body1;
2907 gfc_start_block (&block);
2909 gfc_init_se (&rse, NULL);
2910 gfc_init_se (&lse, NULL);
2912 if (lss == gfc_ss_terminator)
2914 gfc_init_block (&body1);
2915 gfc_conv_expr (&rse, expr2);
2916 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2920 /* Initialize the loop. */
2921 gfc_init_loopinfo (&loop);
2923 /* We may need LSS to determine the shape of the expression. */
2924 gfc_add_ss_to_loop (&loop, lss);
2925 gfc_add_ss_to_loop (&loop, rss);
2927 gfc_conv_ss_startstride (&loop);
2928 gfc_conv_loop_setup (&loop, &expr2->where);
2930 gfc_mark_ss_chain_used (rss, 1);
2931 /* Start the loop body. */
2932 gfc_start_scalarized_body (&loop, &body1);
2934 /* Translate the expression. */
2935 gfc_copy_loopinfo_to_se (&rse, &loop);
2937 gfc_conv_expr (&rse, expr2);
2939 /* Form the expression of the temporary. */
2940 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2943 /* Use the scalar assignment. */
2944 lse.string_length = rse.string_length;
2945 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2946 expr2->expr_type == EXPR_VARIABLE, true);
2948 /* Form the mask expression according to the mask tree list. */
2951 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2953 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
2954 TREE_TYPE (wheremaskexpr),
2956 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2958 build_empty_stmt (input_location));
2961 gfc_add_expr_to_block (&body1, tmp);
2963 if (lss == gfc_ss_terminator)
2965 gfc_add_block_to_block (&block, &body1);
2967 /* Increment count1. */
2968 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
2969 count1, gfc_index_one_node);
2970 gfc_add_modify (&block, count1, tmp);
2974 /* Increment count1. */
2975 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2976 count1, gfc_index_one_node);
2977 gfc_add_modify (&body1, count1, tmp);
2979 /* Increment count3. */
2982 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2983 gfc_array_index_type,
2984 count3, gfc_index_one_node);
2985 gfc_add_modify (&body1, count3, tmp);
2988 /* Generate the copying loops. */
2989 gfc_trans_scalarizing_loops (&loop, &body1);
2991 gfc_add_block_to_block (&block, &loop.pre);
2992 gfc_add_block_to_block (&block, &loop.post);
2994 gfc_cleanup_loop (&loop);
2995 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2996 as tree nodes in SS may not be valid in different scope. */
2999 tmp = gfc_finish_block (&block);
3004 /* Calculate the size of temporary needed in the assignment inside forall.
3005 LSS and RSS are filled in this function. */
3008 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
3009 stmtblock_t * pblock,
3010 gfc_ss **lss, gfc_ss **rss)
3018 *lss = gfc_walk_expr (expr1);
3021 size = gfc_index_one_node;
3022 if (*lss != gfc_ss_terminator)
3024 gfc_init_loopinfo (&loop);
3026 /* Walk the RHS of the expression. */
3027 *rss = gfc_walk_expr (expr2);
3028 if (*rss == gfc_ss_terminator)
3029 /* The rhs is scalar. Add a ss for the expression. */
3030 *rss = gfc_get_scalar_ss (gfc_ss_terminator, expr2);
3032 /* Associate the SS with the loop. */
3033 gfc_add_ss_to_loop (&loop, *lss);
3034 /* We don't actually need to add the rhs at this point, but it might
3035 make guessing the loop bounds a bit easier. */
3036 gfc_add_ss_to_loop (&loop, *rss);
3038 /* We only want the shape of the expression, not rest of the junk
3039 generated by the scalarizer. */
3040 loop.array_parameter = 1;
3042 /* Calculate the bounds of the scalarization. */
3043 save_flag = gfc_option.rtcheck;
3044 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
3045 gfc_conv_ss_startstride (&loop);
3046 gfc_option.rtcheck = save_flag;
3047 gfc_conv_loop_setup (&loop, &expr2->where);
3049 /* Figure out how many elements we need. */
3050 for (i = 0; i < loop.dimen; i++)
3052 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3053 gfc_array_index_type,
3054 gfc_index_one_node, loop.from[i]);
3055 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3056 gfc_array_index_type, tmp, loop.to[i]);
3057 size = fold_build2_loc (input_location, MULT_EXPR,
3058 gfc_array_index_type, size, tmp);
3060 gfc_add_block_to_block (pblock, &loop.pre);
3061 size = gfc_evaluate_now (size, pblock);
3062 gfc_add_block_to_block (pblock, &loop.post);
3064 /* TODO: write a function that cleans up a loopinfo without freeing
3065 the SS chains. Currently a NOP. */
3072 /* Calculate the overall iterator number of the nested forall construct.
3073 This routine actually calculates the number of times the body of the
3074 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
3075 that by the expression INNER_SIZE. The BLOCK argument specifies the
3076 block in which to calculate the result, and the optional INNER_SIZE_BODY
3077 argument contains any statements that need to executed (inside the loop)
3078 to initialize or calculate INNER_SIZE. */
3081 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
3082 stmtblock_t *inner_size_body, stmtblock_t *block)
3084 forall_info *forall_tmp = nested_forall_info;
3088 /* We can eliminate the innermost unconditional loops with constant
3090 if (INTEGER_CST_P (inner_size))
3093 && !forall_tmp->mask
3094 && INTEGER_CST_P (forall_tmp->size))
3096 inner_size = fold_build2_loc (input_location, MULT_EXPR,
3097 gfc_array_index_type,
3098 inner_size, forall_tmp->size);
3099 forall_tmp = forall_tmp->prev_nest;
3102 /* If there are no loops left, we have our constant result. */
3107 /* Otherwise, create a temporary variable to compute the result. */
3108 number = gfc_create_var (gfc_array_index_type, "num");
3109 gfc_add_modify (block, number, gfc_index_zero_node);
3111 gfc_start_block (&body);
3112 if (inner_size_body)
3113 gfc_add_block_to_block (&body, inner_size_body);
3115 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3116 gfc_array_index_type, number, inner_size);
3119 gfc_add_modify (&body, number, tmp);
3120 tmp = gfc_finish_block (&body);
3122 /* Generate loops. */
3123 if (forall_tmp != NULL)
3124 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
3126 gfc_add_expr_to_block (block, tmp);
3132 /* Allocate temporary for forall construct. SIZE is the size of temporary
3133 needed. PTEMP1 is returned for space free. */
3136 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
3143 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
3144 if (!integer_onep (unit))
3145 bytesize = fold_build2_loc (input_location, MULT_EXPR,
3146 gfc_array_index_type, size, unit);
3151 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
3154 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3159 /* Allocate temporary for forall construct according to the information in
3160 nested_forall_info. INNER_SIZE is the size of temporary needed in the
3161 assignment inside forall. PTEMP1 is returned for space free. */
3164 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
3165 tree inner_size, stmtblock_t * inner_size_body,
3166 stmtblock_t * block, tree * ptemp1)
3170 /* Calculate the total size of temporary needed in forall construct. */
3171 size = compute_overall_iter_number (nested_forall_info, inner_size,
3172 inner_size_body, block);
3174 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
3178 /* Handle assignments inside forall which need temporary.
3180 forall (i=start:end:stride; maskexpr)
3183 (where e,f<i> are arbitrary expressions possibly involving i
3184 and there is a dependency between e<i> and f<i>)
3186 masktmp(:) = maskexpr(:)
3191 for (i = start; i <= end; i += stride)
3195 for (i = start; i <= end; i += stride)
3197 if (masktmp[maskindex++])
3198 tmp[count1++] = f<i>
3202 for (i = start; i <= end; i += stride)
3204 if (masktmp[maskindex++])
3205 e<i> = tmp[count1++]
3210 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
3211 tree wheremask, bool invert,
3212 forall_info * nested_forall_info,
3213 stmtblock_t * block)
3221 stmtblock_t inner_size_body;
3223 /* Create vars. count1 is the current iterator number of the nested
3225 count1 = gfc_create_var (gfc_array_index_type, "count1");
3227 /* Count is the wheremask index. */
3230 count = gfc_create_var (gfc_array_index_type, "count");
3231 gfc_add_modify (block, count, gfc_index_zero_node);
3236 /* Initialize count1. */
3237 gfc_add_modify (block, count1, gfc_index_zero_node);
3239 /* Calculate the size of temporary needed in the assignment. Return loop, lss
3240 and rss which are used in function generate_loop_for_rhs_to_temp(). */
3241 gfc_init_block (&inner_size_body);
3242 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
3245 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
3246 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
3248 if (!expr1->ts.u.cl->backend_decl)
3251 gfc_init_se (&tse, NULL);
3252 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
3253 expr1->ts.u.cl->backend_decl = tse.expr;
3255 type = gfc_get_character_type_len (gfc_default_character_kind,
3256 expr1->ts.u.cl->backend_decl);
3259 type = gfc_typenode_for_spec (&expr1->ts);
3261 /* Allocate temporary for nested forall construct according to the
3262 information in nested_forall_info and inner_size. */
3263 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
3264 &inner_size_body, block, &ptemp1);
3266 /* Generate codes to copy rhs to the temporary . */
3267 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
3270 /* Generate body and loops according to the information in
3271 nested_forall_info. */
3272 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3273 gfc_add_expr_to_block (block, tmp);
3276 gfc_add_modify (block, count1, gfc_index_zero_node);
3280 gfc_add_modify (block, count, gfc_index_zero_node);
3282 /* Generate codes to copy the temporary to lhs. */
3283 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
3286 /* Generate body and loops according to the information in
3287 nested_forall_info. */
3288 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3289 gfc_add_expr_to_block (block, tmp);
3293 /* Free the temporary. */
3294 tmp = gfc_call_free (ptemp1);
3295 gfc_add_expr_to_block (block, tmp);
3300 /* Translate pointer assignment inside FORALL which need temporary. */
3303 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
3304 forall_info * nested_forall_info,
3305 stmtblock_t * block)
3312 gfc_array_info *info;
3319 tree tmp, tmp1, ptemp1;
3321 count = gfc_create_var (gfc_array_index_type, "count");
3322 gfc_add_modify (block, count, gfc_index_zero_node);
3324 inner_size = gfc_index_one_node;
3325 lss = gfc_walk_expr (expr1);
3326 rss = gfc_walk_expr (expr2);
3327 if (lss == gfc_ss_terminator)
3329 type = gfc_typenode_for_spec (&expr1->ts);
3330 type = build_pointer_type (type);
3332 /* Allocate temporary for nested forall construct according to the
3333 information in nested_forall_info and inner_size. */
3334 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
3335 inner_size, NULL, block, &ptemp1);
3336 gfc_start_block (&body);
3337 gfc_init_se (&lse, NULL);
3338 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3339 gfc_init_se (&rse, NULL);
3340 rse.want_pointer = 1;
3341 gfc_conv_expr (&rse, expr2);
3342 gfc_add_block_to_block (&body, &rse.pre);
3343 gfc_add_modify (&body, lse.expr,
3344 fold_convert (TREE_TYPE (lse.expr), rse.expr));
3345 gfc_add_block_to_block (&body, &rse.post);
3347 /* Increment count. */
3348 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3349 count, gfc_index_one_node);
3350 gfc_add_modify (&body, count, tmp);
3352 tmp = gfc_finish_block (&body);
3354 /* Generate body and loops according to the information in
3355 nested_forall_info. */
3356 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3357 gfc_add_expr_to_block (block, tmp);
3360 gfc_add_modify (block, count, gfc_index_zero_node);
3362 gfc_start_block (&body);
3363 gfc_init_se (&lse, NULL);
3364 gfc_init_se (&rse, NULL);
3365 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
3366 lse.want_pointer = 1;
3367 gfc_conv_expr (&lse, expr1);
3368 gfc_add_block_to_block (&body, &lse.pre);
3369 gfc_add_modify (&body, lse.expr, rse.expr);
3370 gfc_add_block_to_block (&body, &lse.post);
3371 /* Increment count. */
3372 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3373 count, gfc_index_one_node);
3374 gfc_add_modify (&body, count, tmp);
3375 tmp = gfc_finish_block (&body);
3377 /* Generate body and loops according to the information in
3378 nested_forall_info. */
3379 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3380 gfc_add_expr_to_block (block, tmp);
3384 gfc_init_loopinfo (&loop);
3386 /* Associate the SS with the loop. */
3387 gfc_add_ss_to_loop (&loop, rss);
3389 /* Setup the scalarizing loops and bounds. */
3390 gfc_conv_ss_startstride (&loop);
3392 gfc_conv_loop_setup (&loop, &expr2->where);
3394 info = &rss->info->data.array;
3395 desc = info->descriptor;
3397 /* Make a new descriptor. */
3398 parmtype = gfc_get_element_type (TREE_TYPE (desc));
3399 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0,
3400 loop.from, loop.to, 1,
3401 GFC_ARRAY_UNKNOWN, true);
3403 /* Allocate temporary for nested forall construct. */
3404 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
3405 inner_size, NULL, block, &ptemp1);
3406 gfc_start_block (&body);
3407 gfc_init_se (&lse, NULL);
3408 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3409 lse.direct_byref = 1;
3410 rss = gfc_walk_expr (expr2);
3411 gfc_conv_expr_descriptor (&lse, expr2, rss);
3413 gfc_add_block_to_block (&body, &lse.pre);
3414 gfc_add_block_to_block (&body, &lse.post);
3416 /* Increment count. */
3417 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3418 count, gfc_index_one_node);
3419 gfc_add_modify (&body, count, tmp);
3421 tmp = gfc_finish_block (&body);
3423 /* Generate body and loops according to the information in
3424 nested_forall_info. */
3425 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3426 gfc_add_expr_to_block (block, tmp);
3429 gfc_add_modify (block, count, gfc_index_zero_node);
3431 parm = gfc_build_array_ref (tmp1, count, NULL);
3432 lss = gfc_walk_expr (expr1);
3433 gfc_init_se (&lse, NULL);
3434 gfc_conv_expr_descriptor (&lse, expr1, lss);
3435 gfc_add_modify (&lse.pre, lse.expr, parm);
3436 gfc_start_block (&body);
3437 gfc_add_block_to_block (&body, &lse.pre);
3438 gfc_add_block_to_block (&body, &lse.post);
3440 /* Increment count. */
3441 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3442 count, gfc_index_one_node);
3443 gfc_add_modify (&body, count, tmp);
3445 tmp = gfc_finish_block (&body);
3447 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3448 gfc_add_expr_to_block (block, tmp);
3450 /* Free the temporary. */
3453 tmp = gfc_call_free (ptemp1);
3454 gfc_add_expr_to_block (block, tmp);
3459 /* FORALL and WHERE statements are really nasty, especially when you nest
3460 them. All the rhs of a forall assignment must be evaluated before the
3461 actual assignments are performed. Presumably this also applies to all the
3462 assignments in an inner where statement. */
3464 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3465 linear array, relying on the fact that we process in the same order in all
3468 forall (i=start:end:stride; maskexpr)
3472 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3474 count = ((end + 1 - start) / stride)
3475 masktmp(:) = maskexpr(:)
3478 for (i = start; i <= end; i += stride)
3480 if (masktmp[maskindex++])
3484 for (i = start; i <= end; i += stride)
3486 if (masktmp[maskindex++])
3490 Note that this code only works when there are no dependencies.
3491 Forall loop with array assignments and data dependencies are a real pain,
3492 because the size of the temporary cannot always be determined before the
3493 loop is executed. This problem is compounded by the presence of nested
3498 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
3515 tree cycle_label = NULL_TREE;
3519 gfc_forall_iterator *fa;
3522 gfc_saved_var *saved_vars;
3523 iter_info *this_forall;
3527 /* Do nothing if the mask is false. */
3529 && code->expr1->expr_type == EXPR_CONSTANT
3530 && !code->expr1->value.logical)
3531 return build_empty_stmt (input_location);
3534 /* Count the FORALL index number. */
3535 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3539 /* Allocate the space for var, start, end, step, varexpr. */
3540 var = XCNEWVEC (tree, nvar);
3541 start = XCNEWVEC (tree, nvar);
3542 end = XCNEWVEC (tree, nvar);
3543 step = XCNEWVEC (tree, nvar);
3544 varexpr = XCNEWVEC (gfc_expr *, nvar);
3545 saved_vars = XCNEWVEC (gfc_saved_var, nvar);
3547 /* Allocate the space for info. */
3548 info = XCNEW (forall_info);
3550 gfc_start_block (&pre);
3551 gfc_init_block (&post);
3552 gfc_init_block (&block);
3555 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3557 gfc_symbol *sym = fa->var->symtree->n.sym;
3559 /* Allocate space for this_forall. */
3560 this_forall = XCNEW (iter_info);
3562 /* Create a temporary variable for the FORALL index. */
3563 tmp = gfc_typenode_for_spec (&sym->ts);
3564 var[n] = gfc_create_var (tmp, sym->name);
3565 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
3567 /* Record it in this_forall. */
3568 this_forall->var = var[n];
3570 /* Replace the index symbol's backend_decl with the temporary decl. */
3571 sym->backend_decl = var[n];
3573 /* Work out the start, end and stride for the loop. */
3574 gfc_init_se (&se, NULL);
3575 gfc_conv_expr_val (&se, fa->start);
3576 /* Record it in this_forall. */
3577 this_forall->start = se.expr;
3578 gfc_add_block_to_block (&block, &se.pre);
3581 gfc_init_se (&se, NULL);
3582 gfc_conv_expr_val (&se, fa->end);
3583 /* Record it in this_forall. */
3584 this_forall->end = se.expr;
3585 gfc_make_safe_expr (&se);
3586 gfc_add_block_to_block (&block, &se.pre);
3589 gfc_init_se (&se, NULL);
3590 gfc_conv_expr_val (&se, fa->stride);
3591 /* Record it in this_forall. */
3592 this_forall->step = se.expr;
3593 gfc_make_safe_expr (&se);
3594 gfc_add_block_to_block (&block, &se.pre);
3597 /* Set the NEXT field of this_forall to NULL. */
3598 this_forall->next = NULL;
3599 /* Link this_forall to the info construct. */
3600 if (info->this_loop)
3602 iter_info *iter_tmp = info->this_loop;
3603 while (iter_tmp->next != NULL)
3604 iter_tmp = iter_tmp->next;
3605 iter_tmp->next = this_forall;
3608 info->this_loop = this_forall;
3614 /* Calculate the size needed for the current forall level. */
3615 size = gfc_index_one_node;
3616 for (n = 0; n < nvar; n++)
3618 /* size = (end + step - start) / step. */
3619 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (start[n]),
3621 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (end[n]),
3623 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, TREE_TYPE (tmp),
3625 tmp = convert (gfc_array_index_type, tmp);
3627 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3631 /* Record the nvar and size of current forall level. */
3637 /* If the mask is .true., consider the FORALL unconditional. */
3638 if (code->expr1->expr_type == EXPR_CONSTANT
3639 && code->expr1->value.logical)
3647 /* First we need to allocate the mask. */
3650 /* As the mask array can be very big, prefer compact boolean types. */
3651 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3652 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3653 size, NULL, &block, &pmask);
3654 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3656 /* Record them in the info structure. */
3657 info->maskindex = maskindex;
3662 /* No mask was specified. */
3663 maskindex = NULL_TREE;
3664 mask = pmask = NULL_TREE;
3667 /* Link the current forall level to nested_forall_info. */
3668 info->prev_nest = nested_forall_info;
3669 nested_forall_info = info;
3671 /* Copy the mask into a temporary variable if required.
3672 For now we assume a mask temporary is needed. */
3675 /* As the mask array can be very big, prefer compact boolean types. */
3676 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3678 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3680 /* Start of mask assignment loop body. */
3681 gfc_start_block (&body);
3683 /* Evaluate the mask expression. */
3684 gfc_init_se (&se, NULL);
3685 gfc_conv_expr_val (&se, code->expr1);
3686 gfc_add_block_to_block (&body, &se.pre);
3688 /* Store the mask. */
3689 se.expr = convert (mask_type, se.expr);
3691 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3692 gfc_add_modify (&body, tmp, se.expr);
3694 /* Advance to the next mask element. */
3695 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3696 maskindex, gfc_index_one_node);
3697 gfc_add_modify (&body, maskindex, tmp);
3699 /* Generate the loops. */
3700 tmp = gfc_finish_block (&body);
3701 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3702 gfc_add_expr_to_block (&block, tmp);
3705 if (code->op == EXEC_DO_CONCURRENT)
3707 gfc_init_block (&body);
3708 cycle_label = gfc_build_label_decl (NULL_TREE);
3709 code->cycle_label = cycle_label;
3710 tmp = gfc_trans_code (code->block->next);
3711 gfc_add_expr_to_block (&body, tmp);
3713 if (TREE_USED (cycle_label))
3715 tmp = build1_v (LABEL_EXPR, cycle_label);
3716 gfc_add_expr_to_block (&body, tmp);
3719 tmp = gfc_finish_block (&body);
3720 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3721 gfc_add_expr_to_block (&block, tmp);
3725 c = code->block->next;
3727 /* TODO: loop merging in FORALL statements. */
3728 /* Now that we've got a copy of the mask, generate the assignment loops. */
3734 /* A scalar or array assignment. DO the simple check for
3735 lhs to rhs dependencies. These make a temporary for the
3736 rhs and form a second forall block to copy to variable. */
3737 need_temp = check_forall_dependencies(c, &pre, &post);
3739 /* Temporaries due to array assignment data dependencies introduce
3740 no end of problems. */
3742 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3743 nested_forall_info, &block);
3746 /* Use the normal assignment copying routines. */
3747 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3749 /* Generate body and loops. */
3750 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3752 gfc_add_expr_to_block (&block, tmp);
3755 /* Cleanup any temporary symtrees that have been made to deal
3756 with dependencies. */
3758 cleanup_forall_symtrees (c);
3763 /* Translate WHERE or WHERE construct nested in FORALL. */
3764 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3767 /* Pointer assignment inside FORALL. */
3768 case EXEC_POINTER_ASSIGN:
3769 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3771 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3772 nested_forall_info, &block);
3775 /* Use the normal assignment copying routines. */
3776 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3778 /* Generate body and loops. */
3779 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3781 gfc_add_expr_to_block (&block, tmp);
3786 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3787 gfc_add_expr_to_block (&block, tmp);
3790 /* Explicit subroutine calls are prevented by the frontend but interface
3791 assignments can legitimately produce them. */
3792 case EXEC_ASSIGN_CALL:
3793 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3794 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3795 gfc_add_expr_to_block (&block, tmp);
3806 /* Restore the original index variables. */
3807 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3808 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3810 /* Free the space for var, start, end, step, varexpr. */
3818 for (this_forall = info->this_loop; this_forall;)
3820 iter_info *next = this_forall->next;
3825 /* Free the space for this forall_info. */
3830 /* Free the temporary for the mask. */
3831 tmp = gfc_call_free (pmask);
3832 gfc_add_expr_to_block (&block, tmp);
3835 pushdecl (maskindex);
3837 gfc_add_block_to_block (&pre, &block);
3838 gfc_add_block_to_block (&pre, &post);
3840 return gfc_finish_block (&pre);
3844 /* Translate the FORALL statement or construct. */
3846 tree gfc_trans_forall (gfc_code * code)
3848 return gfc_trans_forall_1 (code, NULL);
3852 /* Translate the DO CONCURRENT construct. */
3854 tree gfc_trans_do_concurrent (gfc_code * code)
3856 return gfc_trans_forall_1 (code, NULL);
3860 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3861 If the WHERE construct is nested in FORALL, compute the overall temporary
3862 needed by the WHERE mask expression multiplied by the iterator number of
3864 ME is the WHERE mask expression.
3865 MASK is the current execution mask upon input, whose sense may or may
3866 not be inverted as specified by the INVERT argument.
3867 CMASK is the updated execution mask on output, or NULL if not required.
3868 PMASK is the pending execution mask on output, or NULL if not required.
3869 BLOCK is the block in which to place the condition evaluation loops. */
3872 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3873 tree mask, bool invert, tree cmask, tree pmask,
3874 tree mask_type, stmtblock_t * block)
3879 stmtblock_t body, body1;
3880 tree count, cond, mtmp;
3883 gfc_init_loopinfo (&loop);
3885 lss = gfc_walk_expr (me);
3886 rss = gfc_walk_expr (me);
3888 /* Variable to index the temporary. */
3889 count = gfc_create_var (gfc_array_index_type, "count");
3890 /* Initialize count. */
3891 gfc_add_modify (block, count, gfc_index_zero_node);
3893 gfc_start_block (&body);
3895 gfc_init_se (&rse, NULL);
3896 gfc_init_se (&lse, NULL);
3898 if (lss == gfc_ss_terminator)
3900 gfc_init_block (&body1);
3904 /* Initialize the loop. */
3905 gfc_init_loopinfo (&loop);
3907 /* We may need LSS to determine the shape of the expression. */
3908 gfc_add_ss_to_loop (&loop, lss);
3909 gfc_add_ss_to_loop (&loop, rss);
3911 gfc_conv_ss_startstride (&loop);
3912 gfc_conv_loop_setup (&loop, &me->where);
3914 gfc_mark_ss_chain_used (rss, 1);
3915 /* Start the loop body. */
3916 gfc_start_scalarized_body (&loop, &body1);
3918 /* Translate the expression. */
3919 gfc_copy_loopinfo_to_se (&rse, &loop);
3921 gfc_conv_expr (&rse, me);
3924 /* Variable to evaluate mask condition. */
3925 cond = gfc_create_var (mask_type, "cond");
3926 if (mask && (cmask || pmask))
3927 mtmp = gfc_create_var (mask_type, "mask");
3928 else mtmp = NULL_TREE;
3930 gfc_add_block_to_block (&body1, &lse.pre);
3931 gfc_add_block_to_block (&body1, &rse.pre);
3933 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3935 if (mask && (cmask || pmask))
3937 tmp = gfc_build_array_ref (mask, count, NULL);
3939 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, tmp);
3940 gfc_add_modify (&body1, mtmp, tmp);
3945 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3948 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type,
3950 gfc_add_modify (&body1, tmp1, tmp);
3955 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3956 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, cond);
3958 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type, mtmp,
3960 gfc_add_modify (&body1, tmp1, tmp);
3963 gfc_add_block_to_block (&body1, &lse.post);
3964 gfc_add_block_to_block (&body1, &rse.post);
3966 if (lss == gfc_ss_terminator)
3968 gfc_add_block_to_block (&body, &body1);
3972 /* Increment count. */
3973 tmp1 = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3974 count, gfc_index_one_node);
3975 gfc_add_modify (&body1, count, tmp1);
3977 /* Generate the copying loops. */
3978 gfc_trans_scalarizing_loops (&loop, &body1);
3980 gfc_add_block_to_block (&body, &loop.pre);
3981 gfc_add_block_to_block (&body, &loop.post);
3983 gfc_cleanup_loop (&loop);
3984 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3985 as tree nodes in SS may not be valid in different scope. */
3988 tmp1 = gfc_finish_block (&body);
3989 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3990 if (nested_forall_info != NULL)
3991 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3993 gfc_add_expr_to_block (block, tmp1);
3997 /* Translate an assignment statement in a WHERE statement or construct
3998 statement. The MASK expression is used to control which elements
3999 of EXPR1 shall be assigned. The sense of MASK is specified by
4003 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
4004 tree mask, bool invert,
4005 tree count1, tree count2,
4011 gfc_ss *lss_section;
4018 tree index, maskexpr;
4020 /* A defined assignment. */
4021 if (cnext && cnext->resolved_sym)
4022 return gfc_trans_call (cnext, true, mask, count1, invert);
4025 /* TODO: handle this special case.
4026 Special case a single function returning an array. */
4027 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
4029 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
4035 /* Assignment of the form lhs = rhs. */
4036 gfc_start_block (&block);
4038 gfc_init_se (&lse, NULL);
4039 gfc_init_se (&rse, NULL);
4042 lss = gfc_walk_expr (expr1);
4045 /* In each where-assign-stmt, the mask-expr and the variable being
4046 defined shall be arrays of the same shape. */
4047 gcc_assert (lss != gfc_ss_terminator);
4049 /* The assignment needs scalarization. */
4052 /* Find a non-scalar SS from the lhs. */
4053 while (lss_section != gfc_ss_terminator
4054 && lss_section->info->type != GFC_SS_SECTION)
4055 lss_section = lss_section->next;
4057 gcc_assert (lss_section != gfc_ss_terminator);
4059 /* Initialize the scalarizer. */
4060 gfc_init_loopinfo (&loop);
4063 rss = gfc_walk_expr (expr2);
4064 if (rss == gfc_ss_terminator)
4066 /* The rhs is scalar. Add a ss for the expression. */
4067 rss = gfc_get_scalar_ss (gfc_ss_terminator, expr2);
4068 rss->info->where = 1;
4071 /* Associate the SS with the loop. */
4072 gfc_add_ss_to_loop (&loop, lss);
4073 gfc_add_ss_to_loop (&loop, rss);
4075 /* Calculate the bounds of the scalarization. */
4076 gfc_conv_ss_startstride (&loop);
4078 /* Resolve any data dependencies in the statement. */
4079 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
4081 /* Setup the scalarizing loops. */
4082 gfc_conv_loop_setup (&loop, &expr2->where);
4084 /* Setup the gfc_se structures. */
4085 gfc_copy_loopinfo_to_se (&lse, &loop);
4086 gfc_copy_loopinfo_to_se (&rse, &loop);
4089 gfc_mark_ss_chain_used (rss, 1);
4090 if (loop.temp_ss == NULL)
4093 gfc_mark_ss_chain_used (lss, 1);
4097 lse.ss = loop.temp_ss;
4098 gfc_mark_ss_chain_used (lss, 3);
4099 gfc_mark_ss_chain_used (loop.temp_ss, 3);
4102 /* Start the scalarized loop body. */
4103 gfc_start_scalarized_body (&loop, &body);
4105 /* Translate the expression. */
4106 gfc_conv_expr (&rse, expr2);
4107 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
4108 gfc_conv_tmp_array_ref (&lse);
4110 gfc_conv_expr (&lse, expr1);
4112 /* Form the mask expression according to the mask. */
4114 maskexpr = gfc_build_array_ref (mask, index, NULL);
4116 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
4117 TREE_TYPE (maskexpr), maskexpr);
4119 /* Use the scalar assignment as is. */
4120 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
4121 loop.temp_ss != NULL, false, true);
4123 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
4125 gfc_add_expr_to_block (&body, tmp);
4127 if (lss == gfc_ss_terminator)
4129 /* Increment count1. */
4130 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
4131 count1, gfc_index_one_node);
4132 gfc_add_modify (&body, count1, tmp);
4134 /* Use the scalar assignment as is. */
4135 gfc_add_block_to_block (&block, &body);
4139 gcc_assert (lse.ss == gfc_ss_terminator
4140 && rse.ss == gfc_ss_terminator);
4142 if (loop.temp_ss != NULL)
4144 /* Increment count1 before finish the main body of a scalarized
4146 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4147 gfc_array_index_type, count1, gfc_index_one_node);
4148 gfc_add_modify (&body, count1, tmp);
4149 gfc_trans_scalarized_loop_boundary (&loop, &body);
4151 /* We need to copy the temporary to the actual lhs. */
4152 gfc_init_se (&lse, NULL);
4153 gfc_init_se (&rse, NULL);
4154 gfc_copy_loopinfo_to_se (&lse, &loop);
4155 gfc_copy_loopinfo_to_se (&rse, &loop);
4157 rse.ss = loop.temp_ss;
4160 gfc_conv_tmp_array_ref (&rse);
4161 gfc_conv_expr (&lse, expr1);
4163 gcc_assert (lse.ss == gfc_ss_terminator
4164 && rse.ss == gfc_ss_terminator);
4166 /* Form the mask expression according to the mask tree list. */
4168 maskexpr = gfc_build_array_ref (mask, index, NULL);
4170 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
4171 TREE_TYPE (maskexpr), maskexpr);
4173 /* Use the scalar assignment as is. */
4174 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
4176 tmp = build3_v (COND_EXPR, maskexpr, tmp,
4177 build_empty_stmt (input_location));
4178 gfc_add_expr_to_block (&body, tmp);
4180 /* Increment count2. */
4181 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4182 gfc_array_index_type, count2,
4183 gfc_index_one_node);
4184 gfc_add_modify (&body, count2, tmp);
4188 /* Increment count1. */
4189 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4190 gfc_array_index_type, count1,
4191 gfc_index_one_node);
4192 gfc_add_modify (&body, count1, tmp);
4195 /* Generate the copying loops. */
4196 gfc_trans_scalarizing_loops (&loop, &body);
4198 /* Wrap the whole thing up. */
4199 gfc_add_block_to_block (&block, &loop.pre);
4200 gfc_add_block_to_block (&block, &loop.post);
4201 gfc_cleanup_loop (&loop);
4204 return gfc_finish_block (&block);
4208 /* Translate the WHERE construct or statement.
4209 This function can be called iteratively to translate the nested WHERE
4210 construct or statement.
4211 MASK is the control mask. */
4214 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
4215 forall_info * nested_forall_info, stmtblock_t * block)
4217 stmtblock_t inner_size_body;
4218 tree inner_size, size;
4227 tree count1, count2;
4231 tree pcmask = NULL_TREE;
4232 tree ppmask = NULL_TREE;
4233 tree cmask = NULL_TREE;
4234 tree pmask = NULL_TREE;
4235 gfc_actual_arglist *arg;
4237 /* the WHERE statement or the WHERE construct statement. */
4238 cblock = code->block;
4240 /* As the mask array can be very big, prefer compact boolean types. */
4241 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
4243 /* Determine which temporary masks are needed. */
4246 /* One clause: No ELSEWHEREs. */
4247 need_cmask = (cblock->next != 0);
4250 else if (cblock->block->block)
4252 /* Three or more clauses: Conditional ELSEWHEREs. */
4256 else if (cblock->next)
4258 /* Two clauses, the first non-empty. */
4260 need_pmask = (mask != NULL_TREE
4261 && cblock->block->next != 0);
4263 else if (!cblock->block->next)
4265 /* Two clauses, both empty. */
4269 /* Two clauses, the first empty, the second non-empty. */
4272 need_cmask = (cblock->block->expr1 != 0);
4281 if (need_cmask || need_pmask)
4283 /* Calculate the size of temporary needed by the mask-expr. */
4284 gfc_init_block (&inner_size_body);
4285 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
4286 &inner_size_body, &lss, &rss);
4288 gfc_free_ss_chain (lss);
4289 gfc_free_ss_chain (rss);
4291 /* Calculate the total size of temporary needed. */
4292 size = compute_overall_iter_number (nested_forall_info, inner_size,
4293 &inner_size_body, block);
4295 /* Check whether the size is negative. */
4296 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, size,
4297 gfc_index_zero_node);
4298 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
4299 cond, gfc_index_zero_node, size);
4300 size = gfc_evaluate_now (size, block);
4302 /* Allocate temporary for WHERE mask if needed. */
4304 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4307 /* Allocate temporary for !mask if needed. */
4309 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4315 /* Each time around this loop, the where clause is conditional
4316 on the value of mask and invert, which are updated at the
4317 bottom of the loop. */
4319 /* Has mask-expr. */
4322 /* Ensure that the WHERE mask will be evaluated exactly once.
4323 If there are no statements in this WHERE/ELSEWHERE clause,
4324 then we don't need to update the control mask (cmask).
4325 If this is the last clause of the WHERE construct, then
4326 we don't need to update the pending control mask (pmask). */
4328 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4330 cblock->next ? cmask : NULL_TREE,
4331 cblock->block ? pmask : NULL_TREE,
4334 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4336 (cblock->next || cblock->block)
4337 ? cmask : NULL_TREE,
4338 NULL_TREE, mask_type, block);
4342 /* It's a final elsewhere-stmt. No mask-expr is present. */
4346 /* The body of this where clause are controlled by cmask with
4347 sense specified by invert. */
4349 /* Get the assignment statement of a WHERE statement, or the first
4350 statement in where-body-construct of a WHERE construct. */
4351 cnext = cblock->next;
4356 /* WHERE assignment statement. */
4357 case EXEC_ASSIGN_CALL:
4359 arg = cnext->ext.actual;
4360 expr1 = expr2 = NULL;
4361 for (; arg; arg = arg->next)
4373 expr1 = cnext->expr1;
4374 expr2 = cnext->expr2;
4376 if (nested_forall_info != NULL)
4378 need_temp = gfc_check_dependency (expr1, expr2, 0);
4379 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
4380 gfc_trans_assign_need_temp (expr1, expr2,
4382 nested_forall_info, block);
4385 /* Variables to control maskexpr. */
4386 count1 = gfc_create_var (gfc_array_index_type, "count1");
4387 count2 = gfc_create_var (gfc_array_index_type, "count2");
4388 gfc_add_modify (block, count1, gfc_index_zero_node);
4389 gfc_add_modify (block, count2, gfc_index_zero_node);
4391 tmp = gfc_trans_where_assign (expr1, expr2,
4396 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
4398 gfc_add_expr_to_block (block, tmp);
4403 /* Variables to control maskexpr. */
4404 count1 = gfc_create_var (gfc_array_index_type, "count1");
4405 count2 = gfc_create_var (gfc_array_index_type, "count2");
4406 gfc_add_modify (block, count1, gfc_index_zero_node);
4407 gfc_add_modify (block, count2, gfc_index_zero_node);
4409 tmp = gfc_trans_where_assign (expr1, expr2,
4413 gfc_add_expr_to_block (block, tmp);
4418 /* WHERE or WHERE construct is part of a where-body-construct. */
4420 gfc_trans_where_2 (cnext, cmask, invert,
4421 nested_forall_info, block);
4428 /* The next statement within the same where-body-construct. */
4429 cnext = cnext->next;
4431 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4432 cblock = cblock->block;
4433 if (mask == NULL_TREE)
4435 /* If we're the initial WHERE, we can simply invert the sense
4436 of the current mask to obtain the "mask" for the remaining
4443 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4449 /* If we allocated a pending mask array, deallocate it now. */
4452 tmp = gfc_call_free (ppmask);
4453 gfc_add_expr_to_block (block, tmp);
4456 /* If we allocated a current mask array, deallocate it now. */
4459 tmp = gfc_call_free (pcmask);
4460 gfc_add_expr_to_block (block, tmp);
4464 /* Translate a simple WHERE construct or statement without dependencies.
4465 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4466 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4467 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4470 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
4472 stmtblock_t block, body;
4473 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
4474 tree tmp, cexpr, tstmt, estmt;
4475 gfc_ss *css, *tdss, *tsss;
4476 gfc_se cse, tdse, tsse, edse, esse;
4481 /* Allow the scalarizer to workshare simple where loops. */
4482 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
4483 ompws_flags |= OMPWS_SCALARIZER_WS;
4485 cond = cblock->expr1;
4486 tdst = cblock->next->expr1;
4487 tsrc = cblock->next->expr2;
4488 edst = eblock ? eblock->next->expr1 : NULL;
4489 esrc = eblock ? eblock->next->expr2 : NULL;
4491 gfc_start_block (&block);
4492 gfc_init_loopinfo (&loop);
4494 /* Handle the condition. */
4495 gfc_init_se (&cse, NULL);
4496 css = gfc_walk_expr (cond);
4497 gfc_add_ss_to_loop (&loop, css);
4499 /* Handle the then-clause. */
4500 gfc_init_se (&tdse, NULL);
4501 gfc_init_se (&tsse, NULL);
4502 tdss = gfc_walk_expr (tdst);
4503 tsss = gfc_walk_expr (tsrc);
4504 if (tsss == gfc_ss_terminator)
4506 tsss = gfc_get_scalar_ss (gfc_ss_terminator, tsrc);
4507 tsss->info->where = 1;
4509 gfc_add_ss_to_loop (&loop, tdss);
4510 gfc_add_ss_to_loop (&loop, tsss);
4514 /* Handle the else clause. */
4515 gfc_init_se (&edse, NULL);
4516 gfc_init_se (&esse, NULL);
4517 edss = gfc_walk_expr (edst);
4518 esss = gfc_walk_expr (esrc);
4519 if (esss == gfc_ss_terminator)
4521 esss = gfc_get_scalar_ss (gfc_ss_terminator, esrc);
4522 esss->info->where = 1;
4524 gfc_add_ss_to_loop (&loop, edss);
4525 gfc_add_ss_to_loop (&loop, esss);
4528 gfc_conv_ss_startstride (&loop);
4529 gfc_conv_loop_setup (&loop, &tdst->where);
4531 gfc_mark_ss_chain_used (css, 1);
4532 gfc_mark_ss_chain_used (tdss, 1);
4533 gfc_mark_ss_chain_used (tsss, 1);
4536 gfc_mark_ss_chain_used (edss, 1);
4537 gfc_mark_ss_chain_used (esss, 1);
4540 gfc_start_scalarized_body (&loop, &body);
4542 gfc_copy_loopinfo_to_se (&cse, &loop);
4543 gfc_copy_loopinfo_to_se (&tdse, &loop);
4544 gfc_copy_loopinfo_to_se (&tsse, &loop);
4550 gfc_copy_loopinfo_to_se (&edse, &loop);
4551 gfc_copy_loopinfo_to_se (&esse, &loop);
4556 gfc_conv_expr (&cse, cond);
4557 gfc_add_block_to_block (&body, &cse.pre);
4560 gfc_conv_expr (&tsse, tsrc);
4561 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
4562 gfc_conv_tmp_array_ref (&tdse);
4564 gfc_conv_expr (&tdse, tdst);
4568 gfc_conv_expr (&esse, esrc);
4569 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
4570 gfc_conv_tmp_array_ref (&edse);
4572 gfc_conv_expr (&edse, edst);
4575 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
4576 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
4578 : build_empty_stmt (input_location);
4579 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
4580 gfc_add_expr_to_block (&body, tmp);
4581 gfc_add_block_to_block (&body, &cse.post);
4583 gfc_trans_scalarizing_loops (&loop, &body);
4584 gfc_add_block_to_block (&block, &loop.pre);
4585 gfc_add_block_to_block (&block, &loop.post);
4586 gfc_cleanup_loop (&loop);
4588 return gfc_finish_block (&block);
4591 /* As the WHERE or WHERE construct statement can be nested, we call
4592 gfc_trans_where_2 to do the translation, and pass the initial
4593 NULL values for both the control mask and the pending control mask. */
4596 gfc_trans_where (gfc_code * code)
4602 cblock = code->block;
4604 && cblock->next->op == EXEC_ASSIGN
4605 && !cblock->next->next)
4607 eblock = cblock->block;
4610 /* A simple "WHERE (cond) x = y" statement or block is
4611 dependence free if cond is not dependent upon writing x,
4612 and the source y is unaffected by the destination x. */
4613 if (!gfc_check_dependency (cblock->next->expr1,
4615 && !gfc_check_dependency (cblock->next->expr1,
4616 cblock->next->expr2, 0))
4617 return gfc_trans_where_3 (cblock, NULL);
4619 else if (!eblock->expr1
4622 && eblock->next->op == EXEC_ASSIGN
4623 && !eblock->next->next)
4625 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4626 block is dependence free if cond is not dependent on writes
4627 to x1 and x2, y1 is not dependent on writes to x2, and y2
4628 is not dependent on writes to x1, and both y's are not
4629 dependent upon their own x's. In addition to this, the
4630 final two dependency checks below exclude all but the same
4631 array reference if the where and elswhere destinations
4632 are the same. In short, this is VERY conservative and this
4633 is needed because the two loops, required by the standard
4634 are coalesced in gfc_trans_where_3. */
4635 if (!gfc_check_dependency(cblock->next->expr1,
4637 && !gfc_check_dependency(eblock->next->expr1,
4639 && !gfc_check_dependency(cblock->next->expr1,
4640 eblock->next->expr2, 1)
4641 && !gfc_check_dependency(eblock->next->expr1,
4642 cblock->next->expr2, 1)
4643 && !gfc_check_dependency(cblock->next->expr1,
4644 cblock->next->expr2, 1)
4645 && !gfc_check_dependency(eblock->next->expr1,
4646 eblock->next->expr2, 1)
4647 && !gfc_check_dependency(cblock->next->expr1,
4648 eblock->next->expr1, 0)
4649 && !gfc_check_dependency(eblock->next->expr1,
4650 cblock->next->expr1, 0))
4651 return gfc_trans_where_3 (cblock, eblock);
4655 gfc_start_block (&block);
4657 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4659 return gfc_finish_block (&block);
4663 /* CYCLE a DO loop. The label decl has already been created by
4664 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4665 node at the head of the loop. We must mark the label as used. */
4668 gfc_trans_cycle (gfc_code * code)
4672 cycle_label = code->ext.which_construct->cycle_label;
4673 gcc_assert (cycle_label);
4675 TREE_USED (cycle_label) = 1;
4676 return build1_v (GOTO_EXPR, cycle_label);
4680 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4681 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4685 gfc_trans_exit (gfc_code * code)
4689 exit_label = code->ext.which_construct->exit_label;
4690 gcc_assert (exit_label);
4692 TREE_USED (exit_label) = 1;
4693 return build1_v (GOTO_EXPR, exit_label);
4697 /* Translate the ALLOCATE statement. */
4700 gfc_trans_allocate (gfc_code * code)
4721 if (!code->ext.alloc.list)
4724 stat = tmp = memsz = NULL_TREE;
4725 label_errmsg = label_finish = errmsg = errlen = NULL_TREE;
4727 gfc_init_block (&block);
4728 gfc_init_block (&post);
4730 /* STAT= (and maybe ERRMSG=) is present. */
4734 tree gfc_int4_type_node = gfc_get_int_type (4);
4735 stat = gfc_create_var (gfc_int4_type_node, "stat");
4737 /* ERRMSG= only makes sense with STAT=. */
4740 gfc_init_se (&se, NULL);
4741 gfc_conv_expr_lhs (&se, code->expr2);
4743 errlen = gfc_get_expr_charlen (code->expr2);
4744 errmsg = gfc_build_addr_expr (pchar_type_node, se.expr);
4748 errmsg = null_pointer_node;
4749 errlen = build_int_cst (gfc_charlen_type_node, 0);
4752 /* GOTO destinations. */
4753 label_errmsg = gfc_build_label_decl (NULL_TREE);
4754 label_finish = gfc_build_label_decl (NULL_TREE);
4755 TREE_USED (label_errmsg) = 1;
4756 TREE_USED (label_finish) = 1;
4762 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4764 expr = gfc_copy_expr (al->expr);
4766 if (expr->ts.type == BT_CLASS)
4767 gfc_add_data_component (expr);
4769 gfc_init_se (&se, NULL);
4771 se.want_pointer = 1;
4772 se.descriptor_only = 1;
4773 gfc_conv_expr (&se, expr);
4775 if (!gfc_array_allocate (&se, expr, stat, errmsg, errlen, code->expr3))
4777 /* A scalar or derived type. */
4779 /* Determine allocate size. */
4780 if (al->expr->ts.type == BT_CLASS && code->expr3)
4782 if (code->expr3->ts.type == BT_CLASS)
4784 sz = gfc_copy_expr (code->expr3);
4785 gfc_add_vptr_component (sz);
4786 gfc_add_size_component (sz);
4787 gfc_init_se (&se_sz, NULL);
4788 gfc_conv_expr (&se_sz, sz);
4793 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4795 else if (al->expr->ts.type == BT_CHARACTER
4796 && al->expr->ts.deferred && code->expr3)
4798 if (!code->expr3->ts.u.cl->backend_decl)
4800 /* Convert and use the length expression. */
4801 gfc_init_se (&se_sz, NULL);
4802 if (code->expr3->expr_type == EXPR_VARIABLE
4803 || code->expr3->expr_type == EXPR_CONSTANT)
4805 gfc_conv_expr (&se_sz, code->expr3);
4806 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4808 = gfc_evaluate_now (se_sz.string_length, &se.pre);
4809 gfc_add_block_to_block (&se.pre, &se_sz.post);
4810 memsz = se_sz.string_length;
4812 else if (code->expr3->mold
4813 && code->expr3->ts.u.cl
4814 && code->expr3->ts.u.cl->length)
4816 gfc_conv_expr (&se_sz, code->expr3->ts.u.cl->length);
4817 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4818 se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre);
4819 gfc_add_block_to_block (&se.pre, &se_sz.post);
4824 /* This is would be inefficient and possibly could
4825 generate wrong code if the result were not stored
4827 if (slen3 == NULL_TREE)
4829 gfc_conv_expr (&se_sz, code->expr3);
4830 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4831 expr3 = gfc_evaluate_now (se_sz.expr, &se.pre);
4832 gfc_add_block_to_block (&post, &se_sz.post);
4833 slen3 = gfc_evaluate_now (se_sz.string_length,
4840 /* Otherwise use the stored string length. */
4841 memsz = code->expr3->ts.u.cl->backend_decl;
4842 tmp = al->expr->ts.u.cl->backend_decl;
4844 /* Store the string length. */
4845 if (tmp && TREE_CODE (tmp) == VAR_DECL)
4846 gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp),
4849 /* Convert to size in bytes, using the character KIND. */
4850 tmp = TREE_TYPE (gfc_typenode_for_spec (&al->expr->ts));
4851 tmp = TYPE_SIZE_UNIT (tmp);
4852 memsz = fold_build2_loc (input_location, MULT_EXPR,
4853 TREE_TYPE (tmp), tmp,
4854 fold_convert (TREE_TYPE (tmp), memsz));
4856 else if (al->expr->ts.type == BT_CHARACTER && al->expr->ts.deferred)
4858 gcc_assert (code->ext.alloc.ts.u.cl && code->ext.alloc.ts.u.cl->length);
4859 gfc_init_se (&se_sz, NULL);
4860 gfc_conv_expr (&se_sz, code->ext.alloc.ts.u.cl->length);
4861 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4862 se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre);
4863 gfc_add_block_to_block (&se.pre, &se_sz.post);
4864 /* Store the string length. */
4865 tmp = al->expr->ts.u.cl->backend_decl;
4866 gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp),
4868 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts));
4869 tmp = TYPE_SIZE_UNIT (tmp);
4870 memsz = fold_build2_loc (input_location, MULT_EXPR,
4871 TREE_TYPE (tmp), tmp,
4872 fold_convert (TREE_TYPE (se_sz.expr),
4875 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4876 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4878 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4880 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4882 memsz = se.string_length;
4884 /* Convert to size in bytes, using the character KIND. */
4885 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts));
4886 tmp = TYPE_SIZE_UNIT (tmp);
4887 memsz = fold_build2_loc (input_location, MULT_EXPR,
4888 TREE_TYPE (tmp), tmp,
4889 fold_convert (TREE_TYPE (tmp), memsz));
4892 /* Allocate - for non-pointers with re-alloc checking. */
4893 if (gfc_expr_attr (expr).allocatable)
4894 gfc_allocate_allocatable (&se.pre, se.expr, memsz, NULL_TREE,
4895 stat, errmsg, errlen, expr);
4897 gfc_allocate_using_malloc (&se.pre, se.expr, memsz, stat);
4899 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4901 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4902 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4903 gfc_add_expr_to_block (&se.pre, tmp);
4905 else if (al->expr->ts.type == BT_CLASS && code->expr3)
4907 /* With class objects, it is best to play safe and null the
4908 memory because we cannot know if dynamic types have allocatable
4909 components or not. */
4910 tmp = build_call_expr_loc (input_location,
4911 builtin_decl_explicit (BUILT_IN_MEMSET),
4912 3, se.expr, integer_zero_node, memsz);
4913 gfc_add_expr_to_block (&se.pre, tmp);
4917 gfc_add_block_to_block (&block, &se.pre);
4919 /* Error checking -- Note: ERRMSG only makes sense with STAT. */
4922 /* The coarray library already sets the errmsg. */
4923 if (gfc_option.coarray == GFC_FCOARRAY_LIB
4924 && gfc_expr_attr (expr).codimension)
4925 tmp = build1_v (GOTO_EXPR, label_finish);
4927 tmp = build1_v (GOTO_EXPR, label_errmsg);
4929 parm = fold_build2_loc (input_location, NE_EXPR,
4930 boolean_type_node, stat,
4931 build_int_cst (TREE_TYPE (stat), 0));
4932 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
4933 gfc_unlikely(parm), tmp,
4934 build_empty_stmt (input_location));
4935 gfc_add_expr_to_block (&block, tmp);
4938 /* We need the vptr of CLASS objects to be initialized. */
4939 e = gfc_copy_expr (al->expr);
4940 if (e->ts.type == BT_CLASS)
4945 lhs = gfc_expr_to_initialize (e);
4946 gfc_add_vptr_component (lhs);
4948 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4950 /* Polymorphic SOURCE: VPTR must be determined at run time. */
4951 rhs = gfc_copy_expr (code->expr3);
4952 gfc_add_vptr_component (rhs);
4953 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4954 gfc_add_expr_to_block (&block, tmp);
4955 gfc_free_expr (rhs);
4956 rhs = gfc_expr_to_initialize (e);
4960 /* VPTR is fixed at compile time. */
4964 ts = &code->expr3->ts;
4965 else if (e->ts.type == BT_DERIVED)
4967 else if (code->ext.alloc.ts.type == BT_DERIVED)
4968 ts = &code->ext.alloc.ts;
4969 else if (e->ts.type == BT_CLASS)
4970 ts = &CLASS_DATA (e)->ts;
4974 if (ts->type == BT_DERIVED)
4976 vtab = gfc_find_derived_vtab (ts->u.derived);
4978 gfc_init_se (&lse, NULL);
4979 lse.want_pointer = 1;
4980 gfc_conv_expr (&lse, lhs);
4981 tmp = gfc_build_addr_expr (NULL_TREE,
4982 gfc_get_symbol_decl (vtab));
4983 gfc_add_modify (&block, lse.expr,
4984 fold_convert (TREE_TYPE (lse.expr), tmp));
4987 gfc_free_expr (lhs);
4992 if (code->expr3 && !code->expr3->mold)
4994 /* Initialization via SOURCE block
4995 (or static default initializer). */
4996 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4997 if (al->expr->ts.type == BT_CLASS)
4999 gfc_actual_arglist *actual;
5004 /* Do a polymorphic deep copy. */
5005 actual = gfc_get_actual_arglist ();
5006 actual->expr = gfc_copy_expr (rhs);
5007 if (rhs->ts.type == BT_CLASS)
5008 gfc_add_data_component (actual->expr);
5009 actual->next = gfc_get_actual_arglist ();
5010 actual->next->expr = gfc_copy_expr (al->expr);
5011 actual->next->expr->ts.type = BT_CLASS;
5012 gfc_add_data_component (actual->next->expr);
5013 dataref = actual->next->expr->ref;
5014 if (dataref->u.c.component->as)
5018 gfc_ref *ref = dataref->next;
5019 ref->u.ar.type = AR_SECTION;
5020 /* We have to set up the array reference to give ranges
5021 in all dimensions and ensure that the end and stride
5022 are set so that the copy can be scalarized. */
5024 for (; dim < dataref->u.c.component->as->rank; dim++)
5026 ref->u.ar.dimen_type[dim] = DIMEN_RANGE;
5027 if (ref->u.ar.end[dim] == NULL)
5029 ref->u.ar.end[dim] = ref->u.ar.start[dim];
5030 temp = gfc_get_int_expr (gfc_default_integer_kind,
5031 &al->expr->where, 1);
5032 ref->u.ar.start[dim] = temp;
5034 temp = gfc_subtract (gfc_copy_expr (ref->u.ar.end[dim]),
5035 gfc_copy_expr (ref->u.ar.start[dim]));
5036 temp = gfc_add (gfc_get_int_expr (gfc_default_integer_kind,
5037 &al->expr->where, 1),
5041 if (rhs->ts.type == BT_CLASS)
5043 ppc = gfc_copy_expr (rhs);
5044 gfc_add_vptr_component (ppc);
5047 ppc = gfc_lval_expr_from_sym
5048 (gfc_find_derived_vtab (rhs->ts.u.derived));
5049 gfc_add_component_ref (ppc, "_copy");
5051 ppc_code = gfc_get_code ();
5052 ppc_code->resolved_sym = ppc->symtree->n.sym;
5053 /* Although '_copy' is set to be elemental in class.c, it is
5054 not staying that way. Find out why, sometime.... */
5055 ppc_code->resolved_sym->attr.elemental = 1;
5056 ppc_code->ext.actual = actual;
5057 ppc_code->expr1 = ppc;
5058 ppc_code->op = EXEC_CALL;
5059 /* Since '_copy' is elemental, the scalarizer will take care
5060 of arrays in gfc_trans_call. */
5061 tmp = gfc_trans_call (ppc_code, true, NULL, NULL, false);
5062 gfc_free_statements (ppc_code);
5064 else if (expr3 != NULL_TREE)
5066 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
5067 gfc_trans_string_copy (&block, slen3, tmp, code->expr3->ts.kind,
5068 slen3, expr3, code->expr3->ts.kind);
5073 /* Switch off automatic reallocation since we have just done
5075 int realloc_lhs = gfc_option.flag_realloc_lhs;
5076 gfc_option.flag_realloc_lhs = 0;
5077 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
5079 gfc_option.flag_realloc_lhs = realloc_lhs;
5081 gfc_free_expr (rhs);
5082 gfc_add_expr_to_block (&block, tmp);
5084 else if (code->expr3 && code->expr3->mold
5085 && code->expr3->ts.type == BT_CLASS)
5087 /* Default-initialization via MOLD (polymorphic). */
5088 gfc_expr *rhs = gfc_copy_expr (code->expr3);
5090 gfc_add_vptr_component (rhs);
5091 gfc_add_def_init_component (rhs);
5092 gfc_init_se (&dst, NULL);
5093 gfc_init_se (&src, NULL);
5094 gfc_conv_expr (&dst, expr);
5095 gfc_conv_expr (&src, rhs);
5096 gfc_add_block_to_block (&block, &src.pre);
5097 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
5098 gfc_add_expr_to_block (&block, tmp);
5099 gfc_free_expr (rhs);
5102 gfc_free_expr (expr);
5105 /* STAT (ERRMSG only makes sense with STAT). */
5108 tmp = build1_v (LABEL_EXPR, label_errmsg);
5109 gfc_add_expr_to_block (&block, tmp);
5115 /* A better error message may be possible, but not required. */
5116 const char *msg = "Attempt to allocate an allocated object";
5119 gfc_init_se (&se, NULL);
5120 gfc_conv_expr_lhs (&se, code->expr2);
5122 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
5124 gfc_add_modify (&block, errmsg,
5125 gfc_build_addr_expr (pchar_type_node,
5126 gfc_build_localized_cstring_const (msg)));
5128 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
5129 dlen = gfc_get_expr_charlen (code->expr2);
5130 slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen,
5133 dlen = build_call_expr_loc (input_location,
5134 builtin_decl_explicit (BUILT_IN_MEMCPY), 3,
5135 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
5137 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
5138 build_int_cst (TREE_TYPE (stat), 0));
5140 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
5142 gfc_add_expr_to_block (&block, tmp);
5145 /* STAT (ERRMSG only makes sense with STAT). */
5148 tmp = build1_v (LABEL_EXPR, label_finish);
5149 gfc_add_expr_to_block (&block, tmp);
5155 gfc_init_se (&se, NULL);
5156 gfc_conv_expr_lhs (&se, code->expr1);
5157 tmp = convert (TREE_TYPE (se.expr), stat);
5158 gfc_add_modify (&block, se.expr, tmp);
5161 gfc_add_block_to_block (&block, &se.post);
5162 gfc_add_block_to_block (&block, &post);
5164 return gfc_finish_block (&block);
5168 /* Translate a DEALLOCATE statement. */
5171 gfc_trans_deallocate (gfc_code *code)
5175 tree apstat, astat, pstat, stat, tmp;
5178 pstat = apstat = stat = astat = tmp = NULL_TREE;
5180 gfc_start_block (&block);
5182 /* Count the number of failed deallocations. If deallocate() was
5183 called with STAT= , then set STAT to the count. If deallocate
5184 was called with ERRMSG, then set ERRMG to a string. */
5185 if (code->expr1 || code->expr2)
5187 tree gfc_int4_type_node = gfc_get_int_type (4);
5189 stat = gfc_create_var (gfc_int4_type_node, "stat");
5190 pstat = gfc_build_addr_expr (NULL_TREE, stat);
5192 /* Running total of possible deallocation failures. */
5193 astat = gfc_create_var (gfc_int4_type_node, "astat");
5194 apstat = gfc_build_addr_expr (NULL_TREE, astat);
5196 /* Initialize astat to 0. */
5197 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
5200 for (al = code->ext.alloc.list; al != NULL; al = al->next)
5202 gfc_expr *expr = gfc_copy_expr (al->expr);
5203 gcc_assert (expr->expr_type == EXPR_VARIABLE);
5205 if (expr->ts.type == BT_CLASS)
5206 gfc_add_data_component (expr);
5208 gfc_init_se (&se, NULL);
5209 gfc_start_block (&se.pre);
5211 se.want_pointer = 1;
5212 se.descriptor_only = 1;
5213 gfc_conv_expr (&se, expr);
5215 if (expr->rank || gfc_expr_attr (expr).codimension)
5217 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
5220 gfc_ref *last = NULL;
5221 for (ref = expr->ref; ref; ref = ref->next)
5222 if (ref->type == REF_COMPONENT)
5225 /* Do not deallocate the components of a derived type
5226 ultimate pointer component. */
5227 if (!(last && last->u.c.component->attr.pointer)
5228 && !(!last && expr->symtree->n.sym->attr.pointer))
5230 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
5232 gfc_add_expr_to_block (&se.pre, tmp);
5235 tmp = gfc_array_deallocate (se.expr, pstat, expr);
5236 gfc_add_expr_to_block (&se.pre, tmp);
5240 tmp = gfc_deallocate_scalar_with_status (se.expr, pstat, false,
5242 gfc_add_expr_to_block (&se.pre, tmp);
5244 /* Set to zero after deallocation. */
5245 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
5247 build_int_cst (TREE_TYPE (se.expr), 0));
5248 gfc_add_expr_to_block (&se.pre, tmp);
5250 if (al->expr->ts.type == BT_CLASS)
5252 /* Reset _vptr component to declared type. */
5253 gfc_expr *rhs, *lhs = gfc_copy_expr (al->expr);
5254 gfc_symbol *vtab = gfc_find_derived_vtab (al->expr->ts.u.derived);
5255 gfc_add_vptr_component (lhs);
5256 rhs = gfc_lval_expr_from_sym (vtab);
5257 tmp = gfc_trans_pointer_assignment (lhs, rhs);
5258 gfc_add_expr_to_block (&se.pre, tmp);
5259 gfc_free_expr (lhs);
5260 gfc_free_expr (rhs);
5264 /* Keep track of the number of failed deallocations by adding stat
5265 of the last deallocation to the running total. */
5266 if (code->expr1 || code->expr2)
5268 apstat = fold_build2_loc (input_location, PLUS_EXPR,
5269 TREE_TYPE (stat), astat, stat);
5270 gfc_add_modify (&se.pre, astat, apstat);
5273 tmp = gfc_finish_block (&se.pre);
5274 gfc_add_expr_to_block (&block, tmp);
5275 gfc_free_expr (expr);
5281 gfc_init_se (&se, NULL);
5282 gfc_conv_expr_lhs (&se, code->expr1);
5283 tmp = convert (TREE_TYPE (se.expr), astat);
5284 gfc_add_modify (&block, se.expr, tmp);
5290 /* A better error message may be possible, but not required. */
5291 const char *msg = "Attempt to deallocate an unallocated object";
5292 tree errmsg, slen, dlen;
5294 gfc_init_se (&se, NULL);
5295 gfc_conv_expr_lhs (&se, code->expr2);
5297 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
5299 gfc_add_modify (&block, errmsg,
5300 gfc_build_addr_expr (pchar_type_node,
5301 gfc_build_localized_cstring_const (msg)));
5303 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
5304 dlen = gfc_get_expr_charlen (code->expr2);
5305 slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen,
5308 dlen = build_call_expr_loc (input_location,
5309 builtin_decl_explicit (BUILT_IN_MEMCPY), 3,
5310 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
5312 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, astat,
5313 build_int_cst (TREE_TYPE (astat), 0));
5315 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
5317 gfc_add_expr_to_block (&block, tmp);
5320 return gfc_finish_block (&block);
5323 #include "gt-fortran-trans-stmt.h"