1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 Free Software Foundation, Inc.
5 Contributed by Paul Brook <paul@nowt.org>
6 and Steven Bosscher <s.bosscher@student.tudelft.nl>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
37 #include "dependency.h"
40 typedef struct iter_info
46 struct iter_info *next;
50 typedef struct forall_info
57 struct forall_info *prev_nest;
61 static void gfc_trans_where_2 (gfc_code *, tree, bool,
62 forall_info *, stmtblock_t *);
64 /* Translate a F95 label number to a LABEL_EXPR. */
67 gfc_trans_label_here (gfc_code * code)
69 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
73 /* Given a variable expression which has been ASSIGNed to, find the decl
74 containing the auxiliary variables. For variables in common blocks this
78 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
80 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
81 gfc_conv_expr (se, expr);
82 /* Deals with variable in common block. Get the field declaration. */
83 if (TREE_CODE (se->expr) == COMPONENT_REF)
84 se->expr = TREE_OPERAND (se->expr, 1);
85 /* Deals with dummy argument. Get the parameter declaration. */
86 else if (TREE_CODE (se->expr) == INDIRECT_REF)
87 se->expr = TREE_OPERAND (se->expr, 0);
90 /* Translate a label assignment statement. */
93 gfc_trans_label_assign (gfc_code * code)
102 /* Start a new block. */
103 gfc_init_se (&se, NULL);
104 gfc_start_block (&se.pre);
105 gfc_conv_label_variable (&se, code->expr1);
107 len = GFC_DECL_STRING_LEN (se.expr);
108 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
110 label_tree = gfc_get_label_decl (code->label1);
112 if (code->label1->defined == ST_LABEL_TARGET)
114 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
115 len_tree = integer_minus_one_node;
119 gfc_expr *format = code->label1->format;
121 label_len = format->value.character.length;
122 len_tree = build_int_cst (gfc_charlen_type_node, label_len);
123 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
124 format->value.character.string);
125 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
128 gfc_add_modify (&se.pre, len, len_tree);
129 gfc_add_modify (&se.pre, addr, label_tree);
131 return gfc_finish_block (&se.pre);
134 /* Translate a GOTO statement. */
137 gfc_trans_goto (gfc_code * code)
139 locus loc = code->loc;
145 if (code->label1 != NULL)
146 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
149 gfc_init_se (&se, NULL);
150 gfc_start_block (&se.pre);
151 gfc_conv_label_variable (&se, code->expr1);
152 tmp = GFC_DECL_STRING_LEN (se.expr);
153 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, tmp,
154 build_int_cst (TREE_TYPE (tmp), -1));
155 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
156 "Assigned label is not a target label");
158 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
160 /* We're going to ignore a label list. It does not really change the
161 statement's semantics (because it is just a further restriction on
162 what's legal code); before, we were comparing label addresses here, but
163 that's a very fragile business and may break with optimization. So
166 target = fold_build1_loc (input_location, GOTO_EXPR, void_type_node,
168 gfc_add_expr_to_block (&se.pre, target);
169 return gfc_finish_block (&se.pre);
173 /* Translate an ENTRY statement. Just adds a label for this entry point. */
175 gfc_trans_entry (gfc_code * code)
177 return build1_v (LABEL_EXPR, code->ext.entry->label);
181 /* Replace a gfc_ss structure by another both in the gfc_se struct
182 and the gfc_loopinfo struct. This is used in gfc_conv_elemental_dependencies
183 to replace a variable ss by the corresponding temporary. */
186 replace_ss (gfc_se *se, gfc_ss *old_ss, gfc_ss *new_ss)
188 gfc_ss **sess, **loopss;
190 /* The old_ss is a ss for a single variable. */
191 gcc_assert (old_ss->info->type == GFC_SS_SECTION);
193 for (sess = &(se->ss); *sess != gfc_ss_terminator; sess = &((*sess)->next))
196 gcc_assert (*sess != gfc_ss_terminator);
199 new_ss->next = old_ss->next;
202 for (loopss = &(se->loop->ss); *loopss != gfc_ss_terminator;
203 loopss = &((*loopss)->loop_chain))
204 if (*loopss == old_ss)
206 gcc_assert (*loopss != gfc_ss_terminator);
209 new_ss->loop_chain = old_ss->loop_chain;
210 new_ss->loop = old_ss->loop;
212 gfc_free_ss (old_ss);
216 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
217 elemental subroutines. Make temporaries for output arguments if any such
218 dependencies are found. Output arguments are chosen because internal_unpack
219 can be used, as is, to copy the result back to the variable. */
221 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
222 gfc_symbol * sym, gfc_actual_arglist * arg,
223 gfc_dep_check check_variable)
225 gfc_actual_arglist *arg0;
227 gfc_formal_arglist *formal;
235 if (loopse->ss == NULL)
240 formal = sym->formal;
242 /* Loop over all the arguments testing for dependencies. */
243 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
249 /* Obtain the info structure for the current argument. */
250 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
251 if (ss->info->expr == e)
254 /* If there is a dependency, create a temporary and use it
255 instead of the variable. */
256 fsym = formal ? formal->sym : NULL;
257 if (e->expr_type == EXPR_VARIABLE
259 && fsym->attr.intent != INTENT_IN
260 && gfc_check_fncall_dependency (e, fsym->attr.intent,
261 sym, arg0, check_variable))
263 tree initial, temptype;
264 stmtblock_t temp_post;
267 tmp_ss = gfc_get_array_ss (gfc_ss_terminator, NULL, ss->dimen,
269 gfc_mark_ss_chain_used (tmp_ss, 1);
270 tmp_ss->info->expr = ss->info->expr;
271 replace_ss (loopse, ss, tmp_ss);
273 /* Obtain the argument descriptor for unpacking. */
274 gfc_init_se (&parmse, NULL);
275 parmse.want_pointer = 1;
276 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
277 gfc_add_block_to_block (&se->pre, &parmse.pre);
279 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
280 initialize the array temporary with a copy of the values. */
281 if (fsym->attr.intent == INTENT_INOUT
282 || (fsym->ts.type ==BT_DERIVED
283 && fsym->attr.intent == INTENT_OUT))
284 initial = parmse.expr;
288 /* Find the type of the temporary to create; we don't use the type
289 of e itself as this breaks for subcomponent-references in e (where
290 the type of e is that of the final reference, but parmse.expr's
291 type corresponds to the full derived-type). */
292 /* TODO: Fix this somehow so we don't need a temporary of the whole
293 array but instead only the components referenced. */
294 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
295 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
296 temptype = TREE_TYPE (temptype);
297 temptype = gfc_get_element_type (temptype);
299 /* Generate the temporary. Cleaning up the temporary should be the
300 very last thing done, so we add the code to a new block and add it
301 to se->post as last instructions. */
302 size = gfc_create_var (gfc_array_index_type, NULL);
303 data = gfc_create_var (pvoid_type_node, NULL);
304 gfc_init_block (&temp_post);
305 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post, tmp_ss,
306 temptype, initial, false, true,
307 false, &arg->expr->where);
308 gfc_add_modify (&se->pre, size, tmp);
309 tmp = fold_convert (pvoid_type_node, tmp_ss->info->data.array.data);
310 gfc_add_modify (&se->pre, data, tmp);
312 /* Update other ss' delta. */
313 gfc_set_delta (loopse->loop);
315 /* Copy the result back using unpack. */
316 tmp = build_call_expr_loc (input_location,
317 gfor_fndecl_in_unpack, 2, parmse.expr, data);
318 gfc_add_expr_to_block (&se->post, tmp);
320 /* parmse.pre is already added above. */
321 gfc_add_block_to_block (&se->post, &parmse.post);
322 gfc_add_block_to_block (&se->post, &temp_post);
328 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
331 gfc_trans_call (gfc_code * code, bool dependency_check,
332 tree mask, tree count1, bool invert)
336 int has_alternate_specifier;
337 gfc_dep_check check_variable;
338 tree index = NULL_TREE;
339 tree maskexpr = NULL_TREE;
342 /* A CALL starts a new block because the actual arguments may have to
343 be evaluated first. */
344 gfc_init_se (&se, NULL);
345 gfc_start_block (&se.pre);
347 gcc_assert (code->resolved_sym);
349 ss = gfc_ss_terminator;
350 if (code->resolved_sym->attr.elemental)
351 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
353 /* Is not an elemental subroutine call with array valued arguments. */
354 if (ss == gfc_ss_terminator)
357 /* Translate the call. */
358 has_alternate_specifier
359 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
362 /* A subroutine without side-effect, by definition, does nothing! */
363 TREE_SIDE_EFFECTS (se.expr) = 1;
365 /* Chain the pieces together and return the block. */
366 if (has_alternate_specifier)
368 gfc_code *select_code;
370 select_code = code->next;
371 gcc_assert(select_code->op == EXEC_SELECT);
372 sym = select_code->expr1->symtree->n.sym;
373 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
374 if (sym->backend_decl == NULL)
375 sym->backend_decl = gfc_get_symbol_decl (sym);
376 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
379 gfc_add_expr_to_block (&se.pre, se.expr);
381 gfc_add_block_to_block (&se.pre, &se.post);
386 /* An elemental subroutine call with array valued arguments has
394 /* gfc_walk_elemental_function_args renders the ss chain in the
395 reverse order to the actual argument order. */
396 ss = gfc_reverse_ss (ss);
398 /* Initialize the loop. */
399 gfc_init_se (&loopse, NULL);
400 gfc_init_loopinfo (&loop);
401 gfc_add_ss_to_loop (&loop, ss);
403 gfc_conv_ss_startstride (&loop);
404 /* TODO: gfc_conv_loop_setup generates a temporary for vector
405 subscripts. This could be prevented in the elemental case
406 as temporaries are handled separatedly
407 (below in gfc_conv_elemental_dependencies). */
408 gfc_conv_loop_setup (&loop, &code->expr1->where);
409 gfc_mark_ss_chain_used (ss, 1);
411 /* Convert the arguments, checking for dependencies. */
412 gfc_copy_loopinfo_to_se (&loopse, &loop);
415 /* For operator assignment, do dependency checking. */
416 if (dependency_check)
417 check_variable = ELEM_CHECK_VARIABLE;
419 check_variable = ELEM_DONT_CHECK_VARIABLE;
421 gfc_init_se (&depse, NULL);
422 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
423 code->ext.actual, check_variable);
425 gfc_add_block_to_block (&loop.pre, &depse.pre);
426 gfc_add_block_to_block (&loop.post, &depse.post);
428 /* Generate the loop body. */
429 gfc_start_scalarized_body (&loop, &body);
430 gfc_init_block (&block);
434 /* Form the mask expression according to the mask. */
436 maskexpr = gfc_build_array_ref (mask, index, NULL);
438 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
439 TREE_TYPE (maskexpr), maskexpr);
442 /* Add the subroutine call to the block. */
443 gfc_conv_procedure_call (&loopse, code->resolved_sym,
444 code->ext.actual, code->expr1, NULL);
448 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
449 build_empty_stmt (input_location));
450 gfc_add_expr_to_block (&loopse.pre, tmp);
451 tmp = fold_build2_loc (input_location, PLUS_EXPR,
452 gfc_array_index_type,
453 count1, gfc_index_one_node);
454 gfc_add_modify (&loopse.pre, count1, tmp);
457 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
459 gfc_add_block_to_block (&block, &loopse.pre);
460 gfc_add_block_to_block (&block, &loopse.post);
462 /* Finish up the loop block and the loop. */
463 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
464 gfc_trans_scalarizing_loops (&loop, &body);
465 gfc_add_block_to_block (&se.pre, &loop.pre);
466 gfc_add_block_to_block (&se.pre, &loop.post);
467 gfc_add_block_to_block (&se.pre, &se.post);
468 gfc_cleanup_loop (&loop);
471 return gfc_finish_block (&se.pre);
475 /* Translate the RETURN statement. */
478 gfc_trans_return (gfc_code * code)
486 /* If code->expr is not NULL, this return statement must appear
487 in a subroutine and current_fake_result_decl has already
490 result = gfc_get_fake_result_decl (NULL, 0);
493 gfc_warning ("An alternate return at %L without a * dummy argument",
494 &code->expr1->where);
495 return gfc_generate_return ();
498 /* Start a new block for this statement. */
499 gfc_init_se (&se, NULL);
500 gfc_start_block (&se.pre);
502 gfc_conv_expr (&se, code->expr1);
504 /* Note that the actually returned expression is a simple value and
505 does not depend on any pointers or such; thus we can clean-up with
506 se.post before returning. */
507 tmp = fold_build2_loc (input_location, MODIFY_EXPR, TREE_TYPE (result),
508 result, fold_convert (TREE_TYPE (result),
510 gfc_add_expr_to_block (&se.pre, tmp);
511 gfc_add_block_to_block (&se.pre, &se.post);
513 tmp = gfc_generate_return ();
514 gfc_add_expr_to_block (&se.pre, tmp);
515 return gfc_finish_block (&se.pre);
518 return gfc_generate_return ();
522 /* Translate the PAUSE statement. We have to translate this statement
523 to a runtime library call. */
526 gfc_trans_pause (gfc_code * code)
528 tree gfc_int4_type_node = gfc_get_int_type (4);
532 /* Start a new block for this statement. */
533 gfc_init_se (&se, NULL);
534 gfc_start_block (&se.pre);
537 if (code->expr1 == NULL)
539 tmp = build_int_cst (gfc_int4_type_node, 0);
540 tmp = build_call_expr_loc (input_location,
541 gfor_fndecl_pause_string, 2,
542 build_int_cst (pchar_type_node, 0), tmp);
544 else if (code->expr1->ts.type == BT_INTEGER)
546 gfc_conv_expr (&se, code->expr1);
547 tmp = build_call_expr_loc (input_location,
548 gfor_fndecl_pause_numeric, 1,
549 fold_convert (gfc_int4_type_node, se.expr));
553 gfc_conv_expr_reference (&se, code->expr1);
554 tmp = build_call_expr_loc (input_location,
555 gfor_fndecl_pause_string, 2,
556 se.expr, se.string_length);
559 gfc_add_expr_to_block (&se.pre, tmp);
561 gfc_add_block_to_block (&se.pre, &se.post);
563 return gfc_finish_block (&se.pre);
567 /* Translate the STOP statement. We have to translate this statement
568 to a runtime library call. */
571 gfc_trans_stop (gfc_code *code, bool error_stop)
573 tree gfc_int4_type_node = gfc_get_int_type (4);
577 /* Start a new block for this statement. */
578 gfc_init_se (&se, NULL);
579 gfc_start_block (&se.pre);
581 if (gfc_option.coarray == GFC_FCOARRAY_LIB && !error_stop)
583 /* Per F2008, 8.5.1 STOP implies a SYNC MEMORY. */
584 tmp = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
585 tmp = build_call_expr_loc (input_location, tmp, 0);
586 gfc_add_expr_to_block (&se.pre, tmp);
588 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_finalize, 0);
589 gfc_add_expr_to_block (&se.pre, tmp);
592 if (code->expr1 == NULL)
594 tmp = build_int_cst (gfc_int4_type_node, 0);
595 tmp = build_call_expr_loc (input_location,
597 ? (gfc_option.coarray == GFC_FCOARRAY_LIB
598 ? gfor_fndecl_caf_error_stop_str
599 : gfor_fndecl_error_stop_string)
600 : gfor_fndecl_stop_string,
601 2, build_int_cst (pchar_type_node, 0), tmp);
603 else if (code->expr1->ts.type == BT_INTEGER)
605 gfc_conv_expr (&se, code->expr1);
606 tmp = build_call_expr_loc (input_location,
608 ? (gfc_option.coarray == GFC_FCOARRAY_LIB
609 ? gfor_fndecl_caf_error_stop
610 : gfor_fndecl_error_stop_numeric)
611 : gfor_fndecl_stop_numeric_f08, 1,
612 fold_convert (gfc_int4_type_node, se.expr));
616 gfc_conv_expr_reference (&se, code->expr1);
617 tmp = build_call_expr_loc (input_location,
619 ? (gfc_option.coarray == GFC_FCOARRAY_LIB
620 ? gfor_fndecl_caf_error_stop_str
621 : gfor_fndecl_error_stop_string)
622 : gfor_fndecl_stop_string,
623 2, se.expr, se.string_length);
626 gfc_add_expr_to_block (&se.pre, tmp);
628 gfc_add_block_to_block (&se.pre, &se.post);
630 return gfc_finish_block (&se.pre);
635 gfc_trans_lock_unlock (gfc_code *code, gfc_exec_op type ATTRIBUTE_UNUSED)
638 tree stat = NULL_TREE, lock_acquired = NULL_TREE;
640 /* Short cut: For single images without STAT= or LOCK_ACQUIRED
641 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
642 if (!code->expr2 && !code->expr4 && gfc_option.coarray != GFC_FCOARRAY_LIB)
645 gfc_init_se (&se, NULL);
646 gfc_start_block (&se.pre);
650 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
651 gfc_init_se (&argse, NULL);
652 gfc_conv_expr_val (&argse, code->expr2);
658 gcc_assert (code->expr4->expr_type == EXPR_VARIABLE);
659 gfc_init_se (&argse, NULL);
660 gfc_conv_expr_val (&argse, code->expr4);
661 lock_acquired = argse.expr;
664 if (stat != NULL_TREE)
665 gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0));
667 if (lock_acquired != NULL_TREE)
668 gfc_add_modify (&se.pre, lock_acquired,
669 fold_convert (TREE_TYPE (lock_acquired),
672 return gfc_finish_block (&se.pre);
677 gfc_trans_sync (gfc_code *code, gfc_exec_op type)
681 tree images = NULL_TREE, stat = NULL_TREE,
682 errmsg = NULL_TREE, errmsglen = NULL_TREE;
684 /* Short cut: For single images without bound checking or without STAT=,
685 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
686 if (!code->expr2 && !(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
687 && gfc_option.coarray != GFC_FCOARRAY_LIB)
690 gfc_init_se (&se, NULL);
691 gfc_start_block (&se.pre);
693 if (code->expr1 && code->expr1->rank == 0)
695 gfc_init_se (&argse, NULL);
696 gfc_conv_expr_val (&argse, code->expr1);
702 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
703 gfc_init_se (&argse, NULL);
704 gfc_conv_expr_val (&argse, code->expr2);
708 stat = null_pointer_node;
710 if (code->expr3 && gfc_option.coarray == GFC_FCOARRAY_LIB
711 && type != EXEC_SYNC_MEMORY)
713 gcc_assert (code->expr3->expr_type == EXPR_VARIABLE);
714 gfc_init_se (&argse, NULL);
715 gfc_conv_expr (&argse, code->expr3);
716 gfc_conv_string_parameter (&argse);
717 errmsg = gfc_build_addr_expr (NULL, argse.expr);
718 errmsglen = argse.string_length;
720 else if (gfc_option.coarray == GFC_FCOARRAY_LIB && type != EXEC_SYNC_MEMORY)
722 errmsg = null_pointer_node;
723 errmsglen = build_int_cst (integer_type_node, 0);
726 /* Check SYNC IMAGES(imageset) for valid image index.
727 FIXME: Add a check for image-set arrays. */
728 if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
729 && code->expr1->rank == 0)
732 if (gfc_option.coarray != GFC_FCOARRAY_LIB)
733 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
734 images, build_int_cst (TREE_TYPE (images), 1));
738 cond = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
739 images, gfort_gvar_caf_num_images);
740 cond2 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
742 build_int_cst (TREE_TYPE (images), 1));
743 cond = fold_build2_loc (input_location, TRUTH_AND_EXPR,
744 boolean_type_node, cond, cond2);
746 gfc_trans_runtime_check (true, false, cond, &se.pre,
747 &code->expr1->where, "Invalid image number "
749 fold_convert (integer_type_node, se.expr));
752 /* Per F2008, 8.5.1, a SYNC MEMORY is implied by calling the
753 image control statements SYNC IMAGES and SYNC ALL. */
754 if (gfc_option.coarray == GFC_FCOARRAY_LIB)
756 tmp = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
757 tmp = build_call_expr_loc (input_location, tmp, 0);
758 gfc_add_expr_to_block (&se.pre, tmp);
761 if (gfc_option.coarray != GFC_FCOARRAY_LIB || type == EXEC_SYNC_MEMORY)
763 /* Set STAT to zero. */
765 gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0));
767 else if (type == EXEC_SYNC_ALL)
769 /* SYNC ALL => stat == null_pointer_node
770 SYNC ALL(stat=s) => stat has an integer type
772 If "stat" has the wrong integer type, use a temp variable of
773 the right type and later cast the result back into "stat". */
774 if (stat == null_pointer_node || TREE_TYPE (stat) == integer_type_node)
776 if (TREE_TYPE (stat) == integer_type_node)
777 stat = gfc_build_addr_expr (NULL, stat);
779 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all,
780 3, stat, errmsg, errmsglen);
781 gfc_add_expr_to_block (&se.pre, tmp);
785 tree tmp_stat = gfc_create_var (integer_type_node, "stat");
787 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all,
788 3, gfc_build_addr_expr (NULL, tmp_stat),
790 gfc_add_expr_to_block (&se.pre, tmp);
792 gfc_add_modify (&se.pre, stat,
793 fold_convert (TREE_TYPE (stat), tmp_stat));
800 gcc_assert (type == EXEC_SYNC_IMAGES);
804 len = build_int_cst (integer_type_node, -1);
805 images = null_pointer_node;
807 else if (code->expr1->rank == 0)
809 len = build_int_cst (integer_type_node, 1);
810 images = gfc_build_addr_expr (NULL_TREE, images);
815 if (code->expr1->ts.kind != gfc_c_int_kind)
816 gfc_fatal_error ("Sorry, only support for integer kind %d "
817 "implemented for image-set at %L",
818 gfc_c_int_kind, &code->expr1->where);
820 gfc_conv_array_parameter (&se, code->expr1,
821 gfc_walk_expr (code->expr1), true, NULL,
825 tmp = gfc_typenode_for_spec (&code->expr1->ts);
826 if (GFC_ARRAY_TYPE_P (tmp) || GFC_DESCRIPTOR_TYPE_P (tmp))
827 tmp = gfc_get_element_type (tmp);
829 len = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
830 TREE_TYPE (len), len,
831 fold_convert (TREE_TYPE (len),
832 TYPE_SIZE_UNIT (tmp)));
833 len = fold_convert (integer_type_node, len);
836 /* SYNC IMAGES(imgs) => stat == null_pointer_node
837 SYNC IMAGES(imgs,stat=s) => stat has an integer type
839 If "stat" has the wrong integer type, use a temp variable of
840 the right type and later cast the result back into "stat". */
841 if (stat == null_pointer_node || TREE_TYPE (stat) == integer_type_node)
843 if (TREE_TYPE (stat) == integer_type_node)
844 stat = gfc_build_addr_expr (NULL, stat);
846 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images,
847 5, fold_convert (integer_type_node, len),
848 images, stat, errmsg, errmsglen);
849 gfc_add_expr_to_block (&se.pre, tmp);
853 tree tmp_stat = gfc_create_var (integer_type_node, "stat");
855 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images,
856 5, fold_convert (integer_type_node, len),
857 images, gfc_build_addr_expr (NULL, tmp_stat),
859 gfc_add_expr_to_block (&se.pre, tmp);
861 gfc_add_modify (&se.pre, stat,
862 fold_convert (TREE_TYPE (stat), tmp_stat));
866 return gfc_finish_block (&se.pre);
870 /* Generate GENERIC for the IF construct. This function also deals with
871 the simple IF statement, because the front end translates the IF
872 statement into an IF construct.
904 where COND_S is the simplified version of the predicate. PRE_COND_S
905 are the pre side-effects produced by the translation of the
907 We need to build the chain recursively otherwise we run into
908 problems with folding incomplete statements. */
911 gfc_trans_if_1 (gfc_code * code)
918 /* Check for an unconditional ELSE clause. */
920 return gfc_trans_code (code->next);
922 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
923 gfc_init_se (&if_se, NULL);
924 gfc_start_block (&if_se.pre);
926 /* Calculate the IF condition expression. */
927 if (code->expr1->where.lb)
929 gfc_save_backend_locus (&saved_loc);
930 gfc_set_backend_locus (&code->expr1->where);
933 gfc_conv_expr_val (&if_se, code->expr1);
935 if (code->expr1->where.lb)
936 gfc_restore_backend_locus (&saved_loc);
938 /* Translate the THEN clause. */
939 stmt = gfc_trans_code (code->next);
941 /* Translate the ELSE clause. */
943 elsestmt = gfc_trans_if_1 (code->block);
945 elsestmt = build_empty_stmt (input_location);
947 /* Build the condition expression and add it to the condition block. */
948 loc = code->expr1->where.lb ? code->expr1->where.lb->location : input_location;
949 stmt = fold_build3_loc (loc, COND_EXPR, void_type_node, if_se.expr, stmt,
952 gfc_add_expr_to_block (&if_se.pre, stmt);
954 /* Finish off this statement. */
955 return gfc_finish_block (&if_se.pre);
959 gfc_trans_if (gfc_code * code)
964 /* Create exit label so it is available for trans'ing the body code. */
965 exit_label = gfc_build_label_decl (NULL_TREE);
966 code->exit_label = exit_label;
968 /* Translate the actual code in code->block. */
969 gfc_init_block (&body);
970 gfc_add_expr_to_block (&body, gfc_trans_if_1 (code->block));
972 /* Add exit label. */
973 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
975 return gfc_finish_block (&body);
979 /* Translate an arithmetic IF expression.
981 IF (cond) label1, label2, label3 translates to
993 An optimized version can be generated in case of equal labels.
994 E.g., if label1 is equal to label2, we can translate it to
1003 gfc_trans_arithmetic_if (gfc_code * code)
1011 /* Start a new block. */
1012 gfc_init_se (&se, NULL);
1013 gfc_start_block (&se.pre);
1015 /* Pre-evaluate COND. */
1016 gfc_conv_expr_val (&se, code->expr1);
1017 se.expr = gfc_evaluate_now (se.expr, &se.pre);
1019 /* Build something to compare with. */
1020 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
1022 if (code->label1->value != code->label2->value)
1024 /* If (cond < 0) take branch1 else take branch2.
1025 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
1026 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
1027 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
1029 if (code->label1->value != code->label3->value)
1030 tmp = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
1033 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
1036 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1037 tmp, branch1, branch2);
1040 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
1042 if (code->label1->value != code->label3->value
1043 && code->label2->value != code->label3->value)
1045 /* if (cond <= 0) take branch1 else take branch2. */
1046 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
1047 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
1049 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1050 tmp, branch1, branch2);
1053 /* Append the COND_EXPR to the evaluation of COND, and return. */
1054 gfc_add_expr_to_block (&se.pre, branch1);
1055 return gfc_finish_block (&se.pre);
1059 /* Translate a CRITICAL block. */
1061 gfc_trans_critical (gfc_code *code)
1066 gfc_start_block (&block);
1068 if (gfc_option.coarray == GFC_FCOARRAY_LIB)
1070 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_critical, 0);
1071 gfc_add_expr_to_block (&block, tmp);
1074 tmp = gfc_trans_code (code->block->next);
1075 gfc_add_expr_to_block (&block, tmp);
1077 if (gfc_option.coarray == GFC_FCOARRAY_LIB)
1079 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_end_critical,
1081 gfc_add_expr_to_block (&block, tmp);
1085 return gfc_finish_block (&block);
1089 /* Do proper initialization for ASSOCIATE names. */
1092 trans_associate_var (gfc_symbol *sym, gfc_wrapped_block *block)
1097 gcc_assert (sym->assoc);
1098 e = sym->assoc->target;
1100 /* Do a `pointer assignment' with updated descriptor (or assign descriptor
1101 to array temporary) for arrays with either unknown shape or if associating
1103 if (sym->attr.dimension
1104 && (sym->as->type == AS_DEFERRED || sym->assoc->variable))
1110 desc = sym->backend_decl;
1112 /* If association is to an expression, evaluate it and create temporary.
1113 Otherwise, get descriptor of target for pointer assignment. */
1114 gfc_init_se (&se, NULL);
1115 ss = gfc_walk_expr (e);
1116 if (sym->assoc->variable)
1118 se.direct_byref = 1;
1121 gfc_conv_expr_descriptor (&se, e, ss);
1123 /* If we didn't already do the pointer assignment, set associate-name
1124 descriptor to the one generated for the temporary. */
1125 if (!sym->assoc->variable)
1129 gfc_add_modify (&se.pre, desc, se.expr);
1131 /* The generated descriptor has lower bound zero (as array
1132 temporary), shift bounds so we get lower bounds of 1. */
1133 for (dim = 0; dim < e->rank; ++dim)
1134 gfc_conv_shift_descriptor_lbound (&se.pre, desc,
1135 dim, gfc_index_one_node);
1138 /* Done, register stuff as init / cleanup code. */
1139 gfc_add_init_cleanup (block, gfc_finish_block (&se.pre),
1140 gfc_finish_block (&se.post));
1143 /* Do a scalar pointer assignment; this is for scalar variable targets. */
1144 else if (gfc_is_associate_pointer (sym))
1148 gcc_assert (!sym->attr.dimension);
1150 gfc_init_se (&se, NULL);
1151 gfc_conv_expr (&se, e);
1153 tmp = TREE_TYPE (sym->backend_decl);
1154 tmp = gfc_build_addr_expr (tmp, se.expr);
1155 gfc_add_modify (&se.pre, sym->backend_decl, tmp);
1157 gfc_add_init_cleanup (block, gfc_finish_block( &se.pre),
1158 gfc_finish_block (&se.post));
1161 /* Do a simple assignment. This is for scalar expressions, where we
1162 can simply use expression assignment. */
1167 lhs = gfc_lval_expr_from_sym (sym);
1168 tmp = gfc_trans_assignment (lhs, e, false, true);
1169 gfc_add_init_cleanup (block, tmp, NULL_TREE);
1174 /* Translate a BLOCK construct. This is basically what we would do for a
1178 gfc_trans_block_construct (gfc_code* code)
1182 gfc_wrapped_block block;
1185 gfc_association_list *ass;
1187 ns = code->ext.block.ns;
1189 sym = ns->proc_name;
1192 /* Process local variables. */
1193 gcc_assert (!sym->tlink);
1195 gfc_process_block_locals (ns);
1197 /* Generate code including exit-label. */
1198 gfc_init_block (&body);
1199 exit_label = gfc_build_label_decl (NULL_TREE);
1200 code->exit_label = exit_label;
1201 gfc_add_expr_to_block (&body, gfc_trans_code (ns->code));
1202 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
1204 /* Finish everything. */
1205 gfc_start_wrapped_block (&block, gfc_finish_block (&body));
1206 gfc_trans_deferred_vars (sym, &block);
1207 for (ass = code->ext.block.assoc; ass; ass = ass->next)
1208 trans_associate_var (ass->st->n.sym, &block);
1210 return gfc_finish_wrapped_block (&block);
1214 /* Translate the simple DO construct. This is where the loop variable has
1215 integer type and step +-1. We can't use this in the general case
1216 because integer overflow and floating point errors could give incorrect
1218 We translate a do loop from:
1220 DO dovar = from, to, step
1226 [Evaluate loop bounds and step]
1228 if ((step > 0) ? (dovar <= to) : (dovar => to))
1234 cond = (dovar == to);
1236 if (cond) goto end_label;
1241 This helps the optimizers by avoiding the extra induction variable
1242 used in the general case. */
1245 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
1246 tree from, tree to, tree step, tree exit_cond)
1252 tree saved_dovar = NULL;
1257 type = TREE_TYPE (dovar);
1259 loc = code->ext.iterator->start->where.lb->location;
1261 /* Initialize the DO variable: dovar = from. */
1262 gfc_add_modify_loc (loc, pblock, dovar,
1263 fold_convert (TREE_TYPE(dovar), from));
1265 /* Save value for do-tinkering checking. */
1266 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1268 saved_dovar = gfc_create_var (type, ".saved_dovar");
1269 gfc_add_modify_loc (loc, pblock, saved_dovar, dovar);
1272 /* Cycle and exit statements are implemented with gotos. */
1273 cycle_label = gfc_build_label_decl (NULL_TREE);
1274 exit_label = gfc_build_label_decl (NULL_TREE);
1276 /* Put the labels where they can be found later. See gfc_trans_do(). */
1277 code->cycle_label = cycle_label;
1278 code->exit_label = exit_label;
1281 gfc_start_block (&body);
1283 /* Main loop body. */
1284 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1285 gfc_add_expr_to_block (&body, tmp);
1287 /* Label for cycle statements (if needed). */
1288 if (TREE_USED (cycle_label))
1290 tmp = build1_v (LABEL_EXPR, cycle_label);
1291 gfc_add_expr_to_block (&body, tmp);
1294 /* Check whether someone has modified the loop variable. */
1295 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1297 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node,
1298 dovar, saved_dovar);
1299 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1300 "Loop variable has been modified");
1303 /* Exit the loop if there is an I/O result condition or error. */
1306 tmp = build1_v (GOTO_EXPR, exit_label);
1307 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1309 build_empty_stmt (loc));
1310 gfc_add_expr_to_block (&body, tmp);
1313 /* Evaluate the loop condition. */
1314 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, dovar,
1316 cond = gfc_evaluate_now_loc (loc, cond, &body);
1318 /* Increment the loop variable. */
1319 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1320 gfc_add_modify_loc (loc, &body, dovar, tmp);
1322 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1323 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1325 /* The loop exit. */
1326 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1327 TREE_USED (exit_label) = 1;
1328 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1329 cond, tmp, build_empty_stmt (loc));
1330 gfc_add_expr_to_block (&body, tmp);
1332 /* Finish the loop body. */
1333 tmp = gfc_finish_block (&body);
1334 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1336 /* Only execute the loop if the number of iterations is positive. */
1337 if (tree_int_cst_sgn (step) > 0)
1338 cond = fold_build2_loc (loc, LE_EXPR, boolean_type_node, dovar,
1341 cond = fold_build2_loc (loc, GE_EXPR, boolean_type_node, dovar,
1343 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, cond, tmp,
1344 build_empty_stmt (loc));
1345 gfc_add_expr_to_block (pblock, tmp);
1347 /* Add the exit label. */
1348 tmp = build1_v (LABEL_EXPR, exit_label);
1349 gfc_add_expr_to_block (pblock, tmp);
1351 return gfc_finish_block (pblock);
1354 /* Translate the DO construct. This obviously is one of the most
1355 important ones to get right with any compiler, but especially
1358 We special case some loop forms as described in gfc_trans_simple_do.
1359 For other cases we implement them with a separate loop count,
1360 as described in the standard.
1362 We translate a do loop from:
1364 DO dovar = from, to, step
1370 [evaluate loop bounds and step]
1371 empty = (step > 0 ? to < from : to > from);
1372 countm1 = (to - from) / step;
1374 if (empty) goto exit_label;
1380 if (countm1 ==0) goto exit_label;
1385 countm1 is an unsigned integer. It is equal to the loop count minus one,
1386 because the loop count itself can overflow. */
1389 gfc_trans_do (gfc_code * code, tree exit_cond)
1393 tree saved_dovar = NULL;
1409 gfc_start_block (&block);
1411 loc = code->ext.iterator->start->where.lb->location;
1413 /* Evaluate all the expressions in the iterator. */
1414 gfc_init_se (&se, NULL);
1415 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1416 gfc_add_block_to_block (&block, &se.pre);
1418 type = TREE_TYPE (dovar);
1420 gfc_init_se (&se, NULL);
1421 gfc_conv_expr_val (&se, code->ext.iterator->start);
1422 gfc_add_block_to_block (&block, &se.pre);
1423 from = gfc_evaluate_now (se.expr, &block);
1425 gfc_init_se (&se, NULL);
1426 gfc_conv_expr_val (&se, code->ext.iterator->end);
1427 gfc_add_block_to_block (&block, &se.pre);
1428 to = gfc_evaluate_now (se.expr, &block);
1430 gfc_init_se (&se, NULL);
1431 gfc_conv_expr_val (&se, code->ext.iterator->step);
1432 gfc_add_block_to_block (&block, &se.pre);
1433 step = gfc_evaluate_now (se.expr, &block);
1435 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1437 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, step,
1438 build_zero_cst (type));
1439 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1440 "DO step value is zero");
1443 /* Special case simple loops. */
1444 if (TREE_CODE (type) == INTEGER_TYPE
1445 && (integer_onep (step)
1446 || tree_int_cst_equal (step, integer_minus_one_node)))
1447 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1449 pos_step = fold_build2_loc (loc, GT_EXPR, boolean_type_node, step,
1450 build_zero_cst (type));
1452 if (TREE_CODE (type) == INTEGER_TYPE)
1453 utype = unsigned_type_for (type);
1455 utype = unsigned_type_for (gfc_array_index_type);
1456 countm1 = gfc_create_var (utype, "countm1");
1458 /* Cycle and exit statements are implemented with gotos. */
1459 cycle_label = gfc_build_label_decl (NULL_TREE);
1460 exit_label = gfc_build_label_decl (NULL_TREE);
1461 TREE_USED (exit_label) = 1;
1463 /* Put these labels where they can be found later. */
1464 code->cycle_label = cycle_label;
1465 code->exit_label = exit_label;
1467 /* Initialize the DO variable: dovar = from. */
1468 gfc_add_modify (&block, dovar, from);
1470 /* Save value for do-tinkering checking. */
1471 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1473 saved_dovar = gfc_create_var (type, ".saved_dovar");
1474 gfc_add_modify_loc (loc, &block, saved_dovar, dovar);
1477 /* Initialize loop count and jump to exit label if the loop is empty.
1478 This code is executed before we enter the loop body. We generate:
1479 step_sign = sign(1,step);
1490 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1494 if (TREE_CODE (type) == INTEGER_TYPE)
1496 tree pos, neg, step_sign, to2, from2, step2;
1498 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1500 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, step,
1501 build_int_cst (TREE_TYPE (step), 0));
1502 step_sign = fold_build3_loc (loc, COND_EXPR, type, tmp,
1503 build_int_cst (type, -1),
1504 build_int_cst (type, 1));
1506 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, to, from);
1507 pos = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1508 fold_build1_loc (loc, GOTO_EXPR, void_type_node,
1510 build_empty_stmt (loc));
1512 tmp = fold_build2_loc (loc, GT_EXPR, boolean_type_node, to,
1514 neg = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1515 fold_build1_loc (loc, GOTO_EXPR, void_type_node,
1517 build_empty_stmt (loc));
1518 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1519 pos_step, pos, neg);
1521 gfc_add_expr_to_block (&block, tmp);
1523 /* Calculate the loop count. to-from can overflow, so
1524 we cast to unsigned. */
1526 to2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, to);
1527 from2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, from);
1528 step2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, step);
1529 step2 = fold_convert (utype, step2);
1530 tmp = fold_build2_loc (loc, MINUS_EXPR, type, to2, from2);
1531 tmp = fold_convert (utype, tmp);
1532 tmp = fold_build2_loc (loc, TRUNC_DIV_EXPR, utype, tmp, step2);
1533 tmp = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, countm1, tmp);
1534 gfc_add_expr_to_block (&block, tmp);
1538 /* TODO: We could use the same width as the real type.
1539 This would probably cause more problems that it solves
1540 when we implement "long double" types. */
1542 tmp = fold_build2_loc (loc, MINUS_EXPR, type, to, from);
1543 tmp = fold_build2_loc (loc, RDIV_EXPR, type, tmp, step);
1544 tmp = fold_build1_loc (loc, FIX_TRUNC_EXPR, utype, tmp);
1545 gfc_add_modify (&block, countm1, tmp);
1547 /* We need a special check for empty loops:
1548 empty = (step > 0 ? to < from : to > from); */
1549 tmp = fold_build3_loc (loc, COND_EXPR, boolean_type_node, pos_step,
1550 fold_build2_loc (loc, LT_EXPR,
1551 boolean_type_node, to, from),
1552 fold_build2_loc (loc, GT_EXPR,
1553 boolean_type_node, to, from));
1554 /* If the loop is empty, go directly to the exit label. */
1555 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1556 build1_v (GOTO_EXPR, exit_label),
1557 build_empty_stmt (input_location));
1558 gfc_add_expr_to_block (&block, tmp);
1562 gfc_start_block (&body);
1564 /* Main loop body. */
1565 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1566 gfc_add_expr_to_block (&body, tmp);
1568 /* Label for cycle statements (if needed). */
1569 if (TREE_USED (cycle_label))
1571 tmp = build1_v (LABEL_EXPR, cycle_label);
1572 gfc_add_expr_to_block (&body, tmp);
1575 /* Check whether someone has modified the loop variable. */
1576 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1578 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node, dovar,
1580 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1581 "Loop variable has been modified");
1584 /* Exit the loop if there is an I/O result condition or error. */
1587 tmp = build1_v (GOTO_EXPR, exit_label);
1588 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1590 build_empty_stmt (input_location));
1591 gfc_add_expr_to_block (&body, tmp);
1594 /* Increment the loop variable. */
1595 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1596 gfc_add_modify_loc (loc, &body, dovar, tmp);
1598 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1599 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1601 /* End with the loop condition. Loop until countm1 == 0. */
1602 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, countm1,
1603 build_int_cst (utype, 0));
1604 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1605 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1606 cond, tmp, build_empty_stmt (loc));
1607 gfc_add_expr_to_block (&body, tmp);
1609 /* Decrement the loop count. */
1610 tmp = fold_build2_loc (loc, MINUS_EXPR, utype, countm1,
1611 build_int_cst (utype, 1));
1612 gfc_add_modify_loc (loc, &body, countm1, tmp);
1614 /* End of loop body. */
1615 tmp = gfc_finish_block (&body);
1617 /* The for loop itself. */
1618 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1619 gfc_add_expr_to_block (&block, tmp);
1621 /* Add the exit label. */
1622 tmp = build1_v (LABEL_EXPR, exit_label);
1623 gfc_add_expr_to_block (&block, tmp);
1625 return gfc_finish_block (&block);
1629 /* Translate the DO WHILE construct.
1642 if (! cond) goto exit_label;
1648 Because the evaluation of the exit condition `cond' may have side
1649 effects, we can't do much for empty loop bodies. The backend optimizers
1650 should be smart enough to eliminate any dead loops. */
1653 gfc_trans_do_while (gfc_code * code)
1661 /* Everything we build here is part of the loop body. */
1662 gfc_start_block (&block);
1664 /* Cycle and exit statements are implemented with gotos. */
1665 cycle_label = gfc_build_label_decl (NULL_TREE);
1666 exit_label = gfc_build_label_decl (NULL_TREE);
1668 /* Put the labels where they can be found later. See gfc_trans_do(). */
1669 code->cycle_label = cycle_label;
1670 code->exit_label = exit_label;
1672 /* Create a GIMPLE version of the exit condition. */
1673 gfc_init_se (&cond, NULL);
1674 gfc_conv_expr_val (&cond, code->expr1);
1675 gfc_add_block_to_block (&block, &cond.pre);
1676 cond.expr = fold_build1_loc (code->expr1->where.lb->location,
1677 TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1679 /* Build "IF (! cond) GOTO exit_label". */
1680 tmp = build1_v (GOTO_EXPR, exit_label);
1681 TREE_USED (exit_label) = 1;
1682 tmp = fold_build3_loc (code->expr1->where.lb->location, COND_EXPR,
1683 void_type_node, cond.expr, tmp,
1684 build_empty_stmt (code->expr1->where.lb->location));
1685 gfc_add_expr_to_block (&block, tmp);
1687 /* The main body of the loop. */
1688 tmp = gfc_trans_code (code->block->next);
1689 gfc_add_expr_to_block (&block, tmp);
1691 /* Label for cycle statements (if needed). */
1692 if (TREE_USED (cycle_label))
1694 tmp = build1_v (LABEL_EXPR, cycle_label);
1695 gfc_add_expr_to_block (&block, tmp);
1698 /* End of loop body. */
1699 tmp = gfc_finish_block (&block);
1701 gfc_init_block (&block);
1702 /* Build the loop. */
1703 tmp = fold_build1_loc (code->expr1->where.lb->location, LOOP_EXPR,
1704 void_type_node, tmp);
1705 gfc_add_expr_to_block (&block, tmp);
1707 /* Add the exit label. */
1708 tmp = build1_v (LABEL_EXPR, exit_label);
1709 gfc_add_expr_to_block (&block, tmp);
1711 return gfc_finish_block (&block);
1715 /* Translate the SELECT CASE construct for INTEGER case expressions,
1716 without killing all potential optimizations. The problem is that
1717 Fortran allows unbounded cases, but the back-end does not, so we
1718 need to intercept those before we enter the equivalent SWITCH_EXPR
1721 For example, we translate this,
1724 CASE (:100,101,105:115)
1734 to the GENERIC equivalent,
1738 case (minimum value for typeof(expr) ... 100:
1744 case 200 ... (maximum value for typeof(expr):
1761 gfc_trans_integer_select (gfc_code * code)
1771 gfc_start_block (&block);
1773 /* Calculate the switch expression. */
1774 gfc_init_se (&se, NULL);
1775 gfc_conv_expr_val (&se, code->expr1);
1776 gfc_add_block_to_block (&block, &se.pre);
1778 end_label = gfc_build_label_decl (NULL_TREE);
1780 gfc_init_block (&body);
1782 for (c = code->block; c; c = c->block)
1784 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1789 /* Assume it's the default case. */
1790 low = high = NULL_TREE;
1794 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1797 /* If there's only a lower bound, set the high bound to the
1798 maximum value of the case expression. */
1800 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1805 /* Three cases are possible here:
1807 1) There is no lower bound, e.g. CASE (:N).
1808 2) There is a lower bound .NE. high bound, that is
1809 a case range, e.g. CASE (N:M) where M>N (we make
1810 sure that M>N during type resolution).
1811 3) There is a lower bound, and it has the same value
1812 as the high bound, e.g. CASE (N:N). This is our
1813 internal representation of CASE(N).
1815 In the first and second case, we need to set a value for
1816 high. In the third case, we don't because the GCC middle
1817 end represents a single case value by just letting high be
1818 a NULL_TREE. We can't do that because we need to be able
1819 to represent unbounded cases. */
1823 && mpz_cmp (cp->low->value.integer,
1824 cp->high->value.integer) != 0))
1825 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1828 /* Unbounded case. */
1830 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1833 /* Build a label. */
1834 label = gfc_build_label_decl (NULL_TREE);
1836 /* Add this case label.
1837 Add parameter 'label', make it match GCC backend. */
1838 tmp = build_case_label (low, high, label);
1839 gfc_add_expr_to_block (&body, tmp);
1842 /* Add the statements for this case. */
1843 tmp = gfc_trans_code (c->next);
1844 gfc_add_expr_to_block (&body, tmp);
1846 /* Break to the end of the construct. */
1847 tmp = build1_v (GOTO_EXPR, end_label);
1848 gfc_add_expr_to_block (&body, tmp);
1851 tmp = gfc_finish_block (&body);
1852 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1853 gfc_add_expr_to_block (&block, tmp);
1855 tmp = build1_v (LABEL_EXPR, end_label);
1856 gfc_add_expr_to_block (&block, tmp);
1858 return gfc_finish_block (&block);
1862 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1864 There are only two cases possible here, even though the standard
1865 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1866 .FALSE., and DEFAULT.
1868 We never generate more than two blocks here. Instead, we always
1869 try to eliminate the DEFAULT case. This way, we can translate this
1870 kind of SELECT construct to a simple
1874 expression in GENERIC. */
1877 gfc_trans_logical_select (gfc_code * code)
1880 gfc_code *t, *f, *d;
1885 /* Assume we don't have any cases at all. */
1888 /* Now see which ones we actually do have. We can have at most two
1889 cases in a single case list: one for .TRUE. and one for .FALSE.
1890 The default case is always separate. If the cases for .TRUE. and
1891 .FALSE. are in the same case list, the block for that case list
1892 always executed, and we don't generate code a COND_EXPR. */
1893 for (c = code->block; c; c = c->block)
1895 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1899 if (cp->low->value.logical == 0) /* .FALSE. */
1901 else /* if (cp->value.logical != 0), thus .TRUE. */
1909 /* Start a new block. */
1910 gfc_start_block (&block);
1912 /* Calculate the switch expression. We always need to do this
1913 because it may have side effects. */
1914 gfc_init_se (&se, NULL);
1915 gfc_conv_expr_val (&se, code->expr1);
1916 gfc_add_block_to_block (&block, &se.pre);
1918 if (t == f && t != NULL)
1920 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1921 translate the code for these cases, append it to the current
1923 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1927 tree true_tree, false_tree, stmt;
1929 true_tree = build_empty_stmt (input_location);
1930 false_tree = build_empty_stmt (input_location);
1932 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1933 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1934 make the missing case the default case. */
1935 if (t != NULL && f != NULL)
1945 /* Translate the code for each of these blocks, and append it to
1946 the current block. */
1948 true_tree = gfc_trans_code (t->next);
1951 false_tree = gfc_trans_code (f->next);
1953 stmt = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1954 se.expr, true_tree, false_tree);
1955 gfc_add_expr_to_block (&block, stmt);
1958 return gfc_finish_block (&block);
1962 /* The jump table types are stored in static variables to avoid
1963 constructing them from scratch every single time. */
1964 static GTY(()) tree select_struct[2];
1966 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1967 Instead of generating compares and jumps, it is far simpler to
1968 generate a data structure describing the cases in order and call a
1969 library subroutine that locates the right case.
1970 This is particularly true because this is the only case where we
1971 might have to dispose of a temporary.
1972 The library subroutine returns a pointer to jump to or NULL if no
1973 branches are to be taken. */
1976 gfc_trans_character_select (gfc_code *code)
1978 tree init, end_label, tmp, type, case_num, label, fndecl;
1979 stmtblock_t block, body;
1984 VEC(constructor_elt,gc) *inits = NULL;
1986 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1988 /* The jump table types are stored in static variables to avoid
1989 constructing them from scratch every single time. */
1990 static tree ss_string1[2], ss_string1_len[2];
1991 static tree ss_string2[2], ss_string2_len[2];
1992 static tree ss_target[2];
1994 cp = code->block->ext.block.case_list;
1995 while (cp->left != NULL)
1998 /* Generate the body */
1999 gfc_start_block (&block);
2000 gfc_init_se (&expr1se, NULL);
2001 gfc_conv_expr_reference (&expr1se, code->expr1);
2003 gfc_add_block_to_block (&block, &expr1se.pre);
2005 end_label = gfc_build_label_decl (NULL_TREE);
2007 gfc_init_block (&body);
2009 /* Attempt to optimize length 1 selects. */
2010 if (integer_onep (expr1se.string_length))
2012 for (d = cp; d; d = d->right)
2017 gcc_assert (d->low->expr_type == EXPR_CONSTANT
2018 && d->low->ts.type == BT_CHARACTER);
2019 if (d->low->value.character.length > 1)
2021 for (i = 1; i < d->low->value.character.length; i++)
2022 if (d->low->value.character.string[i] != ' ')
2024 if (i != d->low->value.character.length)
2026 if (optimize && d->high && i == 1)
2028 gcc_assert (d->high->expr_type == EXPR_CONSTANT
2029 && d->high->ts.type == BT_CHARACTER);
2030 if (d->high->value.character.length > 1
2031 && (d->low->value.character.string[0]
2032 == d->high->value.character.string[0])
2033 && d->high->value.character.string[1] != ' '
2034 && ((d->low->value.character.string[1] < ' ')
2035 == (d->high->value.character.string[1]
2045 gcc_assert (d->high->expr_type == EXPR_CONSTANT
2046 && d->high->ts.type == BT_CHARACTER);
2047 if (d->high->value.character.length > 1)
2049 for (i = 1; i < d->high->value.character.length; i++)
2050 if (d->high->value.character.string[i] != ' ')
2052 if (i != d->high->value.character.length)
2059 tree ctype = gfc_get_char_type (code->expr1->ts.kind);
2061 for (c = code->block; c; c = c->block)
2063 for (cp = c->ext.block.case_list; cp; cp = cp->next)
2069 /* Assume it's the default case. */
2070 low = high = NULL_TREE;
2074 /* CASE ('ab') or CASE ('ab':'az') will never match
2075 any length 1 character. */
2076 if (cp->low->value.character.length > 1
2077 && cp->low->value.character.string[1] != ' ')
2080 if (cp->low->value.character.length > 0)
2081 r = cp->low->value.character.string[0];
2084 low = build_int_cst (ctype, r);
2086 /* If there's only a lower bound, set the high bound
2087 to the maximum value of the case expression. */
2089 high = TYPE_MAX_VALUE (ctype);
2095 || (cp->low->value.character.string[0]
2096 != cp->high->value.character.string[0]))
2098 if (cp->high->value.character.length > 0)
2099 r = cp->high->value.character.string[0];
2102 high = build_int_cst (ctype, r);
2105 /* Unbounded case. */
2107 low = TYPE_MIN_VALUE (ctype);
2110 /* Build a label. */
2111 label = gfc_build_label_decl (NULL_TREE);
2113 /* Add this case label.
2114 Add parameter 'label', make it match GCC backend. */
2115 tmp = build_case_label (low, high, label);
2116 gfc_add_expr_to_block (&body, tmp);
2119 /* Add the statements for this case. */
2120 tmp = gfc_trans_code (c->next);
2121 gfc_add_expr_to_block (&body, tmp);
2123 /* Break to the end of the construct. */
2124 tmp = build1_v (GOTO_EXPR, end_label);
2125 gfc_add_expr_to_block (&body, tmp);
2128 tmp = gfc_string_to_single_character (expr1se.string_length,
2130 code->expr1->ts.kind);
2131 case_num = gfc_create_var (ctype, "case_num");
2132 gfc_add_modify (&block, case_num, tmp);
2134 gfc_add_block_to_block (&block, &expr1se.post);
2136 tmp = gfc_finish_block (&body);
2137 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
2138 gfc_add_expr_to_block (&block, tmp);
2140 tmp = build1_v (LABEL_EXPR, end_label);
2141 gfc_add_expr_to_block (&block, tmp);
2143 return gfc_finish_block (&block);
2147 if (code->expr1->ts.kind == 1)
2149 else if (code->expr1->ts.kind == 4)
2154 if (select_struct[k] == NULL)
2157 select_struct[k] = make_node (RECORD_TYPE);
2159 if (code->expr1->ts.kind == 1)
2160 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
2161 else if (code->expr1->ts.kind == 4)
2162 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
2167 #define ADD_FIELD(NAME, TYPE) \
2168 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
2169 get_identifier (stringize(NAME)), \
2173 ADD_FIELD (string1, pchartype);
2174 ADD_FIELD (string1_len, gfc_charlen_type_node);
2176 ADD_FIELD (string2, pchartype);
2177 ADD_FIELD (string2_len, gfc_charlen_type_node);
2179 ADD_FIELD (target, integer_type_node);
2182 gfc_finish_type (select_struct[k]);
2186 for (d = cp; d; d = d->right)
2189 for (c = code->block; c; c = c->block)
2191 for (d = c->ext.block.case_list; d; d = d->next)
2193 label = gfc_build_label_decl (NULL_TREE);
2194 tmp = build_case_label ((d->low == NULL && d->high == NULL)
2196 : build_int_cst (integer_type_node, d->n),
2198 gfc_add_expr_to_block (&body, tmp);
2201 tmp = gfc_trans_code (c->next);
2202 gfc_add_expr_to_block (&body, tmp);
2204 tmp = build1_v (GOTO_EXPR, end_label);
2205 gfc_add_expr_to_block (&body, tmp);
2208 /* Generate the structure describing the branches */
2209 for (d = cp; d; d = d->right)
2211 VEC(constructor_elt,gc) *node = NULL;
2213 gfc_init_se (&se, NULL);
2217 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node);
2218 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], integer_zero_node);
2222 gfc_conv_expr_reference (&se, d->low);
2224 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr);
2225 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length);
2228 if (d->high == NULL)
2230 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node);
2231 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], integer_zero_node);
2235 gfc_init_se (&se, NULL);
2236 gfc_conv_expr_reference (&se, d->high);
2238 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr);
2239 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length);
2242 CONSTRUCTOR_APPEND_ELT (node, ss_target[k],
2243 build_int_cst (integer_type_node, d->n));
2245 tmp = build_constructor (select_struct[k], node);
2246 CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp);
2249 type = build_array_type (select_struct[k],
2250 build_index_type (size_int (n-1)));
2252 init = build_constructor (type, inits);
2253 TREE_CONSTANT (init) = 1;
2254 TREE_STATIC (init) = 1;
2255 /* Create a static variable to hold the jump table. */
2256 tmp = gfc_create_var (type, "jumptable");
2257 TREE_CONSTANT (tmp) = 1;
2258 TREE_STATIC (tmp) = 1;
2259 TREE_READONLY (tmp) = 1;
2260 DECL_INITIAL (tmp) = init;
2263 /* Build the library call */
2264 init = gfc_build_addr_expr (pvoid_type_node, init);
2266 if (code->expr1->ts.kind == 1)
2267 fndecl = gfor_fndecl_select_string;
2268 else if (code->expr1->ts.kind == 4)
2269 fndecl = gfor_fndecl_select_string_char4;
2273 tmp = build_call_expr_loc (input_location,
2275 build_int_cst (gfc_charlen_type_node, n),
2276 expr1se.expr, expr1se.string_length);
2277 case_num = gfc_create_var (integer_type_node, "case_num");
2278 gfc_add_modify (&block, case_num, tmp);
2280 gfc_add_block_to_block (&block, &expr1se.post);
2282 tmp = gfc_finish_block (&body);
2283 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
2284 gfc_add_expr_to_block (&block, tmp);
2286 tmp = build1_v (LABEL_EXPR, end_label);
2287 gfc_add_expr_to_block (&block, tmp);
2289 return gfc_finish_block (&block);
2293 /* Translate the three variants of the SELECT CASE construct.
2295 SELECT CASEs with INTEGER case expressions can be translated to an
2296 equivalent GENERIC switch statement, and for LOGICAL case
2297 expressions we build one or two if-else compares.
2299 SELECT CASEs with CHARACTER case expressions are a whole different
2300 story, because they don't exist in GENERIC. So we sort them and
2301 do a binary search at runtime.
2303 Fortran has no BREAK statement, and it does not allow jumps from
2304 one case block to another. That makes things a lot easier for
2308 gfc_trans_select (gfc_code * code)
2314 gcc_assert (code && code->expr1);
2315 gfc_init_block (&block);
2317 /* Build the exit label and hang it in. */
2318 exit_label = gfc_build_label_decl (NULL_TREE);
2319 code->exit_label = exit_label;
2321 /* Empty SELECT constructs are legal. */
2322 if (code->block == NULL)
2323 body = build_empty_stmt (input_location);
2325 /* Select the correct translation function. */
2327 switch (code->expr1->ts.type)
2330 body = gfc_trans_logical_select (code);
2334 body = gfc_trans_integer_select (code);
2338 body = gfc_trans_character_select (code);
2342 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2346 /* Build everything together. */
2347 gfc_add_expr_to_block (&block, body);
2348 gfc_add_expr_to_block (&block, build1_v (LABEL_EXPR, exit_label));
2350 return gfc_finish_block (&block);
2354 /* Traversal function to substitute a replacement symtree if the symbol
2355 in the expression is the same as that passed. f == 2 signals that
2356 that variable itself is not to be checked - only the references.
2357 This group of functions is used when the variable expression in a
2358 FORALL assignment has internal references. For example:
2359 FORALL (i = 1:4) p(p(i)) = i
2360 The only recourse here is to store a copy of 'p' for the index
2363 static gfc_symtree *new_symtree;
2364 static gfc_symtree *old_symtree;
2367 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
2369 if (expr->expr_type != EXPR_VARIABLE)
2374 else if (expr->symtree->n.sym == sym)
2375 expr->symtree = new_symtree;
2381 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
2383 gfc_traverse_expr (e, sym, forall_replace, f);
2387 forall_restore (gfc_expr *expr,
2388 gfc_symbol *sym ATTRIBUTE_UNUSED,
2389 int *f ATTRIBUTE_UNUSED)
2391 if (expr->expr_type != EXPR_VARIABLE)
2394 if (expr->symtree == new_symtree)
2395 expr->symtree = old_symtree;
2401 forall_restore_symtree (gfc_expr *e)
2403 gfc_traverse_expr (e, NULL, forall_restore, 0);
2407 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2412 gfc_symbol *new_sym;
2413 gfc_symbol *old_sym;
2417 /* Build a copy of the lvalue. */
2418 old_symtree = c->expr1->symtree;
2419 old_sym = old_symtree->n.sym;
2420 e = gfc_lval_expr_from_sym (old_sym);
2421 if (old_sym->attr.dimension)
2423 gfc_init_se (&tse, NULL);
2424 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
2425 gfc_add_block_to_block (pre, &tse.pre);
2426 gfc_add_block_to_block (post, &tse.post);
2427 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
2429 if (e->ts.type != BT_CHARACTER)
2431 /* Use the variable offset for the temporary. */
2432 tmp = gfc_conv_array_offset (old_sym->backend_decl);
2433 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
2438 gfc_init_se (&tse, NULL);
2439 gfc_init_se (&rse, NULL);
2440 gfc_conv_expr (&rse, e);
2441 if (e->ts.type == BT_CHARACTER)
2443 tse.string_length = rse.string_length;
2444 tmp = gfc_get_character_type_len (gfc_default_character_kind,
2446 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
2448 gfc_add_block_to_block (pre, &tse.pre);
2449 gfc_add_block_to_block (post, &tse.post);
2453 tmp = gfc_typenode_for_spec (&e->ts);
2454 tse.expr = gfc_create_var (tmp, "temp");
2457 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
2458 e->expr_type == EXPR_VARIABLE, true);
2459 gfc_add_expr_to_block (pre, tmp);
2463 /* Create a new symbol to represent the lvalue. */
2464 new_sym = gfc_new_symbol (old_sym->name, NULL);
2465 new_sym->ts = old_sym->ts;
2466 new_sym->attr.referenced = 1;
2467 new_sym->attr.temporary = 1;
2468 new_sym->attr.dimension = old_sym->attr.dimension;
2469 new_sym->attr.flavor = old_sym->attr.flavor;
2471 /* Use the temporary as the backend_decl. */
2472 new_sym->backend_decl = tse.expr;
2474 /* Create a fake symtree for it. */
2476 new_symtree = gfc_new_symtree (&root, old_sym->name);
2477 new_symtree->n.sym = new_sym;
2478 gcc_assert (new_symtree == root);
2480 /* Go through the expression reference replacing the old_symtree
2482 forall_replace_symtree (c->expr1, old_sym, 2);
2484 /* Now we have made this temporary, we might as well use it for
2485 the right hand side. */
2486 forall_replace_symtree (c->expr2, old_sym, 1);
2490 /* Handles dependencies in forall assignments. */
2492 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2499 lsym = c->expr1->symtree->n.sym;
2500 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
2502 /* Now check for dependencies within the 'variable'
2503 expression itself. These are treated by making a complete
2504 copy of variable and changing all the references to it
2505 point to the copy instead. Note that the shallow copy of
2506 the variable will not suffice for derived types with
2507 pointer components. We therefore leave these to their
2509 if (lsym->ts.type == BT_DERIVED
2510 && lsym->ts.u.derived->attr.pointer_comp)
2514 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
2516 forall_make_variable_temp (c, pre, post);
2520 /* Substrings with dependencies are treated in the same
2522 if (c->expr1->ts.type == BT_CHARACTER
2524 && c->expr2->expr_type == EXPR_VARIABLE
2525 && lsym == c->expr2->symtree->n.sym)
2527 for (lref = c->expr1->ref; lref; lref = lref->next)
2528 if (lref->type == REF_SUBSTRING)
2530 for (rref = c->expr2->ref; rref; rref = rref->next)
2531 if (rref->type == REF_SUBSTRING)
2535 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
2537 forall_make_variable_temp (c, pre, post);
2546 cleanup_forall_symtrees (gfc_code *c)
2548 forall_restore_symtree (c->expr1);
2549 forall_restore_symtree (c->expr2);
2550 free (new_symtree->n.sym);
2555 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2556 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2557 indicates whether we should generate code to test the FORALLs mask
2558 array. OUTER is the loop header to be used for initializing mask
2561 The generated loop format is:
2562 count = (end - start + step) / step
2575 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
2576 int mask_flag, stmtblock_t *outer)
2584 tree var, start, end, step;
2587 /* Initialize the mask index outside the FORALL nest. */
2588 if (mask_flag && forall_tmp->mask)
2589 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
2591 iter = forall_tmp->this_loop;
2592 nvar = forall_tmp->nvar;
2593 for (n = 0; n < nvar; n++)
2596 start = iter->start;
2600 exit_label = gfc_build_label_decl (NULL_TREE);
2601 TREE_USED (exit_label) = 1;
2603 /* The loop counter. */
2604 count = gfc_create_var (TREE_TYPE (var), "count");
2606 /* The body of the loop. */
2607 gfc_init_block (&block);
2609 /* The exit condition. */
2610 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
2611 count, build_int_cst (TREE_TYPE (count), 0));
2612 tmp = build1_v (GOTO_EXPR, exit_label);
2613 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2614 cond, tmp, build_empty_stmt (input_location));
2615 gfc_add_expr_to_block (&block, tmp);
2617 /* The main loop body. */
2618 gfc_add_expr_to_block (&block, body);
2620 /* Increment the loop variable. */
2621 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), var,
2623 gfc_add_modify (&block, var, tmp);
2625 /* Advance to the next mask element. Only do this for the
2627 if (n == 0 && mask_flag && forall_tmp->mask)
2629 tree maskindex = forall_tmp->maskindex;
2630 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2631 maskindex, gfc_index_one_node);
2632 gfc_add_modify (&block, maskindex, tmp);
2635 /* Decrement the loop counter. */
2636 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), count,
2637 build_int_cst (TREE_TYPE (var), 1));
2638 gfc_add_modify (&block, count, tmp);
2640 body = gfc_finish_block (&block);
2642 /* Loop var initialization. */
2643 gfc_init_block (&block);
2644 gfc_add_modify (&block, var, start);
2647 /* Initialize the loop counter. */
2648 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), step,
2650 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), end,
2652 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, TREE_TYPE (var),
2654 gfc_add_modify (&block, count, tmp);
2656 /* The loop expression. */
2657 tmp = build1_v (LOOP_EXPR, body);
2658 gfc_add_expr_to_block (&block, tmp);
2660 /* The exit label. */
2661 tmp = build1_v (LABEL_EXPR, exit_label);
2662 gfc_add_expr_to_block (&block, tmp);
2664 body = gfc_finish_block (&block);
2671 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2672 is nonzero, the body is controlled by all masks in the forall nest.
2673 Otherwise, the innermost loop is not controlled by it's mask. This
2674 is used for initializing that mask. */
2677 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2682 forall_info *forall_tmp;
2683 tree mask, maskindex;
2685 gfc_start_block (&header);
2687 forall_tmp = nested_forall_info;
2688 while (forall_tmp != NULL)
2690 /* Generate body with masks' control. */
2693 mask = forall_tmp->mask;
2694 maskindex = forall_tmp->maskindex;
2696 /* If a mask was specified make the assignment conditional. */
2699 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2700 body = build3_v (COND_EXPR, tmp, body,
2701 build_empty_stmt (input_location));
2704 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2705 forall_tmp = forall_tmp->prev_nest;
2709 gfc_add_expr_to_block (&header, body);
2710 return gfc_finish_block (&header);
2714 /* Allocate data for holding a temporary array. Returns either a local
2715 temporary array or a pointer variable. */
2718 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2725 if (INTEGER_CST_P (size))
2726 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
2727 size, gfc_index_one_node);
2731 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2732 type = build_array_type (elem_type, type);
2733 if (gfc_can_put_var_on_stack (bytesize))
2735 gcc_assert (INTEGER_CST_P (size));
2736 tmpvar = gfc_create_var (type, "temp");
2741 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2742 *pdata = convert (pvoid_type_node, tmpvar);
2744 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2745 gfc_add_modify (pblock, tmpvar, tmp);
2751 /* Generate codes to copy the temporary to the actual lhs. */
2754 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2755 tree count1, tree wheremask, bool invert)
2759 stmtblock_t block, body;
2765 lss = gfc_walk_expr (expr);
2767 if (lss == gfc_ss_terminator)
2769 gfc_start_block (&block);
2771 gfc_init_se (&lse, NULL);
2773 /* Translate the expression. */
2774 gfc_conv_expr (&lse, expr);
2776 /* Form the expression for the temporary. */
2777 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2779 /* Use the scalar assignment as is. */
2780 gfc_add_block_to_block (&block, &lse.pre);
2781 gfc_add_modify (&block, lse.expr, tmp);
2782 gfc_add_block_to_block (&block, &lse.post);
2784 /* Increment the count1. */
2785 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
2786 count1, gfc_index_one_node);
2787 gfc_add_modify (&block, count1, tmp);
2789 tmp = gfc_finish_block (&block);
2793 gfc_start_block (&block);
2795 gfc_init_loopinfo (&loop1);
2796 gfc_init_se (&rse, NULL);
2797 gfc_init_se (&lse, NULL);
2799 /* Associate the lss with the loop. */
2800 gfc_add_ss_to_loop (&loop1, lss);
2802 /* Calculate the bounds of the scalarization. */
2803 gfc_conv_ss_startstride (&loop1);
2804 /* Setup the scalarizing loops. */
2805 gfc_conv_loop_setup (&loop1, &expr->where);
2807 gfc_mark_ss_chain_used (lss, 1);
2809 /* Start the scalarized loop body. */
2810 gfc_start_scalarized_body (&loop1, &body);
2812 /* Setup the gfc_se structures. */
2813 gfc_copy_loopinfo_to_se (&lse, &loop1);
2816 /* Form the expression of the temporary. */
2817 if (lss != gfc_ss_terminator)
2818 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2819 /* Translate expr. */
2820 gfc_conv_expr (&lse, expr);
2822 /* Use the scalar assignment. */
2823 rse.string_length = lse.string_length;
2824 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
2826 /* Form the mask expression according to the mask tree list. */
2829 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2831 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
2832 TREE_TYPE (wheremaskexpr),
2834 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2836 build_empty_stmt (input_location));
2839 gfc_add_expr_to_block (&body, tmp);
2841 /* Increment count1. */
2842 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2843 count1, gfc_index_one_node);
2844 gfc_add_modify (&body, count1, tmp);
2846 /* Increment count3. */
2849 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2850 gfc_array_index_type, count3,
2851 gfc_index_one_node);
2852 gfc_add_modify (&body, count3, tmp);
2855 /* Generate the copying loops. */
2856 gfc_trans_scalarizing_loops (&loop1, &body);
2857 gfc_add_block_to_block (&block, &loop1.pre);
2858 gfc_add_block_to_block (&block, &loop1.post);
2859 gfc_cleanup_loop (&loop1);
2861 tmp = gfc_finish_block (&block);
2867 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2868 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2869 and should not be freed. WHEREMASK is the conditional execution mask
2870 whose sense may be inverted by INVERT. */
2873 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2874 tree count1, gfc_ss *lss, gfc_ss *rss,
2875 tree wheremask, bool invert)
2877 stmtblock_t block, body1;
2884 gfc_start_block (&block);
2886 gfc_init_se (&rse, NULL);
2887 gfc_init_se (&lse, NULL);
2889 if (lss == gfc_ss_terminator)
2891 gfc_init_block (&body1);
2892 gfc_conv_expr (&rse, expr2);
2893 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2897 /* Initialize the loop. */
2898 gfc_init_loopinfo (&loop);
2900 /* We may need LSS to determine the shape of the expression. */
2901 gfc_add_ss_to_loop (&loop, lss);
2902 gfc_add_ss_to_loop (&loop, rss);
2904 gfc_conv_ss_startstride (&loop);
2905 gfc_conv_loop_setup (&loop, &expr2->where);
2907 gfc_mark_ss_chain_used (rss, 1);
2908 /* Start the loop body. */
2909 gfc_start_scalarized_body (&loop, &body1);
2911 /* Translate the expression. */
2912 gfc_copy_loopinfo_to_se (&rse, &loop);
2914 gfc_conv_expr (&rse, expr2);
2916 /* Form the expression of the temporary. */
2917 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2920 /* Use the scalar assignment. */
2921 lse.string_length = rse.string_length;
2922 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2923 expr2->expr_type == EXPR_VARIABLE, true);
2925 /* Form the mask expression according to the mask tree list. */
2928 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2930 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
2931 TREE_TYPE (wheremaskexpr),
2933 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2935 build_empty_stmt (input_location));
2938 gfc_add_expr_to_block (&body1, tmp);
2940 if (lss == gfc_ss_terminator)
2942 gfc_add_block_to_block (&block, &body1);
2944 /* Increment count1. */
2945 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
2946 count1, gfc_index_one_node);
2947 gfc_add_modify (&block, count1, tmp);
2951 /* Increment count1. */
2952 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2953 count1, gfc_index_one_node);
2954 gfc_add_modify (&body1, count1, tmp);
2956 /* Increment count3. */
2959 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2960 gfc_array_index_type,
2961 count3, gfc_index_one_node);
2962 gfc_add_modify (&body1, count3, tmp);
2965 /* Generate the copying loops. */
2966 gfc_trans_scalarizing_loops (&loop, &body1);
2968 gfc_add_block_to_block (&block, &loop.pre);
2969 gfc_add_block_to_block (&block, &loop.post);
2971 gfc_cleanup_loop (&loop);
2972 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2973 as tree nodes in SS may not be valid in different scope. */
2976 tmp = gfc_finish_block (&block);
2981 /* Calculate the size of temporary needed in the assignment inside forall.
2982 LSS and RSS are filled in this function. */
2985 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2986 stmtblock_t * pblock,
2987 gfc_ss **lss, gfc_ss **rss)
2995 *lss = gfc_walk_expr (expr1);
2998 size = gfc_index_one_node;
2999 if (*lss != gfc_ss_terminator)
3001 gfc_init_loopinfo (&loop);
3003 /* Walk the RHS of the expression. */
3004 *rss = gfc_walk_expr (expr2);
3005 if (*rss == gfc_ss_terminator)
3006 /* The rhs is scalar. Add a ss for the expression. */
3007 *rss = gfc_get_scalar_ss (gfc_ss_terminator, expr2);
3009 /* Associate the SS with the loop. */
3010 gfc_add_ss_to_loop (&loop, *lss);
3011 /* We don't actually need to add the rhs at this point, but it might
3012 make guessing the loop bounds a bit easier. */
3013 gfc_add_ss_to_loop (&loop, *rss);
3015 /* We only want the shape of the expression, not rest of the junk
3016 generated by the scalarizer. */
3017 loop.array_parameter = 1;
3019 /* Calculate the bounds of the scalarization. */
3020 save_flag = gfc_option.rtcheck;
3021 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
3022 gfc_conv_ss_startstride (&loop);
3023 gfc_option.rtcheck = save_flag;
3024 gfc_conv_loop_setup (&loop, &expr2->where);
3026 /* Figure out how many elements we need. */
3027 for (i = 0; i < loop.dimen; i++)
3029 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3030 gfc_array_index_type,
3031 gfc_index_one_node, loop.from[i]);
3032 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3033 gfc_array_index_type, tmp, loop.to[i]);
3034 size = fold_build2_loc (input_location, MULT_EXPR,
3035 gfc_array_index_type, size, tmp);
3037 gfc_add_block_to_block (pblock, &loop.pre);
3038 size = gfc_evaluate_now (size, pblock);
3039 gfc_add_block_to_block (pblock, &loop.post);
3041 /* TODO: write a function that cleans up a loopinfo without freeing
3042 the SS chains. Currently a NOP. */
3049 /* Calculate the overall iterator number of the nested forall construct.
3050 This routine actually calculates the number of times the body of the
3051 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
3052 that by the expression INNER_SIZE. The BLOCK argument specifies the
3053 block in which to calculate the result, and the optional INNER_SIZE_BODY
3054 argument contains any statements that need to executed (inside the loop)
3055 to initialize or calculate INNER_SIZE. */
3058 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
3059 stmtblock_t *inner_size_body, stmtblock_t *block)
3061 forall_info *forall_tmp = nested_forall_info;
3065 /* We can eliminate the innermost unconditional loops with constant
3067 if (INTEGER_CST_P (inner_size))
3070 && !forall_tmp->mask
3071 && INTEGER_CST_P (forall_tmp->size))
3073 inner_size = fold_build2_loc (input_location, MULT_EXPR,
3074 gfc_array_index_type,
3075 inner_size, forall_tmp->size);
3076 forall_tmp = forall_tmp->prev_nest;
3079 /* If there are no loops left, we have our constant result. */
3084 /* Otherwise, create a temporary variable to compute the result. */
3085 number = gfc_create_var (gfc_array_index_type, "num");
3086 gfc_add_modify (block, number, gfc_index_zero_node);
3088 gfc_start_block (&body);
3089 if (inner_size_body)
3090 gfc_add_block_to_block (&body, inner_size_body);
3092 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3093 gfc_array_index_type, number, inner_size);
3096 gfc_add_modify (&body, number, tmp);
3097 tmp = gfc_finish_block (&body);
3099 /* Generate loops. */
3100 if (forall_tmp != NULL)
3101 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
3103 gfc_add_expr_to_block (block, tmp);
3109 /* Allocate temporary for forall construct. SIZE is the size of temporary
3110 needed. PTEMP1 is returned for space free. */
3113 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
3120 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
3121 if (!integer_onep (unit))
3122 bytesize = fold_build2_loc (input_location, MULT_EXPR,
3123 gfc_array_index_type, size, unit);
3128 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
3131 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3136 /* Allocate temporary for forall construct according to the information in
3137 nested_forall_info. INNER_SIZE is the size of temporary needed in the
3138 assignment inside forall. PTEMP1 is returned for space free. */
3141 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
3142 tree inner_size, stmtblock_t * inner_size_body,
3143 stmtblock_t * block, tree * ptemp1)
3147 /* Calculate the total size of temporary needed in forall construct. */
3148 size = compute_overall_iter_number (nested_forall_info, inner_size,
3149 inner_size_body, block);
3151 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
3155 /* Handle assignments inside forall which need temporary.
3157 forall (i=start:end:stride; maskexpr)
3160 (where e,f<i> are arbitrary expressions possibly involving i
3161 and there is a dependency between e<i> and f<i>)
3163 masktmp(:) = maskexpr(:)
3168 for (i = start; i <= end; i += stride)
3172 for (i = start; i <= end; i += stride)
3174 if (masktmp[maskindex++])
3175 tmp[count1++] = f<i>
3179 for (i = start; i <= end; i += stride)
3181 if (masktmp[maskindex++])
3182 e<i> = tmp[count1++]
3187 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
3188 tree wheremask, bool invert,
3189 forall_info * nested_forall_info,
3190 stmtblock_t * block)
3198 stmtblock_t inner_size_body;
3200 /* Create vars. count1 is the current iterator number of the nested
3202 count1 = gfc_create_var (gfc_array_index_type, "count1");
3204 /* Count is the wheremask index. */
3207 count = gfc_create_var (gfc_array_index_type, "count");
3208 gfc_add_modify (block, count, gfc_index_zero_node);
3213 /* Initialize count1. */
3214 gfc_add_modify (block, count1, gfc_index_zero_node);
3216 /* Calculate the size of temporary needed in the assignment. Return loop, lss
3217 and rss which are used in function generate_loop_for_rhs_to_temp(). */
3218 gfc_init_block (&inner_size_body);
3219 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
3222 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
3223 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
3225 if (!expr1->ts.u.cl->backend_decl)
3228 gfc_init_se (&tse, NULL);
3229 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
3230 expr1->ts.u.cl->backend_decl = tse.expr;
3232 type = gfc_get_character_type_len (gfc_default_character_kind,
3233 expr1->ts.u.cl->backend_decl);
3236 type = gfc_typenode_for_spec (&expr1->ts);
3238 /* Allocate temporary for nested forall construct according to the
3239 information in nested_forall_info and inner_size. */
3240 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
3241 &inner_size_body, block, &ptemp1);
3243 /* Generate codes to copy rhs to the temporary . */
3244 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
3247 /* Generate body and loops according to the information in
3248 nested_forall_info. */
3249 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3250 gfc_add_expr_to_block (block, tmp);
3253 gfc_add_modify (block, count1, gfc_index_zero_node);
3257 gfc_add_modify (block, count, gfc_index_zero_node);
3259 /* Generate codes to copy the temporary to lhs. */
3260 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
3263 /* Generate body and loops according to the information in
3264 nested_forall_info. */
3265 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3266 gfc_add_expr_to_block (block, tmp);
3270 /* Free the temporary. */
3271 tmp = gfc_call_free (ptemp1);
3272 gfc_add_expr_to_block (block, tmp);
3277 /* Translate pointer assignment inside FORALL which need temporary. */
3280 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
3281 forall_info * nested_forall_info,
3282 stmtblock_t * block)
3289 gfc_array_info *info;
3296 tree tmp, tmp1, ptemp1;
3298 count = gfc_create_var (gfc_array_index_type, "count");
3299 gfc_add_modify (block, count, gfc_index_zero_node);
3301 inner_size = gfc_index_one_node;
3302 lss = gfc_walk_expr (expr1);
3303 rss = gfc_walk_expr (expr2);
3304 if (lss == gfc_ss_terminator)
3306 type = gfc_typenode_for_spec (&expr1->ts);
3307 type = build_pointer_type (type);
3309 /* Allocate temporary for nested forall construct according to the
3310 information in nested_forall_info and inner_size. */
3311 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
3312 inner_size, NULL, block, &ptemp1);
3313 gfc_start_block (&body);
3314 gfc_init_se (&lse, NULL);
3315 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3316 gfc_init_se (&rse, NULL);
3317 rse.want_pointer = 1;
3318 gfc_conv_expr (&rse, expr2);
3319 gfc_add_block_to_block (&body, &rse.pre);
3320 gfc_add_modify (&body, lse.expr,
3321 fold_convert (TREE_TYPE (lse.expr), rse.expr));
3322 gfc_add_block_to_block (&body, &rse.post);
3324 /* Increment count. */
3325 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3326 count, gfc_index_one_node);
3327 gfc_add_modify (&body, count, tmp);
3329 tmp = gfc_finish_block (&body);
3331 /* Generate body and loops according to the information in
3332 nested_forall_info. */
3333 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3334 gfc_add_expr_to_block (block, tmp);
3337 gfc_add_modify (block, count, gfc_index_zero_node);
3339 gfc_start_block (&body);
3340 gfc_init_se (&lse, NULL);
3341 gfc_init_se (&rse, NULL);
3342 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
3343 lse.want_pointer = 1;
3344 gfc_conv_expr (&lse, expr1);
3345 gfc_add_block_to_block (&body, &lse.pre);
3346 gfc_add_modify (&body, lse.expr, rse.expr);
3347 gfc_add_block_to_block (&body, &lse.post);
3348 /* Increment count. */
3349 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3350 count, gfc_index_one_node);
3351 gfc_add_modify (&body, count, tmp);
3352 tmp = gfc_finish_block (&body);
3354 /* Generate body and loops according to the information in
3355 nested_forall_info. */
3356 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3357 gfc_add_expr_to_block (block, tmp);
3361 gfc_init_loopinfo (&loop);
3363 /* Associate the SS with the loop. */
3364 gfc_add_ss_to_loop (&loop, rss);
3366 /* Setup the scalarizing loops and bounds. */
3367 gfc_conv_ss_startstride (&loop);
3369 gfc_conv_loop_setup (&loop, &expr2->where);
3371 info = &rss->info->data.array;
3372 desc = info->descriptor;
3374 /* Make a new descriptor. */
3375 parmtype = gfc_get_element_type (TREE_TYPE (desc));
3376 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0,
3377 loop.from, loop.to, 1,
3378 GFC_ARRAY_UNKNOWN, true);
3380 /* Allocate temporary for nested forall construct. */
3381 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
3382 inner_size, NULL, block, &ptemp1);
3383 gfc_start_block (&body);
3384 gfc_init_se (&lse, NULL);
3385 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3386 lse.direct_byref = 1;
3387 rss = gfc_walk_expr (expr2);
3388 gfc_conv_expr_descriptor (&lse, expr2, rss);
3390 gfc_add_block_to_block (&body, &lse.pre);
3391 gfc_add_block_to_block (&body, &lse.post);
3393 /* Increment count. */
3394 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3395 count, gfc_index_one_node);
3396 gfc_add_modify (&body, count, tmp);
3398 tmp = gfc_finish_block (&body);
3400 /* Generate body and loops according to the information in
3401 nested_forall_info. */
3402 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3403 gfc_add_expr_to_block (block, tmp);
3406 gfc_add_modify (block, count, gfc_index_zero_node);
3408 parm = gfc_build_array_ref (tmp1, count, NULL);
3409 lss = gfc_walk_expr (expr1);
3410 gfc_init_se (&lse, NULL);
3411 gfc_conv_expr_descriptor (&lse, expr1, lss);
3412 gfc_add_modify (&lse.pre, lse.expr, parm);
3413 gfc_start_block (&body);
3414 gfc_add_block_to_block (&body, &lse.pre);
3415 gfc_add_block_to_block (&body, &lse.post);
3417 /* Increment count. */
3418 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3419 count, gfc_index_one_node);
3420 gfc_add_modify (&body, count, tmp);
3422 tmp = gfc_finish_block (&body);
3424 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3425 gfc_add_expr_to_block (block, tmp);
3427 /* Free the temporary. */
3430 tmp = gfc_call_free (ptemp1);
3431 gfc_add_expr_to_block (block, tmp);
3436 /* FORALL and WHERE statements are really nasty, especially when you nest
3437 them. All the rhs of a forall assignment must be evaluated before the
3438 actual assignments are performed. Presumably this also applies to all the
3439 assignments in an inner where statement. */
3441 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3442 linear array, relying on the fact that we process in the same order in all
3445 forall (i=start:end:stride; maskexpr)
3449 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3451 count = ((end + 1 - start) / stride)
3452 masktmp(:) = maskexpr(:)
3455 for (i = start; i <= end; i += stride)
3457 if (masktmp[maskindex++])
3461 for (i = start; i <= end; i += stride)
3463 if (masktmp[maskindex++])
3467 Note that this code only works when there are no dependencies.
3468 Forall loop with array assignments and data dependencies are a real pain,
3469 because the size of the temporary cannot always be determined before the
3470 loop is executed. This problem is compounded by the presence of nested
3475 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
3492 tree cycle_label = NULL_TREE;
3496 gfc_forall_iterator *fa;
3499 gfc_saved_var *saved_vars;
3500 iter_info *this_forall;
3504 /* Do nothing if the mask is false. */
3506 && code->expr1->expr_type == EXPR_CONSTANT
3507 && !code->expr1->value.logical)
3508 return build_empty_stmt (input_location);
3511 /* Count the FORALL index number. */
3512 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3516 /* Allocate the space for var, start, end, step, varexpr. */
3517 var = XCNEWVEC (tree, nvar);
3518 start = XCNEWVEC (tree, nvar);
3519 end = XCNEWVEC (tree, nvar);
3520 step = XCNEWVEC (tree, nvar);
3521 varexpr = XCNEWVEC (gfc_expr *, nvar);
3522 saved_vars = XCNEWVEC (gfc_saved_var, nvar);
3524 /* Allocate the space for info. */
3525 info = XCNEW (forall_info);
3527 gfc_start_block (&pre);
3528 gfc_init_block (&post);
3529 gfc_init_block (&block);
3532 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3534 gfc_symbol *sym = fa->var->symtree->n.sym;
3536 /* Allocate space for this_forall. */
3537 this_forall = XCNEW (iter_info);
3539 /* Create a temporary variable for the FORALL index. */
3540 tmp = gfc_typenode_for_spec (&sym->ts);
3541 var[n] = gfc_create_var (tmp, sym->name);
3542 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
3544 /* Record it in this_forall. */
3545 this_forall->var = var[n];
3547 /* Replace the index symbol's backend_decl with the temporary decl. */
3548 sym->backend_decl = var[n];
3550 /* Work out the start, end and stride for the loop. */
3551 gfc_init_se (&se, NULL);
3552 gfc_conv_expr_val (&se, fa->start);
3553 /* Record it in this_forall. */
3554 this_forall->start = se.expr;
3555 gfc_add_block_to_block (&block, &se.pre);
3558 gfc_init_se (&se, NULL);
3559 gfc_conv_expr_val (&se, fa->end);
3560 /* Record it in this_forall. */
3561 this_forall->end = se.expr;
3562 gfc_make_safe_expr (&se);
3563 gfc_add_block_to_block (&block, &se.pre);
3566 gfc_init_se (&se, NULL);
3567 gfc_conv_expr_val (&se, fa->stride);
3568 /* Record it in this_forall. */
3569 this_forall->step = se.expr;
3570 gfc_make_safe_expr (&se);
3571 gfc_add_block_to_block (&block, &se.pre);
3574 /* Set the NEXT field of this_forall to NULL. */
3575 this_forall->next = NULL;
3576 /* Link this_forall to the info construct. */
3577 if (info->this_loop)
3579 iter_info *iter_tmp = info->this_loop;
3580 while (iter_tmp->next != NULL)
3581 iter_tmp = iter_tmp->next;
3582 iter_tmp->next = this_forall;
3585 info->this_loop = this_forall;
3591 /* Calculate the size needed for the current forall level. */
3592 size = gfc_index_one_node;
3593 for (n = 0; n < nvar; n++)
3595 /* size = (end + step - start) / step. */
3596 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (start[n]),
3598 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (end[n]),
3600 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, TREE_TYPE (tmp),
3602 tmp = convert (gfc_array_index_type, tmp);
3604 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3608 /* Record the nvar and size of current forall level. */
3614 /* If the mask is .true., consider the FORALL unconditional. */
3615 if (code->expr1->expr_type == EXPR_CONSTANT
3616 && code->expr1->value.logical)
3624 /* First we need to allocate the mask. */
3627 /* As the mask array can be very big, prefer compact boolean types. */
3628 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3629 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3630 size, NULL, &block, &pmask);
3631 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3633 /* Record them in the info structure. */
3634 info->maskindex = maskindex;
3639 /* No mask was specified. */
3640 maskindex = NULL_TREE;
3641 mask = pmask = NULL_TREE;
3644 /* Link the current forall level to nested_forall_info. */
3645 info->prev_nest = nested_forall_info;
3646 nested_forall_info = info;
3648 /* Copy the mask into a temporary variable if required.
3649 For now we assume a mask temporary is needed. */
3652 /* As the mask array can be very big, prefer compact boolean types. */
3653 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3655 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3657 /* Start of mask assignment loop body. */
3658 gfc_start_block (&body);
3660 /* Evaluate the mask expression. */
3661 gfc_init_se (&se, NULL);
3662 gfc_conv_expr_val (&se, code->expr1);
3663 gfc_add_block_to_block (&body, &se.pre);
3665 /* Store the mask. */
3666 se.expr = convert (mask_type, se.expr);
3668 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3669 gfc_add_modify (&body, tmp, se.expr);
3671 /* Advance to the next mask element. */
3672 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3673 maskindex, gfc_index_one_node);
3674 gfc_add_modify (&body, maskindex, tmp);
3676 /* Generate the loops. */
3677 tmp = gfc_finish_block (&body);
3678 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3679 gfc_add_expr_to_block (&block, tmp);
3682 if (code->op == EXEC_DO_CONCURRENT)
3684 gfc_init_block (&body);
3685 cycle_label = gfc_build_label_decl (NULL_TREE);
3686 code->cycle_label = cycle_label;
3687 tmp = gfc_trans_code (code->block->next);
3688 gfc_add_expr_to_block (&body, tmp);
3690 if (TREE_USED (cycle_label))
3692 tmp = build1_v (LABEL_EXPR, cycle_label);
3693 gfc_add_expr_to_block (&body, tmp);
3696 tmp = gfc_finish_block (&body);
3697 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3698 gfc_add_expr_to_block (&block, tmp);
3702 c = code->block->next;
3704 /* TODO: loop merging in FORALL statements. */
3705 /* Now that we've got a copy of the mask, generate the assignment loops. */
3711 /* A scalar or array assignment. DO the simple check for
3712 lhs to rhs dependencies. These make a temporary for the
3713 rhs and form a second forall block to copy to variable. */
3714 need_temp = check_forall_dependencies(c, &pre, &post);
3716 /* Temporaries due to array assignment data dependencies introduce
3717 no end of problems. */
3719 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3720 nested_forall_info, &block);
3723 /* Use the normal assignment copying routines. */
3724 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3726 /* Generate body and loops. */
3727 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3729 gfc_add_expr_to_block (&block, tmp);
3732 /* Cleanup any temporary symtrees that have been made to deal
3733 with dependencies. */
3735 cleanup_forall_symtrees (c);
3740 /* Translate WHERE or WHERE construct nested in FORALL. */
3741 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3744 /* Pointer assignment inside FORALL. */
3745 case EXEC_POINTER_ASSIGN:
3746 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3748 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3749 nested_forall_info, &block);
3752 /* Use the normal assignment copying routines. */
3753 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3755 /* Generate body and loops. */
3756 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3758 gfc_add_expr_to_block (&block, tmp);
3763 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3764 gfc_add_expr_to_block (&block, tmp);
3767 /* Explicit subroutine calls are prevented by the frontend but interface
3768 assignments can legitimately produce them. */
3769 case EXEC_ASSIGN_CALL:
3770 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3771 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3772 gfc_add_expr_to_block (&block, tmp);
3783 /* Restore the original index variables. */
3784 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3785 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3787 /* Free the space for var, start, end, step, varexpr. */
3795 for (this_forall = info->this_loop; this_forall;)
3797 iter_info *next = this_forall->next;
3802 /* Free the space for this forall_info. */
3807 /* Free the temporary for the mask. */
3808 tmp = gfc_call_free (pmask);
3809 gfc_add_expr_to_block (&block, tmp);
3812 pushdecl (maskindex);
3814 gfc_add_block_to_block (&pre, &block);
3815 gfc_add_block_to_block (&pre, &post);
3817 return gfc_finish_block (&pre);
3821 /* Translate the FORALL statement or construct. */
3823 tree gfc_trans_forall (gfc_code * code)
3825 return gfc_trans_forall_1 (code, NULL);
3829 /* Translate the DO CONCURRENT construct. */
3831 tree gfc_trans_do_concurrent (gfc_code * code)
3833 return gfc_trans_forall_1 (code, NULL);
3837 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3838 If the WHERE construct is nested in FORALL, compute the overall temporary
3839 needed by the WHERE mask expression multiplied by the iterator number of
3841 ME is the WHERE mask expression.
3842 MASK is the current execution mask upon input, whose sense may or may
3843 not be inverted as specified by the INVERT argument.
3844 CMASK is the updated execution mask on output, or NULL if not required.
3845 PMASK is the pending execution mask on output, or NULL if not required.
3846 BLOCK is the block in which to place the condition evaluation loops. */
3849 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3850 tree mask, bool invert, tree cmask, tree pmask,
3851 tree mask_type, stmtblock_t * block)
3856 stmtblock_t body, body1;
3857 tree count, cond, mtmp;
3860 gfc_init_loopinfo (&loop);
3862 lss = gfc_walk_expr (me);
3863 rss = gfc_walk_expr (me);
3865 /* Variable to index the temporary. */
3866 count = gfc_create_var (gfc_array_index_type, "count");
3867 /* Initialize count. */
3868 gfc_add_modify (block, count, gfc_index_zero_node);
3870 gfc_start_block (&body);
3872 gfc_init_se (&rse, NULL);
3873 gfc_init_se (&lse, NULL);
3875 if (lss == gfc_ss_terminator)
3877 gfc_init_block (&body1);
3881 /* Initialize the loop. */
3882 gfc_init_loopinfo (&loop);
3884 /* We may need LSS to determine the shape of the expression. */
3885 gfc_add_ss_to_loop (&loop, lss);
3886 gfc_add_ss_to_loop (&loop, rss);
3888 gfc_conv_ss_startstride (&loop);
3889 gfc_conv_loop_setup (&loop, &me->where);
3891 gfc_mark_ss_chain_used (rss, 1);
3892 /* Start the loop body. */
3893 gfc_start_scalarized_body (&loop, &body1);
3895 /* Translate the expression. */
3896 gfc_copy_loopinfo_to_se (&rse, &loop);
3898 gfc_conv_expr (&rse, me);
3901 /* Variable to evaluate mask condition. */
3902 cond = gfc_create_var (mask_type, "cond");
3903 if (mask && (cmask || pmask))
3904 mtmp = gfc_create_var (mask_type, "mask");
3905 else mtmp = NULL_TREE;
3907 gfc_add_block_to_block (&body1, &lse.pre);
3908 gfc_add_block_to_block (&body1, &rse.pre);
3910 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3912 if (mask && (cmask || pmask))
3914 tmp = gfc_build_array_ref (mask, count, NULL);
3916 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, tmp);
3917 gfc_add_modify (&body1, mtmp, tmp);
3922 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3925 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type,
3927 gfc_add_modify (&body1, tmp1, tmp);
3932 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3933 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, cond);
3935 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type, mtmp,
3937 gfc_add_modify (&body1, tmp1, tmp);
3940 gfc_add_block_to_block (&body1, &lse.post);
3941 gfc_add_block_to_block (&body1, &rse.post);
3943 if (lss == gfc_ss_terminator)
3945 gfc_add_block_to_block (&body, &body1);
3949 /* Increment count. */
3950 tmp1 = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3951 count, gfc_index_one_node);
3952 gfc_add_modify (&body1, count, tmp1);
3954 /* Generate the copying loops. */
3955 gfc_trans_scalarizing_loops (&loop, &body1);
3957 gfc_add_block_to_block (&body, &loop.pre);
3958 gfc_add_block_to_block (&body, &loop.post);
3960 gfc_cleanup_loop (&loop);
3961 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3962 as tree nodes in SS may not be valid in different scope. */
3965 tmp1 = gfc_finish_block (&body);
3966 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3967 if (nested_forall_info != NULL)
3968 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3970 gfc_add_expr_to_block (block, tmp1);
3974 /* Translate an assignment statement in a WHERE statement or construct
3975 statement. The MASK expression is used to control which elements
3976 of EXPR1 shall be assigned. The sense of MASK is specified by
3980 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3981 tree mask, bool invert,
3982 tree count1, tree count2,
3988 gfc_ss *lss_section;
3995 tree index, maskexpr;
3997 /* A defined assignment. */
3998 if (cnext && cnext->resolved_sym)
3999 return gfc_trans_call (cnext, true, mask, count1, invert);
4002 /* TODO: handle this special case.
4003 Special case a single function returning an array. */
4004 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
4006 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
4012 /* Assignment of the form lhs = rhs. */
4013 gfc_start_block (&block);
4015 gfc_init_se (&lse, NULL);
4016 gfc_init_se (&rse, NULL);
4019 lss = gfc_walk_expr (expr1);
4022 /* In each where-assign-stmt, the mask-expr and the variable being
4023 defined shall be arrays of the same shape. */
4024 gcc_assert (lss != gfc_ss_terminator);
4026 /* The assignment needs scalarization. */
4029 /* Find a non-scalar SS from the lhs. */
4030 while (lss_section != gfc_ss_terminator
4031 && lss_section->info->type != GFC_SS_SECTION)
4032 lss_section = lss_section->next;
4034 gcc_assert (lss_section != gfc_ss_terminator);
4036 /* Initialize the scalarizer. */
4037 gfc_init_loopinfo (&loop);
4040 rss = gfc_walk_expr (expr2);
4041 if (rss == gfc_ss_terminator)
4043 /* The rhs is scalar. Add a ss for the expression. */
4044 rss = gfc_get_scalar_ss (gfc_ss_terminator, expr2);
4045 rss->info->where = 1;
4048 /* Associate the SS with the loop. */
4049 gfc_add_ss_to_loop (&loop, lss);
4050 gfc_add_ss_to_loop (&loop, rss);
4052 /* Calculate the bounds of the scalarization. */
4053 gfc_conv_ss_startstride (&loop);
4055 /* Resolve any data dependencies in the statement. */
4056 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
4058 /* Setup the scalarizing loops. */
4059 gfc_conv_loop_setup (&loop, &expr2->where);
4061 /* Setup the gfc_se structures. */
4062 gfc_copy_loopinfo_to_se (&lse, &loop);
4063 gfc_copy_loopinfo_to_se (&rse, &loop);
4066 gfc_mark_ss_chain_used (rss, 1);
4067 if (loop.temp_ss == NULL)
4070 gfc_mark_ss_chain_used (lss, 1);
4074 lse.ss = loop.temp_ss;
4075 gfc_mark_ss_chain_used (lss, 3);
4076 gfc_mark_ss_chain_used (loop.temp_ss, 3);
4079 /* Start the scalarized loop body. */
4080 gfc_start_scalarized_body (&loop, &body);
4082 /* Translate the expression. */
4083 gfc_conv_expr (&rse, expr2);
4084 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
4085 gfc_conv_tmp_array_ref (&lse);
4087 gfc_conv_expr (&lse, expr1);
4089 /* Form the mask expression according to the mask. */
4091 maskexpr = gfc_build_array_ref (mask, index, NULL);
4093 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
4094 TREE_TYPE (maskexpr), maskexpr);
4096 /* Use the scalar assignment as is. */
4097 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
4098 loop.temp_ss != NULL, false, true);
4100 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
4102 gfc_add_expr_to_block (&body, tmp);
4104 if (lss == gfc_ss_terminator)
4106 /* Increment count1. */
4107 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
4108 count1, gfc_index_one_node);
4109 gfc_add_modify (&body, count1, tmp);
4111 /* Use the scalar assignment as is. */
4112 gfc_add_block_to_block (&block, &body);
4116 gcc_assert (lse.ss == gfc_ss_terminator
4117 && rse.ss == gfc_ss_terminator);
4119 if (loop.temp_ss != NULL)
4121 /* Increment count1 before finish the main body of a scalarized
4123 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4124 gfc_array_index_type, count1, gfc_index_one_node);
4125 gfc_add_modify (&body, count1, tmp);
4126 gfc_trans_scalarized_loop_boundary (&loop, &body);
4128 /* We need to copy the temporary to the actual lhs. */
4129 gfc_init_se (&lse, NULL);
4130 gfc_init_se (&rse, NULL);
4131 gfc_copy_loopinfo_to_se (&lse, &loop);
4132 gfc_copy_loopinfo_to_se (&rse, &loop);
4134 rse.ss = loop.temp_ss;
4137 gfc_conv_tmp_array_ref (&rse);
4138 gfc_conv_expr (&lse, expr1);
4140 gcc_assert (lse.ss == gfc_ss_terminator
4141 && rse.ss == gfc_ss_terminator);
4143 /* Form the mask expression according to the mask tree list. */
4145 maskexpr = gfc_build_array_ref (mask, index, NULL);
4147 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
4148 TREE_TYPE (maskexpr), maskexpr);
4150 /* Use the scalar assignment as is. */
4151 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
4153 tmp = build3_v (COND_EXPR, maskexpr, tmp,
4154 build_empty_stmt (input_location));
4155 gfc_add_expr_to_block (&body, tmp);
4157 /* Increment count2. */
4158 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4159 gfc_array_index_type, count2,
4160 gfc_index_one_node);
4161 gfc_add_modify (&body, count2, tmp);
4165 /* Increment count1. */
4166 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4167 gfc_array_index_type, count1,
4168 gfc_index_one_node);
4169 gfc_add_modify (&body, count1, tmp);
4172 /* Generate the copying loops. */
4173 gfc_trans_scalarizing_loops (&loop, &body);
4175 /* Wrap the whole thing up. */
4176 gfc_add_block_to_block (&block, &loop.pre);
4177 gfc_add_block_to_block (&block, &loop.post);
4178 gfc_cleanup_loop (&loop);
4181 return gfc_finish_block (&block);
4185 /* Translate the WHERE construct or statement.
4186 This function can be called iteratively to translate the nested WHERE
4187 construct or statement.
4188 MASK is the control mask. */
4191 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
4192 forall_info * nested_forall_info, stmtblock_t * block)
4194 stmtblock_t inner_size_body;
4195 tree inner_size, size;
4204 tree count1, count2;
4208 tree pcmask = NULL_TREE;
4209 tree ppmask = NULL_TREE;
4210 tree cmask = NULL_TREE;
4211 tree pmask = NULL_TREE;
4212 gfc_actual_arglist *arg;
4214 /* the WHERE statement or the WHERE construct statement. */
4215 cblock = code->block;
4217 /* As the mask array can be very big, prefer compact boolean types. */
4218 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
4220 /* Determine which temporary masks are needed. */
4223 /* One clause: No ELSEWHEREs. */
4224 need_cmask = (cblock->next != 0);
4227 else if (cblock->block->block)
4229 /* Three or more clauses: Conditional ELSEWHEREs. */
4233 else if (cblock->next)
4235 /* Two clauses, the first non-empty. */
4237 need_pmask = (mask != NULL_TREE
4238 && cblock->block->next != 0);
4240 else if (!cblock->block->next)
4242 /* Two clauses, both empty. */
4246 /* Two clauses, the first empty, the second non-empty. */
4249 need_cmask = (cblock->block->expr1 != 0);
4258 if (need_cmask || need_pmask)
4260 /* Calculate the size of temporary needed by the mask-expr. */
4261 gfc_init_block (&inner_size_body);
4262 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
4263 &inner_size_body, &lss, &rss);
4265 gfc_free_ss_chain (lss);
4266 gfc_free_ss_chain (rss);
4268 /* Calculate the total size of temporary needed. */
4269 size = compute_overall_iter_number (nested_forall_info, inner_size,
4270 &inner_size_body, block);
4272 /* Check whether the size is negative. */
4273 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, size,
4274 gfc_index_zero_node);
4275 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
4276 cond, gfc_index_zero_node, size);
4277 size = gfc_evaluate_now (size, block);
4279 /* Allocate temporary for WHERE mask if needed. */
4281 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4284 /* Allocate temporary for !mask if needed. */
4286 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4292 /* Each time around this loop, the where clause is conditional
4293 on the value of mask and invert, which are updated at the
4294 bottom of the loop. */
4296 /* Has mask-expr. */
4299 /* Ensure that the WHERE mask will be evaluated exactly once.
4300 If there are no statements in this WHERE/ELSEWHERE clause,
4301 then we don't need to update the control mask (cmask).
4302 If this is the last clause of the WHERE construct, then
4303 we don't need to update the pending control mask (pmask). */
4305 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4307 cblock->next ? cmask : NULL_TREE,
4308 cblock->block ? pmask : NULL_TREE,
4311 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4313 (cblock->next || cblock->block)
4314 ? cmask : NULL_TREE,
4315 NULL_TREE, mask_type, block);
4319 /* It's a final elsewhere-stmt. No mask-expr is present. */
4323 /* The body of this where clause are controlled by cmask with
4324 sense specified by invert. */
4326 /* Get the assignment statement of a WHERE statement, or the first
4327 statement in where-body-construct of a WHERE construct. */
4328 cnext = cblock->next;
4333 /* WHERE assignment statement. */
4334 case EXEC_ASSIGN_CALL:
4336 arg = cnext->ext.actual;
4337 expr1 = expr2 = NULL;
4338 for (; arg; arg = arg->next)
4350 expr1 = cnext->expr1;
4351 expr2 = cnext->expr2;
4353 if (nested_forall_info != NULL)
4355 need_temp = gfc_check_dependency (expr1, expr2, 0);
4356 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
4357 gfc_trans_assign_need_temp (expr1, expr2,
4359 nested_forall_info, block);
4362 /* Variables to control maskexpr. */
4363 count1 = gfc_create_var (gfc_array_index_type, "count1");
4364 count2 = gfc_create_var (gfc_array_index_type, "count2");
4365 gfc_add_modify (block, count1, gfc_index_zero_node);
4366 gfc_add_modify (block, count2, gfc_index_zero_node);
4368 tmp = gfc_trans_where_assign (expr1, expr2,
4373 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
4375 gfc_add_expr_to_block (block, tmp);
4380 /* Variables to control maskexpr. */
4381 count1 = gfc_create_var (gfc_array_index_type, "count1");
4382 count2 = gfc_create_var (gfc_array_index_type, "count2");
4383 gfc_add_modify (block, count1, gfc_index_zero_node);
4384 gfc_add_modify (block, count2, gfc_index_zero_node);
4386 tmp = gfc_trans_where_assign (expr1, expr2,
4390 gfc_add_expr_to_block (block, tmp);
4395 /* WHERE or WHERE construct is part of a where-body-construct. */
4397 gfc_trans_where_2 (cnext, cmask, invert,
4398 nested_forall_info, block);
4405 /* The next statement within the same where-body-construct. */
4406 cnext = cnext->next;
4408 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4409 cblock = cblock->block;
4410 if (mask == NULL_TREE)
4412 /* If we're the initial WHERE, we can simply invert the sense
4413 of the current mask to obtain the "mask" for the remaining
4420 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4426 /* If we allocated a pending mask array, deallocate it now. */
4429 tmp = gfc_call_free (ppmask);
4430 gfc_add_expr_to_block (block, tmp);
4433 /* If we allocated a current mask array, deallocate it now. */
4436 tmp = gfc_call_free (pcmask);
4437 gfc_add_expr_to_block (block, tmp);
4441 /* Translate a simple WHERE construct or statement without dependencies.
4442 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4443 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4444 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4447 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
4449 stmtblock_t block, body;
4450 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
4451 tree tmp, cexpr, tstmt, estmt;
4452 gfc_ss *css, *tdss, *tsss;
4453 gfc_se cse, tdse, tsse, edse, esse;
4458 /* Allow the scalarizer to workshare simple where loops. */
4459 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
4460 ompws_flags |= OMPWS_SCALARIZER_WS;
4462 cond = cblock->expr1;
4463 tdst = cblock->next->expr1;
4464 tsrc = cblock->next->expr2;
4465 edst = eblock ? eblock->next->expr1 : NULL;
4466 esrc = eblock ? eblock->next->expr2 : NULL;
4468 gfc_start_block (&block);
4469 gfc_init_loopinfo (&loop);
4471 /* Handle the condition. */
4472 gfc_init_se (&cse, NULL);
4473 css = gfc_walk_expr (cond);
4474 gfc_add_ss_to_loop (&loop, css);
4476 /* Handle the then-clause. */
4477 gfc_init_se (&tdse, NULL);
4478 gfc_init_se (&tsse, NULL);
4479 tdss = gfc_walk_expr (tdst);
4480 tsss = gfc_walk_expr (tsrc);
4481 if (tsss == gfc_ss_terminator)
4483 tsss = gfc_get_scalar_ss (gfc_ss_terminator, tsrc);
4484 tsss->info->where = 1;
4486 gfc_add_ss_to_loop (&loop, tdss);
4487 gfc_add_ss_to_loop (&loop, tsss);
4491 /* Handle the else clause. */
4492 gfc_init_se (&edse, NULL);
4493 gfc_init_se (&esse, NULL);
4494 edss = gfc_walk_expr (edst);
4495 esss = gfc_walk_expr (esrc);
4496 if (esss == gfc_ss_terminator)
4498 esss = gfc_get_scalar_ss (gfc_ss_terminator, esrc);
4499 esss->info->where = 1;
4501 gfc_add_ss_to_loop (&loop, edss);
4502 gfc_add_ss_to_loop (&loop, esss);
4505 gfc_conv_ss_startstride (&loop);
4506 gfc_conv_loop_setup (&loop, &tdst->where);
4508 gfc_mark_ss_chain_used (css, 1);
4509 gfc_mark_ss_chain_used (tdss, 1);
4510 gfc_mark_ss_chain_used (tsss, 1);
4513 gfc_mark_ss_chain_used (edss, 1);
4514 gfc_mark_ss_chain_used (esss, 1);
4517 gfc_start_scalarized_body (&loop, &body);
4519 gfc_copy_loopinfo_to_se (&cse, &loop);
4520 gfc_copy_loopinfo_to_se (&tdse, &loop);
4521 gfc_copy_loopinfo_to_se (&tsse, &loop);
4527 gfc_copy_loopinfo_to_se (&edse, &loop);
4528 gfc_copy_loopinfo_to_se (&esse, &loop);
4533 gfc_conv_expr (&cse, cond);
4534 gfc_add_block_to_block (&body, &cse.pre);
4537 gfc_conv_expr (&tsse, tsrc);
4538 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
4539 gfc_conv_tmp_array_ref (&tdse);
4541 gfc_conv_expr (&tdse, tdst);
4545 gfc_conv_expr (&esse, esrc);
4546 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
4547 gfc_conv_tmp_array_ref (&edse);
4549 gfc_conv_expr (&edse, edst);
4552 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
4553 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
4555 : build_empty_stmt (input_location);
4556 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
4557 gfc_add_expr_to_block (&body, tmp);
4558 gfc_add_block_to_block (&body, &cse.post);
4560 gfc_trans_scalarizing_loops (&loop, &body);
4561 gfc_add_block_to_block (&block, &loop.pre);
4562 gfc_add_block_to_block (&block, &loop.post);
4563 gfc_cleanup_loop (&loop);
4565 return gfc_finish_block (&block);
4568 /* As the WHERE or WHERE construct statement can be nested, we call
4569 gfc_trans_where_2 to do the translation, and pass the initial
4570 NULL values for both the control mask and the pending control mask. */
4573 gfc_trans_where (gfc_code * code)
4579 cblock = code->block;
4581 && cblock->next->op == EXEC_ASSIGN
4582 && !cblock->next->next)
4584 eblock = cblock->block;
4587 /* A simple "WHERE (cond) x = y" statement or block is
4588 dependence free if cond is not dependent upon writing x,
4589 and the source y is unaffected by the destination x. */
4590 if (!gfc_check_dependency (cblock->next->expr1,
4592 && !gfc_check_dependency (cblock->next->expr1,
4593 cblock->next->expr2, 0))
4594 return gfc_trans_where_3 (cblock, NULL);
4596 else if (!eblock->expr1
4599 && eblock->next->op == EXEC_ASSIGN
4600 && !eblock->next->next)
4602 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4603 block is dependence free if cond is not dependent on writes
4604 to x1 and x2, y1 is not dependent on writes to x2, and y2
4605 is not dependent on writes to x1, and both y's are not
4606 dependent upon their own x's. In addition to this, the
4607 final two dependency checks below exclude all but the same
4608 array reference if the where and elswhere destinations
4609 are the same. In short, this is VERY conservative and this
4610 is needed because the two loops, required by the standard
4611 are coalesced in gfc_trans_where_3. */
4612 if (!gfc_check_dependency(cblock->next->expr1,
4614 && !gfc_check_dependency(eblock->next->expr1,
4616 && !gfc_check_dependency(cblock->next->expr1,
4617 eblock->next->expr2, 1)
4618 && !gfc_check_dependency(eblock->next->expr1,
4619 cblock->next->expr2, 1)
4620 && !gfc_check_dependency(cblock->next->expr1,
4621 cblock->next->expr2, 1)
4622 && !gfc_check_dependency(eblock->next->expr1,
4623 eblock->next->expr2, 1)
4624 && !gfc_check_dependency(cblock->next->expr1,
4625 eblock->next->expr1, 0)
4626 && !gfc_check_dependency(eblock->next->expr1,
4627 cblock->next->expr1, 0))
4628 return gfc_trans_where_3 (cblock, eblock);
4632 gfc_start_block (&block);
4634 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4636 return gfc_finish_block (&block);
4640 /* CYCLE a DO loop. The label decl has already been created by
4641 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4642 node at the head of the loop. We must mark the label as used. */
4645 gfc_trans_cycle (gfc_code * code)
4649 cycle_label = code->ext.which_construct->cycle_label;
4650 gcc_assert (cycle_label);
4652 TREE_USED (cycle_label) = 1;
4653 return build1_v (GOTO_EXPR, cycle_label);
4657 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4658 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4662 gfc_trans_exit (gfc_code * code)
4666 exit_label = code->ext.which_construct->exit_label;
4667 gcc_assert (exit_label);
4669 TREE_USED (exit_label) = 1;
4670 return build1_v (GOTO_EXPR, exit_label);
4674 /* Translate the ALLOCATE statement. */
4677 gfc_trans_allocate (gfc_code * code)
4697 if (!code->ext.alloc.list)
4700 stat = tmp = memsz = NULL_TREE;
4701 label_errmsg = label_finish = errmsg = errlen = NULL_TREE;
4703 gfc_init_block (&block);
4704 gfc_init_block (&post);
4706 /* STAT= (and maybe ERRMSG=) is present. */
4710 tree gfc_int4_type_node = gfc_get_int_type (4);
4711 stat = gfc_create_var (gfc_int4_type_node, "stat");
4713 /* ERRMSG= only makes sense with STAT=. */
4716 gfc_init_se (&se, NULL);
4717 gfc_conv_expr_lhs (&se, code->expr2);
4719 errlen = gfc_get_expr_charlen (code->expr2);
4720 errmsg = gfc_build_addr_expr (pchar_type_node, se.expr);
4724 errmsg = null_pointer_node;
4725 errlen = build_int_cst (gfc_charlen_type_node, 0);
4728 /* GOTO destinations. */
4729 label_errmsg = gfc_build_label_decl (NULL_TREE);
4730 label_finish = gfc_build_label_decl (NULL_TREE);
4731 TREE_USED (label_errmsg) = 1;
4732 TREE_USED (label_finish) = 1;
4738 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4740 expr = gfc_copy_expr (al->expr);
4742 if (expr->ts.type == BT_CLASS)
4743 gfc_add_data_component (expr);
4745 gfc_init_se (&se, NULL);
4747 se.want_pointer = 1;
4748 se.descriptor_only = 1;
4749 gfc_conv_expr (&se, expr);
4751 if (!gfc_array_allocate (&se, expr, stat, errmsg, errlen))
4753 /* A scalar or derived type. */
4755 /* Determine allocate size. */
4756 if (al->expr->ts.type == BT_CLASS && code->expr3)
4758 if (code->expr3->ts.type == BT_CLASS)
4760 sz = gfc_copy_expr (code->expr3);
4761 gfc_add_vptr_component (sz);
4762 gfc_add_size_component (sz);
4763 gfc_init_se (&se_sz, NULL);
4764 gfc_conv_expr (&se_sz, sz);
4769 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4771 else if (al->expr->ts.type == BT_CHARACTER
4772 && al->expr->ts.deferred && code->expr3)
4774 if (!code->expr3->ts.u.cl->backend_decl)
4776 /* Convert and use the length expression. */
4777 gfc_init_se (&se_sz, NULL);
4778 if (code->expr3->expr_type == EXPR_VARIABLE
4779 || code->expr3->expr_type == EXPR_CONSTANT)
4781 gfc_conv_expr (&se_sz, code->expr3);
4782 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4784 = gfc_evaluate_now (se_sz.string_length, &se.pre);
4785 gfc_add_block_to_block (&se.pre, &se_sz.post);
4786 memsz = se_sz.string_length;
4788 else if (code->expr3->mold
4789 && code->expr3->ts.u.cl
4790 && code->expr3->ts.u.cl->length)
4792 gfc_conv_expr (&se_sz, code->expr3->ts.u.cl->length);
4793 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4794 se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre);
4795 gfc_add_block_to_block (&se.pre, &se_sz.post);
4800 /* This is would be inefficient and possibly could
4801 generate wrong code if the result were not stored
4803 if (slen3 == NULL_TREE)
4805 gfc_conv_expr (&se_sz, code->expr3);
4806 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4807 expr3 = gfc_evaluate_now (se_sz.expr, &se.pre);
4808 gfc_add_block_to_block (&post, &se_sz.post);
4809 slen3 = gfc_evaluate_now (se_sz.string_length,
4816 /* Otherwise use the stored string length. */
4817 memsz = code->expr3->ts.u.cl->backend_decl;
4818 tmp = al->expr->ts.u.cl->backend_decl;
4820 /* Store the string length. */
4821 if (tmp && TREE_CODE (tmp) == VAR_DECL)
4822 gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp),
4825 /* Convert to size in bytes, using the character KIND. */
4826 tmp = TREE_TYPE (gfc_typenode_for_spec (&al->expr->ts));
4827 tmp = TYPE_SIZE_UNIT (tmp);
4828 memsz = fold_build2_loc (input_location, MULT_EXPR,
4829 TREE_TYPE (tmp), tmp,
4830 fold_convert (TREE_TYPE (tmp), memsz));
4832 else if (al->expr->ts.type == BT_CHARACTER && al->expr->ts.deferred)
4834 gcc_assert (code->ext.alloc.ts.u.cl && code->ext.alloc.ts.u.cl->length);
4835 gfc_init_se (&se_sz, NULL);
4836 gfc_conv_expr (&se_sz, code->ext.alloc.ts.u.cl->length);
4837 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4838 se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre);
4839 gfc_add_block_to_block (&se.pre, &se_sz.post);
4840 /* Store the string length. */
4841 tmp = al->expr->ts.u.cl->backend_decl;
4842 gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp),
4844 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts));
4845 tmp = TYPE_SIZE_UNIT (tmp);
4846 memsz = fold_build2_loc (input_location, MULT_EXPR,
4847 TREE_TYPE (tmp), tmp,
4848 fold_convert (TREE_TYPE (se_sz.expr),
4851 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4852 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4854 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4856 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4858 memsz = se.string_length;
4860 /* Convert to size in bytes, using the character KIND. */
4861 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts));
4862 tmp = TYPE_SIZE_UNIT (tmp);
4863 memsz = fold_build2_loc (input_location, MULT_EXPR,
4864 TREE_TYPE (tmp), tmp,
4865 fold_convert (TREE_TYPE (tmp), memsz));
4868 /* Allocate - for non-pointers with re-alloc checking. */
4869 if (gfc_expr_attr (expr).allocatable)
4870 gfc_allocate_allocatable (&se.pre, se.expr, memsz, NULL_TREE,
4871 stat, errmsg, errlen, expr);
4873 gfc_allocate_using_malloc (&se.pre, se.expr, memsz, stat);
4875 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4877 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4878 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4879 gfc_add_expr_to_block (&se.pre, tmp);
4883 gfc_add_block_to_block (&block, &se.pre);
4885 /* Error checking -- Note: ERRMSG only makes sense with STAT. */
4888 /* The coarray library already sets the errmsg. */
4889 if (gfc_option.coarray == GFC_FCOARRAY_LIB
4890 && gfc_expr_attr (expr).codimension)
4891 tmp = build1_v (GOTO_EXPR, label_finish);
4893 tmp = build1_v (GOTO_EXPR, label_errmsg);
4895 parm = fold_build2_loc (input_location, NE_EXPR,
4896 boolean_type_node, stat,
4897 build_int_cst (TREE_TYPE (stat), 0));
4898 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
4899 gfc_unlikely(parm), tmp,
4900 build_empty_stmt (input_location));
4901 gfc_add_expr_to_block (&block, tmp);
4904 if (code->expr3 && !code->expr3->mold)
4906 /* Initialization via SOURCE block
4907 (or static default initializer). */
4908 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4909 if (al->expr->ts.type == BT_CLASS)
4912 gfc_actual_arglist *actual;
4914 gfc_init_se (&call, NULL);
4915 /* Do a polymorphic deep copy. */
4916 actual = gfc_get_actual_arglist ();
4917 actual->expr = gfc_copy_expr (rhs);
4918 if (rhs->ts.type == BT_CLASS)
4919 gfc_add_data_component (actual->expr);
4920 actual->next = gfc_get_actual_arglist ();
4921 actual->next->expr = gfc_copy_expr (al->expr);
4922 gfc_add_data_component (actual->next->expr);
4923 if (rhs->ts.type == BT_CLASS)
4925 ppc = gfc_copy_expr (rhs);
4926 gfc_add_vptr_component (ppc);
4929 ppc = gfc_lval_expr_from_sym (gfc_find_derived_vtab (rhs->ts.u.derived));
4930 gfc_add_component_ref (ppc, "_copy");
4931 gfc_conv_procedure_call (&call, ppc->symtree->n.sym, actual,
4933 gfc_add_expr_to_block (&call.pre, call.expr);
4934 gfc_add_block_to_block (&call.pre, &call.post);
4935 tmp = gfc_finish_block (&call.pre);
4937 else if (expr3 != NULL_TREE)
4939 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4940 gfc_trans_string_copy (&block, slen3, tmp, code->expr3->ts.kind,
4941 slen3, expr3, code->expr3->ts.kind);
4946 /* Switch off automatic reallocation since we have just done
4948 int realloc_lhs = gfc_option.flag_realloc_lhs;
4949 gfc_option.flag_realloc_lhs = 0;
4950 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4952 gfc_option.flag_realloc_lhs = realloc_lhs;
4954 gfc_free_expr (rhs);
4955 gfc_add_expr_to_block (&block, tmp);
4957 else if (code->expr3 && code->expr3->mold
4958 && code->expr3->ts.type == BT_CLASS)
4960 /* Default-initialization via MOLD (polymorphic). */
4961 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4963 gfc_add_vptr_component (rhs);
4964 gfc_add_def_init_component (rhs);
4965 gfc_init_se (&dst, NULL);
4966 gfc_init_se (&src, NULL);
4967 gfc_conv_expr (&dst, expr);
4968 gfc_conv_expr (&src, rhs);
4969 gfc_add_block_to_block (&block, &src.pre);
4970 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4971 gfc_add_expr_to_block (&block, tmp);
4972 gfc_free_expr (rhs);
4975 /* Allocation of CLASS entities. */
4976 gfc_free_expr (expr);
4978 if (expr->ts.type == BT_CLASS)
4983 /* Initialize VPTR for CLASS objects. */
4984 lhs = gfc_expr_to_initialize (expr);
4985 gfc_add_vptr_component (lhs);
4987 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4989 /* Polymorphic SOURCE: VPTR must be determined at run time. */
4990 rhs = gfc_copy_expr (code->expr3);
4991 gfc_add_vptr_component (rhs);
4992 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4993 gfc_add_expr_to_block (&block, tmp);
4994 gfc_free_expr (rhs);
4998 /* VPTR is fixed at compile time. */
5002 ts = &code->expr3->ts;
5003 else if (expr->ts.type == BT_DERIVED)
5005 else if (code->ext.alloc.ts.type == BT_DERIVED)
5006 ts = &code->ext.alloc.ts;
5007 else if (expr->ts.type == BT_CLASS)
5008 ts = &CLASS_DATA (expr)->ts;
5012 if (ts->type == BT_DERIVED)
5014 vtab = gfc_find_derived_vtab (ts->u.derived);
5016 gfc_init_se (&lse, NULL);
5017 lse.want_pointer = 1;
5018 gfc_conv_expr (&lse, lhs);
5019 tmp = gfc_build_addr_expr (NULL_TREE,
5020 gfc_get_symbol_decl (vtab));
5021 gfc_add_modify (&block, lse.expr,
5022 fold_convert (TREE_TYPE (lse.expr), tmp));
5025 gfc_free_expr (lhs);
5030 /* STAT (ERRMSG only makes sense with STAT). */
5033 tmp = build1_v (LABEL_EXPR, label_errmsg);
5034 gfc_add_expr_to_block (&block, tmp);
5040 /* A better error message may be possible, but not required. */
5041 const char *msg = "Attempt to allocate an allocated object";
5044 gfc_init_se (&se, NULL);
5045 gfc_conv_expr_lhs (&se, code->expr2);
5047 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
5049 gfc_add_modify (&block, errmsg,
5050 gfc_build_addr_expr (pchar_type_node,
5051 gfc_build_localized_cstring_const (msg)));
5053 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
5054 dlen = gfc_get_expr_charlen (code->expr2);
5055 slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen,
5058 dlen = build_call_expr_loc (input_location,
5059 builtin_decl_explicit (BUILT_IN_MEMCPY), 3,
5060 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
5062 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
5063 build_int_cst (TREE_TYPE (stat), 0));
5065 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
5067 gfc_add_expr_to_block (&block, tmp);
5070 /* STAT (ERRMSG only makes sense with STAT). */
5073 tmp = build1_v (LABEL_EXPR, label_finish);
5074 gfc_add_expr_to_block (&block, tmp);
5080 gfc_init_se (&se, NULL);
5081 gfc_conv_expr_lhs (&se, code->expr1);
5082 tmp = convert (TREE_TYPE (se.expr), stat);
5083 gfc_add_modify (&block, se.expr, tmp);
5086 gfc_add_block_to_block (&block, &se.post);
5087 gfc_add_block_to_block (&block, &post);
5089 return gfc_finish_block (&block);
5093 /* Translate a DEALLOCATE statement. */
5096 gfc_trans_deallocate (gfc_code *code)
5100 tree apstat, astat, pstat, stat, tmp;
5103 pstat = apstat = stat = astat = tmp = NULL_TREE;
5105 gfc_start_block (&block);
5107 /* Count the number of failed deallocations. If deallocate() was
5108 called with STAT= , then set STAT to the count. If deallocate
5109 was called with ERRMSG, then set ERRMG to a string. */
5110 if (code->expr1 || code->expr2)
5112 tree gfc_int4_type_node = gfc_get_int_type (4);
5114 stat = gfc_create_var (gfc_int4_type_node, "stat");
5115 pstat = gfc_build_addr_expr (NULL_TREE, stat);
5117 /* Running total of possible deallocation failures. */
5118 astat = gfc_create_var (gfc_int4_type_node, "astat");
5119 apstat = gfc_build_addr_expr (NULL_TREE, astat);
5121 /* Initialize astat to 0. */
5122 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
5125 for (al = code->ext.alloc.list; al != NULL; al = al->next)
5127 gfc_expr *expr = gfc_copy_expr (al->expr);
5128 gcc_assert (expr->expr_type == EXPR_VARIABLE);
5130 if (expr->ts.type == BT_CLASS)
5131 gfc_add_data_component (expr);
5133 gfc_init_se (&se, NULL);
5134 gfc_start_block (&se.pre);
5136 se.want_pointer = 1;
5137 se.descriptor_only = 1;
5138 gfc_conv_expr (&se, expr);
5140 if (expr->rank || gfc_expr_attr (expr).codimension)
5142 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
5145 gfc_ref *last = NULL;
5146 for (ref = expr->ref; ref; ref = ref->next)
5147 if (ref->type == REF_COMPONENT)
5150 /* Do not deallocate the components of a derived type
5151 ultimate pointer component. */
5152 if (!(last && last->u.c.component->attr.pointer)
5153 && !(!last && expr->symtree->n.sym->attr.pointer))
5155 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
5157 gfc_add_expr_to_block (&se.pre, tmp);
5160 tmp = gfc_array_deallocate (se.expr, pstat, expr);
5161 gfc_add_expr_to_block (&se.pre, tmp);
5165 tmp = gfc_deallocate_scalar_with_status (se.expr, pstat, false,
5167 gfc_add_expr_to_block (&se.pre, tmp);
5169 /* Set to zero after deallocation. */
5170 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
5172 build_int_cst (TREE_TYPE (se.expr), 0));
5173 gfc_add_expr_to_block (&se.pre, tmp);
5175 if (al->expr->ts.type == BT_CLASS)
5177 /* Reset _vptr component to declared type. */
5178 gfc_expr *rhs, *lhs = gfc_copy_expr (al->expr);
5179 gfc_symbol *vtab = gfc_find_derived_vtab (al->expr->ts.u.derived);
5180 gfc_add_vptr_component (lhs);
5181 rhs = gfc_lval_expr_from_sym (vtab);
5182 tmp = gfc_trans_pointer_assignment (lhs, rhs);
5183 gfc_add_expr_to_block (&se.pre, tmp);
5184 gfc_free_expr (lhs);
5185 gfc_free_expr (rhs);
5189 /* Keep track of the number of failed deallocations by adding stat
5190 of the last deallocation to the running total. */
5191 if (code->expr1 || code->expr2)
5193 apstat = fold_build2_loc (input_location, PLUS_EXPR,
5194 TREE_TYPE (stat), astat, stat);
5195 gfc_add_modify (&se.pre, astat, apstat);
5198 tmp = gfc_finish_block (&se.pre);
5199 gfc_add_expr_to_block (&block, tmp);
5200 gfc_free_expr (expr);
5206 gfc_init_se (&se, NULL);
5207 gfc_conv_expr_lhs (&se, code->expr1);
5208 tmp = convert (TREE_TYPE (se.expr), astat);
5209 gfc_add_modify (&block, se.expr, tmp);
5215 /* A better error message may be possible, but not required. */
5216 const char *msg = "Attempt to deallocate an unallocated object";
5217 tree errmsg, slen, dlen;
5219 gfc_init_se (&se, NULL);
5220 gfc_conv_expr_lhs (&se, code->expr2);
5222 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
5224 gfc_add_modify (&block, errmsg,
5225 gfc_build_addr_expr (pchar_type_node,
5226 gfc_build_localized_cstring_const (msg)));
5228 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
5229 dlen = gfc_get_expr_charlen (code->expr2);
5230 slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen,
5233 dlen = build_call_expr_loc (input_location,
5234 builtin_decl_explicit (BUILT_IN_MEMCPY), 3,
5235 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
5237 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, astat,
5238 build_int_cst (TREE_TYPE (astat), 0));
5240 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
5242 gfc_add_expr_to_block (&block, tmp);
5245 return gfc_finish_block (&block);
5248 #include "gt-fortran-trans-stmt.h"