1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 Free Software Foundation, Inc.
5 Contributed by Paul Brook <paul@nowt.org>
6 and Steven Bosscher <s.bosscher@student.tudelft.nl>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
37 #include "dependency.h"
40 typedef struct iter_info
46 struct iter_info *next;
50 typedef struct forall_info
57 struct forall_info *prev_nest;
61 static void gfc_trans_where_2 (gfc_code *, tree, bool,
62 forall_info *, stmtblock_t *);
64 /* Translate a F95 label number to a LABEL_EXPR. */
67 gfc_trans_label_here (gfc_code * code)
69 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
73 /* Given a variable expression which has been ASSIGNed to, find the decl
74 containing the auxiliary variables. For variables in common blocks this
78 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
80 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
81 gfc_conv_expr (se, expr);
82 /* Deals with variable in common block. Get the field declaration. */
83 if (TREE_CODE (se->expr) == COMPONENT_REF)
84 se->expr = TREE_OPERAND (se->expr, 1);
85 /* Deals with dummy argument. Get the parameter declaration. */
86 else if (TREE_CODE (se->expr) == INDIRECT_REF)
87 se->expr = TREE_OPERAND (se->expr, 0);
90 /* Translate a label assignment statement. */
93 gfc_trans_label_assign (gfc_code * code)
102 /* Start a new block. */
103 gfc_init_se (&se, NULL);
104 gfc_start_block (&se.pre);
105 gfc_conv_label_variable (&se, code->expr1);
107 len = GFC_DECL_STRING_LEN (se.expr);
108 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
110 label_tree = gfc_get_label_decl (code->label1);
112 if (code->label1->defined == ST_LABEL_TARGET)
114 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
115 len_tree = integer_minus_one_node;
119 gfc_expr *format = code->label1->format;
121 label_len = format->value.character.length;
122 len_tree = build_int_cst (gfc_charlen_type_node, label_len);
123 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
124 format->value.character.string);
125 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
128 gfc_add_modify (&se.pre, len, len_tree);
129 gfc_add_modify (&se.pre, addr, label_tree);
131 return gfc_finish_block (&se.pre);
134 /* Translate a GOTO statement. */
137 gfc_trans_goto (gfc_code * code)
139 locus loc = code->loc;
145 if (code->label1 != NULL)
146 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
149 gfc_init_se (&se, NULL);
150 gfc_start_block (&se.pre);
151 gfc_conv_label_variable (&se, code->expr1);
152 tmp = GFC_DECL_STRING_LEN (se.expr);
153 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, tmp,
154 build_int_cst (TREE_TYPE (tmp), -1));
155 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
156 "Assigned label is not a target label");
158 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
160 /* We're going to ignore a label list. It does not really change the
161 statement's semantics (because it is just a further restriction on
162 what's legal code); before, we were comparing label addresses here, but
163 that's a very fragile business and may break with optimization. So
166 target = fold_build1_loc (input_location, GOTO_EXPR, void_type_node,
168 gfc_add_expr_to_block (&se.pre, target);
169 return gfc_finish_block (&se.pre);
173 /* Translate an ENTRY statement. Just adds a label for this entry point. */
175 gfc_trans_entry (gfc_code * code)
177 return build1_v (LABEL_EXPR, code->ext.entry->label);
181 /* Replace a gfc_ss structure by another both in the gfc_se struct
182 and the gfc_loopinfo struct. This is used in gfc_conv_elemental_dependencies
183 to replace a variable ss by the corresponding temporary. */
186 replace_ss (gfc_se *se, gfc_ss *old_ss, gfc_ss *new_ss)
188 gfc_ss **sess, **loopss;
190 /* The old_ss is a ss for a single variable. */
191 gcc_assert (old_ss->info->type == GFC_SS_SECTION);
193 for (sess = &(se->ss); *sess != gfc_ss_terminator; sess = &((*sess)->next))
196 gcc_assert (*sess != gfc_ss_terminator);
199 new_ss->next = old_ss->next;
202 for (loopss = &(se->loop->ss); *loopss != gfc_ss_terminator;
203 loopss = &((*loopss)->loop_chain))
204 if (*loopss == old_ss)
206 gcc_assert (*loopss != gfc_ss_terminator);
209 new_ss->loop_chain = old_ss->loop_chain;
210 new_ss->loop = old_ss->loop;
212 gfc_free_ss (old_ss);
216 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
217 elemental subroutines. Make temporaries for output arguments if any such
218 dependencies are found. Output arguments are chosen because internal_unpack
219 can be used, as is, to copy the result back to the variable. */
221 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
222 gfc_symbol * sym, gfc_actual_arglist * arg,
223 gfc_dep_check check_variable)
225 gfc_actual_arglist *arg0;
227 gfc_formal_arglist *formal;
235 if (loopse->ss == NULL)
240 formal = sym->formal;
242 /* Loop over all the arguments testing for dependencies. */
243 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
249 /* Obtain the info structure for the current argument. */
250 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
251 if (ss->info->expr == e)
254 /* If there is a dependency, create a temporary and use it
255 instead of the variable. */
256 fsym = formal ? formal->sym : NULL;
257 if (e->expr_type == EXPR_VARIABLE
259 && fsym->attr.intent != INTENT_IN
260 && gfc_check_fncall_dependency (e, fsym->attr.intent,
261 sym, arg0, check_variable))
263 tree initial, temptype;
264 stmtblock_t temp_post;
267 tmp_ss = gfc_get_array_ss (gfc_ss_terminator, NULL, ss->dimen,
269 gfc_mark_ss_chain_used (tmp_ss, 1);
270 tmp_ss->info->expr = ss->info->expr;
271 replace_ss (loopse, ss, tmp_ss);
273 /* Obtain the argument descriptor for unpacking. */
274 gfc_init_se (&parmse, NULL);
275 parmse.want_pointer = 1;
276 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
277 gfc_add_block_to_block (&se->pre, &parmse.pre);
279 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
280 initialize the array temporary with a copy of the values. */
281 if (fsym->attr.intent == INTENT_INOUT
282 || (fsym->ts.type ==BT_DERIVED
283 && fsym->attr.intent == INTENT_OUT))
284 initial = parmse.expr;
285 /* For class expressions, we always initialize with the copy of
287 else if (e->ts.type == BT_CLASS)
288 initial = parmse.expr;
292 if (e->ts.type != BT_CLASS)
294 /* Find the type of the temporary to create; we don't use the type
295 of e itself as this breaks for subcomponent-references in e
296 (where the type of e is that of the final reference, but
297 parmse.expr's type corresponds to the full derived-type). */
298 /* TODO: Fix this somehow so we don't need a temporary of the whole
299 array but instead only the components referenced. */
300 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
301 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
302 temptype = TREE_TYPE (temptype);
303 temptype = gfc_get_element_type (temptype);
307 /* For class arrays signal that the size of the dynamic type has to
308 be obtained from the vtable, using the 'initial' expression. */
309 temptype = NULL_TREE;
311 /* Generate the temporary. Cleaning up the temporary should be the
312 very last thing done, so we add the code to a new block and add it
313 to se->post as last instructions. */
314 size = gfc_create_var (gfc_array_index_type, NULL);
315 data = gfc_create_var (pvoid_type_node, NULL);
316 gfc_init_block (&temp_post);
317 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post, tmp_ss,
318 temptype, initial, false, true,
319 false, &arg->expr->where);
320 gfc_add_modify (&se->pre, size, tmp);
321 tmp = fold_convert (pvoid_type_node, tmp_ss->info->data.array.data);
322 gfc_add_modify (&se->pre, data, tmp);
324 /* Update other ss' delta. */
325 gfc_set_delta (loopse->loop);
327 /* Copy the result back using unpack..... */
328 if (e->ts.type != BT_CLASS)
329 tmp = build_call_expr_loc (input_location,
330 gfor_fndecl_in_unpack, 2, parmse.expr, data);
333 /* ... except for class results where the copy is
335 tmp = build_fold_indirect_ref_loc (input_location, parmse.expr);
336 tmp = gfc_conv_descriptor_data_get (tmp);
337 tmp = build_call_expr_loc (input_location,
338 builtin_decl_explicit (BUILT_IN_MEMCPY),
341 gfc_add_expr_to_block (&se->post, tmp);
343 /* parmse.pre is already added above. */
344 gfc_add_block_to_block (&se->post, &parmse.post);
345 gfc_add_block_to_block (&se->post, &temp_post);
351 /* Get the interface symbol for the procedure corresponding to the given call.
352 We can't get the procedure symbol directly as we have to handle the case
353 of (deferred) type-bound procedures. */
356 get_proc_ifc_for_call (gfc_code *c)
360 gcc_assert (c->op == EXEC_ASSIGN_CALL || c->op == EXEC_CALL);
362 sym = gfc_get_proc_ifc_for_expr (c->expr1);
364 /* Fall back/last resort try. */
366 sym = c->resolved_sym;
372 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
375 gfc_trans_call (gfc_code * code, bool dependency_check,
376 tree mask, tree count1, bool invert)
380 int has_alternate_specifier;
381 gfc_dep_check check_variable;
382 tree index = NULL_TREE;
383 tree maskexpr = NULL_TREE;
386 /* A CALL starts a new block because the actual arguments may have to
387 be evaluated first. */
388 gfc_init_se (&se, NULL);
389 gfc_start_block (&se.pre);
391 gcc_assert (code->resolved_sym);
393 ss = gfc_ss_terminator;
394 if (code->resolved_sym->attr.elemental)
395 ss = gfc_walk_elemental_function_args (ss, code->ext.actual,
396 get_proc_ifc_for_call (code),
399 /* Is not an elemental subroutine call with array valued arguments. */
400 if (ss == gfc_ss_terminator)
403 /* Translate the call. */
404 has_alternate_specifier
405 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
408 /* A subroutine without side-effect, by definition, does nothing! */
409 TREE_SIDE_EFFECTS (se.expr) = 1;
411 /* Chain the pieces together and return the block. */
412 if (has_alternate_specifier)
414 gfc_code *select_code;
416 select_code = code->next;
417 gcc_assert(select_code->op == EXEC_SELECT);
418 sym = select_code->expr1->symtree->n.sym;
419 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
420 if (sym->backend_decl == NULL)
421 sym->backend_decl = gfc_get_symbol_decl (sym);
422 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
425 gfc_add_expr_to_block (&se.pre, se.expr);
427 gfc_add_block_to_block (&se.pre, &se.post);
432 /* An elemental subroutine call with array valued arguments has
440 /* gfc_walk_elemental_function_args renders the ss chain in the
441 reverse order to the actual argument order. */
442 ss = gfc_reverse_ss (ss);
444 /* Initialize the loop. */
445 gfc_init_se (&loopse, NULL);
446 gfc_init_loopinfo (&loop);
447 gfc_add_ss_to_loop (&loop, ss);
449 gfc_conv_ss_startstride (&loop);
450 /* TODO: gfc_conv_loop_setup generates a temporary for vector
451 subscripts. This could be prevented in the elemental case
452 as temporaries are handled separatedly
453 (below in gfc_conv_elemental_dependencies). */
454 gfc_conv_loop_setup (&loop, &code->expr1->where);
455 gfc_mark_ss_chain_used (ss, 1);
457 /* Convert the arguments, checking for dependencies. */
458 gfc_copy_loopinfo_to_se (&loopse, &loop);
461 /* For operator assignment, do dependency checking. */
462 if (dependency_check)
463 check_variable = ELEM_CHECK_VARIABLE;
465 check_variable = ELEM_DONT_CHECK_VARIABLE;
467 gfc_init_se (&depse, NULL);
468 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
469 code->ext.actual, check_variable);
471 gfc_add_block_to_block (&loop.pre, &depse.pre);
472 gfc_add_block_to_block (&loop.post, &depse.post);
474 /* Generate the loop body. */
475 gfc_start_scalarized_body (&loop, &body);
476 gfc_init_block (&block);
480 /* Form the mask expression according to the mask. */
482 maskexpr = gfc_build_array_ref (mask, index, NULL);
484 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
485 TREE_TYPE (maskexpr), maskexpr);
488 /* Add the subroutine call to the block. */
489 gfc_conv_procedure_call (&loopse, code->resolved_sym,
490 code->ext.actual, code->expr1, NULL);
494 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
495 build_empty_stmt (input_location));
496 gfc_add_expr_to_block (&loopse.pre, tmp);
497 tmp = fold_build2_loc (input_location, PLUS_EXPR,
498 gfc_array_index_type,
499 count1, gfc_index_one_node);
500 gfc_add_modify (&loopse.pre, count1, tmp);
503 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
505 gfc_add_block_to_block (&block, &loopse.pre);
506 gfc_add_block_to_block (&block, &loopse.post);
508 /* Finish up the loop block and the loop. */
509 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
510 gfc_trans_scalarizing_loops (&loop, &body);
511 gfc_add_block_to_block (&se.pre, &loop.pre);
512 gfc_add_block_to_block (&se.pre, &loop.post);
513 gfc_add_block_to_block (&se.pre, &se.post);
514 gfc_cleanup_loop (&loop);
517 return gfc_finish_block (&se.pre);
521 /* Translate the RETURN statement. */
524 gfc_trans_return (gfc_code * code)
532 /* If code->expr is not NULL, this return statement must appear
533 in a subroutine and current_fake_result_decl has already
536 result = gfc_get_fake_result_decl (NULL, 0);
539 gfc_warning ("An alternate return at %L without a * dummy argument",
540 &code->expr1->where);
541 return gfc_generate_return ();
544 /* Start a new block for this statement. */
545 gfc_init_se (&se, NULL);
546 gfc_start_block (&se.pre);
548 gfc_conv_expr (&se, code->expr1);
550 /* Note that the actually returned expression is a simple value and
551 does not depend on any pointers or such; thus we can clean-up with
552 se.post before returning. */
553 tmp = fold_build2_loc (input_location, MODIFY_EXPR, TREE_TYPE (result),
554 result, fold_convert (TREE_TYPE (result),
556 gfc_add_expr_to_block (&se.pre, tmp);
557 gfc_add_block_to_block (&se.pre, &se.post);
559 tmp = gfc_generate_return ();
560 gfc_add_expr_to_block (&se.pre, tmp);
561 return gfc_finish_block (&se.pre);
564 return gfc_generate_return ();
568 /* Translate the PAUSE statement. We have to translate this statement
569 to a runtime library call. */
572 gfc_trans_pause (gfc_code * code)
574 tree gfc_int4_type_node = gfc_get_int_type (4);
578 /* Start a new block for this statement. */
579 gfc_init_se (&se, NULL);
580 gfc_start_block (&se.pre);
583 if (code->expr1 == NULL)
585 tmp = build_int_cst (gfc_int4_type_node, 0);
586 tmp = build_call_expr_loc (input_location,
587 gfor_fndecl_pause_string, 2,
588 build_int_cst (pchar_type_node, 0), tmp);
590 else if (code->expr1->ts.type == BT_INTEGER)
592 gfc_conv_expr (&se, code->expr1);
593 tmp = build_call_expr_loc (input_location,
594 gfor_fndecl_pause_numeric, 1,
595 fold_convert (gfc_int4_type_node, se.expr));
599 gfc_conv_expr_reference (&se, code->expr1);
600 tmp = build_call_expr_loc (input_location,
601 gfor_fndecl_pause_string, 2,
602 se.expr, se.string_length);
605 gfc_add_expr_to_block (&se.pre, tmp);
607 gfc_add_block_to_block (&se.pre, &se.post);
609 return gfc_finish_block (&se.pre);
613 /* Translate the STOP statement. We have to translate this statement
614 to a runtime library call. */
617 gfc_trans_stop (gfc_code *code, bool error_stop)
619 tree gfc_int4_type_node = gfc_get_int_type (4);
623 /* Start a new block for this statement. */
624 gfc_init_se (&se, NULL);
625 gfc_start_block (&se.pre);
627 if (gfc_option.coarray == GFC_FCOARRAY_LIB && !error_stop)
629 /* Per F2008, 8.5.1 STOP implies a SYNC MEMORY. */
630 tmp = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
631 tmp = build_call_expr_loc (input_location, tmp, 0);
632 gfc_add_expr_to_block (&se.pre, tmp);
634 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_finalize, 0);
635 gfc_add_expr_to_block (&se.pre, tmp);
638 if (code->expr1 == NULL)
640 tmp = build_int_cst (gfc_int4_type_node, 0);
641 tmp = build_call_expr_loc (input_location,
643 ? (gfc_option.coarray == GFC_FCOARRAY_LIB
644 ? gfor_fndecl_caf_error_stop_str
645 : gfor_fndecl_error_stop_string)
646 : gfor_fndecl_stop_string,
647 2, build_int_cst (pchar_type_node, 0), tmp);
649 else if (code->expr1->ts.type == BT_INTEGER)
651 gfc_conv_expr (&se, code->expr1);
652 tmp = build_call_expr_loc (input_location,
654 ? (gfc_option.coarray == GFC_FCOARRAY_LIB
655 ? gfor_fndecl_caf_error_stop
656 : gfor_fndecl_error_stop_numeric)
657 : gfor_fndecl_stop_numeric_f08, 1,
658 fold_convert (gfc_int4_type_node, se.expr));
662 gfc_conv_expr_reference (&se, code->expr1);
663 tmp = build_call_expr_loc (input_location,
665 ? (gfc_option.coarray == GFC_FCOARRAY_LIB
666 ? gfor_fndecl_caf_error_stop_str
667 : gfor_fndecl_error_stop_string)
668 : gfor_fndecl_stop_string,
669 2, se.expr, se.string_length);
672 gfc_add_expr_to_block (&se.pre, tmp);
674 gfc_add_block_to_block (&se.pre, &se.post);
676 return gfc_finish_block (&se.pre);
681 gfc_trans_lock_unlock (gfc_code *code, gfc_exec_op type ATTRIBUTE_UNUSED)
684 tree stat = NULL_TREE, lock_acquired = NULL_TREE;
686 /* Short cut: For single images without STAT= or LOCK_ACQUIRED
687 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
688 if (!code->expr2 && !code->expr4 && gfc_option.coarray != GFC_FCOARRAY_LIB)
691 gfc_init_se (&se, NULL);
692 gfc_start_block (&se.pre);
696 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
697 gfc_init_se (&argse, NULL);
698 gfc_conv_expr_val (&argse, code->expr2);
704 gcc_assert (code->expr4->expr_type == EXPR_VARIABLE);
705 gfc_init_se (&argse, NULL);
706 gfc_conv_expr_val (&argse, code->expr4);
707 lock_acquired = argse.expr;
710 if (stat != NULL_TREE)
711 gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0));
713 if (lock_acquired != NULL_TREE)
714 gfc_add_modify (&se.pre, lock_acquired,
715 fold_convert (TREE_TYPE (lock_acquired),
718 return gfc_finish_block (&se.pre);
723 gfc_trans_sync (gfc_code *code, gfc_exec_op type)
727 tree images = NULL_TREE, stat = NULL_TREE,
728 errmsg = NULL_TREE, errmsglen = NULL_TREE;
730 /* Short cut: For single images without bound checking or without STAT=,
731 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
732 if (!code->expr2 && !(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
733 && gfc_option.coarray != GFC_FCOARRAY_LIB)
736 gfc_init_se (&se, NULL);
737 gfc_start_block (&se.pre);
739 if (code->expr1 && code->expr1->rank == 0)
741 gfc_init_se (&argse, NULL);
742 gfc_conv_expr_val (&argse, code->expr1);
748 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
749 gfc_init_se (&argse, NULL);
750 gfc_conv_expr_val (&argse, code->expr2);
754 stat = null_pointer_node;
756 if (code->expr3 && gfc_option.coarray == GFC_FCOARRAY_LIB
757 && type != EXEC_SYNC_MEMORY)
759 gcc_assert (code->expr3->expr_type == EXPR_VARIABLE);
760 gfc_init_se (&argse, NULL);
761 gfc_conv_expr (&argse, code->expr3);
762 gfc_conv_string_parameter (&argse);
763 errmsg = gfc_build_addr_expr (NULL, argse.expr);
764 errmsglen = argse.string_length;
766 else if (gfc_option.coarray == GFC_FCOARRAY_LIB && type != EXEC_SYNC_MEMORY)
768 errmsg = null_pointer_node;
769 errmsglen = build_int_cst (integer_type_node, 0);
772 /* Check SYNC IMAGES(imageset) for valid image index.
773 FIXME: Add a check for image-set arrays. */
774 if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
775 && code->expr1->rank == 0)
778 if (gfc_option.coarray != GFC_FCOARRAY_LIB)
779 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
780 images, build_int_cst (TREE_TYPE (images), 1));
784 cond = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
785 images, gfort_gvar_caf_num_images);
786 cond2 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
788 build_int_cst (TREE_TYPE (images), 1));
789 cond = fold_build2_loc (input_location, TRUTH_AND_EXPR,
790 boolean_type_node, cond, cond2);
792 gfc_trans_runtime_check (true, false, cond, &se.pre,
793 &code->expr1->where, "Invalid image number "
795 fold_convert (integer_type_node, se.expr));
798 /* Per F2008, 8.5.1, a SYNC MEMORY is implied by calling the
799 image control statements SYNC IMAGES and SYNC ALL. */
800 if (gfc_option.coarray == GFC_FCOARRAY_LIB)
802 tmp = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
803 tmp = build_call_expr_loc (input_location, tmp, 0);
804 gfc_add_expr_to_block (&se.pre, tmp);
807 if (gfc_option.coarray != GFC_FCOARRAY_LIB || type == EXEC_SYNC_MEMORY)
809 /* Set STAT to zero. */
811 gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0));
813 else if (type == EXEC_SYNC_ALL)
815 /* SYNC ALL => stat == null_pointer_node
816 SYNC ALL(stat=s) => stat has an integer type
818 If "stat" has the wrong integer type, use a temp variable of
819 the right type and later cast the result back into "stat". */
820 if (stat == null_pointer_node || TREE_TYPE (stat) == integer_type_node)
822 if (TREE_TYPE (stat) == integer_type_node)
823 stat = gfc_build_addr_expr (NULL, stat);
825 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all,
826 3, stat, errmsg, errmsglen);
827 gfc_add_expr_to_block (&se.pre, tmp);
831 tree tmp_stat = gfc_create_var (integer_type_node, "stat");
833 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all,
834 3, gfc_build_addr_expr (NULL, tmp_stat),
836 gfc_add_expr_to_block (&se.pre, tmp);
838 gfc_add_modify (&se.pre, stat,
839 fold_convert (TREE_TYPE (stat), tmp_stat));
846 gcc_assert (type == EXEC_SYNC_IMAGES);
850 len = build_int_cst (integer_type_node, -1);
851 images = null_pointer_node;
853 else if (code->expr1->rank == 0)
855 len = build_int_cst (integer_type_node, 1);
856 images = gfc_build_addr_expr (NULL_TREE, images);
861 if (code->expr1->ts.kind != gfc_c_int_kind)
862 gfc_fatal_error ("Sorry, only support for integer kind %d "
863 "implemented for image-set at %L",
864 gfc_c_int_kind, &code->expr1->where);
866 gfc_conv_array_parameter (&se, code->expr1,
867 gfc_walk_expr (code->expr1), true, NULL,
871 tmp = gfc_typenode_for_spec (&code->expr1->ts);
872 if (GFC_ARRAY_TYPE_P (tmp) || GFC_DESCRIPTOR_TYPE_P (tmp))
873 tmp = gfc_get_element_type (tmp);
875 len = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
876 TREE_TYPE (len), len,
877 fold_convert (TREE_TYPE (len),
878 TYPE_SIZE_UNIT (tmp)));
879 len = fold_convert (integer_type_node, len);
882 /* SYNC IMAGES(imgs) => stat == null_pointer_node
883 SYNC IMAGES(imgs,stat=s) => stat has an integer type
885 If "stat" has the wrong integer type, use a temp variable of
886 the right type and later cast the result back into "stat". */
887 if (stat == null_pointer_node || TREE_TYPE (stat) == integer_type_node)
889 if (TREE_TYPE (stat) == integer_type_node)
890 stat = gfc_build_addr_expr (NULL, stat);
892 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images,
893 5, fold_convert (integer_type_node, len),
894 images, stat, errmsg, errmsglen);
895 gfc_add_expr_to_block (&se.pre, tmp);
899 tree tmp_stat = gfc_create_var (integer_type_node, "stat");
901 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images,
902 5, fold_convert (integer_type_node, len),
903 images, gfc_build_addr_expr (NULL, tmp_stat),
905 gfc_add_expr_to_block (&se.pre, tmp);
907 gfc_add_modify (&se.pre, stat,
908 fold_convert (TREE_TYPE (stat), tmp_stat));
912 return gfc_finish_block (&se.pre);
916 /* Generate GENERIC for the IF construct. This function also deals with
917 the simple IF statement, because the front end translates the IF
918 statement into an IF construct.
950 where COND_S is the simplified version of the predicate. PRE_COND_S
951 are the pre side-effects produced by the translation of the
953 We need to build the chain recursively otherwise we run into
954 problems with folding incomplete statements. */
957 gfc_trans_if_1 (gfc_code * code)
964 /* Check for an unconditional ELSE clause. */
966 return gfc_trans_code (code->next);
968 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
969 gfc_init_se (&if_se, NULL);
970 gfc_start_block (&if_se.pre);
972 /* Calculate the IF condition expression. */
973 if (code->expr1->where.lb)
975 gfc_save_backend_locus (&saved_loc);
976 gfc_set_backend_locus (&code->expr1->where);
979 gfc_conv_expr_val (&if_se, code->expr1);
981 if (code->expr1->where.lb)
982 gfc_restore_backend_locus (&saved_loc);
984 /* Translate the THEN clause. */
985 stmt = gfc_trans_code (code->next);
987 /* Translate the ELSE clause. */
989 elsestmt = gfc_trans_if_1 (code->block);
991 elsestmt = build_empty_stmt (input_location);
993 /* Build the condition expression and add it to the condition block. */
994 loc = code->expr1->where.lb ? code->expr1->where.lb->location : input_location;
995 stmt = fold_build3_loc (loc, COND_EXPR, void_type_node, if_se.expr, stmt,
998 gfc_add_expr_to_block (&if_se.pre, stmt);
1000 /* Finish off this statement. */
1001 return gfc_finish_block (&if_se.pre);
1005 gfc_trans_if (gfc_code * code)
1010 /* Create exit label so it is available for trans'ing the body code. */
1011 exit_label = gfc_build_label_decl (NULL_TREE);
1012 code->exit_label = exit_label;
1014 /* Translate the actual code in code->block. */
1015 gfc_init_block (&body);
1016 gfc_add_expr_to_block (&body, gfc_trans_if_1 (code->block));
1018 /* Add exit label. */
1019 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
1021 return gfc_finish_block (&body);
1025 /* Translate an arithmetic IF expression.
1027 IF (cond) label1, label2, label3 translates to
1039 An optimized version can be generated in case of equal labels.
1040 E.g., if label1 is equal to label2, we can translate it to
1049 gfc_trans_arithmetic_if (gfc_code * code)
1057 /* Start a new block. */
1058 gfc_init_se (&se, NULL);
1059 gfc_start_block (&se.pre);
1061 /* Pre-evaluate COND. */
1062 gfc_conv_expr_val (&se, code->expr1);
1063 se.expr = gfc_evaluate_now (se.expr, &se.pre);
1065 /* Build something to compare with. */
1066 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
1068 if (code->label1->value != code->label2->value)
1070 /* If (cond < 0) take branch1 else take branch2.
1071 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
1072 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
1073 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
1075 if (code->label1->value != code->label3->value)
1076 tmp = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
1079 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
1082 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1083 tmp, branch1, branch2);
1086 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
1088 if (code->label1->value != code->label3->value
1089 && code->label2->value != code->label3->value)
1091 /* if (cond <= 0) take branch1 else take branch2. */
1092 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
1093 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
1095 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1096 tmp, branch1, branch2);
1099 /* Append the COND_EXPR to the evaluation of COND, and return. */
1100 gfc_add_expr_to_block (&se.pre, branch1);
1101 return gfc_finish_block (&se.pre);
1105 /* Translate a CRITICAL block. */
1107 gfc_trans_critical (gfc_code *code)
1112 gfc_start_block (&block);
1114 if (gfc_option.coarray == GFC_FCOARRAY_LIB)
1116 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_critical, 0);
1117 gfc_add_expr_to_block (&block, tmp);
1120 tmp = gfc_trans_code (code->block->next);
1121 gfc_add_expr_to_block (&block, tmp);
1123 if (gfc_option.coarray == GFC_FCOARRAY_LIB)
1125 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_end_critical,
1127 gfc_add_expr_to_block (&block, tmp);
1131 return gfc_finish_block (&block);
1135 /* Do proper initialization for ASSOCIATE names. */
1138 trans_associate_var (gfc_symbol *sym, gfc_wrapped_block *block)
1144 gcc_assert (sym->assoc);
1145 e = sym->assoc->target;
1147 class_target = (e->expr_type == EXPR_VARIABLE)
1148 && (gfc_is_class_scalar_expr (e)
1149 || gfc_is_class_array_ref (e, NULL));
1151 /* Do a `pointer assignment' with updated descriptor (or assign descriptor
1152 to array temporary) for arrays with either unknown shape or if associating
1154 if (sym->attr.dimension && !class_target
1155 && (sym->as->type == AS_DEFERRED || sym->assoc->variable))
1161 desc = sym->backend_decl;
1163 /* If association is to an expression, evaluate it and create temporary.
1164 Otherwise, get descriptor of target for pointer assignment. */
1165 gfc_init_se (&se, NULL);
1166 ss = gfc_walk_expr (e);
1167 if (sym->assoc->variable)
1169 se.direct_byref = 1;
1172 gfc_conv_expr_descriptor (&se, e, ss);
1174 /* If we didn't already do the pointer assignment, set associate-name
1175 descriptor to the one generated for the temporary. */
1176 if (!sym->assoc->variable)
1180 gfc_add_modify (&se.pre, desc, se.expr);
1182 /* The generated descriptor has lower bound zero (as array
1183 temporary), shift bounds so we get lower bounds of 1. */
1184 for (dim = 0; dim < e->rank; ++dim)
1185 gfc_conv_shift_descriptor_lbound (&se.pre, desc,
1186 dim, gfc_index_one_node);
1189 /* Done, register stuff as init / cleanup code. */
1190 gfc_add_init_cleanup (block, gfc_finish_block (&se.pre),
1191 gfc_finish_block (&se.post));
1194 /* CLASS arrays just need the descriptor to be directly assigned. */
1195 else if (class_target && sym->attr.dimension)
1199 gfc_init_se (&se, NULL);
1200 se.descriptor_only = 1;
1201 gfc_conv_expr (&se, e);
1203 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se.expr)));
1204 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (sym->backend_decl)));
1206 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
1208 gfc_add_init_cleanup (block, gfc_finish_block( &se.pre),
1209 gfc_finish_block (&se.post));
1212 /* Do a scalar pointer assignment; this is for scalar variable targets. */
1213 else if (gfc_is_associate_pointer (sym))
1217 gcc_assert (!sym->attr.dimension);
1219 gfc_init_se (&se, NULL);
1220 gfc_conv_expr (&se, e);
1222 tmp = TREE_TYPE (sym->backend_decl);
1223 tmp = gfc_build_addr_expr (tmp, se.expr);
1224 gfc_add_modify (&se.pre, sym->backend_decl, tmp);
1226 gfc_add_init_cleanup (block, gfc_finish_block( &se.pre),
1227 gfc_finish_block (&se.post));
1230 /* Do a simple assignment. This is for scalar expressions, where we
1231 can simply use expression assignment. */
1236 lhs = gfc_lval_expr_from_sym (sym);
1237 tmp = gfc_trans_assignment (lhs, e, false, true);
1238 gfc_add_init_cleanup (block, tmp, NULL_TREE);
1243 /* Translate a BLOCK construct. This is basically what we would do for a
1247 gfc_trans_block_construct (gfc_code* code)
1251 gfc_wrapped_block block;
1254 gfc_association_list *ass;
1256 ns = code->ext.block.ns;
1258 sym = ns->proc_name;
1261 /* Process local variables. */
1262 gcc_assert (!sym->tlink);
1264 gfc_process_block_locals (ns);
1266 /* Generate code including exit-label. */
1267 gfc_init_block (&body);
1268 exit_label = gfc_build_label_decl (NULL_TREE);
1269 code->exit_label = exit_label;
1270 gfc_add_expr_to_block (&body, gfc_trans_code (ns->code));
1271 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
1273 /* Finish everything. */
1274 gfc_start_wrapped_block (&block, gfc_finish_block (&body));
1275 gfc_trans_deferred_vars (sym, &block);
1276 for (ass = code->ext.block.assoc; ass; ass = ass->next)
1277 trans_associate_var (ass->st->n.sym, &block);
1279 return gfc_finish_wrapped_block (&block);
1283 /* Translate the simple DO construct. This is where the loop variable has
1284 integer type and step +-1. We can't use this in the general case
1285 because integer overflow and floating point errors could give incorrect
1287 We translate a do loop from:
1289 DO dovar = from, to, step
1295 [Evaluate loop bounds and step]
1297 if ((step > 0) ? (dovar <= to) : (dovar => to))
1303 cond = (dovar == to);
1305 if (cond) goto end_label;
1310 This helps the optimizers by avoiding the extra induction variable
1311 used in the general case. */
1314 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
1315 tree from, tree to, tree step, tree exit_cond)
1321 tree saved_dovar = NULL;
1326 type = TREE_TYPE (dovar);
1328 loc = code->ext.iterator->start->where.lb->location;
1330 /* Initialize the DO variable: dovar = from. */
1331 gfc_add_modify_loc (loc, pblock, dovar,
1332 fold_convert (TREE_TYPE(dovar), from));
1334 /* Save value for do-tinkering checking. */
1335 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1337 saved_dovar = gfc_create_var (type, ".saved_dovar");
1338 gfc_add_modify_loc (loc, pblock, saved_dovar, dovar);
1341 /* Cycle and exit statements are implemented with gotos. */
1342 cycle_label = gfc_build_label_decl (NULL_TREE);
1343 exit_label = gfc_build_label_decl (NULL_TREE);
1345 /* Put the labels where they can be found later. See gfc_trans_do(). */
1346 code->cycle_label = cycle_label;
1347 code->exit_label = exit_label;
1350 gfc_start_block (&body);
1352 /* Main loop body. */
1353 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1354 gfc_add_expr_to_block (&body, tmp);
1356 /* Label for cycle statements (if needed). */
1357 if (TREE_USED (cycle_label))
1359 tmp = build1_v (LABEL_EXPR, cycle_label);
1360 gfc_add_expr_to_block (&body, tmp);
1363 /* Check whether someone has modified the loop variable. */
1364 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1366 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node,
1367 dovar, saved_dovar);
1368 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1369 "Loop variable has been modified");
1372 /* Exit the loop if there is an I/O result condition or error. */
1375 tmp = build1_v (GOTO_EXPR, exit_label);
1376 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1378 build_empty_stmt (loc));
1379 gfc_add_expr_to_block (&body, tmp);
1382 /* Evaluate the loop condition. */
1383 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, dovar,
1385 cond = gfc_evaluate_now_loc (loc, cond, &body);
1387 /* Increment the loop variable. */
1388 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1389 gfc_add_modify_loc (loc, &body, dovar, tmp);
1391 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1392 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1394 /* The loop exit. */
1395 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1396 TREE_USED (exit_label) = 1;
1397 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1398 cond, tmp, build_empty_stmt (loc));
1399 gfc_add_expr_to_block (&body, tmp);
1401 /* Finish the loop body. */
1402 tmp = gfc_finish_block (&body);
1403 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1405 /* Only execute the loop if the number of iterations is positive. */
1406 if (tree_int_cst_sgn (step) > 0)
1407 cond = fold_build2_loc (loc, LE_EXPR, boolean_type_node, dovar,
1410 cond = fold_build2_loc (loc, GE_EXPR, boolean_type_node, dovar,
1412 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, cond, tmp,
1413 build_empty_stmt (loc));
1414 gfc_add_expr_to_block (pblock, tmp);
1416 /* Add the exit label. */
1417 tmp = build1_v (LABEL_EXPR, exit_label);
1418 gfc_add_expr_to_block (pblock, tmp);
1420 return gfc_finish_block (pblock);
1423 /* Translate the DO construct. This obviously is one of the most
1424 important ones to get right with any compiler, but especially
1427 We special case some loop forms as described in gfc_trans_simple_do.
1428 For other cases we implement them with a separate loop count,
1429 as described in the standard.
1431 We translate a do loop from:
1433 DO dovar = from, to, step
1439 [evaluate loop bounds and step]
1440 empty = (step > 0 ? to < from : to > from);
1441 countm1 = (to - from) / step;
1443 if (empty) goto exit_label;
1449 if (countm1 ==0) goto exit_label;
1454 countm1 is an unsigned integer. It is equal to the loop count minus one,
1455 because the loop count itself can overflow. */
1458 gfc_trans_do (gfc_code * code, tree exit_cond)
1462 tree saved_dovar = NULL;
1478 gfc_start_block (&block);
1480 loc = code->ext.iterator->start->where.lb->location;
1482 /* Evaluate all the expressions in the iterator. */
1483 gfc_init_se (&se, NULL);
1484 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1485 gfc_add_block_to_block (&block, &se.pre);
1487 type = TREE_TYPE (dovar);
1489 gfc_init_se (&se, NULL);
1490 gfc_conv_expr_val (&se, code->ext.iterator->start);
1491 gfc_add_block_to_block (&block, &se.pre);
1492 from = gfc_evaluate_now (se.expr, &block);
1494 gfc_init_se (&se, NULL);
1495 gfc_conv_expr_val (&se, code->ext.iterator->end);
1496 gfc_add_block_to_block (&block, &se.pre);
1497 to = gfc_evaluate_now (se.expr, &block);
1499 gfc_init_se (&se, NULL);
1500 gfc_conv_expr_val (&se, code->ext.iterator->step);
1501 gfc_add_block_to_block (&block, &se.pre);
1502 step = gfc_evaluate_now (se.expr, &block);
1504 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1506 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, step,
1507 build_zero_cst (type));
1508 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1509 "DO step value is zero");
1512 /* Special case simple loops. */
1513 if (TREE_CODE (type) == INTEGER_TYPE
1514 && (integer_onep (step)
1515 || tree_int_cst_equal (step, integer_minus_one_node)))
1516 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1518 pos_step = fold_build2_loc (loc, GT_EXPR, boolean_type_node, step,
1519 build_zero_cst (type));
1521 if (TREE_CODE (type) == INTEGER_TYPE)
1522 utype = unsigned_type_for (type);
1524 utype = unsigned_type_for (gfc_array_index_type);
1525 countm1 = gfc_create_var (utype, "countm1");
1527 /* Cycle and exit statements are implemented with gotos. */
1528 cycle_label = gfc_build_label_decl (NULL_TREE);
1529 exit_label = gfc_build_label_decl (NULL_TREE);
1530 TREE_USED (exit_label) = 1;
1532 /* Put these labels where they can be found later. */
1533 code->cycle_label = cycle_label;
1534 code->exit_label = exit_label;
1536 /* Initialize the DO variable: dovar = from. */
1537 gfc_add_modify (&block, dovar, from);
1539 /* Save value for do-tinkering checking. */
1540 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1542 saved_dovar = gfc_create_var (type, ".saved_dovar");
1543 gfc_add_modify_loc (loc, &block, saved_dovar, dovar);
1546 /* Initialize loop count and jump to exit label if the loop is empty.
1547 This code is executed before we enter the loop body. We generate:
1548 step_sign = sign(1,step);
1559 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1563 if (TREE_CODE (type) == INTEGER_TYPE)
1565 tree pos, neg, step_sign, to2, from2, step2;
1567 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1569 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, step,
1570 build_int_cst (TREE_TYPE (step), 0));
1571 step_sign = fold_build3_loc (loc, COND_EXPR, type, tmp,
1572 build_int_cst (type, -1),
1573 build_int_cst (type, 1));
1575 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, to, from);
1576 pos = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1577 fold_build1_loc (loc, GOTO_EXPR, void_type_node,
1579 build_empty_stmt (loc));
1581 tmp = fold_build2_loc (loc, GT_EXPR, boolean_type_node, to,
1583 neg = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1584 fold_build1_loc (loc, GOTO_EXPR, void_type_node,
1586 build_empty_stmt (loc));
1587 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1588 pos_step, pos, neg);
1590 gfc_add_expr_to_block (&block, tmp);
1592 /* Calculate the loop count. to-from can overflow, so
1593 we cast to unsigned. */
1595 to2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, to);
1596 from2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, from);
1597 step2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, step);
1598 step2 = fold_convert (utype, step2);
1599 tmp = fold_build2_loc (loc, MINUS_EXPR, type, to2, from2);
1600 tmp = fold_convert (utype, tmp);
1601 tmp = fold_build2_loc (loc, TRUNC_DIV_EXPR, utype, tmp, step2);
1602 tmp = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, countm1, tmp);
1603 gfc_add_expr_to_block (&block, tmp);
1607 /* TODO: We could use the same width as the real type.
1608 This would probably cause more problems that it solves
1609 when we implement "long double" types. */
1611 tmp = fold_build2_loc (loc, MINUS_EXPR, type, to, from);
1612 tmp = fold_build2_loc (loc, RDIV_EXPR, type, tmp, step);
1613 tmp = fold_build1_loc (loc, FIX_TRUNC_EXPR, utype, tmp);
1614 gfc_add_modify (&block, countm1, tmp);
1616 /* We need a special check for empty loops:
1617 empty = (step > 0 ? to < from : to > from); */
1618 tmp = fold_build3_loc (loc, COND_EXPR, boolean_type_node, pos_step,
1619 fold_build2_loc (loc, LT_EXPR,
1620 boolean_type_node, to, from),
1621 fold_build2_loc (loc, GT_EXPR,
1622 boolean_type_node, to, from));
1623 /* If the loop is empty, go directly to the exit label. */
1624 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1625 build1_v (GOTO_EXPR, exit_label),
1626 build_empty_stmt (input_location));
1627 gfc_add_expr_to_block (&block, tmp);
1631 gfc_start_block (&body);
1633 /* Main loop body. */
1634 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1635 gfc_add_expr_to_block (&body, tmp);
1637 /* Label for cycle statements (if needed). */
1638 if (TREE_USED (cycle_label))
1640 tmp = build1_v (LABEL_EXPR, cycle_label);
1641 gfc_add_expr_to_block (&body, tmp);
1644 /* Check whether someone has modified the loop variable. */
1645 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1647 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node, dovar,
1649 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1650 "Loop variable has been modified");
1653 /* Exit the loop if there is an I/O result condition or error. */
1656 tmp = build1_v (GOTO_EXPR, exit_label);
1657 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1659 build_empty_stmt (input_location));
1660 gfc_add_expr_to_block (&body, tmp);
1663 /* Increment the loop variable. */
1664 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1665 gfc_add_modify_loc (loc, &body, dovar, tmp);
1667 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1668 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1670 /* End with the loop condition. Loop until countm1 == 0. */
1671 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, countm1,
1672 build_int_cst (utype, 0));
1673 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1674 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1675 cond, tmp, build_empty_stmt (loc));
1676 gfc_add_expr_to_block (&body, tmp);
1678 /* Decrement the loop count. */
1679 tmp = fold_build2_loc (loc, MINUS_EXPR, utype, countm1,
1680 build_int_cst (utype, 1));
1681 gfc_add_modify_loc (loc, &body, countm1, tmp);
1683 /* End of loop body. */
1684 tmp = gfc_finish_block (&body);
1686 /* The for loop itself. */
1687 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1688 gfc_add_expr_to_block (&block, tmp);
1690 /* Add the exit label. */
1691 tmp = build1_v (LABEL_EXPR, exit_label);
1692 gfc_add_expr_to_block (&block, tmp);
1694 return gfc_finish_block (&block);
1698 /* Translate the DO WHILE construct.
1711 if (! cond) goto exit_label;
1717 Because the evaluation of the exit condition `cond' may have side
1718 effects, we can't do much for empty loop bodies. The backend optimizers
1719 should be smart enough to eliminate any dead loops. */
1722 gfc_trans_do_while (gfc_code * code)
1730 /* Everything we build here is part of the loop body. */
1731 gfc_start_block (&block);
1733 /* Cycle and exit statements are implemented with gotos. */
1734 cycle_label = gfc_build_label_decl (NULL_TREE);
1735 exit_label = gfc_build_label_decl (NULL_TREE);
1737 /* Put the labels where they can be found later. See gfc_trans_do(). */
1738 code->cycle_label = cycle_label;
1739 code->exit_label = exit_label;
1741 /* Create a GIMPLE version of the exit condition. */
1742 gfc_init_se (&cond, NULL);
1743 gfc_conv_expr_val (&cond, code->expr1);
1744 gfc_add_block_to_block (&block, &cond.pre);
1745 cond.expr = fold_build1_loc (code->expr1->where.lb->location,
1746 TRUTH_NOT_EXPR, TREE_TYPE (cond.expr), cond.expr);
1748 /* Build "IF (! cond) GOTO exit_label". */
1749 tmp = build1_v (GOTO_EXPR, exit_label);
1750 TREE_USED (exit_label) = 1;
1751 tmp = fold_build3_loc (code->expr1->where.lb->location, COND_EXPR,
1752 void_type_node, cond.expr, tmp,
1753 build_empty_stmt (code->expr1->where.lb->location));
1754 gfc_add_expr_to_block (&block, tmp);
1756 /* The main body of the loop. */
1757 tmp = gfc_trans_code (code->block->next);
1758 gfc_add_expr_to_block (&block, tmp);
1760 /* Label for cycle statements (if needed). */
1761 if (TREE_USED (cycle_label))
1763 tmp = build1_v (LABEL_EXPR, cycle_label);
1764 gfc_add_expr_to_block (&block, tmp);
1767 /* End of loop body. */
1768 tmp = gfc_finish_block (&block);
1770 gfc_init_block (&block);
1771 /* Build the loop. */
1772 tmp = fold_build1_loc (code->expr1->where.lb->location, LOOP_EXPR,
1773 void_type_node, tmp);
1774 gfc_add_expr_to_block (&block, tmp);
1776 /* Add the exit label. */
1777 tmp = build1_v (LABEL_EXPR, exit_label);
1778 gfc_add_expr_to_block (&block, tmp);
1780 return gfc_finish_block (&block);
1784 /* Translate the SELECT CASE construct for INTEGER case expressions,
1785 without killing all potential optimizations. The problem is that
1786 Fortran allows unbounded cases, but the back-end does not, so we
1787 need to intercept those before we enter the equivalent SWITCH_EXPR
1790 For example, we translate this,
1793 CASE (:100,101,105:115)
1803 to the GENERIC equivalent,
1807 case (minimum value for typeof(expr) ... 100:
1813 case 200 ... (maximum value for typeof(expr):
1830 gfc_trans_integer_select (gfc_code * code)
1840 gfc_start_block (&block);
1842 /* Calculate the switch expression. */
1843 gfc_init_se (&se, NULL);
1844 gfc_conv_expr_val (&se, code->expr1);
1845 gfc_add_block_to_block (&block, &se.pre);
1847 end_label = gfc_build_label_decl (NULL_TREE);
1849 gfc_init_block (&body);
1851 for (c = code->block; c; c = c->block)
1853 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1858 /* Assume it's the default case. */
1859 low = high = NULL_TREE;
1863 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1866 /* If there's only a lower bound, set the high bound to the
1867 maximum value of the case expression. */
1869 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1874 /* Three cases are possible here:
1876 1) There is no lower bound, e.g. CASE (:N).
1877 2) There is a lower bound .NE. high bound, that is
1878 a case range, e.g. CASE (N:M) where M>N (we make
1879 sure that M>N during type resolution).
1880 3) There is a lower bound, and it has the same value
1881 as the high bound, e.g. CASE (N:N). This is our
1882 internal representation of CASE(N).
1884 In the first and second case, we need to set a value for
1885 high. In the third case, we don't because the GCC middle
1886 end represents a single case value by just letting high be
1887 a NULL_TREE. We can't do that because we need to be able
1888 to represent unbounded cases. */
1892 && mpz_cmp (cp->low->value.integer,
1893 cp->high->value.integer) != 0))
1894 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1897 /* Unbounded case. */
1899 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1902 /* Build a label. */
1903 label = gfc_build_label_decl (NULL_TREE);
1905 /* Add this case label.
1906 Add parameter 'label', make it match GCC backend. */
1907 tmp = build_case_label (low, high, label);
1908 gfc_add_expr_to_block (&body, tmp);
1911 /* Add the statements for this case. */
1912 tmp = gfc_trans_code (c->next);
1913 gfc_add_expr_to_block (&body, tmp);
1915 /* Break to the end of the construct. */
1916 tmp = build1_v (GOTO_EXPR, end_label);
1917 gfc_add_expr_to_block (&body, tmp);
1920 tmp = gfc_finish_block (&body);
1921 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1922 gfc_add_expr_to_block (&block, tmp);
1924 tmp = build1_v (LABEL_EXPR, end_label);
1925 gfc_add_expr_to_block (&block, tmp);
1927 return gfc_finish_block (&block);
1931 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1933 There are only two cases possible here, even though the standard
1934 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1935 .FALSE., and DEFAULT.
1937 We never generate more than two blocks here. Instead, we always
1938 try to eliminate the DEFAULT case. This way, we can translate this
1939 kind of SELECT construct to a simple
1943 expression in GENERIC. */
1946 gfc_trans_logical_select (gfc_code * code)
1949 gfc_code *t, *f, *d;
1954 /* Assume we don't have any cases at all. */
1957 /* Now see which ones we actually do have. We can have at most two
1958 cases in a single case list: one for .TRUE. and one for .FALSE.
1959 The default case is always separate. If the cases for .TRUE. and
1960 .FALSE. are in the same case list, the block for that case list
1961 always executed, and we don't generate code a COND_EXPR. */
1962 for (c = code->block; c; c = c->block)
1964 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1968 if (cp->low->value.logical == 0) /* .FALSE. */
1970 else /* if (cp->value.logical != 0), thus .TRUE. */
1978 /* Start a new block. */
1979 gfc_start_block (&block);
1981 /* Calculate the switch expression. We always need to do this
1982 because it may have side effects. */
1983 gfc_init_se (&se, NULL);
1984 gfc_conv_expr_val (&se, code->expr1);
1985 gfc_add_block_to_block (&block, &se.pre);
1987 if (t == f && t != NULL)
1989 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1990 translate the code for these cases, append it to the current
1992 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1996 tree true_tree, false_tree, stmt;
1998 true_tree = build_empty_stmt (input_location);
1999 false_tree = build_empty_stmt (input_location);
2001 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
2002 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
2003 make the missing case the default case. */
2004 if (t != NULL && f != NULL)
2014 /* Translate the code for each of these blocks, and append it to
2015 the current block. */
2017 true_tree = gfc_trans_code (t->next);
2020 false_tree = gfc_trans_code (f->next);
2022 stmt = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2023 se.expr, true_tree, false_tree);
2024 gfc_add_expr_to_block (&block, stmt);
2027 return gfc_finish_block (&block);
2031 /* The jump table types are stored in static variables to avoid
2032 constructing them from scratch every single time. */
2033 static GTY(()) tree select_struct[2];
2035 /* Translate the SELECT CASE construct for CHARACTER case expressions.
2036 Instead of generating compares and jumps, it is far simpler to
2037 generate a data structure describing the cases in order and call a
2038 library subroutine that locates the right case.
2039 This is particularly true because this is the only case where we
2040 might have to dispose of a temporary.
2041 The library subroutine returns a pointer to jump to or NULL if no
2042 branches are to be taken. */
2045 gfc_trans_character_select (gfc_code *code)
2047 tree init, end_label, tmp, type, case_num, label, fndecl;
2048 stmtblock_t block, body;
2053 VEC(constructor_elt,gc) *inits = NULL;
2055 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
2057 /* The jump table types are stored in static variables to avoid
2058 constructing them from scratch every single time. */
2059 static tree ss_string1[2], ss_string1_len[2];
2060 static tree ss_string2[2], ss_string2_len[2];
2061 static tree ss_target[2];
2063 cp = code->block->ext.block.case_list;
2064 while (cp->left != NULL)
2067 /* Generate the body */
2068 gfc_start_block (&block);
2069 gfc_init_se (&expr1se, NULL);
2070 gfc_conv_expr_reference (&expr1se, code->expr1);
2072 gfc_add_block_to_block (&block, &expr1se.pre);
2074 end_label = gfc_build_label_decl (NULL_TREE);
2076 gfc_init_block (&body);
2078 /* Attempt to optimize length 1 selects. */
2079 if (integer_onep (expr1se.string_length))
2081 for (d = cp; d; d = d->right)
2086 gcc_assert (d->low->expr_type == EXPR_CONSTANT
2087 && d->low->ts.type == BT_CHARACTER);
2088 if (d->low->value.character.length > 1)
2090 for (i = 1; i < d->low->value.character.length; i++)
2091 if (d->low->value.character.string[i] != ' ')
2093 if (i != d->low->value.character.length)
2095 if (optimize && d->high && i == 1)
2097 gcc_assert (d->high->expr_type == EXPR_CONSTANT
2098 && d->high->ts.type == BT_CHARACTER);
2099 if (d->high->value.character.length > 1
2100 && (d->low->value.character.string[0]
2101 == d->high->value.character.string[0])
2102 && d->high->value.character.string[1] != ' '
2103 && ((d->low->value.character.string[1] < ' ')
2104 == (d->high->value.character.string[1]
2114 gcc_assert (d->high->expr_type == EXPR_CONSTANT
2115 && d->high->ts.type == BT_CHARACTER);
2116 if (d->high->value.character.length > 1)
2118 for (i = 1; i < d->high->value.character.length; i++)
2119 if (d->high->value.character.string[i] != ' ')
2121 if (i != d->high->value.character.length)
2128 tree ctype = gfc_get_char_type (code->expr1->ts.kind);
2130 for (c = code->block; c; c = c->block)
2132 for (cp = c->ext.block.case_list; cp; cp = cp->next)
2138 /* Assume it's the default case. */
2139 low = high = NULL_TREE;
2143 /* CASE ('ab') or CASE ('ab':'az') will never match
2144 any length 1 character. */
2145 if (cp->low->value.character.length > 1
2146 && cp->low->value.character.string[1] != ' ')
2149 if (cp->low->value.character.length > 0)
2150 r = cp->low->value.character.string[0];
2153 low = build_int_cst (ctype, r);
2155 /* If there's only a lower bound, set the high bound
2156 to the maximum value of the case expression. */
2158 high = TYPE_MAX_VALUE (ctype);
2164 || (cp->low->value.character.string[0]
2165 != cp->high->value.character.string[0]))
2167 if (cp->high->value.character.length > 0)
2168 r = cp->high->value.character.string[0];
2171 high = build_int_cst (ctype, r);
2174 /* Unbounded case. */
2176 low = TYPE_MIN_VALUE (ctype);
2179 /* Build a label. */
2180 label = gfc_build_label_decl (NULL_TREE);
2182 /* Add this case label.
2183 Add parameter 'label', make it match GCC backend. */
2184 tmp = build_case_label (low, high, label);
2185 gfc_add_expr_to_block (&body, tmp);
2188 /* Add the statements for this case. */
2189 tmp = gfc_trans_code (c->next);
2190 gfc_add_expr_to_block (&body, tmp);
2192 /* Break to the end of the construct. */
2193 tmp = build1_v (GOTO_EXPR, end_label);
2194 gfc_add_expr_to_block (&body, tmp);
2197 tmp = gfc_string_to_single_character (expr1se.string_length,
2199 code->expr1->ts.kind);
2200 case_num = gfc_create_var (ctype, "case_num");
2201 gfc_add_modify (&block, case_num, tmp);
2203 gfc_add_block_to_block (&block, &expr1se.post);
2205 tmp = gfc_finish_block (&body);
2206 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
2207 gfc_add_expr_to_block (&block, tmp);
2209 tmp = build1_v (LABEL_EXPR, end_label);
2210 gfc_add_expr_to_block (&block, tmp);
2212 return gfc_finish_block (&block);
2216 if (code->expr1->ts.kind == 1)
2218 else if (code->expr1->ts.kind == 4)
2223 if (select_struct[k] == NULL)
2226 select_struct[k] = make_node (RECORD_TYPE);
2228 if (code->expr1->ts.kind == 1)
2229 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
2230 else if (code->expr1->ts.kind == 4)
2231 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
2236 #define ADD_FIELD(NAME, TYPE) \
2237 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
2238 get_identifier (stringize(NAME)), \
2242 ADD_FIELD (string1, pchartype);
2243 ADD_FIELD (string1_len, gfc_charlen_type_node);
2245 ADD_FIELD (string2, pchartype);
2246 ADD_FIELD (string2_len, gfc_charlen_type_node);
2248 ADD_FIELD (target, integer_type_node);
2251 gfc_finish_type (select_struct[k]);
2255 for (d = cp; d; d = d->right)
2258 for (c = code->block; c; c = c->block)
2260 for (d = c->ext.block.case_list; d; d = d->next)
2262 label = gfc_build_label_decl (NULL_TREE);
2263 tmp = build_case_label ((d->low == NULL && d->high == NULL)
2265 : build_int_cst (integer_type_node, d->n),
2267 gfc_add_expr_to_block (&body, tmp);
2270 tmp = gfc_trans_code (c->next);
2271 gfc_add_expr_to_block (&body, tmp);
2273 tmp = build1_v (GOTO_EXPR, end_label);
2274 gfc_add_expr_to_block (&body, tmp);
2277 /* Generate the structure describing the branches */
2278 for (d = cp; d; d = d->right)
2280 VEC(constructor_elt,gc) *node = NULL;
2282 gfc_init_se (&se, NULL);
2286 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node);
2287 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], integer_zero_node);
2291 gfc_conv_expr_reference (&se, d->low);
2293 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr);
2294 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length);
2297 if (d->high == NULL)
2299 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node);
2300 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], integer_zero_node);
2304 gfc_init_se (&se, NULL);
2305 gfc_conv_expr_reference (&se, d->high);
2307 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr);
2308 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length);
2311 CONSTRUCTOR_APPEND_ELT (node, ss_target[k],
2312 build_int_cst (integer_type_node, d->n));
2314 tmp = build_constructor (select_struct[k], node);
2315 CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp);
2318 type = build_array_type (select_struct[k],
2319 build_index_type (size_int (n-1)));
2321 init = build_constructor (type, inits);
2322 TREE_CONSTANT (init) = 1;
2323 TREE_STATIC (init) = 1;
2324 /* Create a static variable to hold the jump table. */
2325 tmp = gfc_create_var (type, "jumptable");
2326 TREE_CONSTANT (tmp) = 1;
2327 TREE_STATIC (tmp) = 1;
2328 TREE_READONLY (tmp) = 1;
2329 DECL_INITIAL (tmp) = init;
2332 /* Build the library call */
2333 init = gfc_build_addr_expr (pvoid_type_node, init);
2335 if (code->expr1->ts.kind == 1)
2336 fndecl = gfor_fndecl_select_string;
2337 else if (code->expr1->ts.kind == 4)
2338 fndecl = gfor_fndecl_select_string_char4;
2342 tmp = build_call_expr_loc (input_location,
2344 build_int_cst (gfc_charlen_type_node, n),
2345 expr1se.expr, expr1se.string_length);
2346 case_num = gfc_create_var (integer_type_node, "case_num");
2347 gfc_add_modify (&block, case_num, tmp);
2349 gfc_add_block_to_block (&block, &expr1se.post);
2351 tmp = gfc_finish_block (&body);
2352 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
2353 gfc_add_expr_to_block (&block, tmp);
2355 tmp = build1_v (LABEL_EXPR, end_label);
2356 gfc_add_expr_to_block (&block, tmp);
2358 return gfc_finish_block (&block);
2362 /* Translate the three variants of the SELECT CASE construct.
2364 SELECT CASEs with INTEGER case expressions can be translated to an
2365 equivalent GENERIC switch statement, and for LOGICAL case
2366 expressions we build one or two if-else compares.
2368 SELECT CASEs with CHARACTER case expressions are a whole different
2369 story, because they don't exist in GENERIC. So we sort them and
2370 do a binary search at runtime.
2372 Fortran has no BREAK statement, and it does not allow jumps from
2373 one case block to another. That makes things a lot easier for
2377 gfc_trans_select (gfc_code * code)
2383 gcc_assert (code && code->expr1);
2384 gfc_init_block (&block);
2386 /* Build the exit label and hang it in. */
2387 exit_label = gfc_build_label_decl (NULL_TREE);
2388 code->exit_label = exit_label;
2390 /* Empty SELECT constructs are legal. */
2391 if (code->block == NULL)
2392 body = build_empty_stmt (input_location);
2394 /* Select the correct translation function. */
2396 switch (code->expr1->ts.type)
2399 body = gfc_trans_logical_select (code);
2403 body = gfc_trans_integer_select (code);
2407 body = gfc_trans_character_select (code);
2411 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2415 /* Build everything together. */
2416 gfc_add_expr_to_block (&block, body);
2417 gfc_add_expr_to_block (&block, build1_v (LABEL_EXPR, exit_label));
2419 return gfc_finish_block (&block);
2423 /* Traversal function to substitute a replacement symtree if the symbol
2424 in the expression is the same as that passed. f == 2 signals that
2425 that variable itself is not to be checked - only the references.
2426 This group of functions is used when the variable expression in a
2427 FORALL assignment has internal references. For example:
2428 FORALL (i = 1:4) p(p(i)) = i
2429 The only recourse here is to store a copy of 'p' for the index
2432 static gfc_symtree *new_symtree;
2433 static gfc_symtree *old_symtree;
2436 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
2438 if (expr->expr_type != EXPR_VARIABLE)
2443 else if (expr->symtree->n.sym == sym)
2444 expr->symtree = new_symtree;
2450 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
2452 gfc_traverse_expr (e, sym, forall_replace, f);
2456 forall_restore (gfc_expr *expr,
2457 gfc_symbol *sym ATTRIBUTE_UNUSED,
2458 int *f ATTRIBUTE_UNUSED)
2460 if (expr->expr_type != EXPR_VARIABLE)
2463 if (expr->symtree == new_symtree)
2464 expr->symtree = old_symtree;
2470 forall_restore_symtree (gfc_expr *e)
2472 gfc_traverse_expr (e, NULL, forall_restore, 0);
2476 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2481 gfc_symbol *new_sym;
2482 gfc_symbol *old_sym;
2486 /* Build a copy of the lvalue. */
2487 old_symtree = c->expr1->symtree;
2488 old_sym = old_symtree->n.sym;
2489 e = gfc_lval_expr_from_sym (old_sym);
2490 if (old_sym->attr.dimension)
2492 gfc_init_se (&tse, NULL);
2493 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
2494 gfc_add_block_to_block (pre, &tse.pre);
2495 gfc_add_block_to_block (post, &tse.post);
2496 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
2498 if (e->ts.type != BT_CHARACTER)
2500 /* Use the variable offset for the temporary. */
2501 tmp = gfc_conv_array_offset (old_sym->backend_decl);
2502 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
2507 gfc_init_se (&tse, NULL);
2508 gfc_init_se (&rse, NULL);
2509 gfc_conv_expr (&rse, e);
2510 if (e->ts.type == BT_CHARACTER)
2512 tse.string_length = rse.string_length;
2513 tmp = gfc_get_character_type_len (gfc_default_character_kind,
2515 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
2517 gfc_add_block_to_block (pre, &tse.pre);
2518 gfc_add_block_to_block (post, &tse.post);
2522 tmp = gfc_typenode_for_spec (&e->ts);
2523 tse.expr = gfc_create_var (tmp, "temp");
2526 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
2527 e->expr_type == EXPR_VARIABLE, true);
2528 gfc_add_expr_to_block (pre, tmp);
2532 /* Create a new symbol to represent the lvalue. */
2533 new_sym = gfc_new_symbol (old_sym->name, NULL);
2534 new_sym->ts = old_sym->ts;
2535 new_sym->attr.referenced = 1;
2536 new_sym->attr.temporary = 1;
2537 new_sym->attr.dimension = old_sym->attr.dimension;
2538 new_sym->attr.flavor = old_sym->attr.flavor;
2540 /* Use the temporary as the backend_decl. */
2541 new_sym->backend_decl = tse.expr;
2543 /* Create a fake symtree for it. */
2545 new_symtree = gfc_new_symtree (&root, old_sym->name);
2546 new_symtree->n.sym = new_sym;
2547 gcc_assert (new_symtree == root);
2549 /* Go through the expression reference replacing the old_symtree
2551 forall_replace_symtree (c->expr1, old_sym, 2);
2553 /* Now we have made this temporary, we might as well use it for
2554 the right hand side. */
2555 forall_replace_symtree (c->expr2, old_sym, 1);
2559 /* Handles dependencies in forall assignments. */
2561 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2568 lsym = c->expr1->symtree->n.sym;
2569 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
2571 /* Now check for dependencies within the 'variable'
2572 expression itself. These are treated by making a complete
2573 copy of variable and changing all the references to it
2574 point to the copy instead. Note that the shallow copy of
2575 the variable will not suffice for derived types with
2576 pointer components. We therefore leave these to their
2578 if (lsym->ts.type == BT_DERIVED
2579 && lsym->ts.u.derived->attr.pointer_comp)
2583 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
2585 forall_make_variable_temp (c, pre, post);
2589 /* Substrings with dependencies are treated in the same
2591 if (c->expr1->ts.type == BT_CHARACTER
2593 && c->expr2->expr_type == EXPR_VARIABLE
2594 && lsym == c->expr2->symtree->n.sym)
2596 for (lref = c->expr1->ref; lref; lref = lref->next)
2597 if (lref->type == REF_SUBSTRING)
2599 for (rref = c->expr2->ref; rref; rref = rref->next)
2600 if (rref->type == REF_SUBSTRING)
2604 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
2606 forall_make_variable_temp (c, pre, post);
2615 cleanup_forall_symtrees (gfc_code *c)
2617 forall_restore_symtree (c->expr1);
2618 forall_restore_symtree (c->expr2);
2619 free (new_symtree->n.sym);
2624 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2625 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2626 indicates whether we should generate code to test the FORALLs mask
2627 array. OUTER is the loop header to be used for initializing mask
2630 The generated loop format is:
2631 count = (end - start + step) / step
2644 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
2645 int mask_flag, stmtblock_t *outer)
2653 tree var, start, end, step;
2656 /* Initialize the mask index outside the FORALL nest. */
2657 if (mask_flag && forall_tmp->mask)
2658 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
2660 iter = forall_tmp->this_loop;
2661 nvar = forall_tmp->nvar;
2662 for (n = 0; n < nvar; n++)
2665 start = iter->start;
2669 exit_label = gfc_build_label_decl (NULL_TREE);
2670 TREE_USED (exit_label) = 1;
2672 /* The loop counter. */
2673 count = gfc_create_var (TREE_TYPE (var), "count");
2675 /* The body of the loop. */
2676 gfc_init_block (&block);
2678 /* The exit condition. */
2679 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
2680 count, build_int_cst (TREE_TYPE (count), 0));
2681 tmp = build1_v (GOTO_EXPR, exit_label);
2682 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2683 cond, tmp, build_empty_stmt (input_location));
2684 gfc_add_expr_to_block (&block, tmp);
2686 /* The main loop body. */
2687 gfc_add_expr_to_block (&block, body);
2689 /* Increment the loop variable. */
2690 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), var,
2692 gfc_add_modify (&block, var, tmp);
2694 /* Advance to the next mask element. Only do this for the
2696 if (n == 0 && mask_flag && forall_tmp->mask)
2698 tree maskindex = forall_tmp->maskindex;
2699 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2700 maskindex, gfc_index_one_node);
2701 gfc_add_modify (&block, maskindex, tmp);
2704 /* Decrement the loop counter. */
2705 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), count,
2706 build_int_cst (TREE_TYPE (var), 1));
2707 gfc_add_modify (&block, count, tmp);
2709 body = gfc_finish_block (&block);
2711 /* Loop var initialization. */
2712 gfc_init_block (&block);
2713 gfc_add_modify (&block, var, start);
2716 /* Initialize the loop counter. */
2717 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), step,
2719 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), end,
2721 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, TREE_TYPE (var),
2723 gfc_add_modify (&block, count, tmp);
2725 /* The loop expression. */
2726 tmp = build1_v (LOOP_EXPR, body);
2727 gfc_add_expr_to_block (&block, tmp);
2729 /* The exit label. */
2730 tmp = build1_v (LABEL_EXPR, exit_label);
2731 gfc_add_expr_to_block (&block, tmp);
2733 body = gfc_finish_block (&block);
2740 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2741 is nonzero, the body is controlled by all masks in the forall nest.
2742 Otherwise, the innermost loop is not controlled by it's mask. This
2743 is used for initializing that mask. */
2746 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2751 forall_info *forall_tmp;
2752 tree mask, maskindex;
2754 gfc_start_block (&header);
2756 forall_tmp = nested_forall_info;
2757 while (forall_tmp != NULL)
2759 /* Generate body with masks' control. */
2762 mask = forall_tmp->mask;
2763 maskindex = forall_tmp->maskindex;
2765 /* If a mask was specified make the assignment conditional. */
2768 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2769 body = build3_v (COND_EXPR, tmp, body,
2770 build_empty_stmt (input_location));
2773 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2774 forall_tmp = forall_tmp->prev_nest;
2778 gfc_add_expr_to_block (&header, body);
2779 return gfc_finish_block (&header);
2783 /* Allocate data for holding a temporary array. Returns either a local
2784 temporary array or a pointer variable. */
2787 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2794 if (INTEGER_CST_P (size))
2795 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
2796 size, gfc_index_one_node);
2800 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2801 type = build_array_type (elem_type, type);
2802 if (gfc_can_put_var_on_stack (bytesize))
2804 gcc_assert (INTEGER_CST_P (size));
2805 tmpvar = gfc_create_var (type, "temp");
2810 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2811 *pdata = convert (pvoid_type_node, tmpvar);
2813 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2814 gfc_add_modify (pblock, tmpvar, tmp);
2820 /* Generate codes to copy the temporary to the actual lhs. */
2823 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2824 tree count1, tree wheremask, bool invert)
2828 stmtblock_t block, body;
2834 lss = gfc_walk_expr (expr);
2836 if (lss == gfc_ss_terminator)
2838 gfc_start_block (&block);
2840 gfc_init_se (&lse, NULL);
2842 /* Translate the expression. */
2843 gfc_conv_expr (&lse, expr);
2845 /* Form the expression for the temporary. */
2846 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2848 /* Use the scalar assignment as is. */
2849 gfc_add_block_to_block (&block, &lse.pre);
2850 gfc_add_modify (&block, lse.expr, tmp);
2851 gfc_add_block_to_block (&block, &lse.post);
2853 /* Increment the count1. */
2854 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
2855 count1, gfc_index_one_node);
2856 gfc_add_modify (&block, count1, tmp);
2858 tmp = gfc_finish_block (&block);
2862 gfc_start_block (&block);
2864 gfc_init_loopinfo (&loop1);
2865 gfc_init_se (&rse, NULL);
2866 gfc_init_se (&lse, NULL);
2868 /* Associate the lss with the loop. */
2869 gfc_add_ss_to_loop (&loop1, lss);
2871 /* Calculate the bounds of the scalarization. */
2872 gfc_conv_ss_startstride (&loop1);
2873 /* Setup the scalarizing loops. */
2874 gfc_conv_loop_setup (&loop1, &expr->where);
2876 gfc_mark_ss_chain_used (lss, 1);
2878 /* Start the scalarized loop body. */
2879 gfc_start_scalarized_body (&loop1, &body);
2881 /* Setup the gfc_se structures. */
2882 gfc_copy_loopinfo_to_se (&lse, &loop1);
2885 /* Form the expression of the temporary. */
2886 if (lss != gfc_ss_terminator)
2887 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2888 /* Translate expr. */
2889 gfc_conv_expr (&lse, expr);
2891 /* Use the scalar assignment. */
2892 rse.string_length = lse.string_length;
2893 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
2895 /* Form the mask expression according to the mask tree list. */
2898 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2900 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
2901 TREE_TYPE (wheremaskexpr),
2903 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2905 build_empty_stmt (input_location));
2908 gfc_add_expr_to_block (&body, tmp);
2910 /* Increment count1. */
2911 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2912 count1, gfc_index_one_node);
2913 gfc_add_modify (&body, count1, tmp);
2915 /* Increment count3. */
2918 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2919 gfc_array_index_type, count3,
2920 gfc_index_one_node);
2921 gfc_add_modify (&body, count3, tmp);
2924 /* Generate the copying loops. */
2925 gfc_trans_scalarizing_loops (&loop1, &body);
2926 gfc_add_block_to_block (&block, &loop1.pre);
2927 gfc_add_block_to_block (&block, &loop1.post);
2928 gfc_cleanup_loop (&loop1);
2930 tmp = gfc_finish_block (&block);
2936 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2937 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2938 and should not be freed. WHEREMASK is the conditional execution mask
2939 whose sense may be inverted by INVERT. */
2942 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2943 tree count1, gfc_ss *lss, gfc_ss *rss,
2944 tree wheremask, bool invert)
2946 stmtblock_t block, body1;
2953 gfc_start_block (&block);
2955 gfc_init_se (&rse, NULL);
2956 gfc_init_se (&lse, NULL);
2958 if (lss == gfc_ss_terminator)
2960 gfc_init_block (&body1);
2961 gfc_conv_expr (&rse, expr2);
2962 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2966 /* Initialize the loop. */
2967 gfc_init_loopinfo (&loop);
2969 /* We may need LSS to determine the shape of the expression. */
2970 gfc_add_ss_to_loop (&loop, lss);
2971 gfc_add_ss_to_loop (&loop, rss);
2973 gfc_conv_ss_startstride (&loop);
2974 gfc_conv_loop_setup (&loop, &expr2->where);
2976 gfc_mark_ss_chain_used (rss, 1);
2977 /* Start the loop body. */
2978 gfc_start_scalarized_body (&loop, &body1);
2980 /* Translate the expression. */
2981 gfc_copy_loopinfo_to_se (&rse, &loop);
2983 gfc_conv_expr (&rse, expr2);
2985 /* Form the expression of the temporary. */
2986 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2989 /* Use the scalar assignment. */
2990 lse.string_length = rse.string_length;
2991 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2992 expr2->expr_type == EXPR_VARIABLE, true);
2994 /* Form the mask expression according to the mask tree list. */
2997 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2999 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
3000 TREE_TYPE (wheremaskexpr),
3002 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
3004 build_empty_stmt (input_location));
3007 gfc_add_expr_to_block (&body1, tmp);
3009 if (lss == gfc_ss_terminator)
3011 gfc_add_block_to_block (&block, &body1);
3013 /* Increment count1. */
3014 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
3015 count1, gfc_index_one_node);
3016 gfc_add_modify (&block, count1, tmp);
3020 /* Increment count1. */
3021 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3022 count1, gfc_index_one_node);
3023 gfc_add_modify (&body1, count1, tmp);
3025 /* Increment count3. */
3028 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3029 gfc_array_index_type,
3030 count3, gfc_index_one_node);
3031 gfc_add_modify (&body1, count3, tmp);
3034 /* Generate the copying loops. */
3035 gfc_trans_scalarizing_loops (&loop, &body1);
3037 gfc_add_block_to_block (&block, &loop.pre);
3038 gfc_add_block_to_block (&block, &loop.post);
3040 gfc_cleanup_loop (&loop);
3041 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3042 as tree nodes in SS may not be valid in different scope. */
3045 tmp = gfc_finish_block (&block);
3050 /* Calculate the size of temporary needed in the assignment inside forall.
3051 LSS and RSS are filled in this function. */
3054 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
3055 stmtblock_t * pblock,
3056 gfc_ss **lss, gfc_ss **rss)
3064 *lss = gfc_walk_expr (expr1);
3067 size = gfc_index_one_node;
3068 if (*lss != gfc_ss_terminator)
3070 gfc_init_loopinfo (&loop);
3072 /* Walk the RHS of the expression. */
3073 *rss = gfc_walk_expr (expr2);
3074 if (*rss == gfc_ss_terminator)
3075 /* The rhs is scalar. Add a ss for the expression. */
3076 *rss = gfc_get_scalar_ss (gfc_ss_terminator, expr2);
3078 /* Associate the SS with the loop. */
3079 gfc_add_ss_to_loop (&loop, *lss);
3080 /* We don't actually need to add the rhs at this point, but it might
3081 make guessing the loop bounds a bit easier. */
3082 gfc_add_ss_to_loop (&loop, *rss);
3084 /* We only want the shape of the expression, not rest of the junk
3085 generated by the scalarizer. */
3086 loop.array_parameter = 1;
3088 /* Calculate the bounds of the scalarization. */
3089 save_flag = gfc_option.rtcheck;
3090 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
3091 gfc_conv_ss_startstride (&loop);
3092 gfc_option.rtcheck = save_flag;
3093 gfc_conv_loop_setup (&loop, &expr2->where);
3095 /* Figure out how many elements we need. */
3096 for (i = 0; i < loop.dimen; i++)
3098 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3099 gfc_array_index_type,
3100 gfc_index_one_node, loop.from[i]);
3101 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3102 gfc_array_index_type, tmp, loop.to[i]);
3103 size = fold_build2_loc (input_location, MULT_EXPR,
3104 gfc_array_index_type, size, tmp);
3106 gfc_add_block_to_block (pblock, &loop.pre);
3107 size = gfc_evaluate_now (size, pblock);
3108 gfc_add_block_to_block (pblock, &loop.post);
3110 /* TODO: write a function that cleans up a loopinfo without freeing
3111 the SS chains. Currently a NOP. */
3118 /* Calculate the overall iterator number of the nested forall construct.
3119 This routine actually calculates the number of times the body of the
3120 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
3121 that by the expression INNER_SIZE. The BLOCK argument specifies the
3122 block in which to calculate the result, and the optional INNER_SIZE_BODY
3123 argument contains any statements that need to executed (inside the loop)
3124 to initialize or calculate INNER_SIZE. */
3127 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
3128 stmtblock_t *inner_size_body, stmtblock_t *block)
3130 forall_info *forall_tmp = nested_forall_info;
3134 /* We can eliminate the innermost unconditional loops with constant
3136 if (INTEGER_CST_P (inner_size))
3139 && !forall_tmp->mask
3140 && INTEGER_CST_P (forall_tmp->size))
3142 inner_size = fold_build2_loc (input_location, MULT_EXPR,
3143 gfc_array_index_type,
3144 inner_size, forall_tmp->size);
3145 forall_tmp = forall_tmp->prev_nest;
3148 /* If there are no loops left, we have our constant result. */
3153 /* Otherwise, create a temporary variable to compute the result. */
3154 number = gfc_create_var (gfc_array_index_type, "num");
3155 gfc_add_modify (block, number, gfc_index_zero_node);
3157 gfc_start_block (&body);
3158 if (inner_size_body)
3159 gfc_add_block_to_block (&body, inner_size_body);
3161 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3162 gfc_array_index_type, number, inner_size);
3165 gfc_add_modify (&body, number, tmp);
3166 tmp = gfc_finish_block (&body);
3168 /* Generate loops. */
3169 if (forall_tmp != NULL)
3170 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
3172 gfc_add_expr_to_block (block, tmp);
3178 /* Allocate temporary for forall construct. SIZE is the size of temporary
3179 needed. PTEMP1 is returned for space free. */
3182 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
3189 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
3190 if (!integer_onep (unit))
3191 bytesize = fold_build2_loc (input_location, MULT_EXPR,
3192 gfc_array_index_type, size, unit);
3197 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
3200 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3205 /* Allocate temporary for forall construct according to the information in
3206 nested_forall_info. INNER_SIZE is the size of temporary needed in the
3207 assignment inside forall. PTEMP1 is returned for space free. */
3210 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
3211 tree inner_size, stmtblock_t * inner_size_body,
3212 stmtblock_t * block, tree * ptemp1)
3216 /* Calculate the total size of temporary needed in forall construct. */
3217 size = compute_overall_iter_number (nested_forall_info, inner_size,
3218 inner_size_body, block);
3220 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
3224 /* Handle assignments inside forall which need temporary.
3226 forall (i=start:end:stride; maskexpr)
3229 (where e,f<i> are arbitrary expressions possibly involving i
3230 and there is a dependency between e<i> and f<i>)
3232 masktmp(:) = maskexpr(:)
3237 for (i = start; i <= end; i += stride)
3241 for (i = start; i <= end; i += stride)
3243 if (masktmp[maskindex++])
3244 tmp[count1++] = f<i>
3248 for (i = start; i <= end; i += stride)
3250 if (masktmp[maskindex++])
3251 e<i> = tmp[count1++]
3256 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
3257 tree wheremask, bool invert,
3258 forall_info * nested_forall_info,
3259 stmtblock_t * block)
3267 stmtblock_t inner_size_body;
3269 /* Create vars. count1 is the current iterator number of the nested
3271 count1 = gfc_create_var (gfc_array_index_type, "count1");
3273 /* Count is the wheremask index. */
3276 count = gfc_create_var (gfc_array_index_type, "count");
3277 gfc_add_modify (block, count, gfc_index_zero_node);
3282 /* Initialize count1. */
3283 gfc_add_modify (block, count1, gfc_index_zero_node);
3285 /* Calculate the size of temporary needed in the assignment. Return loop, lss
3286 and rss which are used in function generate_loop_for_rhs_to_temp(). */
3287 gfc_init_block (&inner_size_body);
3288 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
3291 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
3292 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
3294 if (!expr1->ts.u.cl->backend_decl)
3297 gfc_init_se (&tse, NULL);
3298 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
3299 expr1->ts.u.cl->backend_decl = tse.expr;
3301 type = gfc_get_character_type_len (gfc_default_character_kind,
3302 expr1->ts.u.cl->backend_decl);
3305 type = gfc_typenode_for_spec (&expr1->ts);
3307 /* Allocate temporary for nested forall construct according to the
3308 information in nested_forall_info and inner_size. */
3309 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
3310 &inner_size_body, block, &ptemp1);
3312 /* Generate codes to copy rhs to the temporary . */
3313 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
3316 /* Generate body and loops according to the information in
3317 nested_forall_info. */
3318 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3319 gfc_add_expr_to_block (block, tmp);
3322 gfc_add_modify (block, count1, gfc_index_zero_node);
3326 gfc_add_modify (block, count, gfc_index_zero_node);
3328 /* Generate codes to copy the temporary to lhs. */
3329 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
3332 /* Generate body and loops according to the information in
3333 nested_forall_info. */
3334 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3335 gfc_add_expr_to_block (block, tmp);
3339 /* Free the temporary. */
3340 tmp = gfc_call_free (ptemp1);
3341 gfc_add_expr_to_block (block, tmp);
3346 /* Translate pointer assignment inside FORALL which need temporary. */
3349 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
3350 forall_info * nested_forall_info,
3351 stmtblock_t * block)
3358 gfc_array_info *info;
3365 tree tmp, tmp1, ptemp1;
3367 count = gfc_create_var (gfc_array_index_type, "count");
3368 gfc_add_modify (block, count, gfc_index_zero_node);
3370 inner_size = gfc_index_one_node;
3371 lss = gfc_walk_expr (expr1);
3372 rss = gfc_walk_expr (expr2);
3373 if (lss == gfc_ss_terminator)
3375 type = gfc_typenode_for_spec (&expr1->ts);
3376 type = build_pointer_type (type);
3378 /* Allocate temporary for nested forall construct according to the
3379 information in nested_forall_info and inner_size. */
3380 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
3381 inner_size, NULL, block, &ptemp1);
3382 gfc_start_block (&body);
3383 gfc_init_se (&lse, NULL);
3384 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3385 gfc_init_se (&rse, NULL);
3386 rse.want_pointer = 1;
3387 gfc_conv_expr (&rse, expr2);
3388 gfc_add_block_to_block (&body, &rse.pre);
3389 gfc_add_modify (&body, lse.expr,
3390 fold_convert (TREE_TYPE (lse.expr), rse.expr));
3391 gfc_add_block_to_block (&body, &rse.post);
3393 /* Increment count. */
3394 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3395 count, gfc_index_one_node);
3396 gfc_add_modify (&body, count, tmp);
3398 tmp = gfc_finish_block (&body);
3400 /* Generate body and loops according to the information in
3401 nested_forall_info. */
3402 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3403 gfc_add_expr_to_block (block, tmp);
3406 gfc_add_modify (block, count, gfc_index_zero_node);
3408 gfc_start_block (&body);
3409 gfc_init_se (&lse, NULL);
3410 gfc_init_se (&rse, NULL);
3411 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
3412 lse.want_pointer = 1;
3413 gfc_conv_expr (&lse, expr1);
3414 gfc_add_block_to_block (&body, &lse.pre);
3415 gfc_add_modify (&body, lse.expr, rse.expr);
3416 gfc_add_block_to_block (&body, &lse.post);
3417 /* Increment count. */
3418 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3419 count, gfc_index_one_node);
3420 gfc_add_modify (&body, count, tmp);
3421 tmp = gfc_finish_block (&body);
3423 /* Generate body and loops according to the information in
3424 nested_forall_info. */
3425 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3426 gfc_add_expr_to_block (block, tmp);
3430 gfc_init_loopinfo (&loop);
3432 /* Associate the SS with the loop. */
3433 gfc_add_ss_to_loop (&loop, rss);
3435 /* Setup the scalarizing loops and bounds. */
3436 gfc_conv_ss_startstride (&loop);
3438 gfc_conv_loop_setup (&loop, &expr2->where);
3440 info = &rss->info->data.array;
3441 desc = info->descriptor;
3443 /* Make a new descriptor. */
3444 parmtype = gfc_get_element_type (TREE_TYPE (desc));
3445 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0,
3446 loop.from, loop.to, 1,
3447 GFC_ARRAY_UNKNOWN, true);
3449 /* Allocate temporary for nested forall construct. */
3450 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
3451 inner_size, NULL, block, &ptemp1);
3452 gfc_start_block (&body);
3453 gfc_init_se (&lse, NULL);
3454 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3455 lse.direct_byref = 1;
3456 rss = gfc_walk_expr (expr2);
3457 gfc_conv_expr_descriptor (&lse, expr2, rss);
3459 gfc_add_block_to_block (&body, &lse.pre);
3460 gfc_add_block_to_block (&body, &lse.post);
3462 /* Increment count. */
3463 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3464 count, gfc_index_one_node);
3465 gfc_add_modify (&body, count, tmp);
3467 tmp = gfc_finish_block (&body);
3469 /* Generate body and loops according to the information in
3470 nested_forall_info. */
3471 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3472 gfc_add_expr_to_block (block, tmp);
3475 gfc_add_modify (block, count, gfc_index_zero_node);
3477 parm = gfc_build_array_ref (tmp1, count, NULL);
3478 lss = gfc_walk_expr (expr1);
3479 gfc_init_se (&lse, NULL);
3480 gfc_conv_expr_descriptor (&lse, expr1, lss);
3481 gfc_add_modify (&lse.pre, lse.expr, parm);
3482 gfc_start_block (&body);
3483 gfc_add_block_to_block (&body, &lse.pre);
3484 gfc_add_block_to_block (&body, &lse.post);
3486 /* Increment count. */
3487 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3488 count, gfc_index_one_node);
3489 gfc_add_modify (&body, count, tmp);
3491 tmp = gfc_finish_block (&body);
3493 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3494 gfc_add_expr_to_block (block, tmp);
3496 /* Free the temporary. */
3499 tmp = gfc_call_free (ptemp1);
3500 gfc_add_expr_to_block (block, tmp);
3505 /* FORALL and WHERE statements are really nasty, especially when you nest
3506 them. All the rhs of a forall assignment must be evaluated before the
3507 actual assignments are performed. Presumably this also applies to all the
3508 assignments in an inner where statement. */
3510 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3511 linear array, relying on the fact that we process in the same order in all
3514 forall (i=start:end:stride; maskexpr)
3518 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3520 count = ((end + 1 - start) / stride)
3521 masktmp(:) = maskexpr(:)
3524 for (i = start; i <= end; i += stride)
3526 if (masktmp[maskindex++])
3530 for (i = start; i <= end; i += stride)
3532 if (masktmp[maskindex++])
3536 Note that this code only works when there are no dependencies.
3537 Forall loop with array assignments and data dependencies are a real pain,
3538 because the size of the temporary cannot always be determined before the
3539 loop is executed. This problem is compounded by the presence of nested
3544 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
3561 tree cycle_label = NULL_TREE;
3565 gfc_forall_iterator *fa;
3568 gfc_saved_var *saved_vars;
3569 iter_info *this_forall;
3573 /* Do nothing if the mask is false. */
3575 && code->expr1->expr_type == EXPR_CONSTANT
3576 && !code->expr1->value.logical)
3577 return build_empty_stmt (input_location);
3580 /* Count the FORALL index number. */
3581 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3585 /* Allocate the space for var, start, end, step, varexpr. */
3586 var = XCNEWVEC (tree, nvar);
3587 start = XCNEWVEC (tree, nvar);
3588 end = XCNEWVEC (tree, nvar);
3589 step = XCNEWVEC (tree, nvar);
3590 varexpr = XCNEWVEC (gfc_expr *, nvar);
3591 saved_vars = XCNEWVEC (gfc_saved_var, nvar);
3593 /* Allocate the space for info. */
3594 info = XCNEW (forall_info);
3596 gfc_start_block (&pre);
3597 gfc_init_block (&post);
3598 gfc_init_block (&block);
3601 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3603 gfc_symbol *sym = fa->var->symtree->n.sym;
3605 /* Allocate space for this_forall. */
3606 this_forall = XCNEW (iter_info);
3608 /* Create a temporary variable for the FORALL index. */
3609 tmp = gfc_typenode_for_spec (&sym->ts);
3610 var[n] = gfc_create_var (tmp, sym->name);
3611 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
3613 /* Record it in this_forall. */
3614 this_forall->var = var[n];
3616 /* Replace the index symbol's backend_decl with the temporary decl. */
3617 sym->backend_decl = var[n];
3619 /* Work out the start, end and stride for the loop. */
3620 gfc_init_se (&se, NULL);
3621 gfc_conv_expr_val (&se, fa->start);
3622 /* Record it in this_forall. */
3623 this_forall->start = se.expr;
3624 gfc_add_block_to_block (&block, &se.pre);
3627 gfc_init_se (&se, NULL);
3628 gfc_conv_expr_val (&se, fa->end);
3629 /* Record it in this_forall. */
3630 this_forall->end = se.expr;
3631 gfc_make_safe_expr (&se);
3632 gfc_add_block_to_block (&block, &se.pre);
3635 gfc_init_se (&se, NULL);
3636 gfc_conv_expr_val (&se, fa->stride);
3637 /* Record it in this_forall. */
3638 this_forall->step = se.expr;
3639 gfc_make_safe_expr (&se);
3640 gfc_add_block_to_block (&block, &se.pre);
3643 /* Set the NEXT field of this_forall to NULL. */
3644 this_forall->next = NULL;
3645 /* Link this_forall to the info construct. */
3646 if (info->this_loop)
3648 iter_info *iter_tmp = info->this_loop;
3649 while (iter_tmp->next != NULL)
3650 iter_tmp = iter_tmp->next;
3651 iter_tmp->next = this_forall;
3654 info->this_loop = this_forall;
3660 /* Calculate the size needed for the current forall level. */
3661 size = gfc_index_one_node;
3662 for (n = 0; n < nvar; n++)
3664 /* size = (end + step - start) / step. */
3665 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (start[n]),
3667 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (end[n]),
3669 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, TREE_TYPE (tmp),
3671 tmp = convert (gfc_array_index_type, tmp);
3673 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3677 /* Record the nvar and size of current forall level. */
3683 /* If the mask is .true., consider the FORALL unconditional. */
3684 if (code->expr1->expr_type == EXPR_CONSTANT
3685 && code->expr1->value.logical)
3693 /* First we need to allocate the mask. */
3696 /* As the mask array can be very big, prefer compact boolean types. */
3697 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3698 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3699 size, NULL, &block, &pmask);
3700 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3702 /* Record them in the info structure. */
3703 info->maskindex = maskindex;
3708 /* No mask was specified. */
3709 maskindex = NULL_TREE;
3710 mask = pmask = NULL_TREE;
3713 /* Link the current forall level to nested_forall_info. */
3714 info->prev_nest = nested_forall_info;
3715 nested_forall_info = info;
3717 /* Copy the mask into a temporary variable if required.
3718 For now we assume a mask temporary is needed. */
3721 /* As the mask array can be very big, prefer compact boolean types. */
3722 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3724 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3726 /* Start of mask assignment loop body. */
3727 gfc_start_block (&body);
3729 /* Evaluate the mask expression. */
3730 gfc_init_se (&se, NULL);
3731 gfc_conv_expr_val (&se, code->expr1);
3732 gfc_add_block_to_block (&body, &se.pre);
3734 /* Store the mask. */
3735 se.expr = convert (mask_type, se.expr);
3737 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3738 gfc_add_modify (&body, tmp, se.expr);
3740 /* Advance to the next mask element. */
3741 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3742 maskindex, gfc_index_one_node);
3743 gfc_add_modify (&body, maskindex, tmp);
3745 /* Generate the loops. */
3746 tmp = gfc_finish_block (&body);
3747 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3748 gfc_add_expr_to_block (&block, tmp);
3751 if (code->op == EXEC_DO_CONCURRENT)
3753 gfc_init_block (&body);
3754 cycle_label = gfc_build_label_decl (NULL_TREE);
3755 code->cycle_label = cycle_label;
3756 tmp = gfc_trans_code (code->block->next);
3757 gfc_add_expr_to_block (&body, tmp);
3759 if (TREE_USED (cycle_label))
3761 tmp = build1_v (LABEL_EXPR, cycle_label);
3762 gfc_add_expr_to_block (&body, tmp);
3765 tmp = gfc_finish_block (&body);
3766 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3767 gfc_add_expr_to_block (&block, tmp);
3771 c = code->block->next;
3773 /* TODO: loop merging in FORALL statements. */
3774 /* Now that we've got a copy of the mask, generate the assignment loops. */
3780 /* A scalar or array assignment. DO the simple check for
3781 lhs to rhs dependencies. These make a temporary for the
3782 rhs and form a second forall block to copy to variable. */
3783 need_temp = check_forall_dependencies(c, &pre, &post);
3785 /* Temporaries due to array assignment data dependencies introduce
3786 no end of problems. */
3788 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3789 nested_forall_info, &block);
3792 /* Use the normal assignment copying routines. */
3793 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3795 /* Generate body and loops. */
3796 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3798 gfc_add_expr_to_block (&block, tmp);
3801 /* Cleanup any temporary symtrees that have been made to deal
3802 with dependencies. */
3804 cleanup_forall_symtrees (c);
3809 /* Translate WHERE or WHERE construct nested in FORALL. */
3810 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3813 /* Pointer assignment inside FORALL. */
3814 case EXEC_POINTER_ASSIGN:
3815 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3817 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3818 nested_forall_info, &block);
3821 /* Use the normal assignment copying routines. */
3822 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3824 /* Generate body and loops. */
3825 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3827 gfc_add_expr_to_block (&block, tmp);
3832 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3833 gfc_add_expr_to_block (&block, tmp);
3836 /* Explicit subroutine calls are prevented by the frontend but interface
3837 assignments can legitimately produce them. */
3838 case EXEC_ASSIGN_CALL:
3839 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3840 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3841 gfc_add_expr_to_block (&block, tmp);
3852 /* Restore the original index variables. */
3853 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3854 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3856 /* Free the space for var, start, end, step, varexpr. */
3864 for (this_forall = info->this_loop; this_forall;)
3866 iter_info *next = this_forall->next;
3871 /* Free the space for this forall_info. */
3876 /* Free the temporary for the mask. */
3877 tmp = gfc_call_free (pmask);
3878 gfc_add_expr_to_block (&block, tmp);
3881 pushdecl (maskindex);
3883 gfc_add_block_to_block (&pre, &block);
3884 gfc_add_block_to_block (&pre, &post);
3886 return gfc_finish_block (&pre);
3890 /* Translate the FORALL statement or construct. */
3892 tree gfc_trans_forall (gfc_code * code)
3894 return gfc_trans_forall_1 (code, NULL);
3898 /* Translate the DO CONCURRENT construct. */
3900 tree gfc_trans_do_concurrent (gfc_code * code)
3902 return gfc_trans_forall_1 (code, NULL);
3906 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3907 If the WHERE construct is nested in FORALL, compute the overall temporary
3908 needed by the WHERE mask expression multiplied by the iterator number of
3910 ME is the WHERE mask expression.
3911 MASK is the current execution mask upon input, whose sense may or may
3912 not be inverted as specified by the INVERT argument.
3913 CMASK is the updated execution mask on output, or NULL if not required.
3914 PMASK is the pending execution mask on output, or NULL if not required.
3915 BLOCK is the block in which to place the condition evaluation loops. */
3918 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3919 tree mask, bool invert, tree cmask, tree pmask,
3920 tree mask_type, stmtblock_t * block)
3925 stmtblock_t body, body1;
3926 tree count, cond, mtmp;
3929 gfc_init_loopinfo (&loop);
3931 lss = gfc_walk_expr (me);
3932 rss = gfc_walk_expr (me);
3934 /* Variable to index the temporary. */
3935 count = gfc_create_var (gfc_array_index_type, "count");
3936 /* Initialize count. */
3937 gfc_add_modify (block, count, gfc_index_zero_node);
3939 gfc_start_block (&body);
3941 gfc_init_se (&rse, NULL);
3942 gfc_init_se (&lse, NULL);
3944 if (lss == gfc_ss_terminator)
3946 gfc_init_block (&body1);
3950 /* Initialize the loop. */
3951 gfc_init_loopinfo (&loop);
3953 /* We may need LSS to determine the shape of the expression. */
3954 gfc_add_ss_to_loop (&loop, lss);
3955 gfc_add_ss_to_loop (&loop, rss);
3957 gfc_conv_ss_startstride (&loop);
3958 gfc_conv_loop_setup (&loop, &me->where);
3960 gfc_mark_ss_chain_used (rss, 1);
3961 /* Start the loop body. */
3962 gfc_start_scalarized_body (&loop, &body1);
3964 /* Translate the expression. */
3965 gfc_copy_loopinfo_to_se (&rse, &loop);
3967 gfc_conv_expr (&rse, me);
3970 /* Variable to evaluate mask condition. */
3971 cond = gfc_create_var (mask_type, "cond");
3972 if (mask && (cmask || pmask))
3973 mtmp = gfc_create_var (mask_type, "mask");
3974 else mtmp = NULL_TREE;
3976 gfc_add_block_to_block (&body1, &lse.pre);
3977 gfc_add_block_to_block (&body1, &rse.pre);
3979 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3981 if (mask && (cmask || pmask))
3983 tmp = gfc_build_array_ref (mask, count, NULL);
3985 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, tmp);
3986 gfc_add_modify (&body1, mtmp, tmp);
3991 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3994 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type,
3996 gfc_add_modify (&body1, tmp1, tmp);
4001 tmp1 = gfc_build_array_ref (pmask, count, NULL);
4002 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, cond);
4004 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type, mtmp,
4006 gfc_add_modify (&body1, tmp1, tmp);
4009 gfc_add_block_to_block (&body1, &lse.post);
4010 gfc_add_block_to_block (&body1, &rse.post);
4012 if (lss == gfc_ss_terminator)
4014 gfc_add_block_to_block (&body, &body1);
4018 /* Increment count. */
4019 tmp1 = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
4020 count, gfc_index_one_node);
4021 gfc_add_modify (&body1, count, tmp1);
4023 /* Generate the copying loops. */
4024 gfc_trans_scalarizing_loops (&loop, &body1);
4026 gfc_add_block_to_block (&body, &loop.pre);
4027 gfc_add_block_to_block (&body, &loop.post);
4029 gfc_cleanup_loop (&loop);
4030 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
4031 as tree nodes in SS may not be valid in different scope. */
4034 tmp1 = gfc_finish_block (&body);
4035 /* If the WHERE construct is inside FORALL, fill the full temporary. */
4036 if (nested_forall_info != NULL)
4037 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
4039 gfc_add_expr_to_block (block, tmp1);
4043 /* Translate an assignment statement in a WHERE statement or construct
4044 statement. The MASK expression is used to control which elements
4045 of EXPR1 shall be assigned. The sense of MASK is specified by
4049 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
4050 tree mask, bool invert,
4051 tree count1, tree count2,
4057 gfc_ss *lss_section;
4064 tree index, maskexpr;
4066 /* A defined assignment. */
4067 if (cnext && cnext->resolved_sym)
4068 return gfc_trans_call (cnext, true, mask, count1, invert);
4071 /* TODO: handle this special case.
4072 Special case a single function returning an array. */
4073 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
4075 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
4081 /* Assignment of the form lhs = rhs. */
4082 gfc_start_block (&block);
4084 gfc_init_se (&lse, NULL);
4085 gfc_init_se (&rse, NULL);
4088 lss = gfc_walk_expr (expr1);
4091 /* In each where-assign-stmt, the mask-expr and the variable being
4092 defined shall be arrays of the same shape. */
4093 gcc_assert (lss != gfc_ss_terminator);
4095 /* The assignment needs scalarization. */
4098 /* Find a non-scalar SS from the lhs. */
4099 while (lss_section != gfc_ss_terminator
4100 && lss_section->info->type != GFC_SS_SECTION)
4101 lss_section = lss_section->next;
4103 gcc_assert (lss_section != gfc_ss_terminator);
4105 /* Initialize the scalarizer. */
4106 gfc_init_loopinfo (&loop);
4109 rss = gfc_walk_expr (expr2);
4110 if (rss == gfc_ss_terminator)
4112 /* The rhs is scalar. Add a ss for the expression. */
4113 rss = gfc_get_scalar_ss (gfc_ss_terminator, expr2);
4114 rss->info->where = 1;
4117 /* Associate the SS with the loop. */
4118 gfc_add_ss_to_loop (&loop, lss);
4119 gfc_add_ss_to_loop (&loop, rss);
4121 /* Calculate the bounds of the scalarization. */
4122 gfc_conv_ss_startstride (&loop);
4124 /* Resolve any data dependencies in the statement. */
4125 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
4127 /* Setup the scalarizing loops. */
4128 gfc_conv_loop_setup (&loop, &expr2->where);
4130 /* Setup the gfc_se structures. */
4131 gfc_copy_loopinfo_to_se (&lse, &loop);
4132 gfc_copy_loopinfo_to_se (&rse, &loop);
4135 gfc_mark_ss_chain_used (rss, 1);
4136 if (loop.temp_ss == NULL)
4139 gfc_mark_ss_chain_used (lss, 1);
4143 lse.ss = loop.temp_ss;
4144 gfc_mark_ss_chain_used (lss, 3);
4145 gfc_mark_ss_chain_used (loop.temp_ss, 3);
4148 /* Start the scalarized loop body. */
4149 gfc_start_scalarized_body (&loop, &body);
4151 /* Translate the expression. */
4152 gfc_conv_expr (&rse, expr2);
4153 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
4154 gfc_conv_tmp_array_ref (&lse);
4156 gfc_conv_expr (&lse, expr1);
4158 /* Form the mask expression according to the mask. */
4160 maskexpr = gfc_build_array_ref (mask, index, NULL);
4162 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
4163 TREE_TYPE (maskexpr), maskexpr);
4165 /* Use the scalar assignment as is. */
4166 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
4167 loop.temp_ss != NULL, false, true);
4169 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
4171 gfc_add_expr_to_block (&body, tmp);
4173 if (lss == gfc_ss_terminator)
4175 /* Increment count1. */
4176 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
4177 count1, gfc_index_one_node);
4178 gfc_add_modify (&body, count1, tmp);
4180 /* Use the scalar assignment as is. */
4181 gfc_add_block_to_block (&block, &body);
4185 gcc_assert (lse.ss == gfc_ss_terminator
4186 && rse.ss == gfc_ss_terminator);
4188 if (loop.temp_ss != NULL)
4190 /* Increment count1 before finish the main body of a scalarized
4192 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4193 gfc_array_index_type, count1, gfc_index_one_node);
4194 gfc_add_modify (&body, count1, tmp);
4195 gfc_trans_scalarized_loop_boundary (&loop, &body);
4197 /* We need to copy the temporary to the actual lhs. */
4198 gfc_init_se (&lse, NULL);
4199 gfc_init_se (&rse, NULL);
4200 gfc_copy_loopinfo_to_se (&lse, &loop);
4201 gfc_copy_loopinfo_to_se (&rse, &loop);
4203 rse.ss = loop.temp_ss;
4206 gfc_conv_tmp_array_ref (&rse);
4207 gfc_conv_expr (&lse, expr1);
4209 gcc_assert (lse.ss == gfc_ss_terminator
4210 && rse.ss == gfc_ss_terminator);
4212 /* Form the mask expression according to the mask tree list. */
4214 maskexpr = gfc_build_array_ref (mask, index, NULL);
4216 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
4217 TREE_TYPE (maskexpr), maskexpr);
4219 /* Use the scalar assignment as is. */
4220 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
4222 tmp = build3_v (COND_EXPR, maskexpr, tmp,
4223 build_empty_stmt (input_location));
4224 gfc_add_expr_to_block (&body, tmp);
4226 /* Increment count2. */
4227 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4228 gfc_array_index_type, count2,
4229 gfc_index_one_node);
4230 gfc_add_modify (&body, count2, tmp);
4234 /* Increment count1. */
4235 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4236 gfc_array_index_type, count1,
4237 gfc_index_one_node);
4238 gfc_add_modify (&body, count1, tmp);
4241 /* Generate the copying loops. */
4242 gfc_trans_scalarizing_loops (&loop, &body);
4244 /* Wrap the whole thing up. */
4245 gfc_add_block_to_block (&block, &loop.pre);
4246 gfc_add_block_to_block (&block, &loop.post);
4247 gfc_cleanup_loop (&loop);
4250 return gfc_finish_block (&block);
4254 /* Translate the WHERE construct or statement.
4255 This function can be called iteratively to translate the nested WHERE
4256 construct or statement.
4257 MASK is the control mask. */
4260 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
4261 forall_info * nested_forall_info, stmtblock_t * block)
4263 stmtblock_t inner_size_body;
4264 tree inner_size, size;
4273 tree count1, count2;
4277 tree pcmask = NULL_TREE;
4278 tree ppmask = NULL_TREE;
4279 tree cmask = NULL_TREE;
4280 tree pmask = NULL_TREE;
4281 gfc_actual_arglist *arg;
4283 /* the WHERE statement or the WHERE construct statement. */
4284 cblock = code->block;
4286 /* As the mask array can be very big, prefer compact boolean types. */
4287 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
4289 /* Determine which temporary masks are needed. */
4292 /* One clause: No ELSEWHEREs. */
4293 need_cmask = (cblock->next != 0);
4296 else if (cblock->block->block)
4298 /* Three or more clauses: Conditional ELSEWHEREs. */
4302 else if (cblock->next)
4304 /* Two clauses, the first non-empty. */
4306 need_pmask = (mask != NULL_TREE
4307 && cblock->block->next != 0);
4309 else if (!cblock->block->next)
4311 /* Two clauses, both empty. */
4315 /* Two clauses, the first empty, the second non-empty. */
4318 need_cmask = (cblock->block->expr1 != 0);
4327 if (need_cmask || need_pmask)
4329 /* Calculate the size of temporary needed by the mask-expr. */
4330 gfc_init_block (&inner_size_body);
4331 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
4332 &inner_size_body, &lss, &rss);
4334 gfc_free_ss_chain (lss);
4335 gfc_free_ss_chain (rss);
4337 /* Calculate the total size of temporary needed. */
4338 size = compute_overall_iter_number (nested_forall_info, inner_size,
4339 &inner_size_body, block);
4341 /* Check whether the size is negative. */
4342 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, size,
4343 gfc_index_zero_node);
4344 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
4345 cond, gfc_index_zero_node, size);
4346 size = gfc_evaluate_now (size, block);
4348 /* Allocate temporary for WHERE mask if needed. */
4350 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4353 /* Allocate temporary for !mask if needed. */
4355 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4361 /* Each time around this loop, the where clause is conditional
4362 on the value of mask and invert, which are updated at the
4363 bottom of the loop. */
4365 /* Has mask-expr. */
4368 /* Ensure that the WHERE mask will be evaluated exactly once.
4369 If there are no statements in this WHERE/ELSEWHERE clause,
4370 then we don't need to update the control mask (cmask).
4371 If this is the last clause of the WHERE construct, then
4372 we don't need to update the pending control mask (pmask). */
4374 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4376 cblock->next ? cmask : NULL_TREE,
4377 cblock->block ? pmask : NULL_TREE,
4380 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4382 (cblock->next || cblock->block)
4383 ? cmask : NULL_TREE,
4384 NULL_TREE, mask_type, block);
4388 /* It's a final elsewhere-stmt. No mask-expr is present. */
4392 /* The body of this where clause are controlled by cmask with
4393 sense specified by invert. */
4395 /* Get the assignment statement of a WHERE statement, or the first
4396 statement in where-body-construct of a WHERE construct. */
4397 cnext = cblock->next;
4402 /* WHERE assignment statement. */
4403 case EXEC_ASSIGN_CALL:
4405 arg = cnext->ext.actual;
4406 expr1 = expr2 = NULL;
4407 for (; arg; arg = arg->next)
4419 expr1 = cnext->expr1;
4420 expr2 = cnext->expr2;
4422 if (nested_forall_info != NULL)
4424 need_temp = gfc_check_dependency (expr1, expr2, 0);
4425 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
4426 gfc_trans_assign_need_temp (expr1, expr2,
4428 nested_forall_info, block);
4431 /* Variables to control maskexpr. */
4432 count1 = gfc_create_var (gfc_array_index_type, "count1");
4433 count2 = gfc_create_var (gfc_array_index_type, "count2");
4434 gfc_add_modify (block, count1, gfc_index_zero_node);
4435 gfc_add_modify (block, count2, gfc_index_zero_node);
4437 tmp = gfc_trans_where_assign (expr1, expr2,
4442 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
4444 gfc_add_expr_to_block (block, tmp);
4449 /* Variables to control maskexpr. */
4450 count1 = gfc_create_var (gfc_array_index_type, "count1");
4451 count2 = gfc_create_var (gfc_array_index_type, "count2");
4452 gfc_add_modify (block, count1, gfc_index_zero_node);
4453 gfc_add_modify (block, count2, gfc_index_zero_node);
4455 tmp = gfc_trans_where_assign (expr1, expr2,
4459 gfc_add_expr_to_block (block, tmp);
4464 /* WHERE or WHERE construct is part of a where-body-construct. */
4466 gfc_trans_where_2 (cnext, cmask, invert,
4467 nested_forall_info, block);
4474 /* The next statement within the same where-body-construct. */
4475 cnext = cnext->next;
4477 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4478 cblock = cblock->block;
4479 if (mask == NULL_TREE)
4481 /* If we're the initial WHERE, we can simply invert the sense
4482 of the current mask to obtain the "mask" for the remaining
4489 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4495 /* If we allocated a pending mask array, deallocate it now. */
4498 tmp = gfc_call_free (ppmask);
4499 gfc_add_expr_to_block (block, tmp);
4502 /* If we allocated a current mask array, deallocate it now. */
4505 tmp = gfc_call_free (pcmask);
4506 gfc_add_expr_to_block (block, tmp);
4510 /* Translate a simple WHERE construct or statement without dependencies.
4511 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4512 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4513 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4516 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
4518 stmtblock_t block, body;
4519 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
4520 tree tmp, cexpr, tstmt, estmt;
4521 gfc_ss *css, *tdss, *tsss;
4522 gfc_se cse, tdse, tsse, edse, esse;
4527 /* Allow the scalarizer to workshare simple where loops. */
4528 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
4529 ompws_flags |= OMPWS_SCALARIZER_WS;
4531 cond = cblock->expr1;
4532 tdst = cblock->next->expr1;
4533 tsrc = cblock->next->expr2;
4534 edst = eblock ? eblock->next->expr1 : NULL;
4535 esrc = eblock ? eblock->next->expr2 : NULL;
4537 gfc_start_block (&block);
4538 gfc_init_loopinfo (&loop);
4540 /* Handle the condition. */
4541 gfc_init_se (&cse, NULL);
4542 css = gfc_walk_expr (cond);
4543 gfc_add_ss_to_loop (&loop, css);
4545 /* Handle the then-clause. */
4546 gfc_init_se (&tdse, NULL);
4547 gfc_init_se (&tsse, NULL);
4548 tdss = gfc_walk_expr (tdst);
4549 tsss = gfc_walk_expr (tsrc);
4550 if (tsss == gfc_ss_terminator)
4552 tsss = gfc_get_scalar_ss (gfc_ss_terminator, tsrc);
4553 tsss->info->where = 1;
4555 gfc_add_ss_to_loop (&loop, tdss);
4556 gfc_add_ss_to_loop (&loop, tsss);
4560 /* Handle the else clause. */
4561 gfc_init_se (&edse, NULL);
4562 gfc_init_se (&esse, NULL);
4563 edss = gfc_walk_expr (edst);
4564 esss = gfc_walk_expr (esrc);
4565 if (esss == gfc_ss_terminator)
4567 esss = gfc_get_scalar_ss (gfc_ss_terminator, esrc);
4568 esss->info->where = 1;
4570 gfc_add_ss_to_loop (&loop, edss);
4571 gfc_add_ss_to_loop (&loop, esss);
4574 gfc_conv_ss_startstride (&loop);
4575 gfc_conv_loop_setup (&loop, &tdst->where);
4577 gfc_mark_ss_chain_used (css, 1);
4578 gfc_mark_ss_chain_used (tdss, 1);
4579 gfc_mark_ss_chain_used (tsss, 1);
4582 gfc_mark_ss_chain_used (edss, 1);
4583 gfc_mark_ss_chain_used (esss, 1);
4586 gfc_start_scalarized_body (&loop, &body);
4588 gfc_copy_loopinfo_to_se (&cse, &loop);
4589 gfc_copy_loopinfo_to_se (&tdse, &loop);
4590 gfc_copy_loopinfo_to_se (&tsse, &loop);
4596 gfc_copy_loopinfo_to_se (&edse, &loop);
4597 gfc_copy_loopinfo_to_se (&esse, &loop);
4602 gfc_conv_expr (&cse, cond);
4603 gfc_add_block_to_block (&body, &cse.pre);
4606 gfc_conv_expr (&tsse, tsrc);
4607 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
4608 gfc_conv_tmp_array_ref (&tdse);
4610 gfc_conv_expr (&tdse, tdst);
4614 gfc_conv_expr (&esse, esrc);
4615 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
4616 gfc_conv_tmp_array_ref (&edse);
4618 gfc_conv_expr (&edse, edst);
4621 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
4622 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
4624 : build_empty_stmt (input_location);
4625 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
4626 gfc_add_expr_to_block (&body, tmp);
4627 gfc_add_block_to_block (&body, &cse.post);
4629 gfc_trans_scalarizing_loops (&loop, &body);
4630 gfc_add_block_to_block (&block, &loop.pre);
4631 gfc_add_block_to_block (&block, &loop.post);
4632 gfc_cleanup_loop (&loop);
4634 return gfc_finish_block (&block);
4637 /* As the WHERE or WHERE construct statement can be nested, we call
4638 gfc_trans_where_2 to do the translation, and pass the initial
4639 NULL values for both the control mask and the pending control mask. */
4642 gfc_trans_where (gfc_code * code)
4648 cblock = code->block;
4650 && cblock->next->op == EXEC_ASSIGN
4651 && !cblock->next->next)
4653 eblock = cblock->block;
4656 /* A simple "WHERE (cond) x = y" statement or block is
4657 dependence free if cond is not dependent upon writing x,
4658 and the source y is unaffected by the destination x. */
4659 if (!gfc_check_dependency (cblock->next->expr1,
4661 && !gfc_check_dependency (cblock->next->expr1,
4662 cblock->next->expr2, 0))
4663 return gfc_trans_where_3 (cblock, NULL);
4665 else if (!eblock->expr1
4668 && eblock->next->op == EXEC_ASSIGN
4669 && !eblock->next->next)
4671 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4672 block is dependence free if cond is not dependent on writes
4673 to x1 and x2, y1 is not dependent on writes to x2, and y2
4674 is not dependent on writes to x1, and both y's are not
4675 dependent upon their own x's. In addition to this, the
4676 final two dependency checks below exclude all but the same
4677 array reference if the where and elswhere destinations
4678 are the same. In short, this is VERY conservative and this
4679 is needed because the two loops, required by the standard
4680 are coalesced in gfc_trans_where_3. */
4681 if (!gfc_check_dependency(cblock->next->expr1,
4683 && !gfc_check_dependency(eblock->next->expr1,
4685 && !gfc_check_dependency(cblock->next->expr1,
4686 eblock->next->expr2, 1)
4687 && !gfc_check_dependency(eblock->next->expr1,
4688 cblock->next->expr2, 1)
4689 && !gfc_check_dependency(cblock->next->expr1,
4690 cblock->next->expr2, 1)
4691 && !gfc_check_dependency(eblock->next->expr1,
4692 eblock->next->expr2, 1)
4693 && !gfc_check_dependency(cblock->next->expr1,
4694 eblock->next->expr1, 0)
4695 && !gfc_check_dependency(eblock->next->expr1,
4696 cblock->next->expr1, 0))
4697 return gfc_trans_where_3 (cblock, eblock);
4701 gfc_start_block (&block);
4703 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4705 return gfc_finish_block (&block);
4709 /* CYCLE a DO loop. The label decl has already been created by
4710 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4711 node at the head of the loop. We must mark the label as used. */
4714 gfc_trans_cycle (gfc_code * code)
4718 cycle_label = code->ext.which_construct->cycle_label;
4719 gcc_assert (cycle_label);
4721 TREE_USED (cycle_label) = 1;
4722 return build1_v (GOTO_EXPR, cycle_label);
4726 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4727 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4731 gfc_trans_exit (gfc_code * code)
4735 exit_label = code->ext.which_construct->exit_label;
4736 gcc_assert (exit_label);
4738 TREE_USED (exit_label) = 1;
4739 return build1_v (GOTO_EXPR, exit_label);
4743 /* Translate the ALLOCATE statement. */
4746 gfc_trans_allocate (gfc_code * code)
4768 tree memsize = NULL_TREE;
4769 tree classexpr = NULL_TREE;
4771 if (!code->ext.alloc.list)
4774 stat = tmp = memsz = NULL_TREE;
4775 label_errmsg = label_finish = errmsg = errlen = NULL_TREE;
4777 gfc_init_block (&block);
4778 gfc_init_block (&post);
4780 /* STAT= (and maybe ERRMSG=) is present. */
4784 tree gfc_int4_type_node = gfc_get_int_type (4);
4785 stat = gfc_create_var (gfc_int4_type_node, "stat");
4787 /* ERRMSG= only makes sense with STAT=. */
4790 gfc_init_se (&se, NULL);
4791 se.want_pointer = 1;
4792 gfc_conv_expr_lhs (&se, code->expr2);
4794 errlen = se.string_length;
4798 errmsg = null_pointer_node;
4799 errlen = build_int_cst (gfc_charlen_type_node, 0);
4802 /* GOTO destinations. */
4803 label_errmsg = gfc_build_label_decl (NULL_TREE);
4804 label_finish = gfc_build_label_decl (NULL_TREE);
4805 TREE_USED (label_finish) = 0;
4811 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4813 expr = gfc_copy_expr (al->expr);
4815 if (expr->ts.type == BT_CLASS)
4816 gfc_add_data_component (expr);
4818 gfc_init_se (&se, NULL);
4820 se.want_pointer = 1;
4821 se.descriptor_only = 1;
4822 gfc_conv_expr (&se, expr);
4824 /* Evaluate expr3 just once if not a variable. */
4825 if (al == code->ext.alloc.list
4826 && al->expr->ts.type == BT_CLASS
4828 && code->expr3->ts.type == BT_CLASS
4829 && code->expr3->expr_type != EXPR_VARIABLE)
4831 gfc_init_se (&se_sz, NULL);
4832 gfc_conv_expr_reference (&se_sz, code->expr3);
4833 gfc_conv_class_to_class (&se_sz, code->expr3,
4834 code->expr3->ts, false);
4835 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4836 gfc_add_block_to_block (&se.post, &se_sz.post);
4837 classexpr = build_fold_indirect_ref_loc (input_location,
4839 classexpr = gfc_evaluate_now (classexpr, &se.pre);
4840 memsize = gfc_vtable_size_get (classexpr);
4841 memsize = fold_convert (sizetype, memsize);
4845 class_expr = classexpr;
4848 if (!gfc_array_allocate (&se, expr, stat, errmsg, errlen, label_finish,
4849 memsz, &nelems, code->expr3))
4851 /* A scalar or derived type. */
4853 /* Determine allocate size. */
4854 if (al->expr->ts.type == BT_CLASS
4856 && memsz == NULL_TREE)
4858 if (code->expr3->ts.type == BT_CLASS)
4860 sz = gfc_copy_expr (code->expr3);
4861 gfc_add_vptr_component (sz);
4862 gfc_add_size_component (sz);
4863 gfc_init_se (&se_sz, NULL);
4864 gfc_conv_expr (&se_sz, sz);
4869 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4871 else if (al->expr->ts.type == BT_CHARACTER
4872 && al->expr->ts.deferred && code->expr3)
4874 if (!code->expr3->ts.u.cl->backend_decl)
4876 /* Convert and use the length expression. */
4877 gfc_init_se (&se_sz, NULL);
4878 if (code->expr3->expr_type == EXPR_VARIABLE
4879 || code->expr3->expr_type == EXPR_CONSTANT)
4881 gfc_conv_expr (&se_sz, code->expr3);
4882 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4884 = gfc_evaluate_now (se_sz.string_length, &se.pre);
4885 gfc_add_block_to_block (&se.pre, &se_sz.post);
4886 memsz = se_sz.string_length;
4888 else if (code->expr3->mold
4889 && code->expr3->ts.u.cl
4890 && code->expr3->ts.u.cl->length)
4892 gfc_conv_expr (&se_sz, code->expr3->ts.u.cl->length);
4893 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4894 se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre);
4895 gfc_add_block_to_block (&se.pre, &se_sz.post);
4900 /* This is would be inefficient and possibly could
4901 generate wrong code if the result were not stored
4903 if (slen3 == NULL_TREE)
4905 gfc_conv_expr (&se_sz, code->expr3);
4906 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4907 expr3 = gfc_evaluate_now (se_sz.expr, &se.pre);
4908 gfc_add_block_to_block (&post, &se_sz.post);
4909 slen3 = gfc_evaluate_now (se_sz.string_length,
4916 /* Otherwise use the stored string length. */
4917 memsz = code->expr3->ts.u.cl->backend_decl;
4918 tmp = al->expr->ts.u.cl->backend_decl;
4920 /* Store the string length. */
4921 if (tmp && TREE_CODE (tmp) == VAR_DECL)
4922 gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp),
4925 /* Convert to size in bytes, using the character KIND. */
4926 tmp = TREE_TYPE (gfc_typenode_for_spec (&al->expr->ts));
4927 tmp = TYPE_SIZE_UNIT (tmp);
4928 memsz = fold_build2_loc (input_location, MULT_EXPR,
4929 TREE_TYPE (tmp), tmp,
4930 fold_convert (TREE_TYPE (tmp), memsz));
4932 else if (al->expr->ts.type == BT_CHARACTER && al->expr->ts.deferred)
4934 gcc_assert (code->ext.alloc.ts.u.cl && code->ext.alloc.ts.u.cl->length);
4935 gfc_init_se (&se_sz, NULL);
4936 gfc_conv_expr (&se_sz, code->ext.alloc.ts.u.cl->length);
4937 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4938 se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre);
4939 gfc_add_block_to_block (&se.pre, &se_sz.post);
4940 /* Store the string length. */
4941 tmp = al->expr->ts.u.cl->backend_decl;
4942 gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp),
4944 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts));
4945 tmp = TYPE_SIZE_UNIT (tmp);
4946 memsz = fold_build2_loc (input_location, MULT_EXPR,
4947 TREE_TYPE (tmp), tmp,
4948 fold_convert (TREE_TYPE (se_sz.expr),
4951 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4952 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4953 else if (memsz == NULL_TREE)
4954 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4956 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4958 memsz = se.string_length;
4960 /* Convert to size in bytes, using the character KIND. */
4961 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts));
4962 tmp = TYPE_SIZE_UNIT (tmp);
4963 memsz = fold_build2_loc (input_location, MULT_EXPR,
4964 TREE_TYPE (tmp), tmp,
4965 fold_convert (TREE_TYPE (tmp), memsz));
4968 /* Allocate - for non-pointers with re-alloc checking. */
4969 if (gfc_expr_attr (expr).allocatable)
4970 gfc_allocate_allocatable (&se.pre, se.expr, memsz, NULL_TREE,
4971 stat, errmsg, errlen, label_finish, expr);
4973 gfc_allocate_using_malloc (&se.pre, se.expr, memsz, stat);
4975 if (al->expr->ts.type == BT_DERIVED
4976 && expr->ts.u.derived->attr.alloc_comp)
4978 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4979 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4980 gfc_add_expr_to_block (&se.pre, tmp);
4982 else if (al->expr->ts.type == BT_CLASS)
4984 /* With class objects, it is best to play safe and null the
4985 memory because we cannot know if dynamic types have allocatable
4986 components or not. */
4987 tmp = build_call_expr_loc (input_location,
4988 builtin_decl_explicit (BUILT_IN_MEMSET),
4989 3, se.expr, integer_zero_node, memsz);
4990 gfc_add_expr_to_block (&se.pre, tmp);
4994 gfc_add_block_to_block (&block, &se.pre);
4996 /* Error checking -- Note: ERRMSG only makes sense with STAT. */
4999 tmp = build1_v (GOTO_EXPR, label_errmsg);
5000 parm = fold_build2_loc (input_location, NE_EXPR,
5001 boolean_type_node, stat,
5002 build_int_cst (TREE_TYPE (stat), 0));
5003 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5004 gfc_unlikely (parm), tmp,
5005 build_empty_stmt (input_location));
5006 gfc_add_expr_to_block (&block, tmp);
5009 /* We need the vptr of CLASS objects to be initialized. */
5010 e = gfc_copy_expr (al->expr);
5011 if (e->ts.type == BT_CLASS)
5013 gfc_expr *lhs, *rhs;
5016 lhs = gfc_expr_to_initialize (e);
5017 gfc_add_vptr_component (lhs);
5019 if (class_expr != NULL_TREE)
5021 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5022 gfc_init_se (&lse, NULL);
5023 lse.want_pointer = 1;
5024 gfc_conv_expr (&lse, lhs);
5025 tmp = gfc_class_vptr_get (class_expr);
5026 gfc_add_modify (&block, lse.expr,
5027 fold_convert (TREE_TYPE (lse.expr), tmp));
5029 else if (code->expr3 && code->expr3->ts.type == BT_CLASS)
5031 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5032 rhs = gfc_copy_expr (code->expr3);
5033 gfc_add_vptr_component (rhs);
5034 tmp = gfc_trans_pointer_assignment (lhs, rhs);
5035 gfc_add_expr_to_block (&block, tmp);
5036 gfc_free_expr (rhs);
5037 rhs = gfc_expr_to_initialize (e);
5041 /* VPTR is fixed at compile time. */
5045 ts = &code->expr3->ts;
5046 else if (e->ts.type == BT_DERIVED)
5048 else if (code->ext.alloc.ts.type == BT_DERIVED)
5049 ts = &code->ext.alloc.ts;
5050 else if (e->ts.type == BT_CLASS)
5051 ts = &CLASS_DATA (e)->ts;
5055 if (ts->type == BT_DERIVED)
5057 vtab = gfc_find_derived_vtab (ts->u.derived);
5059 gfc_init_se (&lse, NULL);
5060 lse.want_pointer = 1;
5061 gfc_conv_expr (&lse, lhs);
5062 tmp = gfc_build_addr_expr (NULL_TREE,
5063 gfc_get_symbol_decl (vtab));
5064 gfc_add_modify (&block, lse.expr,
5065 fold_convert (TREE_TYPE (lse.expr), tmp));
5068 gfc_free_expr (lhs);
5073 if (code->expr3 && !code->expr3->mold)
5075 /* Initialization via SOURCE block
5076 (or static default initializer). */
5077 gfc_expr *rhs = gfc_copy_expr (code->expr3);
5078 if (class_expr != NULL_TREE)
5081 to = TREE_OPERAND (se.expr, 0);
5083 tmp = gfc_copy_class_to_class (class_expr, to, nelems);
5085 else if (al->expr->ts.type == BT_CLASS)
5087 gfc_actual_arglist *actual;
5090 gfc_ref *ref, *dataref;
5092 /* Do a polymorphic deep copy. */
5093 actual = gfc_get_actual_arglist ();
5094 actual->expr = gfc_copy_expr (rhs);
5095 if (rhs->ts.type == BT_CLASS)
5096 gfc_add_data_component (actual->expr);
5097 actual->next = gfc_get_actual_arglist ();
5098 actual->next->expr = gfc_copy_expr (al->expr);
5099 actual->next->expr->ts.type = BT_CLASS;
5100 gfc_add_data_component (actual->next->expr);
5103 /* Make sure we go up through the reference chain to
5104 the _data reference, where the arrayspec is found. */
5105 for (ref = actual->next->expr->ref; ref; ref = ref->next)
5106 if (ref->type == REF_COMPONENT
5107 && strcmp (ref->u.c.component->name, "_data") == 0)
5110 if (dataref && dataref->u.c.component->as)
5114 gfc_ref *ref = dataref->next;
5115 ref->u.ar.type = AR_SECTION;
5116 /* We have to set up the array reference to give ranges
5117 in all dimensions and ensure that the end and stride
5118 are set so that the copy can be scalarized. */
5120 for (; dim < dataref->u.c.component->as->rank; dim++)
5122 ref->u.ar.dimen_type[dim] = DIMEN_RANGE;
5123 if (ref->u.ar.end[dim] == NULL)
5125 ref->u.ar.end[dim] = ref->u.ar.start[dim];
5126 temp = gfc_get_int_expr (gfc_default_integer_kind,
5127 &al->expr->where, 1);
5128 ref->u.ar.start[dim] = temp;
5130 temp = gfc_subtract (gfc_copy_expr (ref->u.ar.end[dim]),
5131 gfc_copy_expr (ref->u.ar.start[dim]));
5132 temp = gfc_add (gfc_get_int_expr (gfc_default_integer_kind,
5133 &al->expr->where, 1),
5137 if (rhs->ts.type == BT_CLASS)
5139 ppc = gfc_copy_expr (rhs);
5140 gfc_add_vptr_component (ppc);
5143 ppc = gfc_lval_expr_from_sym
5144 (gfc_find_derived_vtab (rhs->ts.u.derived));
5145 gfc_add_component_ref (ppc, "_copy");
5147 ppc_code = gfc_get_code ();
5148 ppc_code->resolved_sym = ppc->symtree->n.sym;
5149 /* Although '_copy' is set to be elemental in class.c, it is
5150 not staying that way. Find out why, sometime.... */
5151 ppc_code->resolved_sym->attr.elemental = 1;
5152 ppc_code->ext.actual = actual;
5153 ppc_code->expr1 = ppc;
5154 ppc_code->op = EXEC_CALL;
5155 /* Since '_copy' is elemental, the scalarizer will take care
5156 of arrays in gfc_trans_call. */
5157 tmp = gfc_trans_call (ppc_code, true, NULL, NULL, false);
5158 gfc_free_statements (ppc_code);
5160 else if (expr3 != NULL_TREE)
5162 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
5163 gfc_trans_string_copy (&block, slen3, tmp, code->expr3->ts.kind,
5164 slen3, expr3, code->expr3->ts.kind);
5169 /* Switch off automatic reallocation since we have just done
5171 int realloc_lhs = gfc_option.flag_realloc_lhs;
5172 gfc_option.flag_realloc_lhs = 0;
5173 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
5175 gfc_option.flag_realloc_lhs = realloc_lhs;
5177 gfc_free_expr (rhs);
5178 gfc_add_expr_to_block (&block, tmp);
5180 else if (code->expr3 && code->expr3->mold
5181 && code->expr3->ts.type == BT_CLASS)
5183 /* Since the _vptr has already been assigned to the allocate
5184 object, we can use gfc_copy_class_to_class in its
5185 initialization mode. */
5186 tmp = TREE_OPERAND (se.expr, 0);
5187 tmp = gfc_copy_class_to_class (NULL_TREE, tmp, nelems);
5188 gfc_add_expr_to_block (&block, tmp);
5191 gfc_free_expr (expr);
5197 tmp = build1_v (LABEL_EXPR, label_errmsg);
5198 gfc_add_expr_to_block (&block, tmp);
5201 /* ERRMSG - only useful if STAT is present. */
5202 if (code->expr1 && code->expr2)
5204 const char *msg = "Attempt to allocate an allocated object";
5205 tree slen, dlen, errmsg_str;
5206 stmtblock_t errmsg_block;
5208 gfc_init_block (&errmsg_block);
5210 errmsg_str = gfc_create_var (pchar_type_node, "ERRMSG");
5211 gfc_add_modify (&errmsg_block, errmsg_str,
5212 gfc_build_addr_expr (pchar_type_node,
5213 gfc_build_localized_cstring_const (msg)));
5215 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
5216 dlen = gfc_get_expr_charlen (code->expr2);
5217 slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen,
5220 gfc_trans_string_copy (&errmsg_block, dlen, errmsg, code->expr2->ts.kind,
5221 slen, errmsg_str, gfc_default_character_kind);
5222 dlen = gfc_finish_block (&errmsg_block);
5224 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
5225 build_int_cst (TREE_TYPE (stat), 0));
5227 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
5229 gfc_add_expr_to_block (&block, tmp);
5235 if (TREE_USED (label_finish))
5237 tmp = build1_v (LABEL_EXPR, label_finish);
5238 gfc_add_expr_to_block (&block, tmp);
5241 gfc_init_se (&se, NULL);
5242 gfc_conv_expr_lhs (&se, code->expr1);
5243 tmp = convert (TREE_TYPE (se.expr), stat);
5244 gfc_add_modify (&block, se.expr, tmp);
5247 gfc_add_block_to_block (&block, &se.post);
5248 gfc_add_block_to_block (&block, &post);
5250 return gfc_finish_block (&block);
5254 /* Translate a DEALLOCATE statement. */
5257 gfc_trans_deallocate (gfc_code *code)
5261 tree apstat, pstat, stat, errmsg, errlen, tmp;
5262 tree label_finish, label_errmsg;
5265 pstat = apstat = stat = errmsg = errlen = tmp = NULL_TREE;
5266 label_finish = label_errmsg = NULL_TREE;
5268 gfc_start_block (&block);
5270 /* Count the number of failed deallocations. If deallocate() was
5271 called with STAT= , then set STAT to the count. If deallocate
5272 was called with ERRMSG, then set ERRMG to a string. */
5275 tree gfc_int4_type_node = gfc_get_int_type (4);
5277 stat = gfc_create_var (gfc_int4_type_node, "stat");
5278 pstat = gfc_build_addr_expr (NULL_TREE, stat);
5280 /* GOTO destinations. */
5281 label_errmsg = gfc_build_label_decl (NULL_TREE);
5282 label_finish = gfc_build_label_decl (NULL_TREE);
5283 TREE_USED (label_finish) = 0;
5286 /* Set ERRMSG - only needed if STAT is available. */
5287 if (code->expr1 && code->expr2)
5289 gfc_init_se (&se, NULL);
5290 se.want_pointer = 1;
5291 gfc_conv_expr_lhs (&se, code->expr2);
5293 errlen = se.string_length;
5296 for (al = code->ext.alloc.list; al != NULL; al = al->next)
5298 gfc_expr *expr = gfc_copy_expr (al->expr);
5299 gcc_assert (expr->expr_type == EXPR_VARIABLE);
5301 if (expr->ts.type == BT_CLASS)
5302 gfc_add_data_component (expr);
5304 gfc_init_se (&se, NULL);
5305 gfc_start_block (&se.pre);
5307 se.want_pointer = 1;
5308 se.descriptor_only = 1;
5309 gfc_conv_expr (&se, expr);
5311 if (expr->rank || gfc_is_coarray (expr))
5313 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
5316 gfc_ref *last = NULL;
5317 for (ref = expr->ref; ref; ref = ref->next)
5318 if (ref->type == REF_COMPONENT)
5321 /* Do not deallocate the components of a derived type
5322 ultimate pointer component. */
5323 if (!(last && last->u.c.component->attr.pointer)
5324 && !(!last && expr->symtree->n.sym->attr.pointer))
5326 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
5328 gfc_add_expr_to_block (&se.pre, tmp);
5331 tmp = gfc_array_deallocate (se.expr, pstat, errmsg, errlen,
5332 label_finish, expr);
5333 gfc_add_expr_to_block (&se.pre, tmp);
5337 tmp = gfc_deallocate_scalar_with_status (se.expr, pstat, false,
5339 gfc_add_expr_to_block (&se.pre, tmp);
5341 /* Set to zero after deallocation. */
5342 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
5344 build_int_cst (TREE_TYPE (se.expr), 0));
5345 gfc_add_expr_to_block (&se.pre, tmp);
5347 if (al->expr->ts.type == BT_CLASS)
5349 /* Reset _vptr component to declared type. */
5350 gfc_expr *rhs, *lhs = gfc_copy_expr (al->expr);
5351 gfc_symbol *vtab = gfc_find_derived_vtab (al->expr->ts.u.derived);
5352 gfc_add_vptr_component (lhs);
5353 rhs = gfc_lval_expr_from_sym (vtab);
5354 tmp = gfc_trans_pointer_assignment (lhs, rhs);
5355 gfc_add_expr_to_block (&se.pre, tmp);
5356 gfc_free_expr (lhs);
5357 gfc_free_expr (rhs);
5365 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
5366 build_int_cst (TREE_TYPE (stat), 0));
5367 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5368 gfc_unlikely (cond),
5369 build1_v (GOTO_EXPR, label_errmsg),
5370 build_empty_stmt (input_location));
5371 gfc_add_expr_to_block (&se.pre, tmp);
5374 tmp = gfc_finish_block (&se.pre);
5375 gfc_add_expr_to_block (&block, tmp);
5376 gfc_free_expr (expr);
5381 tmp = build1_v (LABEL_EXPR, label_errmsg);
5382 gfc_add_expr_to_block (&block, tmp);
5385 /* Set ERRMSG - only needed if STAT is available. */
5386 if (code->expr1 && code->expr2)
5388 const char *msg = "Attempt to deallocate an unallocated object";
5389 stmtblock_t errmsg_block;
5390 tree errmsg_str, slen, dlen, cond;
5392 gfc_init_block (&errmsg_block);
5394 errmsg_str = gfc_create_var (pchar_type_node, "ERRMSG");
5395 gfc_add_modify (&errmsg_block, errmsg_str,
5396 gfc_build_addr_expr (pchar_type_node,
5397 gfc_build_localized_cstring_const (msg)));
5398 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
5399 dlen = gfc_get_expr_charlen (code->expr2);
5401 gfc_trans_string_copy (&errmsg_block, dlen, errmsg, code->expr2->ts.kind,
5402 slen, errmsg_str, gfc_default_character_kind);
5403 tmp = gfc_finish_block (&errmsg_block);
5405 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
5406 build_int_cst (TREE_TYPE (stat), 0));
5407 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5408 gfc_unlikely (cond), tmp,
5409 build_empty_stmt (input_location));
5411 gfc_add_expr_to_block (&block, tmp);
5414 if (code->expr1 && TREE_USED (label_finish))
5416 tmp = build1_v (LABEL_EXPR, label_finish);
5417 gfc_add_expr_to_block (&block, tmp);
5423 gfc_init_se (&se, NULL);
5424 gfc_conv_expr_lhs (&se, code->expr1);
5425 tmp = convert (TREE_TYPE (se.expr), stat);
5426 gfc_add_modify (&block, se.expr, tmp);
5429 return gfc_finish_block (&block);
5432 #include "gt-fortran-trans-stmt.h"