1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 Free Software Foundation, Inc.
5 Contributed by Paul Brook <paul@nowt.org>
6 and Steven Bosscher <s.bosscher@student.tudelft.nl>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
37 #include "dependency.h"
40 typedef struct iter_info
46 struct iter_info *next;
50 typedef struct forall_info
57 struct forall_info *prev_nest;
61 static void gfc_trans_where_2 (gfc_code *, tree, bool,
62 forall_info *, stmtblock_t *);
64 /* Translate a F95 label number to a LABEL_EXPR. */
67 gfc_trans_label_here (gfc_code * code)
69 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
73 /* Given a variable expression which has been ASSIGNed to, find the decl
74 containing the auxiliary variables. For variables in common blocks this
78 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
80 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
81 gfc_conv_expr (se, expr);
82 /* Deals with variable in common block. Get the field declaration. */
83 if (TREE_CODE (se->expr) == COMPONENT_REF)
84 se->expr = TREE_OPERAND (se->expr, 1);
85 /* Deals with dummy argument. Get the parameter declaration. */
86 else if (TREE_CODE (se->expr) == INDIRECT_REF)
87 se->expr = TREE_OPERAND (se->expr, 0);
90 /* Translate a label assignment statement. */
93 gfc_trans_label_assign (gfc_code * code)
102 /* Start a new block. */
103 gfc_init_se (&se, NULL);
104 gfc_start_block (&se.pre);
105 gfc_conv_label_variable (&se, code->expr1);
107 len = GFC_DECL_STRING_LEN (se.expr);
108 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
110 label_tree = gfc_get_label_decl (code->label1);
112 if (code->label1->defined == ST_LABEL_TARGET)
114 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
115 len_tree = integer_minus_one_node;
119 gfc_expr *format = code->label1->format;
121 label_len = format->value.character.length;
122 len_tree = build_int_cst (gfc_charlen_type_node, label_len);
123 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
124 format->value.character.string);
125 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
128 gfc_add_modify (&se.pre, len, len_tree);
129 gfc_add_modify (&se.pre, addr, label_tree);
131 return gfc_finish_block (&se.pre);
134 /* Translate a GOTO statement. */
137 gfc_trans_goto (gfc_code * code)
139 locus loc = code->loc;
145 if (code->label1 != NULL)
146 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
149 gfc_init_se (&se, NULL);
150 gfc_start_block (&se.pre);
151 gfc_conv_label_variable (&se, code->expr1);
152 tmp = GFC_DECL_STRING_LEN (se.expr);
153 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, tmp,
154 build_int_cst (TREE_TYPE (tmp), -1));
155 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
156 "Assigned label is not a target label");
158 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
160 /* We're going to ignore a label list. It does not really change the
161 statement's semantics (because it is just a further restriction on
162 what's legal code); before, we were comparing label addresses here, but
163 that's a very fragile business and may break with optimization. So
166 target = fold_build1_loc (input_location, GOTO_EXPR, void_type_node,
168 gfc_add_expr_to_block (&se.pre, target);
169 return gfc_finish_block (&se.pre);
173 /* Translate an ENTRY statement. Just adds a label for this entry point. */
175 gfc_trans_entry (gfc_code * code)
177 return build1_v (LABEL_EXPR, code->ext.entry->label);
181 /* Replace a gfc_ss structure by another both in the gfc_se struct
182 and the gfc_loopinfo struct. This is used in gfc_conv_elemental_dependencies
183 to replace a variable ss by the corresponding temporary. */
186 replace_ss (gfc_se *se, gfc_ss *old_ss, gfc_ss *new_ss)
188 gfc_ss **sess, **loopss;
190 /* The old_ss is a ss for a single variable. */
191 gcc_assert (old_ss->info->type == GFC_SS_SECTION);
193 for (sess = &(se->ss); *sess != gfc_ss_terminator; sess = &((*sess)->next))
196 gcc_assert (*sess != gfc_ss_terminator);
199 new_ss->next = old_ss->next;
202 for (loopss = &(se->loop->ss); *loopss != gfc_ss_terminator;
203 loopss = &((*loopss)->loop_chain))
204 if (*loopss == old_ss)
206 gcc_assert (*loopss != gfc_ss_terminator);
209 new_ss->loop_chain = old_ss->loop_chain;
210 new_ss->loop = old_ss->loop;
212 gfc_free_ss (old_ss);
216 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
217 elemental subroutines. Make temporaries for output arguments if any such
218 dependencies are found. Output arguments are chosen because internal_unpack
219 can be used, as is, to copy the result back to the variable. */
221 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
222 gfc_symbol * sym, gfc_actual_arglist * arg,
223 gfc_dep_check check_variable)
225 gfc_actual_arglist *arg0;
227 gfc_formal_arglist *formal;
235 if (loopse->ss == NULL)
240 formal = sym->formal;
242 /* Loop over all the arguments testing for dependencies. */
243 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
249 /* Obtain the info structure for the current argument. */
250 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
251 if (ss->info->expr == e)
254 /* If there is a dependency, create a temporary and use it
255 instead of the variable. */
256 fsym = formal ? formal->sym : NULL;
257 if (e->expr_type == EXPR_VARIABLE
259 && fsym->attr.intent != INTENT_IN
260 && gfc_check_fncall_dependency (e, fsym->attr.intent,
261 sym, arg0, check_variable))
263 tree initial, temptype;
264 stmtblock_t temp_post;
267 tmp_ss = gfc_get_array_ss (gfc_ss_terminator, NULL, ss->dimen,
269 gfc_mark_ss_chain_used (tmp_ss, 1);
270 tmp_ss->info->expr = ss->info->expr;
271 replace_ss (loopse, ss, tmp_ss);
273 /* Obtain the argument descriptor for unpacking. */
274 gfc_init_se (&parmse, NULL);
275 parmse.want_pointer = 1;
276 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
277 gfc_add_block_to_block (&se->pre, &parmse.pre);
279 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
280 initialize the array temporary with a copy of the values. */
281 if (fsym->attr.intent == INTENT_INOUT
282 || (fsym->ts.type ==BT_DERIVED
283 && fsym->attr.intent == INTENT_OUT))
284 initial = parmse.expr;
285 /* For class expressions, we always initialize with the copy of
287 else if (e->ts.type == BT_CLASS)
288 initial = parmse.expr;
292 if (e->ts.type != BT_CLASS)
294 /* Find the type of the temporary to create; we don't use the type
295 of e itself as this breaks for subcomponent-references in e
296 (where the type of e is that of the final reference, but
297 parmse.expr's type corresponds to the full derived-type). */
298 /* TODO: Fix this somehow so we don't need a temporary of the whole
299 array but instead only the components referenced. */
300 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
301 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
302 temptype = TREE_TYPE (temptype);
303 temptype = gfc_get_element_type (temptype);
307 /* For class arrays signal that the size of the dynamic type has to
308 be obtained from the vtable, using the 'initial' expression. */
309 temptype = NULL_TREE;
311 /* Generate the temporary. Cleaning up the temporary should be the
312 very last thing done, so we add the code to a new block and add it
313 to se->post as last instructions. */
314 size = gfc_create_var (gfc_array_index_type, NULL);
315 data = gfc_create_var (pvoid_type_node, NULL);
316 gfc_init_block (&temp_post);
317 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post, tmp_ss,
318 temptype, initial, false, true,
319 false, &arg->expr->where);
320 gfc_add_modify (&se->pre, size, tmp);
321 tmp = fold_convert (pvoid_type_node, tmp_ss->info->data.array.data);
322 gfc_add_modify (&se->pre, data, tmp);
324 /* Update other ss' delta. */
325 gfc_set_delta (loopse->loop);
327 /* Copy the result back using unpack..... */
328 if (e->ts.type != BT_CLASS)
329 tmp = build_call_expr_loc (input_location,
330 gfor_fndecl_in_unpack, 2, parmse.expr, data);
333 /* ... except for class results where the copy is
335 tmp = build_fold_indirect_ref_loc (input_location, parmse.expr);
336 tmp = gfc_conv_descriptor_data_get (tmp);
337 tmp = build_call_expr_loc (input_location,
338 builtin_decl_explicit (BUILT_IN_MEMCPY),
341 gfc_add_expr_to_block (&se->post, tmp);
343 /* parmse.pre is already added above. */
344 gfc_add_block_to_block (&se->post, &parmse.post);
345 gfc_add_block_to_block (&se->post, &temp_post);
351 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
354 gfc_trans_call (gfc_code * code, bool dependency_check,
355 tree mask, tree count1, bool invert)
359 int has_alternate_specifier;
360 gfc_dep_check check_variable;
361 tree index = NULL_TREE;
362 tree maskexpr = NULL_TREE;
365 /* A CALL starts a new block because the actual arguments may have to
366 be evaluated first. */
367 gfc_init_se (&se, NULL);
368 gfc_start_block (&se.pre);
370 gcc_assert (code->resolved_sym);
372 ss = gfc_ss_terminator;
373 if (code->resolved_sym->attr.elemental)
374 ss = gfc_walk_elemental_function_args (ss, code->ext.actual,
375 gfc_get_proc_ifc_for_expr (code->expr1),
378 /* Is not an elemental subroutine call with array valued arguments. */
379 if (ss == gfc_ss_terminator)
382 /* Translate the call. */
383 has_alternate_specifier
384 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
387 /* A subroutine without side-effect, by definition, does nothing! */
388 TREE_SIDE_EFFECTS (se.expr) = 1;
390 /* Chain the pieces together and return the block. */
391 if (has_alternate_specifier)
393 gfc_code *select_code;
395 select_code = code->next;
396 gcc_assert(select_code->op == EXEC_SELECT);
397 sym = select_code->expr1->symtree->n.sym;
398 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
399 if (sym->backend_decl == NULL)
400 sym->backend_decl = gfc_get_symbol_decl (sym);
401 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
404 gfc_add_expr_to_block (&se.pre, se.expr);
406 gfc_add_block_to_block (&se.pre, &se.post);
411 /* An elemental subroutine call with array valued arguments has
419 /* gfc_walk_elemental_function_args renders the ss chain in the
420 reverse order to the actual argument order. */
421 ss = gfc_reverse_ss (ss);
423 /* Initialize the loop. */
424 gfc_init_se (&loopse, NULL);
425 gfc_init_loopinfo (&loop);
426 gfc_add_ss_to_loop (&loop, ss);
428 gfc_conv_ss_startstride (&loop);
429 /* TODO: gfc_conv_loop_setup generates a temporary for vector
430 subscripts. This could be prevented in the elemental case
431 as temporaries are handled separatedly
432 (below in gfc_conv_elemental_dependencies). */
433 gfc_conv_loop_setup (&loop, &code->expr1->where);
434 gfc_mark_ss_chain_used (ss, 1);
436 /* Convert the arguments, checking for dependencies. */
437 gfc_copy_loopinfo_to_se (&loopse, &loop);
440 /* For operator assignment, do dependency checking. */
441 if (dependency_check)
442 check_variable = ELEM_CHECK_VARIABLE;
444 check_variable = ELEM_DONT_CHECK_VARIABLE;
446 gfc_init_se (&depse, NULL);
447 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
448 code->ext.actual, check_variable);
450 gfc_add_block_to_block (&loop.pre, &depse.pre);
451 gfc_add_block_to_block (&loop.post, &depse.post);
453 /* Generate the loop body. */
454 gfc_start_scalarized_body (&loop, &body);
455 gfc_init_block (&block);
459 /* Form the mask expression according to the mask. */
461 maskexpr = gfc_build_array_ref (mask, index, NULL);
463 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
464 TREE_TYPE (maskexpr), maskexpr);
467 /* Add the subroutine call to the block. */
468 gfc_conv_procedure_call (&loopse, code->resolved_sym,
469 code->ext.actual, code->expr1, NULL);
473 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
474 build_empty_stmt (input_location));
475 gfc_add_expr_to_block (&loopse.pre, tmp);
476 tmp = fold_build2_loc (input_location, PLUS_EXPR,
477 gfc_array_index_type,
478 count1, gfc_index_one_node);
479 gfc_add_modify (&loopse.pre, count1, tmp);
482 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
484 gfc_add_block_to_block (&block, &loopse.pre);
485 gfc_add_block_to_block (&block, &loopse.post);
487 /* Finish up the loop block and the loop. */
488 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
489 gfc_trans_scalarizing_loops (&loop, &body);
490 gfc_add_block_to_block (&se.pre, &loop.pre);
491 gfc_add_block_to_block (&se.pre, &loop.post);
492 gfc_add_block_to_block (&se.pre, &se.post);
493 gfc_cleanup_loop (&loop);
496 return gfc_finish_block (&se.pre);
500 /* Translate the RETURN statement. */
503 gfc_trans_return (gfc_code * code)
511 /* If code->expr is not NULL, this return statement must appear
512 in a subroutine and current_fake_result_decl has already
515 result = gfc_get_fake_result_decl (NULL, 0);
518 gfc_warning ("An alternate return at %L without a * dummy argument",
519 &code->expr1->where);
520 return gfc_generate_return ();
523 /* Start a new block for this statement. */
524 gfc_init_se (&se, NULL);
525 gfc_start_block (&se.pre);
527 gfc_conv_expr (&se, code->expr1);
529 /* Note that the actually returned expression is a simple value and
530 does not depend on any pointers or such; thus we can clean-up with
531 se.post before returning. */
532 tmp = fold_build2_loc (input_location, MODIFY_EXPR, TREE_TYPE (result),
533 result, fold_convert (TREE_TYPE (result),
535 gfc_add_expr_to_block (&se.pre, tmp);
536 gfc_add_block_to_block (&se.pre, &se.post);
538 tmp = gfc_generate_return ();
539 gfc_add_expr_to_block (&se.pre, tmp);
540 return gfc_finish_block (&se.pre);
543 return gfc_generate_return ();
547 /* Translate the PAUSE statement. We have to translate this statement
548 to a runtime library call. */
551 gfc_trans_pause (gfc_code * code)
553 tree gfc_int4_type_node = gfc_get_int_type (4);
557 /* Start a new block for this statement. */
558 gfc_init_se (&se, NULL);
559 gfc_start_block (&se.pre);
562 if (code->expr1 == NULL)
564 tmp = build_int_cst (gfc_int4_type_node, 0);
565 tmp = build_call_expr_loc (input_location,
566 gfor_fndecl_pause_string, 2,
567 build_int_cst (pchar_type_node, 0), tmp);
569 else if (code->expr1->ts.type == BT_INTEGER)
571 gfc_conv_expr (&se, code->expr1);
572 tmp = build_call_expr_loc (input_location,
573 gfor_fndecl_pause_numeric, 1,
574 fold_convert (gfc_int4_type_node, se.expr));
578 gfc_conv_expr_reference (&se, code->expr1);
579 tmp = build_call_expr_loc (input_location,
580 gfor_fndecl_pause_string, 2,
581 se.expr, se.string_length);
584 gfc_add_expr_to_block (&se.pre, tmp);
586 gfc_add_block_to_block (&se.pre, &se.post);
588 return gfc_finish_block (&se.pre);
592 /* Translate the STOP statement. We have to translate this statement
593 to a runtime library call. */
596 gfc_trans_stop (gfc_code *code, bool error_stop)
598 tree gfc_int4_type_node = gfc_get_int_type (4);
602 /* Start a new block for this statement. */
603 gfc_init_se (&se, NULL);
604 gfc_start_block (&se.pre);
606 if (gfc_option.coarray == GFC_FCOARRAY_LIB && !error_stop)
608 /* Per F2008, 8.5.1 STOP implies a SYNC MEMORY. */
609 tmp = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
610 tmp = build_call_expr_loc (input_location, tmp, 0);
611 gfc_add_expr_to_block (&se.pre, tmp);
613 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_finalize, 0);
614 gfc_add_expr_to_block (&se.pre, tmp);
617 if (code->expr1 == NULL)
619 tmp = build_int_cst (gfc_int4_type_node, 0);
620 tmp = build_call_expr_loc (input_location,
622 ? (gfc_option.coarray == GFC_FCOARRAY_LIB
623 ? gfor_fndecl_caf_error_stop_str
624 : gfor_fndecl_error_stop_string)
625 : gfor_fndecl_stop_string,
626 2, build_int_cst (pchar_type_node, 0), tmp);
628 else if (code->expr1->ts.type == BT_INTEGER)
630 gfc_conv_expr (&se, code->expr1);
631 tmp = build_call_expr_loc (input_location,
633 ? (gfc_option.coarray == GFC_FCOARRAY_LIB
634 ? gfor_fndecl_caf_error_stop
635 : gfor_fndecl_error_stop_numeric)
636 : gfor_fndecl_stop_numeric_f08, 1,
637 fold_convert (gfc_int4_type_node, se.expr));
641 gfc_conv_expr_reference (&se, code->expr1);
642 tmp = build_call_expr_loc (input_location,
644 ? (gfc_option.coarray == GFC_FCOARRAY_LIB
645 ? gfor_fndecl_caf_error_stop_str
646 : gfor_fndecl_error_stop_string)
647 : gfor_fndecl_stop_string,
648 2, se.expr, se.string_length);
651 gfc_add_expr_to_block (&se.pre, tmp);
653 gfc_add_block_to_block (&se.pre, &se.post);
655 return gfc_finish_block (&se.pre);
660 gfc_trans_lock_unlock (gfc_code *code, gfc_exec_op type ATTRIBUTE_UNUSED)
663 tree stat = NULL_TREE, lock_acquired = NULL_TREE;
665 /* Short cut: For single images without STAT= or LOCK_ACQUIRED
666 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
667 if (!code->expr2 && !code->expr4 && gfc_option.coarray != GFC_FCOARRAY_LIB)
670 gfc_init_se (&se, NULL);
671 gfc_start_block (&se.pre);
675 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
676 gfc_init_se (&argse, NULL);
677 gfc_conv_expr_val (&argse, code->expr2);
683 gcc_assert (code->expr4->expr_type == EXPR_VARIABLE);
684 gfc_init_se (&argse, NULL);
685 gfc_conv_expr_val (&argse, code->expr4);
686 lock_acquired = argse.expr;
689 if (stat != NULL_TREE)
690 gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0));
692 if (lock_acquired != NULL_TREE)
693 gfc_add_modify (&se.pre, lock_acquired,
694 fold_convert (TREE_TYPE (lock_acquired),
697 return gfc_finish_block (&se.pre);
702 gfc_trans_sync (gfc_code *code, gfc_exec_op type)
706 tree images = NULL_TREE, stat = NULL_TREE,
707 errmsg = NULL_TREE, errmsglen = NULL_TREE;
709 /* Short cut: For single images without bound checking or without STAT=,
710 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
711 if (!code->expr2 && !(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
712 && gfc_option.coarray != GFC_FCOARRAY_LIB)
715 gfc_init_se (&se, NULL);
716 gfc_start_block (&se.pre);
718 if (code->expr1 && code->expr1->rank == 0)
720 gfc_init_se (&argse, NULL);
721 gfc_conv_expr_val (&argse, code->expr1);
727 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
728 gfc_init_se (&argse, NULL);
729 gfc_conv_expr_val (&argse, code->expr2);
733 stat = null_pointer_node;
735 if (code->expr3 && gfc_option.coarray == GFC_FCOARRAY_LIB
736 && type != EXEC_SYNC_MEMORY)
738 gcc_assert (code->expr3->expr_type == EXPR_VARIABLE);
739 gfc_init_se (&argse, NULL);
740 gfc_conv_expr (&argse, code->expr3);
741 gfc_conv_string_parameter (&argse);
742 errmsg = gfc_build_addr_expr (NULL, argse.expr);
743 errmsglen = argse.string_length;
745 else if (gfc_option.coarray == GFC_FCOARRAY_LIB && type != EXEC_SYNC_MEMORY)
747 errmsg = null_pointer_node;
748 errmsglen = build_int_cst (integer_type_node, 0);
751 /* Check SYNC IMAGES(imageset) for valid image index.
752 FIXME: Add a check for image-set arrays. */
753 if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
754 && code->expr1->rank == 0)
757 if (gfc_option.coarray != GFC_FCOARRAY_LIB)
758 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
759 images, build_int_cst (TREE_TYPE (images), 1));
763 cond = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
764 images, gfort_gvar_caf_num_images);
765 cond2 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
767 build_int_cst (TREE_TYPE (images), 1));
768 cond = fold_build2_loc (input_location, TRUTH_AND_EXPR,
769 boolean_type_node, cond, cond2);
771 gfc_trans_runtime_check (true, false, cond, &se.pre,
772 &code->expr1->where, "Invalid image number "
774 fold_convert (integer_type_node, se.expr));
777 /* Per F2008, 8.5.1, a SYNC MEMORY is implied by calling the
778 image control statements SYNC IMAGES and SYNC ALL. */
779 if (gfc_option.coarray == GFC_FCOARRAY_LIB)
781 tmp = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
782 tmp = build_call_expr_loc (input_location, tmp, 0);
783 gfc_add_expr_to_block (&se.pre, tmp);
786 if (gfc_option.coarray != GFC_FCOARRAY_LIB || type == EXEC_SYNC_MEMORY)
788 /* Set STAT to zero. */
790 gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0));
792 else if (type == EXEC_SYNC_ALL)
794 /* SYNC ALL => stat == null_pointer_node
795 SYNC ALL(stat=s) => stat has an integer type
797 If "stat" has the wrong integer type, use a temp variable of
798 the right type and later cast the result back into "stat". */
799 if (stat == null_pointer_node || TREE_TYPE (stat) == integer_type_node)
801 if (TREE_TYPE (stat) == integer_type_node)
802 stat = gfc_build_addr_expr (NULL, stat);
804 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all,
805 3, stat, errmsg, errmsglen);
806 gfc_add_expr_to_block (&se.pre, tmp);
810 tree tmp_stat = gfc_create_var (integer_type_node, "stat");
812 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all,
813 3, gfc_build_addr_expr (NULL, tmp_stat),
815 gfc_add_expr_to_block (&se.pre, tmp);
817 gfc_add_modify (&se.pre, stat,
818 fold_convert (TREE_TYPE (stat), tmp_stat));
825 gcc_assert (type == EXEC_SYNC_IMAGES);
829 len = build_int_cst (integer_type_node, -1);
830 images = null_pointer_node;
832 else if (code->expr1->rank == 0)
834 len = build_int_cst (integer_type_node, 1);
835 images = gfc_build_addr_expr (NULL_TREE, images);
840 if (code->expr1->ts.kind != gfc_c_int_kind)
841 gfc_fatal_error ("Sorry, only support for integer kind %d "
842 "implemented for image-set at %L",
843 gfc_c_int_kind, &code->expr1->where);
845 gfc_conv_array_parameter (&se, code->expr1,
846 gfc_walk_expr (code->expr1), true, NULL,
850 tmp = gfc_typenode_for_spec (&code->expr1->ts);
851 if (GFC_ARRAY_TYPE_P (tmp) || GFC_DESCRIPTOR_TYPE_P (tmp))
852 tmp = gfc_get_element_type (tmp);
854 len = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
855 TREE_TYPE (len), len,
856 fold_convert (TREE_TYPE (len),
857 TYPE_SIZE_UNIT (tmp)));
858 len = fold_convert (integer_type_node, len);
861 /* SYNC IMAGES(imgs) => stat == null_pointer_node
862 SYNC IMAGES(imgs,stat=s) => stat has an integer type
864 If "stat" has the wrong integer type, use a temp variable of
865 the right type and later cast the result back into "stat". */
866 if (stat == null_pointer_node || TREE_TYPE (stat) == integer_type_node)
868 if (TREE_TYPE (stat) == integer_type_node)
869 stat = gfc_build_addr_expr (NULL, stat);
871 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images,
872 5, fold_convert (integer_type_node, len),
873 images, stat, errmsg, errmsglen);
874 gfc_add_expr_to_block (&se.pre, tmp);
878 tree tmp_stat = gfc_create_var (integer_type_node, "stat");
880 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images,
881 5, fold_convert (integer_type_node, len),
882 images, gfc_build_addr_expr (NULL, tmp_stat),
884 gfc_add_expr_to_block (&se.pre, tmp);
886 gfc_add_modify (&se.pre, stat,
887 fold_convert (TREE_TYPE (stat), tmp_stat));
891 return gfc_finish_block (&se.pre);
895 /* Generate GENERIC for the IF construct. This function also deals with
896 the simple IF statement, because the front end translates the IF
897 statement into an IF construct.
929 where COND_S is the simplified version of the predicate. PRE_COND_S
930 are the pre side-effects produced by the translation of the
932 We need to build the chain recursively otherwise we run into
933 problems with folding incomplete statements. */
936 gfc_trans_if_1 (gfc_code * code)
943 /* Check for an unconditional ELSE clause. */
945 return gfc_trans_code (code->next);
947 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
948 gfc_init_se (&if_se, NULL);
949 gfc_start_block (&if_se.pre);
951 /* Calculate the IF condition expression. */
952 if (code->expr1->where.lb)
954 gfc_save_backend_locus (&saved_loc);
955 gfc_set_backend_locus (&code->expr1->where);
958 gfc_conv_expr_val (&if_se, code->expr1);
960 if (code->expr1->where.lb)
961 gfc_restore_backend_locus (&saved_loc);
963 /* Translate the THEN clause. */
964 stmt = gfc_trans_code (code->next);
966 /* Translate the ELSE clause. */
968 elsestmt = gfc_trans_if_1 (code->block);
970 elsestmt = build_empty_stmt (input_location);
972 /* Build the condition expression and add it to the condition block. */
973 loc = code->expr1->where.lb ? code->expr1->where.lb->location : input_location;
974 stmt = fold_build3_loc (loc, COND_EXPR, void_type_node, if_se.expr, stmt,
977 gfc_add_expr_to_block (&if_se.pre, stmt);
979 /* Finish off this statement. */
980 return gfc_finish_block (&if_se.pre);
984 gfc_trans_if (gfc_code * code)
989 /* Create exit label so it is available for trans'ing the body code. */
990 exit_label = gfc_build_label_decl (NULL_TREE);
991 code->exit_label = exit_label;
993 /* Translate the actual code in code->block. */
994 gfc_init_block (&body);
995 gfc_add_expr_to_block (&body, gfc_trans_if_1 (code->block));
997 /* Add exit label. */
998 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
1000 return gfc_finish_block (&body);
1004 /* Translate an arithmetic IF expression.
1006 IF (cond) label1, label2, label3 translates to
1018 An optimized version can be generated in case of equal labels.
1019 E.g., if label1 is equal to label2, we can translate it to
1028 gfc_trans_arithmetic_if (gfc_code * code)
1036 /* Start a new block. */
1037 gfc_init_se (&se, NULL);
1038 gfc_start_block (&se.pre);
1040 /* Pre-evaluate COND. */
1041 gfc_conv_expr_val (&se, code->expr1);
1042 se.expr = gfc_evaluate_now (se.expr, &se.pre);
1044 /* Build something to compare with. */
1045 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
1047 if (code->label1->value != code->label2->value)
1049 /* If (cond < 0) take branch1 else take branch2.
1050 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
1051 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
1052 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
1054 if (code->label1->value != code->label3->value)
1055 tmp = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
1058 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
1061 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1062 tmp, branch1, branch2);
1065 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
1067 if (code->label1->value != code->label3->value
1068 && code->label2->value != code->label3->value)
1070 /* if (cond <= 0) take branch1 else take branch2. */
1071 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
1072 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
1074 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1075 tmp, branch1, branch2);
1078 /* Append the COND_EXPR to the evaluation of COND, and return. */
1079 gfc_add_expr_to_block (&se.pre, branch1);
1080 return gfc_finish_block (&se.pre);
1084 /* Translate a CRITICAL block. */
1086 gfc_trans_critical (gfc_code *code)
1091 gfc_start_block (&block);
1093 if (gfc_option.coarray == GFC_FCOARRAY_LIB)
1095 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_critical, 0);
1096 gfc_add_expr_to_block (&block, tmp);
1099 tmp = gfc_trans_code (code->block->next);
1100 gfc_add_expr_to_block (&block, tmp);
1102 if (gfc_option.coarray == GFC_FCOARRAY_LIB)
1104 tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_end_critical,
1106 gfc_add_expr_to_block (&block, tmp);
1110 return gfc_finish_block (&block);
1114 /* Do proper initialization for ASSOCIATE names. */
1117 trans_associate_var (gfc_symbol *sym, gfc_wrapped_block *block)
1123 gcc_assert (sym->assoc);
1124 e = sym->assoc->target;
1126 class_target = (e->expr_type == EXPR_VARIABLE)
1127 && (gfc_is_class_scalar_expr (e)
1128 || gfc_is_class_array_ref (e, NULL));
1130 /* Do a `pointer assignment' with updated descriptor (or assign descriptor
1131 to array temporary) for arrays with either unknown shape or if associating
1133 if (sym->attr.dimension && !class_target
1134 && (sym->as->type == AS_DEFERRED || sym->assoc->variable))
1140 desc = sym->backend_decl;
1142 /* If association is to an expression, evaluate it and create temporary.
1143 Otherwise, get descriptor of target for pointer assignment. */
1144 gfc_init_se (&se, NULL);
1145 ss = gfc_walk_expr (e);
1146 if (sym->assoc->variable)
1148 se.direct_byref = 1;
1151 gfc_conv_expr_descriptor (&se, e, ss);
1153 /* If we didn't already do the pointer assignment, set associate-name
1154 descriptor to the one generated for the temporary. */
1155 if (!sym->assoc->variable)
1159 gfc_add_modify (&se.pre, desc, se.expr);
1161 /* The generated descriptor has lower bound zero (as array
1162 temporary), shift bounds so we get lower bounds of 1. */
1163 for (dim = 0; dim < e->rank; ++dim)
1164 gfc_conv_shift_descriptor_lbound (&se.pre, desc,
1165 dim, gfc_index_one_node);
1168 /* Done, register stuff as init / cleanup code. */
1169 gfc_add_init_cleanup (block, gfc_finish_block (&se.pre),
1170 gfc_finish_block (&se.post));
1173 /* CLASS arrays just need the descriptor to be directly assigned. */
1174 else if (class_target && sym->attr.dimension)
1178 gfc_init_se (&se, NULL);
1179 se.descriptor_only = 1;
1180 gfc_conv_expr (&se, e);
1182 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se.expr)));
1183 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (sym->backend_decl)));
1185 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
1187 gfc_add_init_cleanup (block, gfc_finish_block( &se.pre),
1188 gfc_finish_block (&se.post));
1191 /* Do a scalar pointer assignment; this is for scalar variable targets. */
1192 else if (gfc_is_associate_pointer (sym))
1196 gcc_assert (!sym->attr.dimension);
1198 gfc_init_se (&se, NULL);
1199 gfc_conv_expr (&se, e);
1201 tmp = TREE_TYPE (sym->backend_decl);
1202 tmp = gfc_build_addr_expr (tmp, se.expr);
1203 gfc_add_modify (&se.pre, sym->backend_decl, tmp);
1205 gfc_add_init_cleanup (block, gfc_finish_block( &se.pre),
1206 gfc_finish_block (&se.post));
1209 /* Do a simple assignment. This is for scalar expressions, where we
1210 can simply use expression assignment. */
1215 lhs = gfc_lval_expr_from_sym (sym);
1216 tmp = gfc_trans_assignment (lhs, e, false, true);
1217 gfc_add_init_cleanup (block, tmp, NULL_TREE);
1222 /* Translate a BLOCK construct. This is basically what we would do for a
1226 gfc_trans_block_construct (gfc_code* code)
1230 gfc_wrapped_block block;
1233 gfc_association_list *ass;
1235 ns = code->ext.block.ns;
1237 sym = ns->proc_name;
1240 /* Process local variables. */
1241 gcc_assert (!sym->tlink);
1243 gfc_process_block_locals (ns);
1245 /* Generate code including exit-label. */
1246 gfc_init_block (&body);
1247 exit_label = gfc_build_label_decl (NULL_TREE);
1248 code->exit_label = exit_label;
1249 gfc_add_expr_to_block (&body, gfc_trans_code (ns->code));
1250 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
1252 /* Finish everything. */
1253 gfc_start_wrapped_block (&block, gfc_finish_block (&body));
1254 gfc_trans_deferred_vars (sym, &block);
1255 for (ass = code->ext.block.assoc; ass; ass = ass->next)
1256 trans_associate_var (ass->st->n.sym, &block);
1258 return gfc_finish_wrapped_block (&block);
1262 /* Translate the simple DO construct. This is where the loop variable has
1263 integer type and step +-1. We can't use this in the general case
1264 because integer overflow and floating point errors could give incorrect
1266 We translate a do loop from:
1268 DO dovar = from, to, step
1274 [Evaluate loop bounds and step]
1276 if ((step > 0) ? (dovar <= to) : (dovar => to))
1282 cond = (dovar == to);
1284 if (cond) goto end_label;
1289 This helps the optimizers by avoiding the extra induction variable
1290 used in the general case. */
1293 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
1294 tree from, tree to, tree step, tree exit_cond)
1300 tree saved_dovar = NULL;
1305 type = TREE_TYPE (dovar);
1307 loc = code->ext.iterator->start->where.lb->location;
1309 /* Initialize the DO variable: dovar = from. */
1310 gfc_add_modify_loc (loc, pblock, dovar,
1311 fold_convert (TREE_TYPE(dovar), from));
1313 /* Save value for do-tinkering checking. */
1314 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1316 saved_dovar = gfc_create_var (type, ".saved_dovar");
1317 gfc_add_modify_loc (loc, pblock, saved_dovar, dovar);
1320 /* Cycle and exit statements are implemented with gotos. */
1321 cycle_label = gfc_build_label_decl (NULL_TREE);
1322 exit_label = gfc_build_label_decl (NULL_TREE);
1324 /* Put the labels where they can be found later. See gfc_trans_do(). */
1325 code->cycle_label = cycle_label;
1326 code->exit_label = exit_label;
1329 gfc_start_block (&body);
1331 /* Main loop body. */
1332 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1333 gfc_add_expr_to_block (&body, tmp);
1335 /* Label for cycle statements (if needed). */
1336 if (TREE_USED (cycle_label))
1338 tmp = build1_v (LABEL_EXPR, cycle_label);
1339 gfc_add_expr_to_block (&body, tmp);
1342 /* Check whether someone has modified the loop variable. */
1343 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1345 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node,
1346 dovar, saved_dovar);
1347 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1348 "Loop variable has been modified");
1351 /* Exit the loop if there is an I/O result condition or error. */
1354 tmp = build1_v (GOTO_EXPR, exit_label);
1355 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1357 build_empty_stmt (loc));
1358 gfc_add_expr_to_block (&body, tmp);
1361 /* Evaluate the loop condition. */
1362 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, dovar,
1364 cond = gfc_evaluate_now_loc (loc, cond, &body);
1366 /* Increment the loop variable. */
1367 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1368 gfc_add_modify_loc (loc, &body, dovar, tmp);
1370 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1371 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1373 /* The loop exit. */
1374 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1375 TREE_USED (exit_label) = 1;
1376 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1377 cond, tmp, build_empty_stmt (loc));
1378 gfc_add_expr_to_block (&body, tmp);
1380 /* Finish the loop body. */
1381 tmp = gfc_finish_block (&body);
1382 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1384 /* Only execute the loop if the number of iterations is positive. */
1385 if (tree_int_cst_sgn (step) > 0)
1386 cond = fold_build2_loc (loc, LE_EXPR, boolean_type_node, dovar,
1389 cond = fold_build2_loc (loc, GE_EXPR, boolean_type_node, dovar,
1391 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, cond, tmp,
1392 build_empty_stmt (loc));
1393 gfc_add_expr_to_block (pblock, tmp);
1395 /* Add the exit label. */
1396 tmp = build1_v (LABEL_EXPR, exit_label);
1397 gfc_add_expr_to_block (pblock, tmp);
1399 return gfc_finish_block (pblock);
1402 /* Translate the DO construct. This obviously is one of the most
1403 important ones to get right with any compiler, but especially
1406 We special case some loop forms as described in gfc_trans_simple_do.
1407 For other cases we implement them with a separate loop count,
1408 as described in the standard.
1410 We translate a do loop from:
1412 DO dovar = from, to, step
1418 [evaluate loop bounds and step]
1419 empty = (step > 0 ? to < from : to > from);
1420 countm1 = (to - from) / step;
1422 if (empty) goto exit_label;
1428 if (countm1 ==0) goto exit_label;
1433 countm1 is an unsigned integer. It is equal to the loop count minus one,
1434 because the loop count itself can overflow. */
1437 gfc_trans_do (gfc_code * code, tree exit_cond)
1441 tree saved_dovar = NULL;
1457 gfc_start_block (&block);
1459 loc = code->ext.iterator->start->where.lb->location;
1461 /* Evaluate all the expressions in the iterator. */
1462 gfc_init_se (&se, NULL);
1463 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1464 gfc_add_block_to_block (&block, &se.pre);
1466 type = TREE_TYPE (dovar);
1468 gfc_init_se (&se, NULL);
1469 gfc_conv_expr_val (&se, code->ext.iterator->start);
1470 gfc_add_block_to_block (&block, &se.pre);
1471 from = gfc_evaluate_now (se.expr, &block);
1473 gfc_init_se (&se, NULL);
1474 gfc_conv_expr_val (&se, code->ext.iterator->end);
1475 gfc_add_block_to_block (&block, &se.pre);
1476 to = gfc_evaluate_now (se.expr, &block);
1478 gfc_init_se (&se, NULL);
1479 gfc_conv_expr_val (&se, code->ext.iterator->step);
1480 gfc_add_block_to_block (&block, &se.pre);
1481 step = gfc_evaluate_now (se.expr, &block);
1483 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1485 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, step,
1486 build_zero_cst (type));
1487 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1488 "DO step value is zero");
1491 /* Special case simple loops. */
1492 if (TREE_CODE (type) == INTEGER_TYPE
1493 && (integer_onep (step)
1494 || tree_int_cst_equal (step, integer_minus_one_node)))
1495 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1497 pos_step = fold_build2_loc (loc, GT_EXPR, boolean_type_node, step,
1498 build_zero_cst (type));
1500 if (TREE_CODE (type) == INTEGER_TYPE)
1501 utype = unsigned_type_for (type);
1503 utype = unsigned_type_for (gfc_array_index_type);
1504 countm1 = gfc_create_var (utype, "countm1");
1506 /* Cycle and exit statements are implemented with gotos. */
1507 cycle_label = gfc_build_label_decl (NULL_TREE);
1508 exit_label = gfc_build_label_decl (NULL_TREE);
1509 TREE_USED (exit_label) = 1;
1511 /* Put these labels where they can be found later. */
1512 code->cycle_label = cycle_label;
1513 code->exit_label = exit_label;
1515 /* Initialize the DO variable: dovar = from. */
1516 gfc_add_modify (&block, dovar, from);
1518 /* Save value for do-tinkering checking. */
1519 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1521 saved_dovar = gfc_create_var (type, ".saved_dovar");
1522 gfc_add_modify_loc (loc, &block, saved_dovar, dovar);
1525 /* Initialize loop count and jump to exit label if the loop is empty.
1526 This code is executed before we enter the loop body. We generate:
1527 step_sign = sign(1,step);
1538 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1542 if (TREE_CODE (type) == INTEGER_TYPE)
1544 tree pos, neg, step_sign, to2, from2, step2;
1546 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1548 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, step,
1549 build_int_cst (TREE_TYPE (step), 0));
1550 step_sign = fold_build3_loc (loc, COND_EXPR, type, tmp,
1551 build_int_cst (type, -1),
1552 build_int_cst (type, 1));
1554 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, to, from);
1555 pos = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1556 fold_build1_loc (loc, GOTO_EXPR, void_type_node,
1558 build_empty_stmt (loc));
1560 tmp = fold_build2_loc (loc, GT_EXPR, boolean_type_node, to,
1562 neg = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1563 fold_build1_loc (loc, GOTO_EXPR, void_type_node,
1565 build_empty_stmt (loc));
1566 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1567 pos_step, pos, neg);
1569 gfc_add_expr_to_block (&block, tmp);
1571 /* Calculate the loop count. to-from can overflow, so
1572 we cast to unsigned. */
1574 to2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, to);
1575 from2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, from);
1576 step2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, step);
1577 step2 = fold_convert (utype, step2);
1578 tmp = fold_build2_loc (loc, MINUS_EXPR, type, to2, from2);
1579 tmp = fold_convert (utype, tmp);
1580 tmp = fold_build2_loc (loc, TRUNC_DIV_EXPR, utype, tmp, step2);
1581 tmp = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, countm1, tmp);
1582 gfc_add_expr_to_block (&block, tmp);
1586 /* TODO: We could use the same width as the real type.
1587 This would probably cause more problems that it solves
1588 when we implement "long double" types. */
1590 tmp = fold_build2_loc (loc, MINUS_EXPR, type, to, from);
1591 tmp = fold_build2_loc (loc, RDIV_EXPR, type, tmp, step);
1592 tmp = fold_build1_loc (loc, FIX_TRUNC_EXPR, utype, tmp);
1593 gfc_add_modify (&block, countm1, tmp);
1595 /* We need a special check for empty loops:
1596 empty = (step > 0 ? to < from : to > from); */
1597 tmp = fold_build3_loc (loc, COND_EXPR, boolean_type_node, pos_step,
1598 fold_build2_loc (loc, LT_EXPR,
1599 boolean_type_node, to, from),
1600 fold_build2_loc (loc, GT_EXPR,
1601 boolean_type_node, to, from));
1602 /* If the loop is empty, go directly to the exit label. */
1603 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1604 build1_v (GOTO_EXPR, exit_label),
1605 build_empty_stmt (input_location));
1606 gfc_add_expr_to_block (&block, tmp);
1610 gfc_start_block (&body);
1612 /* Main loop body. */
1613 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1614 gfc_add_expr_to_block (&body, tmp);
1616 /* Label for cycle statements (if needed). */
1617 if (TREE_USED (cycle_label))
1619 tmp = build1_v (LABEL_EXPR, cycle_label);
1620 gfc_add_expr_to_block (&body, tmp);
1623 /* Check whether someone has modified the loop variable. */
1624 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1626 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node, dovar,
1628 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1629 "Loop variable has been modified");
1632 /* Exit the loop if there is an I/O result condition or error. */
1635 tmp = build1_v (GOTO_EXPR, exit_label);
1636 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1638 build_empty_stmt (input_location));
1639 gfc_add_expr_to_block (&body, tmp);
1642 /* Increment the loop variable. */
1643 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1644 gfc_add_modify_loc (loc, &body, dovar, tmp);
1646 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1647 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1649 /* End with the loop condition. Loop until countm1 == 0. */
1650 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, countm1,
1651 build_int_cst (utype, 0));
1652 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1653 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1654 cond, tmp, build_empty_stmt (loc));
1655 gfc_add_expr_to_block (&body, tmp);
1657 /* Decrement the loop count. */
1658 tmp = fold_build2_loc (loc, MINUS_EXPR, utype, countm1,
1659 build_int_cst (utype, 1));
1660 gfc_add_modify_loc (loc, &body, countm1, tmp);
1662 /* End of loop body. */
1663 tmp = gfc_finish_block (&body);
1665 /* The for loop itself. */
1666 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1667 gfc_add_expr_to_block (&block, tmp);
1669 /* Add the exit label. */
1670 tmp = build1_v (LABEL_EXPR, exit_label);
1671 gfc_add_expr_to_block (&block, tmp);
1673 return gfc_finish_block (&block);
1677 /* Translate the DO WHILE construct.
1690 if (! cond) goto exit_label;
1696 Because the evaluation of the exit condition `cond' may have side
1697 effects, we can't do much for empty loop bodies. The backend optimizers
1698 should be smart enough to eliminate any dead loops. */
1701 gfc_trans_do_while (gfc_code * code)
1709 /* Everything we build here is part of the loop body. */
1710 gfc_start_block (&block);
1712 /* Cycle and exit statements are implemented with gotos. */
1713 cycle_label = gfc_build_label_decl (NULL_TREE);
1714 exit_label = gfc_build_label_decl (NULL_TREE);
1716 /* Put the labels where they can be found later. See gfc_trans_do(). */
1717 code->cycle_label = cycle_label;
1718 code->exit_label = exit_label;
1720 /* Create a GIMPLE version of the exit condition. */
1721 gfc_init_se (&cond, NULL);
1722 gfc_conv_expr_val (&cond, code->expr1);
1723 gfc_add_block_to_block (&block, &cond.pre);
1724 cond.expr = fold_build1_loc (code->expr1->where.lb->location,
1725 TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1727 /* Build "IF (! cond) GOTO exit_label". */
1728 tmp = build1_v (GOTO_EXPR, exit_label);
1729 TREE_USED (exit_label) = 1;
1730 tmp = fold_build3_loc (code->expr1->where.lb->location, COND_EXPR,
1731 void_type_node, cond.expr, tmp,
1732 build_empty_stmt (code->expr1->where.lb->location));
1733 gfc_add_expr_to_block (&block, tmp);
1735 /* The main body of the loop. */
1736 tmp = gfc_trans_code (code->block->next);
1737 gfc_add_expr_to_block (&block, tmp);
1739 /* Label for cycle statements (if needed). */
1740 if (TREE_USED (cycle_label))
1742 tmp = build1_v (LABEL_EXPR, cycle_label);
1743 gfc_add_expr_to_block (&block, tmp);
1746 /* End of loop body. */
1747 tmp = gfc_finish_block (&block);
1749 gfc_init_block (&block);
1750 /* Build the loop. */
1751 tmp = fold_build1_loc (code->expr1->where.lb->location, LOOP_EXPR,
1752 void_type_node, tmp);
1753 gfc_add_expr_to_block (&block, tmp);
1755 /* Add the exit label. */
1756 tmp = build1_v (LABEL_EXPR, exit_label);
1757 gfc_add_expr_to_block (&block, tmp);
1759 return gfc_finish_block (&block);
1763 /* Translate the SELECT CASE construct for INTEGER case expressions,
1764 without killing all potential optimizations. The problem is that
1765 Fortran allows unbounded cases, but the back-end does not, so we
1766 need to intercept those before we enter the equivalent SWITCH_EXPR
1769 For example, we translate this,
1772 CASE (:100,101,105:115)
1782 to the GENERIC equivalent,
1786 case (minimum value for typeof(expr) ... 100:
1792 case 200 ... (maximum value for typeof(expr):
1809 gfc_trans_integer_select (gfc_code * code)
1819 gfc_start_block (&block);
1821 /* Calculate the switch expression. */
1822 gfc_init_se (&se, NULL);
1823 gfc_conv_expr_val (&se, code->expr1);
1824 gfc_add_block_to_block (&block, &se.pre);
1826 end_label = gfc_build_label_decl (NULL_TREE);
1828 gfc_init_block (&body);
1830 for (c = code->block; c; c = c->block)
1832 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1837 /* Assume it's the default case. */
1838 low = high = NULL_TREE;
1842 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1845 /* If there's only a lower bound, set the high bound to the
1846 maximum value of the case expression. */
1848 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1853 /* Three cases are possible here:
1855 1) There is no lower bound, e.g. CASE (:N).
1856 2) There is a lower bound .NE. high bound, that is
1857 a case range, e.g. CASE (N:M) where M>N (we make
1858 sure that M>N during type resolution).
1859 3) There is a lower bound, and it has the same value
1860 as the high bound, e.g. CASE (N:N). This is our
1861 internal representation of CASE(N).
1863 In the first and second case, we need to set a value for
1864 high. In the third case, we don't because the GCC middle
1865 end represents a single case value by just letting high be
1866 a NULL_TREE. We can't do that because we need to be able
1867 to represent unbounded cases. */
1871 && mpz_cmp (cp->low->value.integer,
1872 cp->high->value.integer) != 0))
1873 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1876 /* Unbounded case. */
1878 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1881 /* Build a label. */
1882 label = gfc_build_label_decl (NULL_TREE);
1884 /* Add this case label.
1885 Add parameter 'label', make it match GCC backend. */
1886 tmp = build_case_label (low, high, label);
1887 gfc_add_expr_to_block (&body, tmp);
1890 /* Add the statements for this case. */
1891 tmp = gfc_trans_code (c->next);
1892 gfc_add_expr_to_block (&body, tmp);
1894 /* Break to the end of the construct. */
1895 tmp = build1_v (GOTO_EXPR, end_label);
1896 gfc_add_expr_to_block (&body, tmp);
1899 tmp = gfc_finish_block (&body);
1900 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1901 gfc_add_expr_to_block (&block, tmp);
1903 tmp = build1_v (LABEL_EXPR, end_label);
1904 gfc_add_expr_to_block (&block, tmp);
1906 return gfc_finish_block (&block);
1910 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1912 There are only two cases possible here, even though the standard
1913 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1914 .FALSE., and DEFAULT.
1916 We never generate more than two blocks here. Instead, we always
1917 try to eliminate the DEFAULT case. This way, we can translate this
1918 kind of SELECT construct to a simple
1922 expression in GENERIC. */
1925 gfc_trans_logical_select (gfc_code * code)
1928 gfc_code *t, *f, *d;
1933 /* Assume we don't have any cases at all. */
1936 /* Now see which ones we actually do have. We can have at most two
1937 cases in a single case list: one for .TRUE. and one for .FALSE.
1938 The default case is always separate. If the cases for .TRUE. and
1939 .FALSE. are in the same case list, the block for that case list
1940 always executed, and we don't generate code a COND_EXPR. */
1941 for (c = code->block; c; c = c->block)
1943 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1947 if (cp->low->value.logical == 0) /* .FALSE. */
1949 else /* if (cp->value.logical != 0), thus .TRUE. */
1957 /* Start a new block. */
1958 gfc_start_block (&block);
1960 /* Calculate the switch expression. We always need to do this
1961 because it may have side effects. */
1962 gfc_init_se (&se, NULL);
1963 gfc_conv_expr_val (&se, code->expr1);
1964 gfc_add_block_to_block (&block, &se.pre);
1966 if (t == f && t != NULL)
1968 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1969 translate the code for these cases, append it to the current
1971 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1975 tree true_tree, false_tree, stmt;
1977 true_tree = build_empty_stmt (input_location);
1978 false_tree = build_empty_stmt (input_location);
1980 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1981 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1982 make the missing case the default case. */
1983 if (t != NULL && f != NULL)
1993 /* Translate the code for each of these blocks, and append it to
1994 the current block. */
1996 true_tree = gfc_trans_code (t->next);
1999 false_tree = gfc_trans_code (f->next);
2001 stmt = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2002 se.expr, true_tree, false_tree);
2003 gfc_add_expr_to_block (&block, stmt);
2006 return gfc_finish_block (&block);
2010 /* The jump table types are stored in static variables to avoid
2011 constructing them from scratch every single time. */
2012 static GTY(()) tree select_struct[2];
2014 /* Translate the SELECT CASE construct for CHARACTER case expressions.
2015 Instead of generating compares and jumps, it is far simpler to
2016 generate a data structure describing the cases in order and call a
2017 library subroutine that locates the right case.
2018 This is particularly true because this is the only case where we
2019 might have to dispose of a temporary.
2020 The library subroutine returns a pointer to jump to or NULL if no
2021 branches are to be taken. */
2024 gfc_trans_character_select (gfc_code *code)
2026 tree init, end_label, tmp, type, case_num, label, fndecl;
2027 stmtblock_t block, body;
2032 VEC(constructor_elt,gc) *inits = NULL;
2034 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
2036 /* The jump table types are stored in static variables to avoid
2037 constructing them from scratch every single time. */
2038 static tree ss_string1[2], ss_string1_len[2];
2039 static tree ss_string2[2], ss_string2_len[2];
2040 static tree ss_target[2];
2042 cp = code->block->ext.block.case_list;
2043 while (cp->left != NULL)
2046 /* Generate the body */
2047 gfc_start_block (&block);
2048 gfc_init_se (&expr1se, NULL);
2049 gfc_conv_expr_reference (&expr1se, code->expr1);
2051 gfc_add_block_to_block (&block, &expr1se.pre);
2053 end_label = gfc_build_label_decl (NULL_TREE);
2055 gfc_init_block (&body);
2057 /* Attempt to optimize length 1 selects. */
2058 if (integer_onep (expr1se.string_length))
2060 for (d = cp; d; d = d->right)
2065 gcc_assert (d->low->expr_type == EXPR_CONSTANT
2066 && d->low->ts.type == BT_CHARACTER);
2067 if (d->low->value.character.length > 1)
2069 for (i = 1; i < d->low->value.character.length; i++)
2070 if (d->low->value.character.string[i] != ' ')
2072 if (i != d->low->value.character.length)
2074 if (optimize && d->high && i == 1)
2076 gcc_assert (d->high->expr_type == EXPR_CONSTANT
2077 && d->high->ts.type == BT_CHARACTER);
2078 if (d->high->value.character.length > 1
2079 && (d->low->value.character.string[0]
2080 == d->high->value.character.string[0])
2081 && d->high->value.character.string[1] != ' '
2082 && ((d->low->value.character.string[1] < ' ')
2083 == (d->high->value.character.string[1]
2093 gcc_assert (d->high->expr_type == EXPR_CONSTANT
2094 && d->high->ts.type == BT_CHARACTER);
2095 if (d->high->value.character.length > 1)
2097 for (i = 1; i < d->high->value.character.length; i++)
2098 if (d->high->value.character.string[i] != ' ')
2100 if (i != d->high->value.character.length)
2107 tree ctype = gfc_get_char_type (code->expr1->ts.kind);
2109 for (c = code->block; c; c = c->block)
2111 for (cp = c->ext.block.case_list; cp; cp = cp->next)
2117 /* Assume it's the default case. */
2118 low = high = NULL_TREE;
2122 /* CASE ('ab') or CASE ('ab':'az') will never match
2123 any length 1 character. */
2124 if (cp->low->value.character.length > 1
2125 && cp->low->value.character.string[1] != ' ')
2128 if (cp->low->value.character.length > 0)
2129 r = cp->low->value.character.string[0];
2132 low = build_int_cst (ctype, r);
2134 /* If there's only a lower bound, set the high bound
2135 to the maximum value of the case expression. */
2137 high = TYPE_MAX_VALUE (ctype);
2143 || (cp->low->value.character.string[0]
2144 != cp->high->value.character.string[0]))
2146 if (cp->high->value.character.length > 0)
2147 r = cp->high->value.character.string[0];
2150 high = build_int_cst (ctype, r);
2153 /* Unbounded case. */
2155 low = TYPE_MIN_VALUE (ctype);
2158 /* Build a label. */
2159 label = gfc_build_label_decl (NULL_TREE);
2161 /* Add this case label.
2162 Add parameter 'label', make it match GCC backend. */
2163 tmp = build_case_label (low, high, label);
2164 gfc_add_expr_to_block (&body, tmp);
2167 /* Add the statements for this case. */
2168 tmp = gfc_trans_code (c->next);
2169 gfc_add_expr_to_block (&body, tmp);
2171 /* Break to the end of the construct. */
2172 tmp = build1_v (GOTO_EXPR, end_label);
2173 gfc_add_expr_to_block (&body, tmp);
2176 tmp = gfc_string_to_single_character (expr1se.string_length,
2178 code->expr1->ts.kind);
2179 case_num = gfc_create_var (ctype, "case_num");
2180 gfc_add_modify (&block, case_num, tmp);
2182 gfc_add_block_to_block (&block, &expr1se.post);
2184 tmp = gfc_finish_block (&body);
2185 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
2186 gfc_add_expr_to_block (&block, tmp);
2188 tmp = build1_v (LABEL_EXPR, end_label);
2189 gfc_add_expr_to_block (&block, tmp);
2191 return gfc_finish_block (&block);
2195 if (code->expr1->ts.kind == 1)
2197 else if (code->expr1->ts.kind == 4)
2202 if (select_struct[k] == NULL)
2205 select_struct[k] = make_node (RECORD_TYPE);
2207 if (code->expr1->ts.kind == 1)
2208 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
2209 else if (code->expr1->ts.kind == 4)
2210 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
2215 #define ADD_FIELD(NAME, TYPE) \
2216 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
2217 get_identifier (stringize(NAME)), \
2221 ADD_FIELD (string1, pchartype);
2222 ADD_FIELD (string1_len, gfc_charlen_type_node);
2224 ADD_FIELD (string2, pchartype);
2225 ADD_FIELD (string2_len, gfc_charlen_type_node);
2227 ADD_FIELD (target, integer_type_node);
2230 gfc_finish_type (select_struct[k]);
2234 for (d = cp; d; d = d->right)
2237 for (c = code->block; c; c = c->block)
2239 for (d = c->ext.block.case_list; d; d = d->next)
2241 label = gfc_build_label_decl (NULL_TREE);
2242 tmp = build_case_label ((d->low == NULL && d->high == NULL)
2244 : build_int_cst (integer_type_node, d->n),
2246 gfc_add_expr_to_block (&body, tmp);
2249 tmp = gfc_trans_code (c->next);
2250 gfc_add_expr_to_block (&body, tmp);
2252 tmp = build1_v (GOTO_EXPR, end_label);
2253 gfc_add_expr_to_block (&body, tmp);
2256 /* Generate the structure describing the branches */
2257 for (d = cp; d; d = d->right)
2259 VEC(constructor_elt,gc) *node = NULL;
2261 gfc_init_se (&se, NULL);
2265 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node);
2266 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], integer_zero_node);
2270 gfc_conv_expr_reference (&se, d->low);
2272 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr);
2273 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length);
2276 if (d->high == NULL)
2278 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node);
2279 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], integer_zero_node);
2283 gfc_init_se (&se, NULL);
2284 gfc_conv_expr_reference (&se, d->high);
2286 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr);
2287 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length);
2290 CONSTRUCTOR_APPEND_ELT (node, ss_target[k],
2291 build_int_cst (integer_type_node, d->n));
2293 tmp = build_constructor (select_struct[k], node);
2294 CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp);
2297 type = build_array_type (select_struct[k],
2298 build_index_type (size_int (n-1)));
2300 init = build_constructor (type, inits);
2301 TREE_CONSTANT (init) = 1;
2302 TREE_STATIC (init) = 1;
2303 /* Create a static variable to hold the jump table. */
2304 tmp = gfc_create_var (type, "jumptable");
2305 TREE_CONSTANT (tmp) = 1;
2306 TREE_STATIC (tmp) = 1;
2307 TREE_READONLY (tmp) = 1;
2308 DECL_INITIAL (tmp) = init;
2311 /* Build the library call */
2312 init = gfc_build_addr_expr (pvoid_type_node, init);
2314 if (code->expr1->ts.kind == 1)
2315 fndecl = gfor_fndecl_select_string;
2316 else if (code->expr1->ts.kind == 4)
2317 fndecl = gfor_fndecl_select_string_char4;
2321 tmp = build_call_expr_loc (input_location,
2323 build_int_cst (gfc_charlen_type_node, n),
2324 expr1se.expr, expr1se.string_length);
2325 case_num = gfc_create_var (integer_type_node, "case_num");
2326 gfc_add_modify (&block, case_num, tmp);
2328 gfc_add_block_to_block (&block, &expr1se.post);
2330 tmp = gfc_finish_block (&body);
2331 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
2332 gfc_add_expr_to_block (&block, tmp);
2334 tmp = build1_v (LABEL_EXPR, end_label);
2335 gfc_add_expr_to_block (&block, tmp);
2337 return gfc_finish_block (&block);
2341 /* Translate the three variants of the SELECT CASE construct.
2343 SELECT CASEs with INTEGER case expressions can be translated to an
2344 equivalent GENERIC switch statement, and for LOGICAL case
2345 expressions we build one or two if-else compares.
2347 SELECT CASEs with CHARACTER case expressions are a whole different
2348 story, because they don't exist in GENERIC. So we sort them and
2349 do a binary search at runtime.
2351 Fortran has no BREAK statement, and it does not allow jumps from
2352 one case block to another. That makes things a lot easier for
2356 gfc_trans_select (gfc_code * code)
2362 gcc_assert (code && code->expr1);
2363 gfc_init_block (&block);
2365 /* Build the exit label and hang it in. */
2366 exit_label = gfc_build_label_decl (NULL_TREE);
2367 code->exit_label = exit_label;
2369 /* Empty SELECT constructs are legal. */
2370 if (code->block == NULL)
2371 body = build_empty_stmt (input_location);
2373 /* Select the correct translation function. */
2375 switch (code->expr1->ts.type)
2378 body = gfc_trans_logical_select (code);
2382 body = gfc_trans_integer_select (code);
2386 body = gfc_trans_character_select (code);
2390 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2394 /* Build everything together. */
2395 gfc_add_expr_to_block (&block, body);
2396 gfc_add_expr_to_block (&block, build1_v (LABEL_EXPR, exit_label));
2398 return gfc_finish_block (&block);
2402 /* Traversal function to substitute a replacement symtree if the symbol
2403 in the expression is the same as that passed. f == 2 signals that
2404 that variable itself is not to be checked - only the references.
2405 This group of functions is used when the variable expression in a
2406 FORALL assignment has internal references. For example:
2407 FORALL (i = 1:4) p(p(i)) = i
2408 The only recourse here is to store a copy of 'p' for the index
2411 static gfc_symtree *new_symtree;
2412 static gfc_symtree *old_symtree;
2415 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
2417 if (expr->expr_type != EXPR_VARIABLE)
2422 else if (expr->symtree->n.sym == sym)
2423 expr->symtree = new_symtree;
2429 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
2431 gfc_traverse_expr (e, sym, forall_replace, f);
2435 forall_restore (gfc_expr *expr,
2436 gfc_symbol *sym ATTRIBUTE_UNUSED,
2437 int *f ATTRIBUTE_UNUSED)
2439 if (expr->expr_type != EXPR_VARIABLE)
2442 if (expr->symtree == new_symtree)
2443 expr->symtree = old_symtree;
2449 forall_restore_symtree (gfc_expr *e)
2451 gfc_traverse_expr (e, NULL, forall_restore, 0);
2455 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2460 gfc_symbol *new_sym;
2461 gfc_symbol *old_sym;
2465 /* Build a copy of the lvalue. */
2466 old_symtree = c->expr1->symtree;
2467 old_sym = old_symtree->n.sym;
2468 e = gfc_lval_expr_from_sym (old_sym);
2469 if (old_sym->attr.dimension)
2471 gfc_init_se (&tse, NULL);
2472 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
2473 gfc_add_block_to_block (pre, &tse.pre);
2474 gfc_add_block_to_block (post, &tse.post);
2475 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
2477 if (e->ts.type != BT_CHARACTER)
2479 /* Use the variable offset for the temporary. */
2480 tmp = gfc_conv_array_offset (old_sym->backend_decl);
2481 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
2486 gfc_init_se (&tse, NULL);
2487 gfc_init_se (&rse, NULL);
2488 gfc_conv_expr (&rse, e);
2489 if (e->ts.type == BT_CHARACTER)
2491 tse.string_length = rse.string_length;
2492 tmp = gfc_get_character_type_len (gfc_default_character_kind,
2494 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
2496 gfc_add_block_to_block (pre, &tse.pre);
2497 gfc_add_block_to_block (post, &tse.post);
2501 tmp = gfc_typenode_for_spec (&e->ts);
2502 tse.expr = gfc_create_var (tmp, "temp");
2505 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
2506 e->expr_type == EXPR_VARIABLE, true);
2507 gfc_add_expr_to_block (pre, tmp);
2511 /* Create a new symbol to represent the lvalue. */
2512 new_sym = gfc_new_symbol (old_sym->name, NULL);
2513 new_sym->ts = old_sym->ts;
2514 new_sym->attr.referenced = 1;
2515 new_sym->attr.temporary = 1;
2516 new_sym->attr.dimension = old_sym->attr.dimension;
2517 new_sym->attr.flavor = old_sym->attr.flavor;
2519 /* Use the temporary as the backend_decl. */
2520 new_sym->backend_decl = tse.expr;
2522 /* Create a fake symtree for it. */
2524 new_symtree = gfc_new_symtree (&root, old_sym->name);
2525 new_symtree->n.sym = new_sym;
2526 gcc_assert (new_symtree == root);
2528 /* Go through the expression reference replacing the old_symtree
2530 forall_replace_symtree (c->expr1, old_sym, 2);
2532 /* Now we have made this temporary, we might as well use it for
2533 the right hand side. */
2534 forall_replace_symtree (c->expr2, old_sym, 1);
2538 /* Handles dependencies in forall assignments. */
2540 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2547 lsym = c->expr1->symtree->n.sym;
2548 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
2550 /* Now check for dependencies within the 'variable'
2551 expression itself. These are treated by making a complete
2552 copy of variable and changing all the references to it
2553 point to the copy instead. Note that the shallow copy of
2554 the variable will not suffice for derived types with
2555 pointer components. We therefore leave these to their
2557 if (lsym->ts.type == BT_DERIVED
2558 && lsym->ts.u.derived->attr.pointer_comp)
2562 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
2564 forall_make_variable_temp (c, pre, post);
2568 /* Substrings with dependencies are treated in the same
2570 if (c->expr1->ts.type == BT_CHARACTER
2572 && c->expr2->expr_type == EXPR_VARIABLE
2573 && lsym == c->expr2->symtree->n.sym)
2575 for (lref = c->expr1->ref; lref; lref = lref->next)
2576 if (lref->type == REF_SUBSTRING)
2578 for (rref = c->expr2->ref; rref; rref = rref->next)
2579 if (rref->type == REF_SUBSTRING)
2583 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
2585 forall_make_variable_temp (c, pre, post);
2594 cleanup_forall_symtrees (gfc_code *c)
2596 forall_restore_symtree (c->expr1);
2597 forall_restore_symtree (c->expr2);
2598 free (new_symtree->n.sym);
2603 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2604 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2605 indicates whether we should generate code to test the FORALLs mask
2606 array. OUTER is the loop header to be used for initializing mask
2609 The generated loop format is:
2610 count = (end - start + step) / step
2623 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
2624 int mask_flag, stmtblock_t *outer)
2632 tree var, start, end, step;
2635 /* Initialize the mask index outside the FORALL nest. */
2636 if (mask_flag && forall_tmp->mask)
2637 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
2639 iter = forall_tmp->this_loop;
2640 nvar = forall_tmp->nvar;
2641 for (n = 0; n < nvar; n++)
2644 start = iter->start;
2648 exit_label = gfc_build_label_decl (NULL_TREE);
2649 TREE_USED (exit_label) = 1;
2651 /* The loop counter. */
2652 count = gfc_create_var (TREE_TYPE (var), "count");
2654 /* The body of the loop. */
2655 gfc_init_block (&block);
2657 /* The exit condition. */
2658 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
2659 count, build_int_cst (TREE_TYPE (count), 0));
2660 tmp = build1_v (GOTO_EXPR, exit_label);
2661 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2662 cond, tmp, build_empty_stmt (input_location));
2663 gfc_add_expr_to_block (&block, tmp);
2665 /* The main loop body. */
2666 gfc_add_expr_to_block (&block, body);
2668 /* Increment the loop variable. */
2669 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), var,
2671 gfc_add_modify (&block, var, tmp);
2673 /* Advance to the next mask element. Only do this for the
2675 if (n == 0 && mask_flag && forall_tmp->mask)
2677 tree maskindex = forall_tmp->maskindex;
2678 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2679 maskindex, gfc_index_one_node);
2680 gfc_add_modify (&block, maskindex, tmp);
2683 /* Decrement the loop counter. */
2684 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), count,
2685 build_int_cst (TREE_TYPE (var), 1));
2686 gfc_add_modify (&block, count, tmp);
2688 body = gfc_finish_block (&block);
2690 /* Loop var initialization. */
2691 gfc_init_block (&block);
2692 gfc_add_modify (&block, var, start);
2695 /* Initialize the loop counter. */
2696 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), step,
2698 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), end,
2700 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, TREE_TYPE (var),
2702 gfc_add_modify (&block, count, tmp);
2704 /* The loop expression. */
2705 tmp = build1_v (LOOP_EXPR, body);
2706 gfc_add_expr_to_block (&block, tmp);
2708 /* The exit label. */
2709 tmp = build1_v (LABEL_EXPR, exit_label);
2710 gfc_add_expr_to_block (&block, tmp);
2712 body = gfc_finish_block (&block);
2719 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2720 is nonzero, the body is controlled by all masks in the forall nest.
2721 Otherwise, the innermost loop is not controlled by it's mask. This
2722 is used for initializing that mask. */
2725 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2730 forall_info *forall_tmp;
2731 tree mask, maskindex;
2733 gfc_start_block (&header);
2735 forall_tmp = nested_forall_info;
2736 while (forall_tmp != NULL)
2738 /* Generate body with masks' control. */
2741 mask = forall_tmp->mask;
2742 maskindex = forall_tmp->maskindex;
2744 /* If a mask was specified make the assignment conditional. */
2747 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2748 body = build3_v (COND_EXPR, tmp, body,
2749 build_empty_stmt (input_location));
2752 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2753 forall_tmp = forall_tmp->prev_nest;
2757 gfc_add_expr_to_block (&header, body);
2758 return gfc_finish_block (&header);
2762 /* Allocate data for holding a temporary array. Returns either a local
2763 temporary array or a pointer variable. */
2766 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2773 if (INTEGER_CST_P (size))
2774 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
2775 size, gfc_index_one_node);
2779 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2780 type = build_array_type (elem_type, type);
2781 if (gfc_can_put_var_on_stack (bytesize))
2783 gcc_assert (INTEGER_CST_P (size));
2784 tmpvar = gfc_create_var (type, "temp");
2789 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2790 *pdata = convert (pvoid_type_node, tmpvar);
2792 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2793 gfc_add_modify (pblock, tmpvar, tmp);
2799 /* Generate codes to copy the temporary to the actual lhs. */
2802 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2803 tree count1, tree wheremask, bool invert)
2807 stmtblock_t block, body;
2813 lss = gfc_walk_expr (expr);
2815 if (lss == gfc_ss_terminator)
2817 gfc_start_block (&block);
2819 gfc_init_se (&lse, NULL);
2821 /* Translate the expression. */
2822 gfc_conv_expr (&lse, expr);
2824 /* Form the expression for the temporary. */
2825 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2827 /* Use the scalar assignment as is. */
2828 gfc_add_block_to_block (&block, &lse.pre);
2829 gfc_add_modify (&block, lse.expr, tmp);
2830 gfc_add_block_to_block (&block, &lse.post);
2832 /* Increment the count1. */
2833 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
2834 count1, gfc_index_one_node);
2835 gfc_add_modify (&block, count1, tmp);
2837 tmp = gfc_finish_block (&block);
2841 gfc_start_block (&block);
2843 gfc_init_loopinfo (&loop1);
2844 gfc_init_se (&rse, NULL);
2845 gfc_init_se (&lse, NULL);
2847 /* Associate the lss with the loop. */
2848 gfc_add_ss_to_loop (&loop1, lss);
2850 /* Calculate the bounds of the scalarization. */
2851 gfc_conv_ss_startstride (&loop1);
2852 /* Setup the scalarizing loops. */
2853 gfc_conv_loop_setup (&loop1, &expr->where);
2855 gfc_mark_ss_chain_used (lss, 1);
2857 /* Start the scalarized loop body. */
2858 gfc_start_scalarized_body (&loop1, &body);
2860 /* Setup the gfc_se structures. */
2861 gfc_copy_loopinfo_to_se (&lse, &loop1);
2864 /* Form the expression of the temporary. */
2865 if (lss != gfc_ss_terminator)
2866 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2867 /* Translate expr. */
2868 gfc_conv_expr (&lse, expr);
2870 /* Use the scalar assignment. */
2871 rse.string_length = lse.string_length;
2872 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
2874 /* Form the mask expression according to the mask tree list. */
2877 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2879 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
2880 TREE_TYPE (wheremaskexpr),
2882 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2884 build_empty_stmt (input_location));
2887 gfc_add_expr_to_block (&body, tmp);
2889 /* Increment count1. */
2890 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2891 count1, gfc_index_one_node);
2892 gfc_add_modify (&body, count1, tmp);
2894 /* Increment count3. */
2897 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2898 gfc_array_index_type, count3,
2899 gfc_index_one_node);
2900 gfc_add_modify (&body, count3, tmp);
2903 /* Generate the copying loops. */
2904 gfc_trans_scalarizing_loops (&loop1, &body);
2905 gfc_add_block_to_block (&block, &loop1.pre);
2906 gfc_add_block_to_block (&block, &loop1.post);
2907 gfc_cleanup_loop (&loop1);
2909 tmp = gfc_finish_block (&block);
2915 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2916 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2917 and should not be freed. WHEREMASK is the conditional execution mask
2918 whose sense may be inverted by INVERT. */
2921 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2922 tree count1, gfc_ss *lss, gfc_ss *rss,
2923 tree wheremask, bool invert)
2925 stmtblock_t block, body1;
2932 gfc_start_block (&block);
2934 gfc_init_se (&rse, NULL);
2935 gfc_init_se (&lse, NULL);
2937 if (lss == gfc_ss_terminator)
2939 gfc_init_block (&body1);
2940 gfc_conv_expr (&rse, expr2);
2941 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2945 /* Initialize the loop. */
2946 gfc_init_loopinfo (&loop);
2948 /* We may need LSS to determine the shape of the expression. */
2949 gfc_add_ss_to_loop (&loop, lss);
2950 gfc_add_ss_to_loop (&loop, rss);
2952 gfc_conv_ss_startstride (&loop);
2953 gfc_conv_loop_setup (&loop, &expr2->where);
2955 gfc_mark_ss_chain_used (rss, 1);
2956 /* Start the loop body. */
2957 gfc_start_scalarized_body (&loop, &body1);
2959 /* Translate the expression. */
2960 gfc_copy_loopinfo_to_se (&rse, &loop);
2962 gfc_conv_expr (&rse, expr2);
2964 /* Form the expression of the temporary. */
2965 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2968 /* Use the scalar assignment. */
2969 lse.string_length = rse.string_length;
2970 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2971 expr2->expr_type == EXPR_VARIABLE, true);
2973 /* Form the mask expression according to the mask tree list. */
2976 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2978 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
2979 TREE_TYPE (wheremaskexpr),
2981 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2983 build_empty_stmt (input_location));
2986 gfc_add_expr_to_block (&body1, tmp);
2988 if (lss == gfc_ss_terminator)
2990 gfc_add_block_to_block (&block, &body1);
2992 /* Increment count1. */
2993 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
2994 count1, gfc_index_one_node);
2995 gfc_add_modify (&block, count1, tmp);
2999 /* Increment count1. */
3000 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3001 count1, gfc_index_one_node);
3002 gfc_add_modify (&body1, count1, tmp);
3004 /* Increment count3. */
3007 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3008 gfc_array_index_type,
3009 count3, gfc_index_one_node);
3010 gfc_add_modify (&body1, count3, tmp);
3013 /* Generate the copying loops. */
3014 gfc_trans_scalarizing_loops (&loop, &body1);
3016 gfc_add_block_to_block (&block, &loop.pre);
3017 gfc_add_block_to_block (&block, &loop.post);
3019 gfc_cleanup_loop (&loop);
3020 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3021 as tree nodes in SS may not be valid in different scope. */
3024 tmp = gfc_finish_block (&block);
3029 /* Calculate the size of temporary needed in the assignment inside forall.
3030 LSS and RSS are filled in this function. */
3033 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
3034 stmtblock_t * pblock,
3035 gfc_ss **lss, gfc_ss **rss)
3043 *lss = gfc_walk_expr (expr1);
3046 size = gfc_index_one_node;
3047 if (*lss != gfc_ss_terminator)
3049 gfc_init_loopinfo (&loop);
3051 /* Walk the RHS of the expression. */
3052 *rss = gfc_walk_expr (expr2);
3053 if (*rss == gfc_ss_terminator)
3054 /* The rhs is scalar. Add a ss for the expression. */
3055 *rss = gfc_get_scalar_ss (gfc_ss_terminator, expr2);
3057 /* Associate the SS with the loop. */
3058 gfc_add_ss_to_loop (&loop, *lss);
3059 /* We don't actually need to add the rhs at this point, but it might
3060 make guessing the loop bounds a bit easier. */
3061 gfc_add_ss_to_loop (&loop, *rss);
3063 /* We only want the shape of the expression, not rest of the junk
3064 generated by the scalarizer. */
3065 loop.array_parameter = 1;
3067 /* Calculate the bounds of the scalarization. */
3068 save_flag = gfc_option.rtcheck;
3069 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
3070 gfc_conv_ss_startstride (&loop);
3071 gfc_option.rtcheck = save_flag;
3072 gfc_conv_loop_setup (&loop, &expr2->where);
3074 /* Figure out how many elements we need. */
3075 for (i = 0; i < loop.dimen; i++)
3077 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3078 gfc_array_index_type,
3079 gfc_index_one_node, loop.from[i]);
3080 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3081 gfc_array_index_type, tmp, loop.to[i]);
3082 size = fold_build2_loc (input_location, MULT_EXPR,
3083 gfc_array_index_type, size, tmp);
3085 gfc_add_block_to_block (pblock, &loop.pre);
3086 size = gfc_evaluate_now (size, pblock);
3087 gfc_add_block_to_block (pblock, &loop.post);
3089 /* TODO: write a function that cleans up a loopinfo without freeing
3090 the SS chains. Currently a NOP. */
3097 /* Calculate the overall iterator number of the nested forall construct.
3098 This routine actually calculates the number of times the body of the
3099 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
3100 that by the expression INNER_SIZE. The BLOCK argument specifies the
3101 block in which to calculate the result, and the optional INNER_SIZE_BODY
3102 argument contains any statements that need to executed (inside the loop)
3103 to initialize or calculate INNER_SIZE. */
3106 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
3107 stmtblock_t *inner_size_body, stmtblock_t *block)
3109 forall_info *forall_tmp = nested_forall_info;
3113 /* We can eliminate the innermost unconditional loops with constant
3115 if (INTEGER_CST_P (inner_size))
3118 && !forall_tmp->mask
3119 && INTEGER_CST_P (forall_tmp->size))
3121 inner_size = fold_build2_loc (input_location, MULT_EXPR,
3122 gfc_array_index_type,
3123 inner_size, forall_tmp->size);
3124 forall_tmp = forall_tmp->prev_nest;
3127 /* If there are no loops left, we have our constant result. */
3132 /* Otherwise, create a temporary variable to compute the result. */
3133 number = gfc_create_var (gfc_array_index_type, "num");
3134 gfc_add_modify (block, number, gfc_index_zero_node);
3136 gfc_start_block (&body);
3137 if (inner_size_body)
3138 gfc_add_block_to_block (&body, inner_size_body);
3140 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3141 gfc_array_index_type, number, inner_size);
3144 gfc_add_modify (&body, number, tmp);
3145 tmp = gfc_finish_block (&body);
3147 /* Generate loops. */
3148 if (forall_tmp != NULL)
3149 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
3151 gfc_add_expr_to_block (block, tmp);
3157 /* Allocate temporary for forall construct. SIZE is the size of temporary
3158 needed. PTEMP1 is returned for space free. */
3161 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
3168 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
3169 if (!integer_onep (unit))
3170 bytesize = fold_build2_loc (input_location, MULT_EXPR,
3171 gfc_array_index_type, size, unit);
3176 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
3179 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3184 /* Allocate temporary for forall construct according to the information in
3185 nested_forall_info. INNER_SIZE is the size of temporary needed in the
3186 assignment inside forall. PTEMP1 is returned for space free. */
3189 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
3190 tree inner_size, stmtblock_t * inner_size_body,
3191 stmtblock_t * block, tree * ptemp1)
3195 /* Calculate the total size of temporary needed in forall construct. */
3196 size = compute_overall_iter_number (nested_forall_info, inner_size,
3197 inner_size_body, block);
3199 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
3203 /* Handle assignments inside forall which need temporary.
3205 forall (i=start:end:stride; maskexpr)
3208 (where e,f<i> are arbitrary expressions possibly involving i
3209 and there is a dependency between e<i> and f<i>)
3211 masktmp(:) = maskexpr(:)
3216 for (i = start; i <= end; i += stride)
3220 for (i = start; i <= end; i += stride)
3222 if (masktmp[maskindex++])
3223 tmp[count1++] = f<i>
3227 for (i = start; i <= end; i += stride)
3229 if (masktmp[maskindex++])
3230 e<i> = tmp[count1++]
3235 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
3236 tree wheremask, bool invert,
3237 forall_info * nested_forall_info,
3238 stmtblock_t * block)
3246 stmtblock_t inner_size_body;
3248 /* Create vars. count1 is the current iterator number of the nested
3250 count1 = gfc_create_var (gfc_array_index_type, "count1");
3252 /* Count is the wheremask index. */
3255 count = gfc_create_var (gfc_array_index_type, "count");
3256 gfc_add_modify (block, count, gfc_index_zero_node);
3261 /* Initialize count1. */
3262 gfc_add_modify (block, count1, gfc_index_zero_node);
3264 /* Calculate the size of temporary needed in the assignment. Return loop, lss
3265 and rss which are used in function generate_loop_for_rhs_to_temp(). */
3266 gfc_init_block (&inner_size_body);
3267 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
3270 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
3271 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
3273 if (!expr1->ts.u.cl->backend_decl)
3276 gfc_init_se (&tse, NULL);
3277 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
3278 expr1->ts.u.cl->backend_decl = tse.expr;
3280 type = gfc_get_character_type_len (gfc_default_character_kind,
3281 expr1->ts.u.cl->backend_decl);
3284 type = gfc_typenode_for_spec (&expr1->ts);
3286 /* Allocate temporary for nested forall construct according to the
3287 information in nested_forall_info and inner_size. */
3288 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
3289 &inner_size_body, block, &ptemp1);
3291 /* Generate codes to copy rhs to the temporary . */
3292 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
3295 /* Generate body and loops according to the information in
3296 nested_forall_info. */
3297 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3298 gfc_add_expr_to_block (block, tmp);
3301 gfc_add_modify (block, count1, gfc_index_zero_node);
3305 gfc_add_modify (block, count, gfc_index_zero_node);
3307 /* Generate codes to copy the temporary to lhs. */
3308 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
3311 /* Generate body and loops according to the information in
3312 nested_forall_info. */
3313 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3314 gfc_add_expr_to_block (block, tmp);
3318 /* Free the temporary. */
3319 tmp = gfc_call_free (ptemp1);
3320 gfc_add_expr_to_block (block, tmp);
3325 /* Translate pointer assignment inside FORALL which need temporary. */
3328 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
3329 forall_info * nested_forall_info,
3330 stmtblock_t * block)
3337 gfc_array_info *info;
3344 tree tmp, tmp1, ptemp1;
3346 count = gfc_create_var (gfc_array_index_type, "count");
3347 gfc_add_modify (block, count, gfc_index_zero_node);
3349 inner_size = gfc_index_one_node;
3350 lss = gfc_walk_expr (expr1);
3351 rss = gfc_walk_expr (expr2);
3352 if (lss == gfc_ss_terminator)
3354 type = gfc_typenode_for_spec (&expr1->ts);
3355 type = build_pointer_type (type);
3357 /* Allocate temporary for nested forall construct according to the
3358 information in nested_forall_info and inner_size. */
3359 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
3360 inner_size, NULL, block, &ptemp1);
3361 gfc_start_block (&body);
3362 gfc_init_se (&lse, NULL);
3363 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3364 gfc_init_se (&rse, NULL);
3365 rse.want_pointer = 1;
3366 gfc_conv_expr (&rse, expr2);
3367 gfc_add_block_to_block (&body, &rse.pre);
3368 gfc_add_modify (&body, lse.expr,
3369 fold_convert (TREE_TYPE (lse.expr), rse.expr));
3370 gfc_add_block_to_block (&body, &rse.post);
3372 /* Increment count. */
3373 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3374 count, gfc_index_one_node);
3375 gfc_add_modify (&body, count, tmp);
3377 tmp = gfc_finish_block (&body);
3379 /* Generate body and loops according to the information in
3380 nested_forall_info. */
3381 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3382 gfc_add_expr_to_block (block, tmp);
3385 gfc_add_modify (block, count, gfc_index_zero_node);
3387 gfc_start_block (&body);
3388 gfc_init_se (&lse, NULL);
3389 gfc_init_se (&rse, NULL);
3390 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
3391 lse.want_pointer = 1;
3392 gfc_conv_expr (&lse, expr1);
3393 gfc_add_block_to_block (&body, &lse.pre);
3394 gfc_add_modify (&body, lse.expr, rse.expr);
3395 gfc_add_block_to_block (&body, &lse.post);
3396 /* Increment count. */
3397 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3398 count, gfc_index_one_node);
3399 gfc_add_modify (&body, count, tmp);
3400 tmp = gfc_finish_block (&body);
3402 /* Generate body and loops according to the information in
3403 nested_forall_info. */
3404 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3405 gfc_add_expr_to_block (block, tmp);
3409 gfc_init_loopinfo (&loop);
3411 /* Associate the SS with the loop. */
3412 gfc_add_ss_to_loop (&loop, rss);
3414 /* Setup the scalarizing loops and bounds. */
3415 gfc_conv_ss_startstride (&loop);
3417 gfc_conv_loop_setup (&loop, &expr2->where);
3419 info = &rss->info->data.array;
3420 desc = info->descriptor;
3422 /* Make a new descriptor. */
3423 parmtype = gfc_get_element_type (TREE_TYPE (desc));
3424 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0,
3425 loop.from, loop.to, 1,
3426 GFC_ARRAY_UNKNOWN, true);
3428 /* Allocate temporary for nested forall construct. */
3429 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
3430 inner_size, NULL, block, &ptemp1);
3431 gfc_start_block (&body);
3432 gfc_init_se (&lse, NULL);
3433 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3434 lse.direct_byref = 1;
3435 rss = gfc_walk_expr (expr2);
3436 gfc_conv_expr_descriptor (&lse, expr2, rss);
3438 gfc_add_block_to_block (&body, &lse.pre);
3439 gfc_add_block_to_block (&body, &lse.post);
3441 /* Increment count. */
3442 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3443 count, gfc_index_one_node);
3444 gfc_add_modify (&body, count, tmp);
3446 tmp = gfc_finish_block (&body);
3448 /* Generate body and loops according to the information in
3449 nested_forall_info. */
3450 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3451 gfc_add_expr_to_block (block, tmp);
3454 gfc_add_modify (block, count, gfc_index_zero_node);
3456 parm = gfc_build_array_ref (tmp1, count, NULL);
3457 lss = gfc_walk_expr (expr1);
3458 gfc_init_se (&lse, NULL);
3459 gfc_conv_expr_descriptor (&lse, expr1, lss);
3460 gfc_add_modify (&lse.pre, lse.expr, parm);
3461 gfc_start_block (&body);
3462 gfc_add_block_to_block (&body, &lse.pre);
3463 gfc_add_block_to_block (&body, &lse.post);
3465 /* Increment count. */
3466 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3467 count, gfc_index_one_node);
3468 gfc_add_modify (&body, count, tmp);
3470 tmp = gfc_finish_block (&body);
3472 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3473 gfc_add_expr_to_block (block, tmp);
3475 /* Free the temporary. */
3478 tmp = gfc_call_free (ptemp1);
3479 gfc_add_expr_to_block (block, tmp);
3484 /* FORALL and WHERE statements are really nasty, especially when you nest
3485 them. All the rhs of a forall assignment must be evaluated before the
3486 actual assignments are performed. Presumably this also applies to all the
3487 assignments in an inner where statement. */
3489 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3490 linear array, relying on the fact that we process in the same order in all
3493 forall (i=start:end:stride; maskexpr)
3497 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3499 count = ((end + 1 - start) / stride)
3500 masktmp(:) = maskexpr(:)
3503 for (i = start; i <= end; i += stride)
3505 if (masktmp[maskindex++])
3509 for (i = start; i <= end; i += stride)
3511 if (masktmp[maskindex++])
3515 Note that this code only works when there are no dependencies.
3516 Forall loop with array assignments and data dependencies are a real pain,
3517 because the size of the temporary cannot always be determined before the
3518 loop is executed. This problem is compounded by the presence of nested
3523 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
3540 tree cycle_label = NULL_TREE;
3544 gfc_forall_iterator *fa;
3547 gfc_saved_var *saved_vars;
3548 iter_info *this_forall;
3552 /* Do nothing if the mask is false. */
3554 && code->expr1->expr_type == EXPR_CONSTANT
3555 && !code->expr1->value.logical)
3556 return build_empty_stmt (input_location);
3559 /* Count the FORALL index number. */
3560 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3564 /* Allocate the space for var, start, end, step, varexpr. */
3565 var = XCNEWVEC (tree, nvar);
3566 start = XCNEWVEC (tree, nvar);
3567 end = XCNEWVEC (tree, nvar);
3568 step = XCNEWVEC (tree, nvar);
3569 varexpr = XCNEWVEC (gfc_expr *, nvar);
3570 saved_vars = XCNEWVEC (gfc_saved_var, nvar);
3572 /* Allocate the space for info. */
3573 info = XCNEW (forall_info);
3575 gfc_start_block (&pre);
3576 gfc_init_block (&post);
3577 gfc_init_block (&block);
3580 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3582 gfc_symbol *sym = fa->var->symtree->n.sym;
3584 /* Allocate space for this_forall. */
3585 this_forall = XCNEW (iter_info);
3587 /* Create a temporary variable for the FORALL index. */
3588 tmp = gfc_typenode_for_spec (&sym->ts);
3589 var[n] = gfc_create_var (tmp, sym->name);
3590 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
3592 /* Record it in this_forall. */
3593 this_forall->var = var[n];
3595 /* Replace the index symbol's backend_decl with the temporary decl. */
3596 sym->backend_decl = var[n];
3598 /* Work out the start, end and stride for the loop. */
3599 gfc_init_se (&se, NULL);
3600 gfc_conv_expr_val (&se, fa->start);
3601 /* Record it in this_forall. */
3602 this_forall->start = se.expr;
3603 gfc_add_block_to_block (&block, &se.pre);
3606 gfc_init_se (&se, NULL);
3607 gfc_conv_expr_val (&se, fa->end);
3608 /* Record it in this_forall. */
3609 this_forall->end = se.expr;
3610 gfc_make_safe_expr (&se);
3611 gfc_add_block_to_block (&block, &se.pre);
3614 gfc_init_se (&se, NULL);
3615 gfc_conv_expr_val (&se, fa->stride);
3616 /* Record it in this_forall. */
3617 this_forall->step = se.expr;
3618 gfc_make_safe_expr (&se);
3619 gfc_add_block_to_block (&block, &se.pre);
3622 /* Set the NEXT field of this_forall to NULL. */
3623 this_forall->next = NULL;
3624 /* Link this_forall to the info construct. */
3625 if (info->this_loop)
3627 iter_info *iter_tmp = info->this_loop;
3628 while (iter_tmp->next != NULL)
3629 iter_tmp = iter_tmp->next;
3630 iter_tmp->next = this_forall;
3633 info->this_loop = this_forall;
3639 /* Calculate the size needed for the current forall level. */
3640 size = gfc_index_one_node;
3641 for (n = 0; n < nvar; n++)
3643 /* size = (end + step - start) / step. */
3644 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (start[n]),
3646 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (end[n]),
3648 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, TREE_TYPE (tmp),
3650 tmp = convert (gfc_array_index_type, tmp);
3652 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3656 /* Record the nvar and size of current forall level. */
3662 /* If the mask is .true., consider the FORALL unconditional. */
3663 if (code->expr1->expr_type == EXPR_CONSTANT
3664 && code->expr1->value.logical)
3672 /* First we need to allocate the mask. */
3675 /* As the mask array can be very big, prefer compact boolean types. */
3676 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3677 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3678 size, NULL, &block, &pmask);
3679 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3681 /* Record them in the info structure. */
3682 info->maskindex = maskindex;
3687 /* No mask was specified. */
3688 maskindex = NULL_TREE;
3689 mask = pmask = NULL_TREE;
3692 /* Link the current forall level to nested_forall_info. */
3693 info->prev_nest = nested_forall_info;
3694 nested_forall_info = info;
3696 /* Copy the mask into a temporary variable if required.
3697 For now we assume a mask temporary is needed. */
3700 /* As the mask array can be very big, prefer compact boolean types. */
3701 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3703 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3705 /* Start of mask assignment loop body. */
3706 gfc_start_block (&body);
3708 /* Evaluate the mask expression. */
3709 gfc_init_se (&se, NULL);
3710 gfc_conv_expr_val (&se, code->expr1);
3711 gfc_add_block_to_block (&body, &se.pre);
3713 /* Store the mask. */
3714 se.expr = convert (mask_type, se.expr);
3716 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3717 gfc_add_modify (&body, tmp, se.expr);
3719 /* Advance to the next mask element. */
3720 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3721 maskindex, gfc_index_one_node);
3722 gfc_add_modify (&body, maskindex, tmp);
3724 /* Generate the loops. */
3725 tmp = gfc_finish_block (&body);
3726 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3727 gfc_add_expr_to_block (&block, tmp);
3730 if (code->op == EXEC_DO_CONCURRENT)
3732 gfc_init_block (&body);
3733 cycle_label = gfc_build_label_decl (NULL_TREE);
3734 code->cycle_label = cycle_label;
3735 tmp = gfc_trans_code (code->block->next);
3736 gfc_add_expr_to_block (&body, tmp);
3738 if (TREE_USED (cycle_label))
3740 tmp = build1_v (LABEL_EXPR, cycle_label);
3741 gfc_add_expr_to_block (&body, tmp);
3744 tmp = gfc_finish_block (&body);
3745 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3746 gfc_add_expr_to_block (&block, tmp);
3750 c = code->block->next;
3752 /* TODO: loop merging in FORALL statements. */
3753 /* Now that we've got a copy of the mask, generate the assignment loops. */
3759 /* A scalar or array assignment. DO the simple check for
3760 lhs to rhs dependencies. These make a temporary for the
3761 rhs and form a second forall block to copy to variable. */
3762 need_temp = check_forall_dependencies(c, &pre, &post);
3764 /* Temporaries due to array assignment data dependencies introduce
3765 no end of problems. */
3767 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3768 nested_forall_info, &block);
3771 /* Use the normal assignment copying routines. */
3772 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3774 /* Generate body and loops. */
3775 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3777 gfc_add_expr_to_block (&block, tmp);
3780 /* Cleanup any temporary symtrees that have been made to deal
3781 with dependencies. */
3783 cleanup_forall_symtrees (c);
3788 /* Translate WHERE or WHERE construct nested in FORALL. */
3789 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3792 /* Pointer assignment inside FORALL. */
3793 case EXEC_POINTER_ASSIGN:
3794 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3796 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3797 nested_forall_info, &block);
3800 /* Use the normal assignment copying routines. */
3801 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3803 /* Generate body and loops. */
3804 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3806 gfc_add_expr_to_block (&block, tmp);
3811 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3812 gfc_add_expr_to_block (&block, tmp);
3815 /* Explicit subroutine calls are prevented by the frontend but interface
3816 assignments can legitimately produce them. */
3817 case EXEC_ASSIGN_CALL:
3818 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3819 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3820 gfc_add_expr_to_block (&block, tmp);
3831 /* Restore the original index variables. */
3832 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3833 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3835 /* Free the space for var, start, end, step, varexpr. */
3843 for (this_forall = info->this_loop; this_forall;)
3845 iter_info *next = this_forall->next;
3850 /* Free the space for this forall_info. */
3855 /* Free the temporary for the mask. */
3856 tmp = gfc_call_free (pmask);
3857 gfc_add_expr_to_block (&block, tmp);
3860 pushdecl (maskindex);
3862 gfc_add_block_to_block (&pre, &block);
3863 gfc_add_block_to_block (&pre, &post);
3865 return gfc_finish_block (&pre);
3869 /* Translate the FORALL statement or construct. */
3871 tree gfc_trans_forall (gfc_code * code)
3873 return gfc_trans_forall_1 (code, NULL);
3877 /* Translate the DO CONCURRENT construct. */
3879 tree gfc_trans_do_concurrent (gfc_code * code)
3881 return gfc_trans_forall_1 (code, NULL);
3885 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3886 If the WHERE construct is nested in FORALL, compute the overall temporary
3887 needed by the WHERE mask expression multiplied by the iterator number of
3889 ME is the WHERE mask expression.
3890 MASK is the current execution mask upon input, whose sense may or may
3891 not be inverted as specified by the INVERT argument.
3892 CMASK is the updated execution mask on output, or NULL if not required.
3893 PMASK is the pending execution mask on output, or NULL if not required.
3894 BLOCK is the block in which to place the condition evaluation loops. */
3897 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3898 tree mask, bool invert, tree cmask, tree pmask,
3899 tree mask_type, stmtblock_t * block)
3904 stmtblock_t body, body1;
3905 tree count, cond, mtmp;
3908 gfc_init_loopinfo (&loop);
3910 lss = gfc_walk_expr (me);
3911 rss = gfc_walk_expr (me);
3913 /* Variable to index the temporary. */
3914 count = gfc_create_var (gfc_array_index_type, "count");
3915 /* Initialize count. */
3916 gfc_add_modify (block, count, gfc_index_zero_node);
3918 gfc_start_block (&body);
3920 gfc_init_se (&rse, NULL);
3921 gfc_init_se (&lse, NULL);
3923 if (lss == gfc_ss_terminator)
3925 gfc_init_block (&body1);
3929 /* Initialize the loop. */
3930 gfc_init_loopinfo (&loop);
3932 /* We may need LSS to determine the shape of the expression. */
3933 gfc_add_ss_to_loop (&loop, lss);
3934 gfc_add_ss_to_loop (&loop, rss);
3936 gfc_conv_ss_startstride (&loop);
3937 gfc_conv_loop_setup (&loop, &me->where);
3939 gfc_mark_ss_chain_used (rss, 1);
3940 /* Start the loop body. */
3941 gfc_start_scalarized_body (&loop, &body1);
3943 /* Translate the expression. */
3944 gfc_copy_loopinfo_to_se (&rse, &loop);
3946 gfc_conv_expr (&rse, me);
3949 /* Variable to evaluate mask condition. */
3950 cond = gfc_create_var (mask_type, "cond");
3951 if (mask && (cmask || pmask))
3952 mtmp = gfc_create_var (mask_type, "mask");
3953 else mtmp = NULL_TREE;
3955 gfc_add_block_to_block (&body1, &lse.pre);
3956 gfc_add_block_to_block (&body1, &rse.pre);
3958 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3960 if (mask && (cmask || pmask))
3962 tmp = gfc_build_array_ref (mask, count, NULL);
3964 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, tmp);
3965 gfc_add_modify (&body1, mtmp, tmp);
3970 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3973 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type,
3975 gfc_add_modify (&body1, tmp1, tmp);
3980 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3981 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, cond);
3983 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type, mtmp,
3985 gfc_add_modify (&body1, tmp1, tmp);
3988 gfc_add_block_to_block (&body1, &lse.post);
3989 gfc_add_block_to_block (&body1, &rse.post);
3991 if (lss == gfc_ss_terminator)
3993 gfc_add_block_to_block (&body, &body1);
3997 /* Increment count. */
3998 tmp1 = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3999 count, gfc_index_one_node);
4000 gfc_add_modify (&body1, count, tmp1);
4002 /* Generate the copying loops. */
4003 gfc_trans_scalarizing_loops (&loop, &body1);
4005 gfc_add_block_to_block (&body, &loop.pre);
4006 gfc_add_block_to_block (&body, &loop.post);
4008 gfc_cleanup_loop (&loop);
4009 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
4010 as tree nodes in SS may not be valid in different scope. */
4013 tmp1 = gfc_finish_block (&body);
4014 /* If the WHERE construct is inside FORALL, fill the full temporary. */
4015 if (nested_forall_info != NULL)
4016 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
4018 gfc_add_expr_to_block (block, tmp1);
4022 /* Translate an assignment statement in a WHERE statement or construct
4023 statement. The MASK expression is used to control which elements
4024 of EXPR1 shall be assigned. The sense of MASK is specified by
4028 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
4029 tree mask, bool invert,
4030 tree count1, tree count2,
4036 gfc_ss *lss_section;
4043 tree index, maskexpr;
4045 /* A defined assignment. */
4046 if (cnext && cnext->resolved_sym)
4047 return gfc_trans_call (cnext, true, mask, count1, invert);
4050 /* TODO: handle this special case.
4051 Special case a single function returning an array. */
4052 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
4054 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
4060 /* Assignment of the form lhs = rhs. */
4061 gfc_start_block (&block);
4063 gfc_init_se (&lse, NULL);
4064 gfc_init_se (&rse, NULL);
4067 lss = gfc_walk_expr (expr1);
4070 /* In each where-assign-stmt, the mask-expr and the variable being
4071 defined shall be arrays of the same shape. */
4072 gcc_assert (lss != gfc_ss_terminator);
4074 /* The assignment needs scalarization. */
4077 /* Find a non-scalar SS from the lhs. */
4078 while (lss_section != gfc_ss_terminator
4079 && lss_section->info->type != GFC_SS_SECTION)
4080 lss_section = lss_section->next;
4082 gcc_assert (lss_section != gfc_ss_terminator);
4084 /* Initialize the scalarizer. */
4085 gfc_init_loopinfo (&loop);
4088 rss = gfc_walk_expr (expr2);
4089 if (rss == gfc_ss_terminator)
4091 /* The rhs is scalar. Add a ss for the expression. */
4092 rss = gfc_get_scalar_ss (gfc_ss_terminator, expr2);
4093 rss->info->where = 1;
4096 /* Associate the SS with the loop. */
4097 gfc_add_ss_to_loop (&loop, lss);
4098 gfc_add_ss_to_loop (&loop, rss);
4100 /* Calculate the bounds of the scalarization. */
4101 gfc_conv_ss_startstride (&loop);
4103 /* Resolve any data dependencies in the statement. */
4104 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
4106 /* Setup the scalarizing loops. */
4107 gfc_conv_loop_setup (&loop, &expr2->where);
4109 /* Setup the gfc_se structures. */
4110 gfc_copy_loopinfo_to_se (&lse, &loop);
4111 gfc_copy_loopinfo_to_se (&rse, &loop);
4114 gfc_mark_ss_chain_used (rss, 1);
4115 if (loop.temp_ss == NULL)
4118 gfc_mark_ss_chain_used (lss, 1);
4122 lse.ss = loop.temp_ss;
4123 gfc_mark_ss_chain_used (lss, 3);
4124 gfc_mark_ss_chain_used (loop.temp_ss, 3);
4127 /* Start the scalarized loop body. */
4128 gfc_start_scalarized_body (&loop, &body);
4130 /* Translate the expression. */
4131 gfc_conv_expr (&rse, expr2);
4132 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
4133 gfc_conv_tmp_array_ref (&lse);
4135 gfc_conv_expr (&lse, expr1);
4137 /* Form the mask expression according to the mask. */
4139 maskexpr = gfc_build_array_ref (mask, index, NULL);
4141 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
4142 TREE_TYPE (maskexpr), maskexpr);
4144 /* Use the scalar assignment as is. */
4145 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
4146 loop.temp_ss != NULL, false, true);
4148 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
4150 gfc_add_expr_to_block (&body, tmp);
4152 if (lss == gfc_ss_terminator)
4154 /* Increment count1. */
4155 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
4156 count1, gfc_index_one_node);
4157 gfc_add_modify (&body, count1, tmp);
4159 /* Use the scalar assignment as is. */
4160 gfc_add_block_to_block (&block, &body);
4164 gcc_assert (lse.ss == gfc_ss_terminator
4165 && rse.ss == gfc_ss_terminator);
4167 if (loop.temp_ss != NULL)
4169 /* Increment count1 before finish the main body of a scalarized
4171 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4172 gfc_array_index_type, count1, gfc_index_one_node);
4173 gfc_add_modify (&body, count1, tmp);
4174 gfc_trans_scalarized_loop_boundary (&loop, &body);
4176 /* We need to copy the temporary to the actual lhs. */
4177 gfc_init_se (&lse, NULL);
4178 gfc_init_se (&rse, NULL);
4179 gfc_copy_loopinfo_to_se (&lse, &loop);
4180 gfc_copy_loopinfo_to_se (&rse, &loop);
4182 rse.ss = loop.temp_ss;
4185 gfc_conv_tmp_array_ref (&rse);
4186 gfc_conv_expr (&lse, expr1);
4188 gcc_assert (lse.ss == gfc_ss_terminator
4189 && rse.ss == gfc_ss_terminator);
4191 /* Form the mask expression according to the mask tree list. */
4193 maskexpr = gfc_build_array_ref (mask, index, NULL);
4195 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
4196 TREE_TYPE (maskexpr), maskexpr);
4198 /* Use the scalar assignment as is. */
4199 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
4201 tmp = build3_v (COND_EXPR, maskexpr, tmp,
4202 build_empty_stmt (input_location));
4203 gfc_add_expr_to_block (&body, tmp);
4205 /* Increment count2. */
4206 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4207 gfc_array_index_type, count2,
4208 gfc_index_one_node);
4209 gfc_add_modify (&body, count2, tmp);
4213 /* Increment count1. */
4214 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4215 gfc_array_index_type, count1,
4216 gfc_index_one_node);
4217 gfc_add_modify (&body, count1, tmp);
4220 /* Generate the copying loops. */
4221 gfc_trans_scalarizing_loops (&loop, &body);
4223 /* Wrap the whole thing up. */
4224 gfc_add_block_to_block (&block, &loop.pre);
4225 gfc_add_block_to_block (&block, &loop.post);
4226 gfc_cleanup_loop (&loop);
4229 return gfc_finish_block (&block);
4233 /* Translate the WHERE construct or statement.
4234 This function can be called iteratively to translate the nested WHERE
4235 construct or statement.
4236 MASK is the control mask. */
4239 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
4240 forall_info * nested_forall_info, stmtblock_t * block)
4242 stmtblock_t inner_size_body;
4243 tree inner_size, size;
4252 tree count1, count2;
4256 tree pcmask = NULL_TREE;
4257 tree ppmask = NULL_TREE;
4258 tree cmask = NULL_TREE;
4259 tree pmask = NULL_TREE;
4260 gfc_actual_arglist *arg;
4262 /* the WHERE statement or the WHERE construct statement. */
4263 cblock = code->block;
4265 /* As the mask array can be very big, prefer compact boolean types. */
4266 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
4268 /* Determine which temporary masks are needed. */
4271 /* One clause: No ELSEWHEREs. */
4272 need_cmask = (cblock->next != 0);
4275 else if (cblock->block->block)
4277 /* Three or more clauses: Conditional ELSEWHEREs. */
4281 else if (cblock->next)
4283 /* Two clauses, the first non-empty. */
4285 need_pmask = (mask != NULL_TREE
4286 && cblock->block->next != 0);
4288 else if (!cblock->block->next)
4290 /* Two clauses, both empty. */
4294 /* Two clauses, the first empty, the second non-empty. */
4297 need_cmask = (cblock->block->expr1 != 0);
4306 if (need_cmask || need_pmask)
4308 /* Calculate the size of temporary needed by the mask-expr. */
4309 gfc_init_block (&inner_size_body);
4310 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
4311 &inner_size_body, &lss, &rss);
4313 gfc_free_ss_chain (lss);
4314 gfc_free_ss_chain (rss);
4316 /* Calculate the total size of temporary needed. */
4317 size = compute_overall_iter_number (nested_forall_info, inner_size,
4318 &inner_size_body, block);
4320 /* Check whether the size is negative. */
4321 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, size,
4322 gfc_index_zero_node);
4323 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
4324 cond, gfc_index_zero_node, size);
4325 size = gfc_evaluate_now (size, block);
4327 /* Allocate temporary for WHERE mask if needed. */
4329 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4332 /* Allocate temporary for !mask if needed. */
4334 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4340 /* Each time around this loop, the where clause is conditional
4341 on the value of mask and invert, which are updated at the
4342 bottom of the loop. */
4344 /* Has mask-expr. */
4347 /* Ensure that the WHERE mask will be evaluated exactly once.
4348 If there are no statements in this WHERE/ELSEWHERE clause,
4349 then we don't need to update the control mask (cmask).
4350 If this is the last clause of the WHERE construct, then
4351 we don't need to update the pending control mask (pmask). */
4353 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4355 cblock->next ? cmask : NULL_TREE,
4356 cblock->block ? pmask : NULL_TREE,
4359 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4361 (cblock->next || cblock->block)
4362 ? cmask : NULL_TREE,
4363 NULL_TREE, mask_type, block);
4367 /* It's a final elsewhere-stmt. No mask-expr is present. */
4371 /* The body of this where clause are controlled by cmask with
4372 sense specified by invert. */
4374 /* Get the assignment statement of a WHERE statement, or the first
4375 statement in where-body-construct of a WHERE construct. */
4376 cnext = cblock->next;
4381 /* WHERE assignment statement. */
4382 case EXEC_ASSIGN_CALL:
4384 arg = cnext->ext.actual;
4385 expr1 = expr2 = NULL;
4386 for (; arg; arg = arg->next)
4398 expr1 = cnext->expr1;
4399 expr2 = cnext->expr2;
4401 if (nested_forall_info != NULL)
4403 need_temp = gfc_check_dependency (expr1, expr2, 0);
4404 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
4405 gfc_trans_assign_need_temp (expr1, expr2,
4407 nested_forall_info, block);
4410 /* Variables to control maskexpr. */
4411 count1 = gfc_create_var (gfc_array_index_type, "count1");
4412 count2 = gfc_create_var (gfc_array_index_type, "count2");
4413 gfc_add_modify (block, count1, gfc_index_zero_node);
4414 gfc_add_modify (block, count2, gfc_index_zero_node);
4416 tmp = gfc_trans_where_assign (expr1, expr2,
4421 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
4423 gfc_add_expr_to_block (block, tmp);
4428 /* Variables to control maskexpr. */
4429 count1 = gfc_create_var (gfc_array_index_type, "count1");
4430 count2 = gfc_create_var (gfc_array_index_type, "count2");
4431 gfc_add_modify (block, count1, gfc_index_zero_node);
4432 gfc_add_modify (block, count2, gfc_index_zero_node);
4434 tmp = gfc_trans_where_assign (expr1, expr2,
4438 gfc_add_expr_to_block (block, tmp);
4443 /* WHERE or WHERE construct is part of a where-body-construct. */
4445 gfc_trans_where_2 (cnext, cmask, invert,
4446 nested_forall_info, block);
4453 /* The next statement within the same where-body-construct. */
4454 cnext = cnext->next;
4456 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4457 cblock = cblock->block;
4458 if (mask == NULL_TREE)
4460 /* If we're the initial WHERE, we can simply invert the sense
4461 of the current mask to obtain the "mask" for the remaining
4468 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4474 /* If we allocated a pending mask array, deallocate it now. */
4477 tmp = gfc_call_free (ppmask);
4478 gfc_add_expr_to_block (block, tmp);
4481 /* If we allocated a current mask array, deallocate it now. */
4484 tmp = gfc_call_free (pcmask);
4485 gfc_add_expr_to_block (block, tmp);
4489 /* Translate a simple WHERE construct or statement without dependencies.
4490 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4491 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4492 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4495 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
4497 stmtblock_t block, body;
4498 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
4499 tree tmp, cexpr, tstmt, estmt;
4500 gfc_ss *css, *tdss, *tsss;
4501 gfc_se cse, tdse, tsse, edse, esse;
4506 /* Allow the scalarizer to workshare simple where loops. */
4507 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
4508 ompws_flags |= OMPWS_SCALARIZER_WS;
4510 cond = cblock->expr1;
4511 tdst = cblock->next->expr1;
4512 tsrc = cblock->next->expr2;
4513 edst = eblock ? eblock->next->expr1 : NULL;
4514 esrc = eblock ? eblock->next->expr2 : NULL;
4516 gfc_start_block (&block);
4517 gfc_init_loopinfo (&loop);
4519 /* Handle the condition. */
4520 gfc_init_se (&cse, NULL);
4521 css = gfc_walk_expr (cond);
4522 gfc_add_ss_to_loop (&loop, css);
4524 /* Handle the then-clause. */
4525 gfc_init_se (&tdse, NULL);
4526 gfc_init_se (&tsse, NULL);
4527 tdss = gfc_walk_expr (tdst);
4528 tsss = gfc_walk_expr (tsrc);
4529 if (tsss == gfc_ss_terminator)
4531 tsss = gfc_get_scalar_ss (gfc_ss_terminator, tsrc);
4532 tsss->info->where = 1;
4534 gfc_add_ss_to_loop (&loop, tdss);
4535 gfc_add_ss_to_loop (&loop, tsss);
4539 /* Handle the else clause. */
4540 gfc_init_se (&edse, NULL);
4541 gfc_init_se (&esse, NULL);
4542 edss = gfc_walk_expr (edst);
4543 esss = gfc_walk_expr (esrc);
4544 if (esss == gfc_ss_terminator)
4546 esss = gfc_get_scalar_ss (gfc_ss_terminator, esrc);
4547 esss->info->where = 1;
4549 gfc_add_ss_to_loop (&loop, edss);
4550 gfc_add_ss_to_loop (&loop, esss);
4553 gfc_conv_ss_startstride (&loop);
4554 gfc_conv_loop_setup (&loop, &tdst->where);
4556 gfc_mark_ss_chain_used (css, 1);
4557 gfc_mark_ss_chain_used (tdss, 1);
4558 gfc_mark_ss_chain_used (tsss, 1);
4561 gfc_mark_ss_chain_used (edss, 1);
4562 gfc_mark_ss_chain_used (esss, 1);
4565 gfc_start_scalarized_body (&loop, &body);
4567 gfc_copy_loopinfo_to_se (&cse, &loop);
4568 gfc_copy_loopinfo_to_se (&tdse, &loop);
4569 gfc_copy_loopinfo_to_se (&tsse, &loop);
4575 gfc_copy_loopinfo_to_se (&edse, &loop);
4576 gfc_copy_loopinfo_to_se (&esse, &loop);
4581 gfc_conv_expr (&cse, cond);
4582 gfc_add_block_to_block (&body, &cse.pre);
4585 gfc_conv_expr (&tsse, tsrc);
4586 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
4587 gfc_conv_tmp_array_ref (&tdse);
4589 gfc_conv_expr (&tdse, tdst);
4593 gfc_conv_expr (&esse, esrc);
4594 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
4595 gfc_conv_tmp_array_ref (&edse);
4597 gfc_conv_expr (&edse, edst);
4600 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
4601 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
4603 : build_empty_stmt (input_location);
4604 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
4605 gfc_add_expr_to_block (&body, tmp);
4606 gfc_add_block_to_block (&body, &cse.post);
4608 gfc_trans_scalarizing_loops (&loop, &body);
4609 gfc_add_block_to_block (&block, &loop.pre);
4610 gfc_add_block_to_block (&block, &loop.post);
4611 gfc_cleanup_loop (&loop);
4613 return gfc_finish_block (&block);
4616 /* As the WHERE or WHERE construct statement can be nested, we call
4617 gfc_trans_where_2 to do the translation, and pass the initial
4618 NULL values for both the control mask and the pending control mask. */
4621 gfc_trans_where (gfc_code * code)
4627 cblock = code->block;
4629 && cblock->next->op == EXEC_ASSIGN
4630 && !cblock->next->next)
4632 eblock = cblock->block;
4635 /* A simple "WHERE (cond) x = y" statement or block is
4636 dependence free if cond is not dependent upon writing x,
4637 and the source y is unaffected by the destination x. */
4638 if (!gfc_check_dependency (cblock->next->expr1,
4640 && !gfc_check_dependency (cblock->next->expr1,
4641 cblock->next->expr2, 0))
4642 return gfc_trans_where_3 (cblock, NULL);
4644 else if (!eblock->expr1
4647 && eblock->next->op == EXEC_ASSIGN
4648 && !eblock->next->next)
4650 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4651 block is dependence free if cond is not dependent on writes
4652 to x1 and x2, y1 is not dependent on writes to x2, and y2
4653 is not dependent on writes to x1, and both y's are not
4654 dependent upon their own x's. In addition to this, the
4655 final two dependency checks below exclude all but the same
4656 array reference if the where and elswhere destinations
4657 are the same. In short, this is VERY conservative and this
4658 is needed because the two loops, required by the standard
4659 are coalesced in gfc_trans_where_3. */
4660 if (!gfc_check_dependency(cblock->next->expr1,
4662 && !gfc_check_dependency(eblock->next->expr1,
4664 && !gfc_check_dependency(cblock->next->expr1,
4665 eblock->next->expr2, 1)
4666 && !gfc_check_dependency(eblock->next->expr1,
4667 cblock->next->expr2, 1)
4668 && !gfc_check_dependency(cblock->next->expr1,
4669 cblock->next->expr2, 1)
4670 && !gfc_check_dependency(eblock->next->expr1,
4671 eblock->next->expr2, 1)
4672 && !gfc_check_dependency(cblock->next->expr1,
4673 eblock->next->expr1, 0)
4674 && !gfc_check_dependency(eblock->next->expr1,
4675 cblock->next->expr1, 0))
4676 return gfc_trans_where_3 (cblock, eblock);
4680 gfc_start_block (&block);
4682 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4684 return gfc_finish_block (&block);
4688 /* CYCLE a DO loop. The label decl has already been created by
4689 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4690 node at the head of the loop. We must mark the label as used. */
4693 gfc_trans_cycle (gfc_code * code)
4697 cycle_label = code->ext.which_construct->cycle_label;
4698 gcc_assert (cycle_label);
4700 TREE_USED (cycle_label) = 1;
4701 return build1_v (GOTO_EXPR, cycle_label);
4705 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4706 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4710 gfc_trans_exit (gfc_code * code)
4714 exit_label = code->ext.which_construct->exit_label;
4715 gcc_assert (exit_label);
4717 TREE_USED (exit_label) = 1;
4718 return build1_v (GOTO_EXPR, exit_label);
4722 /* Translate the ALLOCATE statement. */
4725 gfc_trans_allocate (gfc_code * code)
4747 tree memsize = NULL_TREE;
4748 tree classexpr = NULL_TREE;
4750 if (!code->ext.alloc.list)
4753 stat = tmp = memsz = NULL_TREE;
4754 label_errmsg = label_finish = errmsg = errlen = NULL_TREE;
4756 gfc_init_block (&block);
4757 gfc_init_block (&post);
4759 /* STAT= (and maybe ERRMSG=) is present. */
4763 tree gfc_int4_type_node = gfc_get_int_type (4);
4764 stat = gfc_create_var (gfc_int4_type_node, "stat");
4766 /* ERRMSG= only makes sense with STAT=. */
4769 gfc_init_se (&se, NULL);
4770 se.want_pointer = 1;
4771 gfc_conv_expr_lhs (&se, code->expr2);
4773 errlen = se.string_length;
4777 errmsg = null_pointer_node;
4778 errlen = build_int_cst (gfc_charlen_type_node, 0);
4781 /* GOTO destinations. */
4782 label_errmsg = gfc_build_label_decl (NULL_TREE);
4783 label_finish = gfc_build_label_decl (NULL_TREE);
4784 TREE_USED (label_finish) = 0;
4790 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4792 expr = gfc_copy_expr (al->expr);
4794 if (expr->ts.type == BT_CLASS)
4795 gfc_add_data_component (expr);
4797 gfc_init_se (&se, NULL);
4799 se.want_pointer = 1;
4800 se.descriptor_only = 1;
4801 gfc_conv_expr (&se, expr);
4803 /* Evaluate expr3 just once if not a variable. */
4804 if (al == code->ext.alloc.list
4805 && al->expr->ts.type == BT_CLASS
4807 && code->expr3->ts.type == BT_CLASS
4808 && code->expr3->expr_type != EXPR_VARIABLE)
4810 gfc_init_se (&se_sz, NULL);
4811 gfc_conv_expr_reference (&se_sz, code->expr3);
4812 gfc_conv_class_to_class (&se_sz, code->expr3,
4813 code->expr3->ts, false);
4814 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4815 gfc_add_block_to_block (&se.post, &se_sz.post);
4816 classexpr = build_fold_indirect_ref_loc (input_location,
4818 classexpr = gfc_evaluate_now (classexpr, &se.pre);
4819 memsize = gfc_vtable_size_get (classexpr);
4820 memsize = fold_convert (sizetype, memsize);
4824 class_expr = classexpr;
4827 if (!gfc_array_allocate (&se, expr, stat, errmsg, errlen, label_finish,
4828 memsz, &nelems, code->expr3))
4830 /* A scalar or derived type. */
4832 /* Determine allocate size. */
4833 if (al->expr->ts.type == BT_CLASS
4835 && memsz == NULL_TREE)
4837 if (code->expr3->ts.type == BT_CLASS)
4839 sz = gfc_copy_expr (code->expr3);
4840 gfc_add_vptr_component (sz);
4841 gfc_add_size_component (sz);
4842 gfc_init_se (&se_sz, NULL);
4843 gfc_conv_expr (&se_sz, sz);
4848 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4850 else if (al->expr->ts.type == BT_CHARACTER
4851 && al->expr->ts.deferred && code->expr3)
4853 if (!code->expr3->ts.u.cl->backend_decl)
4855 /* Convert and use the length expression. */
4856 gfc_init_se (&se_sz, NULL);
4857 if (code->expr3->expr_type == EXPR_VARIABLE
4858 || code->expr3->expr_type == EXPR_CONSTANT)
4860 gfc_conv_expr (&se_sz, code->expr3);
4861 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4863 = gfc_evaluate_now (se_sz.string_length, &se.pre);
4864 gfc_add_block_to_block (&se.pre, &se_sz.post);
4865 memsz = se_sz.string_length;
4867 else if (code->expr3->mold
4868 && code->expr3->ts.u.cl
4869 && code->expr3->ts.u.cl->length)
4871 gfc_conv_expr (&se_sz, code->expr3->ts.u.cl->length);
4872 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4873 se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre);
4874 gfc_add_block_to_block (&se.pre, &se_sz.post);
4879 /* This is would be inefficient and possibly could
4880 generate wrong code if the result were not stored
4882 if (slen3 == NULL_TREE)
4884 gfc_conv_expr (&se_sz, code->expr3);
4885 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4886 expr3 = gfc_evaluate_now (se_sz.expr, &se.pre);
4887 gfc_add_block_to_block (&post, &se_sz.post);
4888 slen3 = gfc_evaluate_now (se_sz.string_length,
4895 /* Otherwise use the stored string length. */
4896 memsz = code->expr3->ts.u.cl->backend_decl;
4897 tmp = al->expr->ts.u.cl->backend_decl;
4899 /* Store the string length. */
4900 if (tmp && TREE_CODE (tmp) == VAR_DECL)
4901 gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp),
4904 /* Convert to size in bytes, using the character KIND. */
4905 tmp = TREE_TYPE (gfc_typenode_for_spec (&al->expr->ts));
4906 tmp = TYPE_SIZE_UNIT (tmp);
4907 memsz = fold_build2_loc (input_location, MULT_EXPR,
4908 TREE_TYPE (tmp), tmp,
4909 fold_convert (TREE_TYPE (tmp), memsz));
4911 else if (al->expr->ts.type == BT_CHARACTER && al->expr->ts.deferred)
4913 gcc_assert (code->ext.alloc.ts.u.cl && code->ext.alloc.ts.u.cl->length);
4914 gfc_init_se (&se_sz, NULL);
4915 gfc_conv_expr (&se_sz, code->ext.alloc.ts.u.cl->length);
4916 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4917 se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre);
4918 gfc_add_block_to_block (&se.pre, &se_sz.post);
4919 /* Store the string length. */
4920 tmp = al->expr->ts.u.cl->backend_decl;
4921 gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp),
4923 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts));
4924 tmp = TYPE_SIZE_UNIT (tmp);
4925 memsz = fold_build2_loc (input_location, MULT_EXPR,
4926 TREE_TYPE (tmp), tmp,
4927 fold_convert (TREE_TYPE (se_sz.expr),
4930 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4931 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4932 else if (memsz == NULL_TREE)
4933 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4935 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4937 memsz = se.string_length;
4939 /* Convert to size in bytes, using the character KIND. */
4940 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts));
4941 tmp = TYPE_SIZE_UNIT (tmp);
4942 memsz = fold_build2_loc (input_location, MULT_EXPR,
4943 TREE_TYPE (tmp), tmp,
4944 fold_convert (TREE_TYPE (tmp), memsz));
4947 /* Allocate - for non-pointers with re-alloc checking. */
4948 if (gfc_expr_attr (expr).allocatable)
4949 gfc_allocate_allocatable (&se.pre, se.expr, memsz, NULL_TREE,
4950 stat, errmsg, errlen, label_finish, expr);
4952 gfc_allocate_using_malloc (&se.pre, se.expr, memsz, stat);
4954 if (al->expr->ts.type == BT_DERIVED
4955 && expr->ts.u.derived->attr.alloc_comp)
4957 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4958 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4959 gfc_add_expr_to_block (&se.pre, tmp);
4961 else if (al->expr->ts.type == BT_CLASS)
4963 /* With class objects, it is best to play safe and null the
4964 memory because we cannot know if dynamic types have allocatable
4965 components or not. */
4966 tmp = build_call_expr_loc (input_location,
4967 builtin_decl_explicit (BUILT_IN_MEMSET),
4968 3, se.expr, integer_zero_node, memsz);
4969 gfc_add_expr_to_block (&se.pre, tmp);
4973 gfc_add_block_to_block (&block, &se.pre);
4975 /* Error checking -- Note: ERRMSG only makes sense with STAT. */
4978 tmp = build1_v (GOTO_EXPR, label_errmsg);
4979 parm = fold_build2_loc (input_location, NE_EXPR,
4980 boolean_type_node, stat,
4981 build_int_cst (TREE_TYPE (stat), 0));
4982 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
4983 gfc_unlikely (parm), tmp,
4984 build_empty_stmt (input_location));
4985 gfc_add_expr_to_block (&block, tmp);
4988 /* We need the vptr of CLASS objects to be initialized. */
4989 e = gfc_copy_expr (al->expr);
4990 if (e->ts.type == BT_CLASS)
4992 gfc_expr *lhs, *rhs;
4995 lhs = gfc_expr_to_initialize (e);
4996 gfc_add_vptr_component (lhs);
4998 if (class_expr != NULL_TREE)
5000 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5001 gfc_init_se (&lse, NULL);
5002 lse.want_pointer = 1;
5003 gfc_conv_expr (&lse, lhs);
5004 tmp = gfc_class_vptr_get (class_expr);
5005 gfc_add_modify (&block, lse.expr,
5006 fold_convert (TREE_TYPE (lse.expr), tmp));
5008 else if (code->expr3 && code->expr3->ts.type == BT_CLASS)
5010 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5011 rhs = gfc_copy_expr (code->expr3);
5012 gfc_add_vptr_component (rhs);
5013 tmp = gfc_trans_pointer_assignment (lhs, rhs);
5014 gfc_add_expr_to_block (&block, tmp);
5015 gfc_free_expr (rhs);
5016 rhs = gfc_expr_to_initialize (e);
5020 /* VPTR is fixed at compile time. */
5024 ts = &code->expr3->ts;
5025 else if (e->ts.type == BT_DERIVED)
5027 else if (code->ext.alloc.ts.type == BT_DERIVED)
5028 ts = &code->ext.alloc.ts;
5029 else if (e->ts.type == BT_CLASS)
5030 ts = &CLASS_DATA (e)->ts;
5034 if (ts->type == BT_DERIVED)
5036 vtab = gfc_find_derived_vtab (ts->u.derived);
5038 gfc_init_se (&lse, NULL);
5039 lse.want_pointer = 1;
5040 gfc_conv_expr (&lse, lhs);
5041 tmp = gfc_build_addr_expr (NULL_TREE,
5042 gfc_get_symbol_decl (vtab));
5043 gfc_add_modify (&block, lse.expr,
5044 fold_convert (TREE_TYPE (lse.expr), tmp));
5047 gfc_free_expr (lhs);
5052 if (code->expr3 && !code->expr3->mold)
5054 /* Initialization via SOURCE block
5055 (or static default initializer). */
5056 gfc_expr *rhs = gfc_copy_expr (code->expr3);
5057 if (class_expr != NULL_TREE)
5060 to = TREE_OPERAND (se.expr, 0);
5062 tmp = gfc_copy_class_to_class (class_expr, to, nelems);
5064 else if (al->expr->ts.type == BT_CLASS)
5066 gfc_actual_arglist *actual;
5071 /* Do a polymorphic deep copy. */
5072 actual = gfc_get_actual_arglist ();
5073 actual->expr = gfc_copy_expr (rhs);
5074 if (rhs->ts.type == BT_CLASS)
5075 gfc_add_data_component (actual->expr);
5076 actual->next = gfc_get_actual_arglist ();
5077 actual->next->expr = gfc_copy_expr (al->expr);
5078 actual->next->expr->ts.type = BT_CLASS;
5079 gfc_add_data_component (actual->next->expr);
5081 dataref = actual->next->expr->ref;
5082 /* Make sure we go up through the reference chain to
5083 the _data reference, where the arrayspec is found. */
5084 while (dataref->next && dataref->next->type != REF_ARRAY)
5085 dataref = dataref->next;
5087 if (dataref->u.c.component->as)
5091 gfc_ref *ref = dataref->next;
5092 ref->u.ar.type = AR_SECTION;
5093 /* We have to set up the array reference to give ranges
5094 in all dimensions and ensure that the end and stride
5095 are set so that the copy can be scalarized. */
5097 for (; dim < dataref->u.c.component->as->rank; dim++)
5099 ref->u.ar.dimen_type[dim] = DIMEN_RANGE;
5100 if (ref->u.ar.end[dim] == NULL)
5102 ref->u.ar.end[dim] = ref->u.ar.start[dim];
5103 temp = gfc_get_int_expr (gfc_default_integer_kind,
5104 &al->expr->where, 1);
5105 ref->u.ar.start[dim] = temp;
5107 temp = gfc_subtract (gfc_copy_expr (ref->u.ar.end[dim]),
5108 gfc_copy_expr (ref->u.ar.start[dim]));
5109 temp = gfc_add (gfc_get_int_expr (gfc_default_integer_kind,
5110 &al->expr->where, 1),
5114 if (rhs->ts.type == BT_CLASS)
5116 ppc = gfc_copy_expr (rhs);
5117 gfc_add_vptr_component (ppc);
5120 ppc = gfc_lval_expr_from_sym
5121 (gfc_find_derived_vtab (rhs->ts.u.derived));
5122 gfc_add_component_ref (ppc, "_copy");
5124 ppc_code = gfc_get_code ();
5125 ppc_code->resolved_sym = ppc->symtree->n.sym;
5126 /* Although '_copy' is set to be elemental in class.c, it is
5127 not staying that way. Find out why, sometime.... */
5128 ppc_code->resolved_sym->attr.elemental = 1;
5129 ppc_code->ext.actual = actual;
5130 ppc_code->expr1 = ppc;
5131 ppc_code->op = EXEC_CALL;
5132 /* Since '_copy' is elemental, the scalarizer will take care
5133 of arrays in gfc_trans_call. */
5134 tmp = gfc_trans_call (ppc_code, true, NULL, NULL, false);
5135 gfc_free_statements (ppc_code);
5137 else if (expr3 != NULL_TREE)
5139 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
5140 gfc_trans_string_copy (&block, slen3, tmp, code->expr3->ts.kind,
5141 slen3, expr3, code->expr3->ts.kind);
5146 /* Switch off automatic reallocation since we have just done
5148 int realloc_lhs = gfc_option.flag_realloc_lhs;
5149 gfc_option.flag_realloc_lhs = 0;
5150 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
5152 gfc_option.flag_realloc_lhs = realloc_lhs;
5154 gfc_free_expr (rhs);
5155 gfc_add_expr_to_block (&block, tmp);
5157 else if (code->expr3 && code->expr3->mold
5158 && code->expr3->ts.type == BT_CLASS)
5160 /* Since the _vptr has already been assigned to the allocate
5161 object, we can use gfc_copy_class_to_class in its
5162 initialization mode. */
5163 tmp = TREE_OPERAND (se.expr, 0);
5164 tmp = gfc_copy_class_to_class (NULL_TREE, tmp, nelems);
5165 gfc_add_expr_to_block (&block, tmp);
5168 gfc_free_expr (expr);
5174 tmp = build1_v (LABEL_EXPR, label_errmsg);
5175 gfc_add_expr_to_block (&block, tmp);
5178 /* ERRMSG - only useful if STAT is present. */
5179 if (code->expr1 && code->expr2)
5181 const char *msg = "Attempt to allocate an allocated object";
5182 tree slen, dlen, errmsg_str;
5183 stmtblock_t errmsg_block;
5185 gfc_init_block (&errmsg_block);
5187 errmsg_str = gfc_create_var (pchar_type_node, "ERRMSG");
5188 gfc_add_modify (&errmsg_block, errmsg_str,
5189 gfc_build_addr_expr (pchar_type_node,
5190 gfc_build_localized_cstring_const (msg)));
5192 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
5193 dlen = gfc_get_expr_charlen (code->expr2);
5194 slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen,
5197 gfc_trans_string_copy (&errmsg_block, dlen, errmsg, code->expr2->ts.kind,
5198 slen, errmsg_str, gfc_default_character_kind);
5199 dlen = gfc_finish_block (&errmsg_block);
5201 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
5202 build_int_cst (TREE_TYPE (stat), 0));
5204 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
5206 gfc_add_expr_to_block (&block, tmp);
5212 if (TREE_USED (label_finish))
5214 tmp = build1_v (LABEL_EXPR, label_finish);
5215 gfc_add_expr_to_block (&block, tmp);
5218 gfc_init_se (&se, NULL);
5219 gfc_conv_expr_lhs (&se, code->expr1);
5220 tmp = convert (TREE_TYPE (se.expr), stat);
5221 gfc_add_modify (&block, se.expr, tmp);
5224 gfc_add_block_to_block (&block, &se.post);
5225 gfc_add_block_to_block (&block, &post);
5227 return gfc_finish_block (&block);
5231 /* Translate a DEALLOCATE statement. */
5234 gfc_trans_deallocate (gfc_code *code)
5238 tree apstat, pstat, stat, errmsg, errlen, tmp;
5239 tree label_finish, label_errmsg;
5242 pstat = apstat = stat = errmsg = errlen = tmp = NULL_TREE;
5243 label_finish = label_errmsg = NULL_TREE;
5245 gfc_start_block (&block);
5247 /* Count the number of failed deallocations. If deallocate() was
5248 called with STAT= , then set STAT to the count. If deallocate
5249 was called with ERRMSG, then set ERRMG to a string. */
5252 tree gfc_int4_type_node = gfc_get_int_type (4);
5254 stat = gfc_create_var (gfc_int4_type_node, "stat");
5255 pstat = gfc_build_addr_expr (NULL_TREE, stat);
5257 /* GOTO destinations. */
5258 label_errmsg = gfc_build_label_decl (NULL_TREE);
5259 label_finish = gfc_build_label_decl (NULL_TREE);
5260 TREE_USED (label_finish) = 0;
5263 /* Set ERRMSG - only needed if STAT is available. */
5264 if (code->expr1 && code->expr2)
5266 gfc_init_se (&se, NULL);
5267 se.want_pointer = 1;
5268 gfc_conv_expr_lhs (&se, code->expr2);
5270 errlen = se.string_length;
5273 for (al = code->ext.alloc.list; al != NULL; al = al->next)
5275 gfc_expr *expr = gfc_copy_expr (al->expr);
5276 gcc_assert (expr->expr_type == EXPR_VARIABLE);
5278 if (expr->ts.type == BT_CLASS)
5279 gfc_add_data_component (expr);
5281 gfc_init_se (&se, NULL);
5282 gfc_start_block (&se.pre);
5284 se.want_pointer = 1;
5285 se.descriptor_only = 1;
5286 gfc_conv_expr (&se, expr);
5288 if (expr->rank || gfc_is_coarray (expr))
5290 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
5293 gfc_ref *last = NULL;
5294 for (ref = expr->ref; ref; ref = ref->next)
5295 if (ref->type == REF_COMPONENT)
5298 /* Do not deallocate the components of a derived type
5299 ultimate pointer component. */
5300 if (!(last && last->u.c.component->attr.pointer)
5301 && !(!last && expr->symtree->n.sym->attr.pointer))
5303 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
5305 gfc_add_expr_to_block (&se.pre, tmp);
5308 tmp = gfc_array_deallocate (se.expr, pstat, errmsg, errlen,
5309 label_finish, expr);
5310 gfc_add_expr_to_block (&se.pre, tmp);
5314 tmp = gfc_deallocate_scalar_with_status (se.expr, pstat, false,
5316 gfc_add_expr_to_block (&se.pre, tmp);
5318 /* Set to zero after deallocation. */
5319 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
5321 build_int_cst (TREE_TYPE (se.expr), 0));
5322 gfc_add_expr_to_block (&se.pre, tmp);
5324 if (al->expr->ts.type == BT_CLASS)
5326 /* Reset _vptr component to declared type. */
5327 gfc_expr *rhs, *lhs = gfc_copy_expr (al->expr);
5328 gfc_symbol *vtab = gfc_find_derived_vtab (al->expr->ts.u.derived);
5329 gfc_add_vptr_component (lhs);
5330 rhs = gfc_lval_expr_from_sym (vtab);
5331 tmp = gfc_trans_pointer_assignment (lhs, rhs);
5332 gfc_add_expr_to_block (&se.pre, tmp);
5333 gfc_free_expr (lhs);
5334 gfc_free_expr (rhs);
5342 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
5343 build_int_cst (TREE_TYPE (stat), 0));
5344 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5345 gfc_unlikely (cond),
5346 build1_v (GOTO_EXPR, label_errmsg),
5347 build_empty_stmt (input_location));
5348 gfc_add_expr_to_block (&se.pre, tmp);
5351 tmp = gfc_finish_block (&se.pre);
5352 gfc_add_expr_to_block (&block, tmp);
5353 gfc_free_expr (expr);
5358 tmp = build1_v (LABEL_EXPR, label_errmsg);
5359 gfc_add_expr_to_block (&block, tmp);
5362 /* Set ERRMSG - only needed if STAT is available. */
5363 if (code->expr1 && code->expr2)
5365 const char *msg = "Attempt to deallocate an unallocated object";
5366 stmtblock_t errmsg_block;
5367 tree errmsg_str, slen, dlen, cond;
5369 gfc_init_block (&errmsg_block);
5371 errmsg_str = gfc_create_var (pchar_type_node, "ERRMSG");
5372 gfc_add_modify (&errmsg_block, errmsg_str,
5373 gfc_build_addr_expr (pchar_type_node,
5374 gfc_build_localized_cstring_const (msg)));
5375 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
5376 dlen = gfc_get_expr_charlen (code->expr2);
5378 gfc_trans_string_copy (&errmsg_block, dlen, errmsg, code->expr2->ts.kind,
5379 slen, errmsg_str, gfc_default_character_kind);
5380 tmp = gfc_finish_block (&errmsg_block);
5382 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
5383 build_int_cst (TREE_TYPE (stat), 0));
5384 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5385 gfc_unlikely (cond), tmp,
5386 build_empty_stmt (input_location));
5388 gfc_add_expr_to_block (&block, tmp);
5391 if (code->expr1 && TREE_USED (label_finish))
5393 tmp = build1_v (LABEL_EXPR, label_finish);
5394 gfc_add_expr_to_block (&block, tmp);
5400 gfc_init_se (&se, NULL);
5401 gfc_conv_expr_lhs (&se, code->expr1);
5402 tmp = convert (TREE_TYPE (se.expr), stat);
5403 gfc_add_modify (&block, se.expr, tmp);
5406 return gfc_finish_block (&block);
5409 #include "gt-fortran-trans-stmt.h"