1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
40 #include "dependency.h"
42 typedef struct iter_info
48 struct iter_info *next;
52 typedef struct forall_info
59 struct forall_info *prev_nest;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
95 gfc_trans_label_assign (gfc_code * code)
104 /* Start a new block. */
105 gfc_init_se (&se, NULL);
106 gfc_start_block (&se.pre);
107 gfc_conv_label_variable (&se, code->expr1);
109 len = GFC_DECL_STRING_LEN (se.expr);
110 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112 label_tree = gfc_get_label_decl (code->label1);
114 if (code->label1->defined == ST_LABEL_TARGET)
116 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117 len_tree = integer_minus_one_node;
121 gfc_expr *format = code->label1->format;
123 label_len = format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126 format->value.character.string);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify (&se.pre, len, len_tree);
131 gfc_add_modify (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
147 if (code->label1 != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr1);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158 "Assigned label is not a target label");
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
162 /* We're going to ignore a label list. It does not really change the
163 statement's semantics (because it is just a further restriction on
164 what's legal code); before, we were comparing label addresses here, but
165 that's a very fragile business and may break with optimization. So
168 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
169 gfc_add_expr_to_block (&se.pre, target);
170 return gfc_finish_block (&se.pre);
174 /* Translate an ENTRY statement. Just adds a label for this entry point. */
176 gfc_trans_entry (gfc_code * code)
178 return build1_v (LABEL_EXPR, code->ext.entry->label);
182 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
183 elemental subroutines. Make temporaries for output arguments if any such
184 dependencies are found. Output arguments are chosen because internal_unpack
185 can be used, as is, to copy the result back to the variable. */
187 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
188 gfc_symbol * sym, gfc_actual_arglist * arg,
189 gfc_dep_check check_variable)
191 gfc_actual_arglist *arg0;
193 gfc_formal_arglist *formal;
194 gfc_loopinfo tmp_loop;
206 if (loopse->ss == NULL)
211 formal = sym->formal;
213 /* Loop over all the arguments testing for dependencies. */
214 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
220 /* Obtain the info structure for the current argument. */
222 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
226 info = &ss->data.info;
230 /* If there is a dependency, create a temporary and use it
231 instead of the variable. */
232 fsym = formal ? formal->sym : NULL;
233 if (e->expr_type == EXPR_VARIABLE
235 && fsym->attr.intent != INTENT_IN
236 && gfc_check_fncall_dependency (e, fsym->attr.intent,
237 sym, arg0, check_variable))
239 tree initial, temptype;
240 stmtblock_t temp_post;
242 /* Make a local loopinfo for the temporary creation, so that
243 none of the other ss->info's have to be renormalized. */
244 gfc_init_loopinfo (&tmp_loop);
245 for (n = 0; n < info->dimen; n++)
247 tmp_loop.to[n] = loopse->loop->to[n];
248 tmp_loop.from[n] = loopse->loop->from[n];
249 tmp_loop.order[n] = loopse->loop->order[n];
252 /* Obtain the argument descriptor for unpacking. */
253 gfc_init_se (&parmse, NULL);
254 parmse.want_pointer = 1;
256 /* The scalarizer introduces some specific peculiarities when
257 handling elemental subroutines; the stride can be needed up to
258 the dim_array - 1, rather than dim_loop - 1 to calculate
259 offsets outside the loop. For this reason, we make sure that
260 the descriptor has the dimensionality of the array by converting
261 trailing elements into ranges with end = start. */
262 for (ref = e->ref; ref; ref = ref->next)
263 if (ref->type == REF_ARRAY && ref->u.ar.type == AR_SECTION)
268 bool seen_range = false;
269 for (n = 0; n < ref->u.ar.dimen; n++)
271 if (ref->u.ar.dimen_type[n] == DIMEN_RANGE)
275 || ref->u.ar.dimen_type[n] != DIMEN_ELEMENT)
278 ref->u.ar.end[n] = gfc_copy_expr (ref->u.ar.start[n]);
279 ref->u.ar.dimen_type[n] = DIMEN_RANGE;
283 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
284 gfc_add_block_to_block (&se->pre, &parmse.pre);
286 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
287 initialize the array temporary with a copy of the values. */
288 if (fsym->attr.intent == INTENT_INOUT
289 || (fsym->ts.type ==BT_DERIVED
290 && fsym->attr.intent == INTENT_OUT))
291 initial = parmse.expr;
295 /* Find the type of the temporary to create; we don't use the type
296 of e itself as this breaks for subcomponent-references in e (where
297 the type of e is that of the final reference, but parmse.expr's
298 type corresponds to the full derived-type). */
299 /* TODO: Fix this somehow so we don't need a temporary of the whole
300 array but instead only the components referenced. */
301 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
302 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
303 temptype = TREE_TYPE (temptype);
304 temptype = gfc_get_element_type (temptype);
306 /* Generate the temporary. Cleaning up the temporary should be the
307 very last thing done, so we add the code to a new block and add it
308 to se->post as last instructions. */
309 size = gfc_create_var (gfc_array_index_type, NULL);
310 data = gfc_create_var (pvoid_type_node, NULL);
311 gfc_init_block (&temp_post);
312 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
313 &tmp_loop, info, temptype,
317 gfc_add_modify (&se->pre, size, tmp);
318 tmp = fold_convert (pvoid_type_node, info->data);
319 gfc_add_modify (&se->pre, data, tmp);
321 /* Calculate the offset for the temporary. */
322 offset = gfc_index_zero_node;
323 for (n = 0; n < info->dimen; n++)
325 tmp = gfc_conv_descriptor_stride_get (info->descriptor,
327 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
328 loopse->loop->from[n], tmp);
329 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
332 info->offset = gfc_create_var (gfc_array_index_type, NULL);
333 gfc_add_modify (&se->pre, info->offset, offset);
335 /* Copy the result back using unpack. */
336 tmp = build_call_expr_loc (input_location,
337 gfor_fndecl_in_unpack, 2, parmse.expr, data);
338 gfc_add_expr_to_block (&se->post, tmp);
340 /* parmse.pre is already added above. */
341 gfc_add_block_to_block (&se->post, &parmse.post);
342 gfc_add_block_to_block (&se->post, &temp_post);
348 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
351 gfc_trans_call (gfc_code * code, bool dependency_check,
352 tree mask, tree count1, bool invert)
356 int has_alternate_specifier;
357 gfc_dep_check check_variable;
358 tree index = NULL_TREE;
359 tree maskexpr = NULL_TREE;
362 /* A CALL starts a new block because the actual arguments may have to
363 be evaluated first. */
364 gfc_init_se (&se, NULL);
365 gfc_start_block (&se.pre);
367 gcc_assert (code->resolved_sym);
369 ss = gfc_ss_terminator;
370 if (code->resolved_sym->attr.elemental)
371 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
373 /* Is not an elemental subroutine call with array valued arguments. */
374 if (ss == gfc_ss_terminator)
377 /* Translate the call. */
378 has_alternate_specifier
379 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
380 code->expr1, NULL_TREE);
382 /* A subroutine without side-effect, by definition, does nothing! */
383 TREE_SIDE_EFFECTS (se.expr) = 1;
385 /* Chain the pieces together and return the block. */
386 if (has_alternate_specifier)
388 gfc_code *select_code;
390 select_code = code->next;
391 gcc_assert(select_code->op == EXEC_SELECT);
392 sym = select_code->expr1->symtree->n.sym;
393 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
394 if (sym->backend_decl == NULL)
395 sym->backend_decl = gfc_get_symbol_decl (sym);
396 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
399 gfc_add_expr_to_block (&se.pre, se.expr);
401 gfc_add_block_to_block (&se.pre, &se.post);
406 /* An elemental subroutine call with array valued arguments has
414 /* gfc_walk_elemental_function_args renders the ss chain in the
415 reverse order to the actual argument order. */
416 ss = gfc_reverse_ss (ss);
418 /* Initialize the loop. */
419 gfc_init_se (&loopse, NULL);
420 gfc_init_loopinfo (&loop);
421 gfc_add_ss_to_loop (&loop, ss);
423 gfc_conv_ss_startstride (&loop);
424 /* TODO: gfc_conv_loop_setup generates a temporary for vector
425 subscripts. This could be prevented in the elemental case
426 as temporaries are handled separatedly
427 (below in gfc_conv_elemental_dependencies). */
428 gfc_conv_loop_setup (&loop, &code->expr1->where);
429 gfc_mark_ss_chain_used (ss, 1);
431 /* Convert the arguments, checking for dependencies. */
432 gfc_copy_loopinfo_to_se (&loopse, &loop);
435 /* For operator assignment, do dependency checking. */
436 if (dependency_check)
437 check_variable = ELEM_CHECK_VARIABLE;
439 check_variable = ELEM_DONT_CHECK_VARIABLE;
441 gfc_init_se (&depse, NULL);
442 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
443 code->ext.actual, check_variable);
445 gfc_add_block_to_block (&loop.pre, &depse.pre);
446 gfc_add_block_to_block (&loop.post, &depse.post);
448 /* Generate the loop body. */
449 gfc_start_scalarized_body (&loop, &body);
450 gfc_init_block (&block);
454 /* Form the mask expression according to the mask. */
456 maskexpr = gfc_build_array_ref (mask, index, NULL);
458 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
462 /* Add the subroutine call to the block. */
463 gfc_conv_procedure_call (&loopse, code->resolved_sym,
464 code->ext.actual, code->expr1,
469 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
470 build_empty_stmt (input_location));
471 gfc_add_expr_to_block (&loopse.pre, tmp);
472 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
473 count1, gfc_index_one_node);
474 gfc_add_modify (&loopse.pre, count1, tmp);
477 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
479 gfc_add_block_to_block (&block, &loopse.pre);
480 gfc_add_block_to_block (&block, &loopse.post);
482 /* Finish up the loop block and the loop. */
483 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
484 gfc_trans_scalarizing_loops (&loop, &body);
485 gfc_add_block_to_block (&se.pre, &loop.pre);
486 gfc_add_block_to_block (&se.pre, &loop.post);
487 gfc_add_block_to_block (&se.pre, &se.post);
488 gfc_cleanup_loop (&loop);
491 return gfc_finish_block (&se.pre);
495 /* Translate the RETURN statement. */
498 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
506 /* If code->expr is not NULL, this return statement must appear
507 in a subroutine and current_fake_result_decl has already
510 result = gfc_get_fake_result_decl (NULL, 0);
513 gfc_warning ("An alternate return at %L without a * dummy argument",
514 &code->expr1->where);
515 return build1_v (GOTO_EXPR, gfc_get_return_label ());
518 /* Start a new block for this statement. */
519 gfc_init_se (&se, NULL);
520 gfc_start_block (&se.pre);
522 gfc_conv_expr (&se, code->expr1);
524 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
525 fold_convert (TREE_TYPE (result), se.expr));
526 gfc_add_expr_to_block (&se.pre, tmp);
528 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
529 gfc_add_expr_to_block (&se.pre, tmp);
530 gfc_add_block_to_block (&se.pre, &se.post);
531 return gfc_finish_block (&se.pre);
534 return build1_v (GOTO_EXPR, gfc_get_return_label ());
538 /* Translate the PAUSE statement. We have to translate this statement
539 to a runtime library call. */
542 gfc_trans_pause (gfc_code * code)
544 tree gfc_int4_type_node = gfc_get_int_type (4);
548 /* Start a new block for this statement. */
549 gfc_init_se (&se, NULL);
550 gfc_start_block (&se.pre);
553 if (code->expr1 == NULL)
555 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
556 tmp = build_call_expr_loc (input_location,
557 gfor_fndecl_pause_numeric, 1, tmp);
561 gfc_conv_expr_reference (&se, code->expr1);
562 tmp = build_call_expr_loc (input_location,
563 gfor_fndecl_pause_string, 2,
564 se.expr, se.string_length);
567 gfc_add_expr_to_block (&se.pre, tmp);
569 gfc_add_block_to_block (&se.pre, &se.post);
571 return gfc_finish_block (&se.pre);
575 /* Translate the STOP statement. We have to translate this statement
576 to a runtime library call. */
579 gfc_trans_stop (gfc_code * code)
581 tree gfc_int4_type_node = gfc_get_int_type (4);
585 /* Start a new block for this statement. */
586 gfc_init_se (&se, NULL);
587 gfc_start_block (&se.pre);
590 if (code->expr1 == NULL)
592 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
593 tmp = build_call_expr_loc (input_location,
594 gfor_fndecl_stop_numeric, 1, tmp);
598 gfc_conv_expr_reference (&se, code->expr1);
599 tmp = build_call_expr_loc (input_location,
600 gfor_fndecl_stop_string, 2,
601 se.expr, se.string_length);
604 gfc_add_expr_to_block (&se.pre, tmp);
606 gfc_add_block_to_block (&se.pre, &se.post);
608 return gfc_finish_block (&se.pre);
612 /* Generate GENERIC for the IF construct. This function also deals with
613 the simple IF statement, because the front end translates the IF
614 statement into an IF construct.
646 where COND_S is the simplified version of the predicate. PRE_COND_S
647 are the pre side-effects produced by the translation of the
649 We need to build the chain recursively otherwise we run into
650 problems with folding incomplete statements. */
653 gfc_trans_if_1 (gfc_code * code)
658 /* Check for an unconditional ELSE clause. */
660 return gfc_trans_code (code->next);
662 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
663 gfc_init_se (&if_se, NULL);
664 gfc_start_block (&if_se.pre);
666 /* Calculate the IF condition expression. */
667 gfc_conv_expr_val (&if_se, code->expr1);
669 /* Translate the THEN clause. */
670 stmt = gfc_trans_code (code->next);
672 /* Translate the ELSE clause. */
674 elsestmt = gfc_trans_if_1 (code->block);
676 elsestmt = build_empty_stmt (input_location);
678 /* Build the condition expression and add it to the condition block. */
679 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
681 gfc_add_expr_to_block (&if_se.pre, stmt);
683 /* Finish off this statement. */
684 return gfc_finish_block (&if_se.pre);
688 gfc_trans_if (gfc_code * code)
690 /* Ignore the top EXEC_IF, it only announces an IF construct. The
691 actual code we must translate is in code->block. */
693 return gfc_trans_if_1 (code->block);
697 /* Translate an arithmetic IF expression.
699 IF (cond) label1, label2, label3 translates to
711 An optimized version can be generated in case of equal labels.
712 E.g., if label1 is equal to label2, we can translate it to
721 gfc_trans_arithmetic_if (gfc_code * code)
729 /* Start a new block. */
730 gfc_init_se (&se, NULL);
731 gfc_start_block (&se.pre);
733 /* Pre-evaluate COND. */
734 gfc_conv_expr_val (&se, code->expr1);
735 se.expr = gfc_evaluate_now (se.expr, &se.pre);
737 /* Build something to compare with. */
738 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
740 if (code->label1->value != code->label2->value)
742 /* If (cond < 0) take branch1 else take branch2.
743 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
744 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
745 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
747 if (code->label1->value != code->label3->value)
748 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
750 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
752 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
755 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
757 if (code->label1->value != code->label3->value
758 && code->label2->value != code->label3->value)
760 /* if (cond <= 0) take branch1 else take branch2. */
761 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
762 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
763 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
766 /* Append the COND_EXPR to the evaluation of COND, and return. */
767 gfc_add_expr_to_block (&se.pre, branch1);
768 return gfc_finish_block (&se.pre);
772 /* Translate a BLOCK construct. This is basically what we would do for a
776 gfc_trans_block_construct (gfc_code* code)
788 gcc_assert (!sym->tlink);
791 gfc_start_block (&body);
792 gfc_process_block_locals (ns);
794 tmp = gfc_trans_code (ns->code);
795 tmp = gfc_trans_deferred_vars (sym, tmp);
797 gfc_add_expr_to_block (&body, tmp);
798 return gfc_finish_block (&body);
802 /* Translate the simple DO construct. This is where the loop variable has
803 integer type and step +-1. We can't use this in the general case
804 because integer overflow and floating point errors could give incorrect
806 We translate a do loop from:
808 DO dovar = from, to, step
814 [Evaluate loop bounds and step]
816 if ((step > 0) ? (dovar <= to) : (dovar => to))
822 cond = (dovar == to);
824 if (cond) goto end_label;
829 This helps the optimizers by avoiding the extra induction variable
830 used in the general case. */
833 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
834 tree from, tree to, tree step, tree exit_cond)
840 tree saved_dovar = NULL;
844 type = TREE_TYPE (dovar);
846 /* Initialize the DO variable: dovar = from. */
847 gfc_add_modify (pblock, dovar, from);
849 /* Save value for do-tinkering checking. */
850 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
852 saved_dovar = gfc_create_var (type, ".saved_dovar");
853 gfc_add_modify (pblock, saved_dovar, dovar);
856 /* Cycle and exit statements are implemented with gotos. */
857 cycle_label = gfc_build_label_decl (NULL_TREE);
858 exit_label = gfc_build_label_decl (NULL_TREE);
860 /* Put the labels where they can be found later. See gfc_trans_do(). */
861 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
864 gfc_start_block (&body);
866 /* Main loop body. */
867 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
868 gfc_add_expr_to_block (&body, tmp);
870 /* Label for cycle statements (if needed). */
871 if (TREE_USED (cycle_label))
873 tmp = build1_v (LABEL_EXPR, cycle_label);
874 gfc_add_expr_to_block (&body, tmp);
877 /* Check whether someone has modified the loop variable. */
878 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
880 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
881 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
882 "Loop variable has been modified");
885 /* Exit the loop if there is an I/O result condition or error. */
888 tmp = build1_v (GOTO_EXPR, exit_label);
889 tmp = fold_build3 (COND_EXPR, void_type_node, exit_cond, tmp,
890 build_empty_stmt (input_location));
891 gfc_add_expr_to_block (&body, tmp);
894 /* Evaluate the loop condition. */
895 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
896 cond = gfc_evaluate_now (cond, &body);
898 /* Increment the loop variable. */
899 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
900 gfc_add_modify (&body, dovar, tmp);
902 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
903 gfc_add_modify (&body, saved_dovar, dovar);
906 tmp = build1_v (GOTO_EXPR, exit_label);
907 TREE_USED (exit_label) = 1;
908 tmp = fold_build3 (COND_EXPR, void_type_node,
909 cond, tmp, build_empty_stmt (input_location));
910 gfc_add_expr_to_block (&body, tmp);
912 /* Finish the loop body. */
913 tmp = gfc_finish_block (&body);
914 tmp = build1_v (LOOP_EXPR, tmp);
916 /* Only execute the loop if the number of iterations is positive. */
917 if (tree_int_cst_sgn (step) > 0)
918 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
920 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
921 tmp = fold_build3 (COND_EXPR, void_type_node,
922 cond, tmp, build_empty_stmt (input_location));
923 gfc_add_expr_to_block (pblock, tmp);
925 /* Add the exit label. */
926 tmp = build1_v (LABEL_EXPR, exit_label);
927 gfc_add_expr_to_block (pblock, tmp);
929 return gfc_finish_block (pblock);
932 /* Translate the DO construct. This obviously is one of the most
933 important ones to get right with any compiler, but especially
936 We special case some loop forms as described in gfc_trans_simple_do.
937 For other cases we implement them with a separate loop count,
938 as described in the standard.
940 We translate a do loop from:
942 DO dovar = from, to, step
948 [evaluate loop bounds and step]
949 empty = (step > 0 ? to < from : to > from);
950 countm1 = (to - from) / step;
952 if (empty) goto exit_label;
958 if (countm1 ==0) goto exit_label;
963 countm1 is an unsigned integer. It is equal to the loop count minus one,
964 because the loop count itself can overflow. */
967 gfc_trans_do (gfc_code * code, tree exit_cond)
971 tree saved_dovar = NULL;
986 gfc_start_block (&block);
988 /* Evaluate all the expressions in the iterator. */
989 gfc_init_se (&se, NULL);
990 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
991 gfc_add_block_to_block (&block, &se.pre);
993 type = TREE_TYPE (dovar);
995 gfc_init_se (&se, NULL);
996 gfc_conv_expr_val (&se, code->ext.iterator->start);
997 gfc_add_block_to_block (&block, &se.pre);
998 from = gfc_evaluate_now (se.expr, &block);
1000 gfc_init_se (&se, NULL);
1001 gfc_conv_expr_val (&se, code->ext.iterator->end);
1002 gfc_add_block_to_block (&block, &se.pre);
1003 to = gfc_evaluate_now (se.expr, &block);
1005 gfc_init_se (&se, NULL);
1006 gfc_conv_expr_val (&se, code->ext.iterator->step);
1007 gfc_add_block_to_block (&block, &se.pre);
1008 step = gfc_evaluate_now (se.expr, &block);
1010 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1012 tmp = fold_build2 (EQ_EXPR, boolean_type_node, step,
1013 fold_convert (type, integer_zero_node));
1014 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1015 "DO step value is zero");
1018 /* Special case simple loops. */
1019 if (TREE_CODE (type) == INTEGER_TYPE
1020 && (integer_onep (step)
1021 || tree_int_cst_equal (step, integer_minus_one_node)))
1022 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1024 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
1025 fold_convert (type, integer_zero_node));
1027 if (TREE_CODE (type) == INTEGER_TYPE)
1028 utype = unsigned_type_for (type);
1030 utype = unsigned_type_for (gfc_array_index_type);
1031 countm1 = gfc_create_var (utype, "countm1");
1033 /* Cycle and exit statements are implemented with gotos. */
1034 cycle_label = gfc_build_label_decl (NULL_TREE);
1035 exit_label = gfc_build_label_decl (NULL_TREE);
1036 TREE_USED (exit_label) = 1;
1038 /* Initialize the DO variable: dovar = from. */
1039 gfc_add_modify (&block, dovar, from);
1041 /* Save value for do-tinkering checking. */
1042 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1044 saved_dovar = gfc_create_var (type, ".saved_dovar");
1045 gfc_add_modify (&block, saved_dovar, dovar);
1048 /* Initialize loop count and jump to exit label if the loop is empty.
1049 This code is executed before we enter the loop body. We generate:
1050 step_sign = sign(1,step);
1061 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1065 if (TREE_CODE (type) == INTEGER_TYPE)
1067 tree pos, neg, step_sign, to2, from2, step2;
1069 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1071 tmp = fold_build2 (LT_EXPR, boolean_type_node, step,
1072 build_int_cst (TREE_TYPE (step), 0));
1073 step_sign = fold_build3 (COND_EXPR, type, tmp,
1074 build_int_cst (type, -1),
1075 build_int_cst (type, 1));
1077 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
1078 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
1079 build1_v (GOTO_EXPR, exit_label),
1080 build_empty_stmt (input_location));
1082 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
1083 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
1084 build1_v (GOTO_EXPR, exit_label),
1085 build_empty_stmt (input_location));
1086 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
1088 gfc_add_expr_to_block (&block, tmp);
1090 /* Calculate the loop count. to-from can overflow, so
1091 we cast to unsigned. */
1093 to2 = fold_build2 (MULT_EXPR, type, step_sign, to);
1094 from2 = fold_build2 (MULT_EXPR, type, step_sign, from);
1095 step2 = fold_build2 (MULT_EXPR, type, step_sign, step);
1096 step2 = fold_convert (utype, step2);
1097 tmp = fold_build2 (MINUS_EXPR, type, to2, from2);
1098 tmp = fold_convert (utype, tmp);
1099 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp, step2);
1100 tmp = fold_build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1101 gfc_add_expr_to_block (&block, tmp);
1105 /* TODO: We could use the same width as the real type.
1106 This would probably cause more problems that it solves
1107 when we implement "long double" types. */
1109 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1110 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
1111 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
1112 gfc_add_modify (&block, countm1, tmp);
1114 /* We need a special check for empty loops:
1115 empty = (step > 0 ? to < from : to > from); */
1116 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
1117 fold_build2 (LT_EXPR, boolean_type_node, to, from),
1118 fold_build2 (GT_EXPR, boolean_type_node, to, from));
1119 /* If the loop is empty, go directly to the exit label. */
1120 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
1121 build1_v (GOTO_EXPR, exit_label),
1122 build_empty_stmt (input_location));
1123 gfc_add_expr_to_block (&block, tmp);
1127 gfc_start_block (&body);
1129 /* Put these labels where they can be found later. We put the
1130 labels in a TREE_LIST node (because TREE_CHAIN is already
1131 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1132 label in TREE_VALUE (backend_decl). */
1134 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1136 /* Main loop body. */
1137 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1138 gfc_add_expr_to_block (&body, tmp);
1140 /* Label for cycle statements (if needed). */
1141 if (TREE_USED (cycle_label))
1143 tmp = build1_v (LABEL_EXPR, cycle_label);
1144 gfc_add_expr_to_block (&body, tmp);
1147 /* Check whether someone has modified the loop variable. */
1148 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1150 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
1151 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1152 "Loop variable has been modified");
1155 /* Exit the loop if there is an I/O result condition or error. */
1158 tmp = build1_v (GOTO_EXPR, exit_label);
1159 tmp = fold_build3 (COND_EXPR, void_type_node, exit_cond, tmp,
1160 build_empty_stmt (input_location));
1161 gfc_add_expr_to_block (&body, tmp);
1164 /* Increment the loop variable. */
1165 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1166 gfc_add_modify (&body, dovar, tmp);
1168 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1169 gfc_add_modify (&body, saved_dovar, dovar);
1171 /* End with the loop condition. Loop until countm1 == 0. */
1172 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1173 build_int_cst (utype, 0));
1174 tmp = build1_v (GOTO_EXPR, exit_label);
1175 tmp = fold_build3 (COND_EXPR, void_type_node,
1176 cond, tmp, build_empty_stmt (input_location));
1177 gfc_add_expr_to_block (&body, tmp);
1179 /* Decrement the loop count. */
1180 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1181 gfc_add_modify (&body, countm1, tmp);
1183 /* End of loop body. */
1184 tmp = gfc_finish_block (&body);
1186 /* The for loop itself. */
1187 tmp = build1_v (LOOP_EXPR, tmp);
1188 gfc_add_expr_to_block (&block, tmp);
1190 /* Add the exit label. */
1191 tmp = build1_v (LABEL_EXPR, exit_label);
1192 gfc_add_expr_to_block (&block, tmp);
1194 return gfc_finish_block (&block);
1198 /* Translate the DO WHILE construct.
1211 if (! cond) goto exit_label;
1217 Because the evaluation of the exit condition `cond' may have side
1218 effects, we can't do much for empty loop bodies. The backend optimizers
1219 should be smart enough to eliminate any dead loops. */
1222 gfc_trans_do_while (gfc_code * code)
1230 /* Everything we build here is part of the loop body. */
1231 gfc_start_block (&block);
1233 /* Cycle and exit statements are implemented with gotos. */
1234 cycle_label = gfc_build_label_decl (NULL_TREE);
1235 exit_label = gfc_build_label_decl (NULL_TREE);
1237 /* Put the labels where they can be found later. See gfc_trans_do(). */
1238 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1240 /* Create a GIMPLE version of the exit condition. */
1241 gfc_init_se (&cond, NULL);
1242 gfc_conv_expr_val (&cond, code->expr1);
1243 gfc_add_block_to_block (&block, &cond.pre);
1244 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1246 /* Build "IF (! cond) GOTO exit_label". */
1247 tmp = build1_v (GOTO_EXPR, exit_label);
1248 TREE_USED (exit_label) = 1;
1249 tmp = fold_build3 (COND_EXPR, void_type_node,
1250 cond.expr, tmp, build_empty_stmt (input_location));
1251 gfc_add_expr_to_block (&block, tmp);
1253 /* The main body of the loop. */
1254 tmp = gfc_trans_code (code->block->next);
1255 gfc_add_expr_to_block (&block, tmp);
1257 /* Label for cycle statements (if needed). */
1258 if (TREE_USED (cycle_label))
1260 tmp = build1_v (LABEL_EXPR, cycle_label);
1261 gfc_add_expr_to_block (&block, tmp);
1264 /* End of loop body. */
1265 tmp = gfc_finish_block (&block);
1267 gfc_init_block (&block);
1268 /* Build the loop. */
1269 tmp = build1_v (LOOP_EXPR, tmp);
1270 gfc_add_expr_to_block (&block, tmp);
1272 /* Add the exit label. */
1273 tmp = build1_v (LABEL_EXPR, exit_label);
1274 gfc_add_expr_to_block (&block, tmp);
1276 return gfc_finish_block (&block);
1280 /* Translate the SELECT CASE construct for INTEGER case expressions,
1281 without killing all potential optimizations. The problem is that
1282 Fortran allows unbounded cases, but the back-end does not, so we
1283 need to intercept those before we enter the equivalent SWITCH_EXPR
1286 For example, we translate this,
1289 CASE (:100,101,105:115)
1299 to the GENERIC equivalent,
1303 case (minimum value for typeof(expr) ... 100:
1309 case 200 ... (maximum value for typeof(expr):
1326 gfc_trans_integer_select (gfc_code * code)
1336 gfc_start_block (&block);
1338 /* Calculate the switch expression. */
1339 gfc_init_se (&se, NULL);
1340 gfc_conv_expr_val (&se, code->expr1);
1341 gfc_add_block_to_block (&block, &se.pre);
1343 end_label = gfc_build_label_decl (NULL_TREE);
1345 gfc_init_block (&body);
1347 for (c = code->block; c; c = c->block)
1349 for (cp = c->ext.case_list; cp; cp = cp->next)
1354 /* Assume it's the default case. */
1355 low = high = NULL_TREE;
1359 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1362 /* If there's only a lower bound, set the high bound to the
1363 maximum value of the case expression. */
1365 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1370 /* Three cases are possible here:
1372 1) There is no lower bound, e.g. CASE (:N).
1373 2) There is a lower bound .NE. high bound, that is
1374 a case range, e.g. CASE (N:M) where M>N (we make
1375 sure that M>N during type resolution).
1376 3) There is a lower bound, and it has the same value
1377 as the high bound, e.g. CASE (N:N). This is our
1378 internal representation of CASE(N).
1380 In the first and second case, we need to set a value for
1381 high. In the third case, we don't because the GCC middle
1382 end represents a single case value by just letting high be
1383 a NULL_TREE. We can't do that because we need to be able
1384 to represent unbounded cases. */
1388 && mpz_cmp (cp->low->value.integer,
1389 cp->high->value.integer) != 0))
1390 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1393 /* Unbounded case. */
1395 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1398 /* Build a label. */
1399 label = gfc_build_label_decl (NULL_TREE);
1401 /* Add this case label.
1402 Add parameter 'label', make it match GCC backend. */
1403 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1405 gfc_add_expr_to_block (&body, tmp);
1408 /* Add the statements for this case. */
1409 tmp = gfc_trans_code (c->next);
1410 gfc_add_expr_to_block (&body, tmp);
1412 /* Break to the end of the construct. */
1413 tmp = build1_v (GOTO_EXPR, end_label);
1414 gfc_add_expr_to_block (&body, tmp);
1417 tmp = gfc_finish_block (&body);
1418 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1419 gfc_add_expr_to_block (&block, tmp);
1421 tmp = build1_v (LABEL_EXPR, end_label);
1422 gfc_add_expr_to_block (&block, tmp);
1424 return gfc_finish_block (&block);
1428 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1430 There are only two cases possible here, even though the standard
1431 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1432 .FALSE., and DEFAULT.
1434 We never generate more than two blocks here. Instead, we always
1435 try to eliminate the DEFAULT case. This way, we can translate this
1436 kind of SELECT construct to a simple
1440 expression in GENERIC. */
1443 gfc_trans_logical_select (gfc_code * code)
1446 gfc_code *t, *f, *d;
1451 /* Assume we don't have any cases at all. */
1454 /* Now see which ones we actually do have. We can have at most two
1455 cases in a single case list: one for .TRUE. and one for .FALSE.
1456 The default case is always separate. If the cases for .TRUE. and
1457 .FALSE. are in the same case list, the block for that case list
1458 always executed, and we don't generate code a COND_EXPR. */
1459 for (c = code->block; c; c = c->block)
1461 for (cp = c->ext.case_list; cp; cp = cp->next)
1465 if (cp->low->value.logical == 0) /* .FALSE. */
1467 else /* if (cp->value.logical != 0), thus .TRUE. */
1475 /* Start a new block. */
1476 gfc_start_block (&block);
1478 /* Calculate the switch expression. We always need to do this
1479 because it may have side effects. */
1480 gfc_init_se (&se, NULL);
1481 gfc_conv_expr_val (&se, code->expr1);
1482 gfc_add_block_to_block (&block, &se.pre);
1484 if (t == f && t != NULL)
1486 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1487 translate the code for these cases, append it to the current
1489 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1493 tree true_tree, false_tree, stmt;
1495 true_tree = build_empty_stmt (input_location);
1496 false_tree = build_empty_stmt (input_location);
1498 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1499 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1500 make the missing case the default case. */
1501 if (t != NULL && f != NULL)
1511 /* Translate the code for each of these blocks, and append it to
1512 the current block. */
1514 true_tree = gfc_trans_code (t->next);
1517 false_tree = gfc_trans_code (f->next);
1519 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1520 true_tree, false_tree);
1521 gfc_add_expr_to_block (&block, stmt);
1524 return gfc_finish_block (&block);
1528 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1529 Instead of generating compares and jumps, it is far simpler to
1530 generate a data structure describing the cases in order and call a
1531 library subroutine that locates the right case.
1532 This is particularly true because this is the only case where we
1533 might have to dispose of a temporary.
1534 The library subroutine returns a pointer to jump to or NULL if no
1535 branches are to be taken. */
1538 gfc_trans_character_select (gfc_code *code)
1540 tree init, node, end_label, tmp, type, case_num, label, fndecl;
1541 stmtblock_t block, body;
1547 /* The jump table types are stored in static variables to avoid
1548 constructing them from scratch every single time. */
1549 static tree select_struct[2];
1550 static tree ss_string1[2], ss_string1_len[2];
1551 static tree ss_string2[2], ss_string2_len[2];
1552 static tree ss_target[2];
1554 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1556 if (code->expr1->ts.kind == 1)
1558 else if (code->expr1->ts.kind == 4)
1563 if (select_struct[k] == NULL)
1565 select_struct[k] = make_node (RECORD_TYPE);
1567 if (code->expr1->ts.kind == 1)
1568 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1569 else if (code->expr1->ts.kind == 4)
1570 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1575 #define ADD_FIELD(NAME, TYPE) \
1576 ss_##NAME[k] = gfc_add_field_to_struct \
1577 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1578 get_identifier (stringize(NAME)), TYPE)
1580 ADD_FIELD (string1, pchartype);
1581 ADD_FIELD (string1_len, gfc_charlen_type_node);
1583 ADD_FIELD (string2, pchartype);
1584 ADD_FIELD (string2_len, gfc_charlen_type_node);
1586 ADD_FIELD (target, integer_type_node);
1589 gfc_finish_type (select_struct[k]);
1592 cp = code->block->ext.case_list;
1593 while (cp->left != NULL)
1597 for (d = cp; d; d = d->right)
1600 end_label = gfc_build_label_decl (NULL_TREE);
1602 /* Generate the body */
1603 gfc_start_block (&block);
1604 gfc_init_block (&body);
1606 for (c = code->block; c; c = c->block)
1608 for (d = c->ext.case_list; d; d = d->next)
1610 label = gfc_build_label_decl (NULL_TREE);
1611 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1612 build_int_cst (NULL_TREE, d->n),
1613 build_int_cst (NULL_TREE, d->n), label);
1614 gfc_add_expr_to_block (&body, tmp);
1617 tmp = gfc_trans_code (c->next);
1618 gfc_add_expr_to_block (&body, tmp);
1620 tmp = build1_v (GOTO_EXPR, end_label);
1621 gfc_add_expr_to_block (&body, tmp);
1624 /* Generate the structure describing the branches */
1627 for(d = cp; d; d = d->right)
1631 gfc_init_se (&se, NULL);
1635 node = tree_cons (ss_string1[k], null_pointer_node, node);
1636 node = tree_cons (ss_string1_len[k], integer_zero_node, node);
1640 gfc_conv_expr_reference (&se, d->low);
1642 node = tree_cons (ss_string1[k], se.expr, node);
1643 node = tree_cons (ss_string1_len[k], se.string_length, node);
1646 if (d->high == NULL)
1648 node = tree_cons (ss_string2[k], null_pointer_node, node);
1649 node = tree_cons (ss_string2_len[k], integer_zero_node, node);
1653 gfc_init_se (&se, NULL);
1654 gfc_conv_expr_reference (&se, d->high);
1656 node = tree_cons (ss_string2[k], se.expr, node);
1657 node = tree_cons (ss_string2_len[k], se.string_length, node);
1660 node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n),
1663 tmp = build_constructor_from_list (select_struct[k], nreverse (node));
1664 init = tree_cons (NULL_TREE, tmp, init);
1667 type = build_array_type (select_struct[k],
1668 build_index_type (build_int_cst (NULL_TREE, n-1)));
1670 init = build_constructor_from_list (type, nreverse(init));
1671 TREE_CONSTANT (init) = 1;
1672 TREE_STATIC (init) = 1;
1673 /* Create a static variable to hold the jump table. */
1674 tmp = gfc_create_var (type, "jumptable");
1675 TREE_CONSTANT (tmp) = 1;
1676 TREE_STATIC (tmp) = 1;
1677 TREE_READONLY (tmp) = 1;
1678 DECL_INITIAL (tmp) = init;
1681 /* Build the library call */
1682 init = gfc_build_addr_expr (pvoid_type_node, init);
1684 gfc_init_se (&se, NULL);
1685 gfc_conv_expr_reference (&se, code->expr1);
1687 gfc_add_block_to_block (&block, &se.pre);
1689 if (code->expr1->ts.kind == 1)
1690 fndecl = gfor_fndecl_select_string;
1691 else if (code->expr1->ts.kind == 4)
1692 fndecl = gfor_fndecl_select_string_char4;
1696 tmp = build_call_expr_loc (input_location,
1697 fndecl, 4, init, build_int_cst (NULL_TREE, n),
1698 se.expr, se.string_length);
1699 case_num = gfc_create_var (integer_type_node, "case_num");
1700 gfc_add_modify (&block, case_num, tmp);
1702 gfc_add_block_to_block (&block, &se.post);
1704 tmp = gfc_finish_block (&body);
1705 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1706 gfc_add_expr_to_block (&block, tmp);
1708 tmp = build1_v (LABEL_EXPR, end_label);
1709 gfc_add_expr_to_block (&block, tmp);
1711 return gfc_finish_block (&block);
1715 /* Translate the three variants of the SELECT CASE construct.
1717 SELECT CASEs with INTEGER case expressions can be translated to an
1718 equivalent GENERIC switch statement, and for LOGICAL case
1719 expressions we build one or two if-else compares.
1721 SELECT CASEs with CHARACTER case expressions are a whole different
1722 story, because they don't exist in GENERIC. So we sort them and
1723 do a binary search at runtime.
1725 Fortran has no BREAK statement, and it does not allow jumps from
1726 one case block to another. That makes things a lot easier for
1730 gfc_trans_select (gfc_code * code)
1732 gcc_assert (code && code->expr1);
1734 /* Empty SELECT constructs are legal. */
1735 if (code->block == NULL)
1736 return build_empty_stmt (input_location);
1738 /* Select the correct translation function. */
1739 switch (code->expr1->ts.type)
1741 case BT_LOGICAL: return gfc_trans_logical_select (code);
1742 case BT_INTEGER: return gfc_trans_integer_select (code);
1743 case BT_CHARACTER: return gfc_trans_character_select (code);
1745 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1751 /* Traversal function to substitute a replacement symtree if the symbol
1752 in the expression is the same as that passed. f == 2 signals that
1753 that variable itself is not to be checked - only the references.
1754 This group of functions is used when the variable expression in a
1755 FORALL assignment has internal references. For example:
1756 FORALL (i = 1:4) p(p(i)) = i
1757 The only recourse here is to store a copy of 'p' for the index
1760 static gfc_symtree *new_symtree;
1761 static gfc_symtree *old_symtree;
1764 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1766 if (expr->expr_type != EXPR_VARIABLE)
1771 else if (expr->symtree->n.sym == sym)
1772 expr->symtree = new_symtree;
1778 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1780 gfc_traverse_expr (e, sym, forall_replace, f);
1784 forall_restore (gfc_expr *expr,
1785 gfc_symbol *sym ATTRIBUTE_UNUSED,
1786 int *f ATTRIBUTE_UNUSED)
1788 if (expr->expr_type != EXPR_VARIABLE)
1791 if (expr->symtree == new_symtree)
1792 expr->symtree = old_symtree;
1798 forall_restore_symtree (gfc_expr *e)
1800 gfc_traverse_expr (e, NULL, forall_restore, 0);
1804 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1809 gfc_symbol *new_sym;
1810 gfc_symbol *old_sym;
1814 /* Build a copy of the lvalue. */
1815 old_symtree = c->expr1->symtree;
1816 old_sym = old_symtree->n.sym;
1817 e = gfc_lval_expr_from_sym (old_sym);
1818 if (old_sym->attr.dimension)
1820 gfc_init_se (&tse, NULL);
1821 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
1822 gfc_add_block_to_block (pre, &tse.pre);
1823 gfc_add_block_to_block (post, &tse.post);
1824 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
1826 if (e->ts.type != BT_CHARACTER)
1828 /* Use the variable offset for the temporary. */
1829 tmp = gfc_conv_array_offset (old_sym->backend_decl);
1830 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
1835 gfc_init_se (&tse, NULL);
1836 gfc_init_se (&rse, NULL);
1837 gfc_conv_expr (&rse, e);
1838 if (e->ts.type == BT_CHARACTER)
1840 tse.string_length = rse.string_length;
1841 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1843 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1845 gfc_add_block_to_block (pre, &tse.pre);
1846 gfc_add_block_to_block (post, &tse.post);
1850 tmp = gfc_typenode_for_spec (&e->ts);
1851 tse.expr = gfc_create_var (tmp, "temp");
1854 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1855 e->expr_type == EXPR_VARIABLE, true);
1856 gfc_add_expr_to_block (pre, tmp);
1860 /* Create a new symbol to represent the lvalue. */
1861 new_sym = gfc_new_symbol (old_sym->name, NULL);
1862 new_sym->ts = old_sym->ts;
1863 new_sym->attr.referenced = 1;
1864 new_sym->attr.temporary = 1;
1865 new_sym->attr.dimension = old_sym->attr.dimension;
1866 new_sym->attr.flavor = old_sym->attr.flavor;
1868 /* Use the temporary as the backend_decl. */
1869 new_sym->backend_decl = tse.expr;
1871 /* Create a fake symtree for it. */
1873 new_symtree = gfc_new_symtree (&root, old_sym->name);
1874 new_symtree->n.sym = new_sym;
1875 gcc_assert (new_symtree == root);
1877 /* Go through the expression reference replacing the old_symtree
1879 forall_replace_symtree (c->expr1, old_sym, 2);
1881 /* Now we have made this temporary, we might as well use it for
1882 the right hand side. */
1883 forall_replace_symtree (c->expr2, old_sym, 1);
1887 /* Handles dependencies in forall assignments. */
1889 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1896 lsym = c->expr1->symtree->n.sym;
1897 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
1899 /* Now check for dependencies within the 'variable'
1900 expression itself. These are treated by making a complete
1901 copy of variable and changing all the references to it
1902 point to the copy instead. Note that the shallow copy of
1903 the variable will not suffice for derived types with
1904 pointer components. We therefore leave these to their
1906 if (lsym->ts.type == BT_DERIVED
1907 && lsym->ts.u.derived->attr.pointer_comp)
1911 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
1913 forall_make_variable_temp (c, pre, post);
1917 /* Substrings with dependencies are treated in the same
1919 if (c->expr1->ts.type == BT_CHARACTER
1921 && c->expr2->expr_type == EXPR_VARIABLE
1922 && lsym == c->expr2->symtree->n.sym)
1924 for (lref = c->expr1->ref; lref; lref = lref->next)
1925 if (lref->type == REF_SUBSTRING)
1927 for (rref = c->expr2->ref; rref; rref = rref->next)
1928 if (rref->type == REF_SUBSTRING)
1932 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1934 forall_make_variable_temp (c, pre, post);
1943 cleanup_forall_symtrees (gfc_code *c)
1945 forall_restore_symtree (c->expr1);
1946 forall_restore_symtree (c->expr2);
1947 gfc_free (new_symtree->n.sym);
1948 gfc_free (new_symtree);
1952 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1953 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1954 indicates whether we should generate code to test the FORALLs mask
1955 array. OUTER is the loop header to be used for initializing mask
1958 The generated loop format is:
1959 count = (end - start + step) / step
1972 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1973 int mask_flag, stmtblock_t *outer)
1981 tree var, start, end, step;
1984 /* Initialize the mask index outside the FORALL nest. */
1985 if (mask_flag && forall_tmp->mask)
1986 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
1988 iter = forall_tmp->this_loop;
1989 nvar = forall_tmp->nvar;
1990 for (n = 0; n < nvar; n++)
1993 start = iter->start;
1997 exit_label = gfc_build_label_decl (NULL_TREE);
1998 TREE_USED (exit_label) = 1;
2000 /* The loop counter. */
2001 count = gfc_create_var (TREE_TYPE (var), "count");
2003 /* The body of the loop. */
2004 gfc_init_block (&block);
2006 /* The exit condition. */
2007 cond = fold_build2 (LE_EXPR, boolean_type_node,
2008 count, build_int_cst (TREE_TYPE (count), 0));
2009 tmp = build1_v (GOTO_EXPR, exit_label);
2010 tmp = fold_build3 (COND_EXPR, void_type_node,
2011 cond, tmp, build_empty_stmt (input_location));
2012 gfc_add_expr_to_block (&block, tmp);
2014 /* The main loop body. */
2015 gfc_add_expr_to_block (&block, body);
2017 /* Increment the loop variable. */
2018 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
2019 gfc_add_modify (&block, var, tmp);
2021 /* Advance to the next mask element. Only do this for the
2023 if (n == 0 && mask_flag && forall_tmp->mask)
2025 tree maskindex = forall_tmp->maskindex;
2026 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2027 maskindex, gfc_index_one_node);
2028 gfc_add_modify (&block, maskindex, tmp);
2031 /* Decrement the loop counter. */
2032 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
2033 build_int_cst (TREE_TYPE (var), 1));
2034 gfc_add_modify (&block, count, tmp);
2036 body = gfc_finish_block (&block);
2038 /* Loop var initialization. */
2039 gfc_init_block (&block);
2040 gfc_add_modify (&block, var, start);
2043 /* Initialize the loop counter. */
2044 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
2045 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
2046 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
2047 gfc_add_modify (&block, count, tmp);
2049 /* The loop expression. */
2050 tmp = build1_v (LOOP_EXPR, body);
2051 gfc_add_expr_to_block (&block, tmp);
2053 /* The exit label. */
2054 tmp = build1_v (LABEL_EXPR, exit_label);
2055 gfc_add_expr_to_block (&block, tmp);
2057 body = gfc_finish_block (&block);
2064 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2065 is nonzero, the body is controlled by all masks in the forall nest.
2066 Otherwise, the innermost loop is not controlled by it's mask. This
2067 is used for initializing that mask. */
2070 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2075 forall_info *forall_tmp;
2076 tree mask, maskindex;
2078 gfc_start_block (&header);
2080 forall_tmp = nested_forall_info;
2081 while (forall_tmp != NULL)
2083 /* Generate body with masks' control. */
2086 mask = forall_tmp->mask;
2087 maskindex = forall_tmp->maskindex;
2089 /* If a mask was specified make the assignment conditional. */
2092 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2093 body = build3_v (COND_EXPR, tmp, body,
2094 build_empty_stmt (input_location));
2097 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2098 forall_tmp = forall_tmp->prev_nest;
2102 gfc_add_expr_to_block (&header, body);
2103 return gfc_finish_block (&header);
2107 /* Allocate data for holding a temporary array. Returns either a local
2108 temporary array or a pointer variable. */
2111 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2118 if (INTEGER_CST_P (size))
2120 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
2121 gfc_index_one_node);
2126 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2127 type = build_array_type (elem_type, type);
2128 if (gfc_can_put_var_on_stack (bytesize))
2130 gcc_assert (INTEGER_CST_P (size));
2131 tmpvar = gfc_create_var (type, "temp");
2136 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2137 *pdata = convert (pvoid_type_node, tmpvar);
2139 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2140 gfc_add_modify (pblock, tmpvar, tmp);
2146 /* Generate codes to copy the temporary to the actual lhs. */
2149 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2150 tree count1, tree wheremask, bool invert)
2154 stmtblock_t block, body;
2160 lss = gfc_walk_expr (expr);
2162 if (lss == gfc_ss_terminator)
2164 gfc_start_block (&block);
2166 gfc_init_se (&lse, NULL);
2168 /* Translate the expression. */
2169 gfc_conv_expr (&lse, expr);
2171 /* Form the expression for the temporary. */
2172 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2174 /* Use the scalar assignment as is. */
2175 gfc_add_block_to_block (&block, &lse.pre);
2176 gfc_add_modify (&block, lse.expr, tmp);
2177 gfc_add_block_to_block (&block, &lse.post);
2179 /* Increment the count1. */
2180 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2181 gfc_index_one_node);
2182 gfc_add_modify (&block, count1, tmp);
2184 tmp = gfc_finish_block (&block);
2188 gfc_start_block (&block);
2190 gfc_init_loopinfo (&loop1);
2191 gfc_init_se (&rse, NULL);
2192 gfc_init_se (&lse, NULL);
2194 /* Associate the lss with the loop. */
2195 gfc_add_ss_to_loop (&loop1, lss);
2197 /* Calculate the bounds of the scalarization. */
2198 gfc_conv_ss_startstride (&loop1);
2199 /* Setup the scalarizing loops. */
2200 gfc_conv_loop_setup (&loop1, &expr->where);
2202 gfc_mark_ss_chain_used (lss, 1);
2204 /* Start the scalarized loop body. */
2205 gfc_start_scalarized_body (&loop1, &body);
2207 /* Setup the gfc_se structures. */
2208 gfc_copy_loopinfo_to_se (&lse, &loop1);
2211 /* Form the expression of the temporary. */
2212 if (lss != gfc_ss_terminator)
2213 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2214 /* Translate expr. */
2215 gfc_conv_expr (&lse, expr);
2217 /* Use the scalar assignment. */
2218 rse.string_length = lse.string_length;
2219 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
2221 /* Form the mask expression according to the mask tree list. */
2224 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2226 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2227 TREE_TYPE (wheremaskexpr),
2229 tmp = fold_build3 (COND_EXPR, void_type_node,
2231 build_empty_stmt (input_location));
2234 gfc_add_expr_to_block (&body, tmp);
2236 /* Increment count1. */
2237 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2238 count1, gfc_index_one_node);
2239 gfc_add_modify (&body, count1, tmp);
2241 /* Increment count3. */
2244 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2245 count3, gfc_index_one_node);
2246 gfc_add_modify (&body, count3, tmp);
2249 /* Generate the copying loops. */
2250 gfc_trans_scalarizing_loops (&loop1, &body);
2251 gfc_add_block_to_block (&block, &loop1.pre);
2252 gfc_add_block_to_block (&block, &loop1.post);
2253 gfc_cleanup_loop (&loop1);
2255 tmp = gfc_finish_block (&block);
2261 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2262 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2263 and should not be freed. WHEREMASK is the conditional execution mask
2264 whose sense may be inverted by INVERT. */
2267 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2268 tree count1, gfc_ss *lss, gfc_ss *rss,
2269 tree wheremask, bool invert)
2271 stmtblock_t block, body1;
2278 gfc_start_block (&block);
2280 gfc_init_se (&rse, NULL);
2281 gfc_init_se (&lse, NULL);
2283 if (lss == gfc_ss_terminator)
2285 gfc_init_block (&body1);
2286 gfc_conv_expr (&rse, expr2);
2287 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2291 /* Initialize the loop. */
2292 gfc_init_loopinfo (&loop);
2294 /* We may need LSS to determine the shape of the expression. */
2295 gfc_add_ss_to_loop (&loop, lss);
2296 gfc_add_ss_to_loop (&loop, rss);
2298 gfc_conv_ss_startstride (&loop);
2299 gfc_conv_loop_setup (&loop, &expr2->where);
2301 gfc_mark_ss_chain_used (rss, 1);
2302 /* Start the loop body. */
2303 gfc_start_scalarized_body (&loop, &body1);
2305 /* Translate the expression. */
2306 gfc_copy_loopinfo_to_se (&rse, &loop);
2308 gfc_conv_expr (&rse, expr2);
2310 /* Form the expression of the temporary. */
2311 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2314 /* Use the scalar assignment. */
2315 lse.string_length = rse.string_length;
2316 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2317 expr2->expr_type == EXPR_VARIABLE, true);
2319 /* Form the mask expression according to the mask tree list. */
2322 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2324 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2325 TREE_TYPE (wheremaskexpr),
2327 tmp = fold_build3 (COND_EXPR, void_type_node,
2328 wheremaskexpr, tmp, build_empty_stmt (input_location));
2331 gfc_add_expr_to_block (&body1, tmp);
2333 if (lss == gfc_ss_terminator)
2335 gfc_add_block_to_block (&block, &body1);
2337 /* Increment count1. */
2338 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2339 gfc_index_one_node);
2340 gfc_add_modify (&block, count1, tmp);
2344 /* Increment count1. */
2345 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2346 count1, gfc_index_one_node);
2347 gfc_add_modify (&body1, count1, tmp);
2349 /* Increment count3. */
2352 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2353 count3, gfc_index_one_node);
2354 gfc_add_modify (&body1, count3, tmp);
2357 /* Generate the copying loops. */
2358 gfc_trans_scalarizing_loops (&loop, &body1);
2360 gfc_add_block_to_block (&block, &loop.pre);
2361 gfc_add_block_to_block (&block, &loop.post);
2363 gfc_cleanup_loop (&loop);
2364 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2365 as tree nodes in SS may not be valid in different scope. */
2368 tmp = gfc_finish_block (&block);
2373 /* Calculate the size of temporary needed in the assignment inside forall.
2374 LSS and RSS are filled in this function. */
2377 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2378 stmtblock_t * pblock,
2379 gfc_ss **lss, gfc_ss **rss)
2387 *lss = gfc_walk_expr (expr1);
2390 size = gfc_index_one_node;
2391 if (*lss != gfc_ss_terminator)
2393 gfc_init_loopinfo (&loop);
2395 /* Walk the RHS of the expression. */
2396 *rss = gfc_walk_expr (expr2);
2397 if (*rss == gfc_ss_terminator)
2399 /* The rhs is scalar. Add a ss for the expression. */
2400 *rss = gfc_get_ss ();
2401 (*rss)->next = gfc_ss_terminator;
2402 (*rss)->type = GFC_SS_SCALAR;
2403 (*rss)->expr = expr2;
2406 /* Associate the SS with the loop. */
2407 gfc_add_ss_to_loop (&loop, *lss);
2408 /* We don't actually need to add the rhs at this point, but it might
2409 make guessing the loop bounds a bit easier. */
2410 gfc_add_ss_to_loop (&loop, *rss);
2412 /* We only want the shape of the expression, not rest of the junk
2413 generated by the scalarizer. */
2414 loop.array_parameter = 1;
2416 /* Calculate the bounds of the scalarization. */
2417 save_flag = gfc_option.rtcheck;
2418 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2419 gfc_conv_ss_startstride (&loop);
2420 gfc_option.rtcheck = save_flag;
2421 gfc_conv_loop_setup (&loop, &expr2->where);
2423 /* Figure out how many elements we need. */
2424 for (i = 0; i < loop.dimen; i++)
2426 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2427 gfc_index_one_node, loop.from[i]);
2428 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2430 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2432 gfc_add_block_to_block (pblock, &loop.pre);
2433 size = gfc_evaluate_now (size, pblock);
2434 gfc_add_block_to_block (pblock, &loop.post);
2436 /* TODO: write a function that cleans up a loopinfo without freeing
2437 the SS chains. Currently a NOP. */
2444 /* Calculate the overall iterator number of the nested forall construct.
2445 This routine actually calculates the number of times the body of the
2446 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2447 that by the expression INNER_SIZE. The BLOCK argument specifies the
2448 block in which to calculate the result, and the optional INNER_SIZE_BODY
2449 argument contains any statements that need to executed (inside the loop)
2450 to initialize or calculate INNER_SIZE. */
2453 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2454 stmtblock_t *inner_size_body, stmtblock_t *block)
2456 forall_info *forall_tmp = nested_forall_info;
2460 /* We can eliminate the innermost unconditional loops with constant
2462 if (INTEGER_CST_P (inner_size))
2465 && !forall_tmp->mask
2466 && INTEGER_CST_P (forall_tmp->size))
2468 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2469 inner_size, forall_tmp->size);
2470 forall_tmp = forall_tmp->prev_nest;
2473 /* If there are no loops left, we have our constant result. */
2478 /* Otherwise, create a temporary variable to compute the result. */
2479 number = gfc_create_var (gfc_array_index_type, "num");
2480 gfc_add_modify (block, number, gfc_index_zero_node);
2482 gfc_start_block (&body);
2483 if (inner_size_body)
2484 gfc_add_block_to_block (&body, inner_size_body);
2486 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2487 number, inner_size);
2490 gfc_add_modify (&body, number, tmp);
2491 tmp = gfc_finish_block (&body);
2493 /* Generate loops. */
2494 if (forall_tmp != NULL)
2495 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2497 gfc_add_expr_to_block (block, tmp);
2503 /* Allocate temporary for forall construct. SIZE is the size of temporary
2504 needed. PTEMP1 is returned for space free. */
2507 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2514 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2515 if (!integer_onep (unit))
2516 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2521 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2524 tmp = build_fold_indirect_ref_loc (input_location, tmp);
2529 /* Allocate temporary for forall construct according to the information in
2530 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2531 assignment inside forall. PTEMP1 is returned for space free. */
2534 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2535 tree inner_size, stmtblock_t * inner_size_body,
2536 stmtblock_t * block, tree * ptemp1)
2540 /* Calculate the total size of temporary needed in forall construct. */
2541 size = compute_overall_iter_number (nested_forall_info, inner_size,
2542 inner_size_body, block);
2544 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2548 /* Handle assignments inside forall which need temporary.
2550 forall (i=start:end:stride; maskexpr)
2553 (where e,f<i> are arbitrary expressions possibly involving i
2554 and there is a dependency between e<i> and f<i>)
2556 masktmp(:) = maskexpr(:)
2561 for (i = start; i <= end; i += stride)
2565 for (i = start; i <= end; i += stride)
2567 if (masktmp[maskindex++])
2568 tmp[count1++] = f<i>
2572 for (i = start; i <= end; i += stride)
2574 if (masktmp[maskindex++])
2575 e<i> = tmp[count1++]
2580 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2581 tree wheremask, bool invert,
2582 forall_info * nested_forall_info,
2583 stmtblock_t * block)
2591 stmtblock_t inner_size_body;
2593 /* Create vars. count1 is the current iterator number of the nested
2595 count1 = gfc_create_var (gfc_array_index_type, "count1");
2597 /* Count is the wheremask index. */
2600 count = gfc_create_var (gfc_array_index_type, "count");
2601 gfc_add_modify (block, count, gfc_index_zero_node);
2606 /* Initialize count1. */
2607 gfc_add_modify (block, count1, gfc_index_zero_node);
2609 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2610 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2611 gfc_init_block (&inner_size_body);
2612 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2615 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2616 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
2618 if (!expr1->ts.u.cl->backend_decl)
2621 gfc_init_se (&tse, NULL);
2622 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
2623 expr1->ts.u.cl->backend_decl = tse.expr;
2625 type = gfc_get_character_type_len (gfc_default_character_kind,
2626 expr1->ts.u.cl->backend_decl);
2629 type = gfc_typenode_for_spec (&expr1->ts);
2631 /* Allocate temporary for nested forall construct according to the
2632 information in nested_forall_info and inner_size. */
2633 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2634 &inner_size_body, block, &ptemp1);
2636 /* Generate codes to copy rhs to the temporary . */
2637 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2640 /* Generate body and loops according to the information in
2641 nested_forall_info. */
2642 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2643 gfc_add_expr_to_block (block, tmp);
2646 gfc_add_modify (block, count1, gfc_index_zero_node);
2650 gfc_add_modify (block, count, gfc_index_zero_node);
2652 /* Generate codes to copy the temporary to lhs. */
2653 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2656 /* Generate body and loops according to the information in
2657 nested_forall_info. */
2658 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2659 gfc_add_expr_to_block (block, tmp);
2663 /* Free the temporary. */
2664 tmp = gfc_call_free (ptemp1);
2665 gfc_add_expr_to_block (block, tmp);
2670 /* Translate pointer assignment inside FORALL which need temporary. */
2673 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2674 forall_info * nested_forall_info,
2675 stmtblock_t * block)
2689 tree tmp, tmp1, ptemp1;
2691 count = gfc_create_var (gfc_array_index_type, "count");
2692 gfc_add_modify (block, count, gfc_index_zero_node);
2694 inner_size = integer_one_node;
2695 lss = gfc_walk_expr (expr1);
2696 rss = gfc_walk_expr (expr2);
2697 if (lss == gfc_ss_terminator)
2699 type = gfc_typenode_for_spec (&expr1->ts);
2700 type = build_pointer_type (type);
2702 /* Allocate temporary for nested forall construct according to the
2703 information in nested_forall_info and inner_size. */
2704 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2705 inner_size, NULL, block, &ptemp1);
2706 gfc_start_block (&body);
2707 gfc_init_se (&lse, NULL);
2708 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2709 gfc_init_se (&rse, NULL);
2710 rse.want_pointer = 1;
2711 gfc_conv_expr (&rse, expr2);
2712 gfc_add_block_to_block (&body, &rse.pre);
2713 gfc_add_modify (&body, lse.expr,
2714 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2715 gfc_add_block_to_block (&body, &rse.post);
2717 /* Increment count. */
2718 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2719 count, gfc_index_one_node);
2720 gfc_add_modify (&body, count, tmp);
2722 tmp = gfc_finish_block (&body);
2724 /* Generate body and loops according to the information in
2725 nested_forall_info. */
2726 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2727 gfc_add_expr_to_block (block, tmp);
2730 gfc_add_modify (block, count, gfc_index_zero_node);
2732 gfc_start_block (&body);
2733 gfc_init_se (&lse, NULL);
2734 gfc_init_se (&rse, NULL);
2735 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2736 lse.want_pointer = 1;
2737 gfc_conv_expr (&lse, expr1);
2738 gfc_add_block_to_block (&body, &lse.pre);
2739 gfc_add_modify (&body, lse.expr, rse.expr);
2740 gfc_add_block_to_block (&body, &lse.post);
2741 /* Increment count. */
2742 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2743 count, gfc_index_one_node);
2744 gfc_add_modify (&body, count, tmp);
2745 tmp = gfc_finish_block (&body);
2747 /* Generate body and loops according to the information in
2748 nested_forall_info. */
2749 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2750 gfc_add_expr_to_block (block, tmp);
2754 gfc_init_loopinfo (&loop);
2756 /* Associate the SS with the loop. */
2757 gfc_add_ss_to_loop (&loop, rss);
2759 /* Setup the scalarizing loops and bounds. */
2760 gfc_conv_ss_startstride (&loop);
2762 gfc_conv_loop_setup (&loop, &expr2->where);
2764 info = &rss->data.info;
2765 desc = info->descriptor;
2767 /* Make a new descriptor. */
2768 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2769 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2770 loop.from, loop.to, 1,
2771 GFC_ARRAY_UNKNOWN, true);
2773 /* Allocate temporary for nested forall construct. */
2774 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2775 inner_size, NULL, block, &ptemp1);
2776 gfc_start_block (&body);
2777 gfc_init_se (&lse, NULL);
2778 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2779 lse.direct_byref = 1;
2780 rss = gfc_walk_expr (expr2);
2781 gfc_conv_expr_descriptor (&lse, expr2, rss);
2783 gfc_add_block_to_block (&body, &lse.pre);
2784 gfc_add_block_to_block (&body, &lse.post);
2786 /* Increment count. */
2787 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2788 count, gfc_index_one_node);
2789 gfc_add_modify (&body, count, tmp);
2791 tmp = gfc_finish_block (&body);
2793 /* Generate body and loops according to the information in
2794 nested_forall_info. */
2795 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2796 gfc_add_expr_to_block (block, tmp);
2799 gfc_add_modify (block, count, gfc_index_zero_node);
2801 parm = gfc_build_array_ref (tmp1, count, NULL);
2802 lss = gfc_walk_expr (expr1);
2803 gfc_init_se (&lse, NULL);
2804 gfc_conv_expr_descriptor (&lse, expr1, lss);
2805 gfc_add_modify (&lse.pre, lse.expr, parm);
2806 gfc_start_block (&body);
2807 gfc_add_block_to_block (&body, &lse.pre);
2808 gfc_add_block_to_block (&body, &lse.post);
2810 /* Increment count. */
2811 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2812 count, gfc_index_one_node);
2813 gfc_add_modify (&body, count, tmp);
2815 tmp = gfc_finish_block (&body);
2817 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2818 gfc_add_expr_to_block (block, tmp);
2820 /* Free the temporary. */
2823 tmp = gfc_call_free (ptemp1);
2824 gfc_add_expr_to_block (block, tmp);
2829 /* FORALL and WHERE statements are really nasty, especially when you nest
2830 them. All the rhs of a forall assignment must be evaluated before the
2831 actual assignments are performed. Presumably this also applies to all the
2832 assignments in an inner where statement. */
2834 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2835 linear array, relying on the fact that we process in the same order in all
2838 forall (i=start:end:stride; maskexpr)
2842 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2844 count = ((end + 1 - start) / stride)
2845 masktmp(:) = maskexpr(:)
2848 for (i = start; i <= end; i += stride)
2850 if (masktmp[maskindex++])
2854 for (i = start; i <= end; i += stride)
2856 if (masktmp[maskindex++])
2860 Note that this code only works when there are no dependencies.
2861 Forall loop with array assignments and data dependencies are a real pain,
2862 because the size of the temporary cannot always be determined before the
2863 loop is executed. This problem is compounded by the presence of nested
2868 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2888 gfc_forall_iterator *fa;
2891 gfc_saved_var *saved_vars;
2892 iter_info *this_forall;
2896 /* Do nothing if the mask is false. */
2898 && code->expr1->expr_type == EXPR_CONSTANT
2899 && !code->expr1->value.logical)
2900 return build_empty_stmt (input_location);
2903 /* Count the FORALL index number. */
2904 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2908 /* Allocate the space for var, start, end, step, varexpr. */
2909 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2910 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2911 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2912 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2913 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2914 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2916 /* Allocate the space for info. */
2917 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2919 gfc_start_block (&pre);
2920 gfc_init_block (&post);
2921 gfc_init_block (&block);
2924 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2926 gfc_symbol *sym = fa->var->symtree->n.sym;
2928 /* Allocate space for this_forall. */
2929 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2931 /* Create a temporary variable for the FORALL index. */
2932 tmp = gfc_typenode_for_spec (&sym->ts);
2933 var[n] = gfc_create_var (tmp, sym->name);
2934 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2936 /* Record it in this_forall. */
2937 this_forall->var = var[n];
2939 /* Replace the index symbol's backend_decl with the temporary decl. */
2940 sym->backend_decl = var[n];
2942 /* Work out the start, end and stride for the loop. */
2943 gfc_init_se (&se, NULL);
2944 gfc_conv_expr_val (&se, fa->start);
2945 /* Record it in this_forall. */
2946 this_forall->start = se.expr;
2947 gfc_add_block_to_block (&block, &se.pre);
2950 gfc_init_se (&se, NULL);
2951 gfc_conv_expr_val (&se, fa->end);
2952 /* Record it in this_forall. */
2953 this_forall->end = se.expr;
2954 gfc_make_safe_expr (&se);
2955 gfc_add_block_to_block (&block, &se.pre);
2958 gfc_init_se (&se, NULL);
2959 gfc_conv_expr_val (&se, fa->stride);
2960 /* Record it in this_forall. */
2961 this_forall->step = se.expr;
2962 gfc_make_safe_expr (&se);
2963 gfc_add_block_to_block (&block, &se.pre);
2966 /* Set the NEXT field of this_forall to NULL. */
2967 this_forall->next = NULL;
2968 /* Link this_forall to the info construct. */
2969 if (info->this_loop)
2971 iter_info *iter_tmp = info->this_loop;
2972 while (iter_tmp->next != NULL)
2973 iter_tmp = iter_tmp->next;
2974 iter_tmp->next = this_forall;
2977 info->this_loop = this_forall;
2983 /* Calculate the size needed for the current forall level. */
2984 size = gfc_index_one_node;
2985 for (n = 0; n < nvar; n++)
2987 /* size = (end + step - start) / step. */
2988 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2990 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2992 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2993 tmp = convert (gfc_array_index_type, tmp);
2995 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2998 /* Record the nvar and size of current forall level. */
3004 /* If the mask is .true., consider the FORALL unconditional. */
3005 if (code->expr1->expr_type == EXPR_CONSTANT
3006 && code->expr1->value.logical)
3014 /* First we need to allocate the mask. */
3017 /* As the mask array can be very big, prefer compact boolean types. */
3018 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3019 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3020 size, NULL, &block, &pmask);
3021 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3023 /* Record them in the info structure. */
3024 info->maskindex = maskindex;
3029 /* No mask was specified. */
3030 maskindex = NULL_TREE;
3031 mask = pmask = NULL_TREE;
3034 /* Link the current forall level to nested_forall_info. */
3035 info->prev_nest = nested_forall_info;
3036 nested_forall_info = info;
3038 /* Copy the mask into a temporary variable if required.
3039 For now we assume a mask temporary is needed. */
3042 /* As the mask array can be very big, prefer compact boolean types. */
3043 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3045 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3047 /* Start of mask assignment loop body. */
3048 gfc_start_block (&body);
3050 /* Evaluate the mask expression. */
3051 gfc_init_se (&se, NULL);
3052 gfc_conv_expr_val (&se, code->expr1);
3053 gfc_add_block_to_block (&body, &se.pre);
3055 /* Store the mask. */
3056 se.expr = convert (mask_type, se.expr);
3058 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3059 gfc_add_modify (&body, tmp, se.expr);
3061 /* Advance to the next mask element. */
3062 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3063 maskindex, gfc_index_one_node);
3064 gfc_add_modify (&body, maskindex, tmp);
3066 /* Generate the loops. */
3067 tmp = gfc_finish_block (&body);
3068 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3069 gfc_add_expr_to_block (&block, tmp);
3072 c = code->block->next;
3074 /* TODO: loop merging in FORALL statements. */
3075 /* Now that we've got a copy of the mask, generate the assignment loops. */
3081 /* A scalar or array assignment. DO the simple check for
3082 lhs to rhs dependencies. These make a temporary for the
3083 rhs and form a second forall block to copy to variable. */
3084 need_temp = check_forall_dependencies(c, &pre, &post);
3086 /* Temporaries due to array assignment data dependencies introduce
3087 no end of problems. */
3089 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3090 nested_forall_info, &block);
3093 /* Use the normal assignment copying routines. */
3094 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3096 /* Generate body and loops. */
3097 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3099 gfc_add_expr_to_block (&block, tmp);
3102 /* Cleanup any temporary symtrees that have been made to deal
3103 with dependencies. */
3105 cleanup_forall_symtrees (c);
3110 /* Translate WHERE or WHERE construct nested in FORALL. */
3111 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3114 /* Pointer assignment inside FORALL. */
3115 case EXEC_POINTER_ASSIGN:
3116 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3118 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3119 nested_forall_info, &block);
3122 /* Use the normal assignment copying routines. */
3123 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3125 /* Generate body and loops. */
3126 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3128 gfc_add_expr_to_block (&block, tmp);
3133 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3134 gfc_add_expr_to_block (&block, tmp);
3137 /* Explicit subroutine calls are prevented by the frontend but interface
3138 assignments can legitimately produce them. */
3139 case EXEC_ASSIGN_CALL:
3140 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3141 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3142 gfc_add_expr_to_block (&block, tmp);
3152 /* Restore the original index variables. */
3153 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3154 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3156 /* Free the space for var, start, end, step, varexpr. */
3162 gfc_free (saved_vars);
3164 /* Free the space for this forall_info. */
3169 /* Free the temporary for the mask. */
3170 tmp = gfc_call_free (pmask);
3171 gfc_add_expr_to_block (&block, tmp);
3174 pushdecl (maskindex);
3176 gfc_add_block_to_block (&pre, &block);
3177 gfc_add_block_to_block (&pre, &post);
3179 return gfc_finish_block (&pre);
3183 /* Translate the FORALL statement or construct. */
3185 tree gfc_trans_forall (gfc_code * code)
3187 return gfc_trans_forall_1 (code, NULL);
3191 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3192 If the WHERE construct is nested in FORALL, compute the overall temporary
3193 needed by the WHERE mask expression multiplied by the iterator number of
3195 ME is the WHERE mask expression.
3196 MASK is the current execution mask upon input, whose sense may or may
3197 not be inverted as specified by the INVERT argument.
3198 CMASK is the updated execution mask on output, or NULL if not required.
3199 PMASK is the pending execution mask on output, or NULL if not required.
3200 BLOCK is the block in which to place the condition evaluation loops. */
3203 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3204 tree mask, bool invert, tree cmask, tree pmask,
3205 tree mask_type, stmtblock_t * block)
3210 stmtblock_t body, body1;
3211 tree count, cond, mtmp;
3214 gfc_init_loopinfo (&loop);
3216 lss = gfc_walk_expr (me);
3217 rss = gfc_walk_expr (me);
3219 /* Variable to index the temporary. */
3220 count = gfc_create_var (gfc_array_index_type, "count");
3221 /* Initialize count. */
3222 gfc_add_modify (block, count, gfc_index_zero_node);
3224 gfc_start_block (&body);
3226 gfc_init_se (&rse, NULL);
3227 gfc_init_se (&lse, NULL);
3229 if (lss == gfc_ss_terminator)
3231 gfc_init_block (&body1);
3235 /* Initialize the loop. */
3236 gfc_init_loopinfo (&loop);
3238 /* We may need LSS to determine the shape of the expression. */
3239 gfc_add_ss_to_loop (&loop, lss);
3240 gfc_add_ss_to_loop (&loop, rss);
3242 gfc_conv_ss_startstride (&loop);
3243 gfc_conv_loop_setup (&loop, &me->where);
3245 gfc_mark_ss_chain_used (rss, 1);
3246 /* Start the loop body. */
3247 gfc_start_scalarized_body (&loop, &body1);
3249 /* Translate the expression. */
3250 gfc_copy_loopinfo_to_se (&rse, &loop);
3252 gfc_conv_expr (&rse, me);
3255 /* Variable to evaluate mask condition. */
3256 cond = gfc_create_var (mask_type, "cond");
3257 if (mask && (cmask || pmask))
3258 mtmp = gfc_create_var (mask_type, "mask");
3259 else mtmp = NULL_TREE;
3261 gfc_add_block_to_block (&body1, &lse.pre);
3262 gfc_add_block_to_block (&body1, &rse.pre);
3264 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3266 if (mask && (cmask || pmask))
3268 tmp = gfc_build_array_ref (mask, count, NULL);
3270 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3271 gfc_add_modify (&body1, mtmp, tmp);
3276 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3279 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3280 gfc_add_modify (&body1, tmp1, tmp);
3285 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3286 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3288 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3289 gfc_add_modify (&body1, tmp1, tmp);
3292 gfc_add_block_to_block (&body1, &lse.post);
3293 gfc_add_block_to_block (&body1, &rse.post);
3295 if (lss == gfc_ss_terminator)
3297 gfc_add_block_to_block (&body, &body1);
3301 /* Increment count. */
3302 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3303 gfc_index_one_node);
3304 gfc_add_modify (&body1, count, tmp1);
3306 /* Generate the copying loops. */
3307 gfc_trans_scalarizing_loops (&loop, &body1);
3309 gfc_add_block_to_block (&body, &loop.pre);
3310 gfc_add_block_to_block (&body, &loop.post);
3312 gfc_cleanup_loop (&loop);
3313 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3314 as tree nodes in SS may not be valid in different scope. */
3317 tmp1 = gfc_finish_block (&body);
3318 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3319 if (nested_forall_info != NULL)
3320 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3322 gfc_add_expr_to_block (block, tmp1);
3326 /* Translate an assignment statement in a WHERE statement or construct
3327 statement. The MASK expression is used to control which elements
3328 of EXPR1 shall be assigned. The sense of MASK is specified by
3332 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3333 tree mask, bool invert,
3334 tree count1, tree count2,
3340 gfc_ss *lss_section;
3347 tree index, maskexpr;
3349 /* A defined assignment. */
3350 if (cnext && cnext->resolved_sym)
3351 return gfc_trans_call (cnext, true, mask, count1, invert);
3354 /* TODO: handle this special case.
3355 Special case a single function returning an array. */
3356 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3358 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3364 /* Assignment of the form lhs = rhs. */
3365 gfc_start_block (&block);
3367 gfc_init_se (&lse, NULL);
3368 gfc_init_se (&rse, NULL);
3371 lss = gfc_walk_expr (expr1);
3374 /* In each where-assign-stmt, the mask-expr and the variable being
3375 defined shall be arrays of the same shape. */
3376 gcc_assert (lss != gfc_ss_terminator);
3378 /* The assignment needs scalarization. */
3381 /* Find a non-scalar SS from the lhs. */
3382 while (lss_section != gfc_ss_terminator
3383 && lss_section->type != GFC_SS_SECTION)
3384 lss_section = lss_section->next;
3386 gcc_assert (lss_section != gfc_ss_terminator);
3388 /* Initialize the scalarizer. */
3389 gfc_init_loopinfo (&loop);
3392 rss = gfc_walk_expr (expr2);
3393 if (rss == gfc_ss_terminator)
3395 /* The rhs is scalar. Add a ss for the expression. */
3396 rss = gfc_get_ss ();
3398 rss->next = gfc_ss_terminator;
3399 rss->type = GFC_SS_SCALAR;
3403 /* Associate the SS with the loop. */
3404 gfc_add_ss_to_loop (&loop, lss);
3405 gfc_add_ss_to_loop (&loop, rss);
3407 /* Calculate the bounds of the scalarization. */
3408 gfc_conv_ss_startstride (&loop);
3410 /* Resolve any data dependencies in the statement. */
3411 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3413 /* Setup the scalarizing loops. */
3414 gfc_conv_loop_setup (&loop, &expr2->where);
3416 /* Setup the gfc_se structures. */
3417 gfc_copy_loopinfo_to_se (&lse, &loop);
3418 gfc_copy_loopinfo_to_se (&rse, &loop);
3421 gfc_mark_ss_chain_used (rss, 1);
3422 if (loop.temp_ss == NULL)
3425 gfc_mark_ss_chain_used (lss, 1);
3429 lse.ss = loop.temp_ss;
3430 gfc_mark_ss_chain_used (lss, 3);
3431 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3434 /* Start the scalarized loop body. */
3435 gfc_start_scalarized_body (&loop, &body);
3437 /* Translate the expression. */
3438 gfc_conv_expr (&rse, expr2);
3439 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3441 gfc_conv_tmp_array_ref (&lse);
3442 gfc_advance_se_ss_chain (&lse);
3445 gfc_conv_expr (&lse, expr1);
3447 /* Form the mask expression according to the mask. */
3449 maskexpr = gfc_build_array_ref (mask, index, NULL);
3451 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3453 /* Use the scalar assignment as is. */
3454 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3455 loop.temp_ss != NULL, false, true);
3457 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
3459 gfc_add_expr_to_block (&body, tmp);
3461 if (lss == gfc_ss_terminator)
3463 /* Increment count1. */
3464 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3465 count1, gfc_index_one_node);
3466 gfc_add_modify (&body, count1, tmp);
3468 /* Use the scalar assignment as is. */
3469 gfc_add_block_to_block (&block, &body);
3473 gcc_assert (lse.ss == gfc_ss_terminator
3474 && rse.ss == gfc_ss_terminator);
3476 if (loop.temp_ss != NULL)
3478 /* Increment count1 before finish the main body of a scalarized
3480 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3481 count1, gfc_index_one_node);
3482 gfc_add_modify (&body, count1, tmp);
3483 gfc_trans_scalarized_loop_boundary (&loop, &body);
3485 /* We need to copy the temporary to the actual lhs. */
3486 gfc_init_se (&lse, NULL);
3487 gfc_init_se (&rse, NULL);
3488 gfc_copy_loopinfo_to_se (&lse, &loop);
3489 gfc_copy_loopinfo_to_se (&rse, &loop);
3491 rse.ss = loop.temp_ss;
3494 gfc_conv_tmp_array_ref (&rse);
3495 gfc_advance_se_ss_chain (&rse);
3496 gfc_conv_expr (&lse, expr1);
3498 gcc_assert (lse.ss == gfc_ss_terminator
3499 && rse.ss == gfc_ss_terminator);
3501 /* Form the mask expression according to the mask tree list. */
3503 maskexpr = gfc_build_array_ref (mask, index, NULL);
3505 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3508 /* Use the scalar assignment as is. */
3509 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
3511 tmp = build3_v (COND_EXPR, maskexpr, tmp,
3512 build_empty_stmt (input_location));
3513 gfc_add_expr_to_block (&body, tmp);
3515 /* Increment count2. */
3516 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3517 count2, gfc_index_one_node);
3518 gfc_add_modify (&body, count2, tmp);
3522 /* Increment count1. */
3523 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3524 count1, gfc_index_one_node);
3525 gfc_add_modify (&body, count1, tmp);
3528 /* Generate the copying loops. */
3529 gfc_trans_scalarizing_loops (&loop, &body);
3531 /* Wrap the whole thing up. */
3532 gfc_add_block_to_block (&block, &loop.pre);
3533 gfc_add_block_to_block (&block, &loop.post);
3534 gfc_cleanup_loop (&loop);
3537 return gfc_finish_block (&block);
3541 /* Translate the WHERE construct or statement.
3542 This function can be called iteratively to translate the nested WHERE
3543 construct or statement.
3544 MASK is the control mask. */
3547 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3548 forall_info * nested_forall_info, stmtblock_t * block)
3550 stmtblock_t inner_size_body;
3551 tree inner_size, size;
3560 tree count1, count2;
3564 tree pcmask = NULL_TREE;
3565 tree ppmask = NULL_TREE;
3566 tree cmask = NULL_TREE;
3567 tree pmask = NULL_TREE;
3568 gfc_actual_arglist *arg;
3570 /* the WHERE statement or the WHERE construct statement. */
3571 cblock = code->block;
3573 /* As the mask array can be very big, prefer compact boolean types. */
3574 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3576 /* Determine which temporary masks are needed. */
3579 /* One clause: No ELSEWHEREs. */
3580 need_cmask = (cblock->next != 0);
3583 else if (cblock->block->block)
3585 /* Three or more clauses: Conditional ELSEWHEREs. */
3589 else if (cblock->next)
3591 /* Two clauses, the first non-empty. */
3593 need_pmask = (mask != NULL_TREE
3594 && cblock->block->next != 0);
3596 else if (!cblock->block->next)
3598 /* Two clauses, both empty. */
3602 /* Two clauses, the first empty, the second non-empty. */
3605 need_cmask = (cblock->block->expr1 != 0);
3614 if (need_cmask || need_pmask)
3616 /* Calculate the size of temporary needed by the mask-expr. */
3617 gfc_init_block (&inner_size_body);
3618 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
3619 &inner_size_body, &lss, &rss);
3621 /* Calculate the total size of temporary needed. */
3622 size = compute_overall_iter_number (nested_forall_info, inner_size,
3623 &inner_size_body, block);
3625 /* Check whether the size is negative. */
3626 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3627 gfc_index_zero_node);
3628 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3629 gfc_index_zero_node, size);
3630 size = gfc_evaluate_now (size, block);
3632 /* Allocate temporary for WHERE mask if needed. */
3634 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3637 /* Allocate temporary for !mask if needed. */
3639 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3645 /* Each time around this loop, the where clause is conditional
3646 on the value of mask and invert, which are updated at the
3647 bottom of the loop. */
3649 /* Has mask-expr. */
3652 /* Ensure that the WHERE mask will be evaluated exactly once.
3653 If there are no statements in this WHERE/ELSEWHERE clause,
3654 then we don't need to update the control mask (cmask).
3655 If this is the last clause of the WHERE construct, then
3656 we don't need to update the pending control mask (pmask). */
3658 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3660 cblock->next ? cmask : NULL_TREE,
3661 cblock->block ? pmask : NULL_TREE,
3664 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3666 (cblock->next || cblock->block)
3667 ? cmask : NULL_TREE,
3668 NULL_TREE, mask_type, block);
3672 /* It's a final elsewhere-stmt. No mask-expr is present. */
3676 /* The body of this where clause are controlled by cmask with
3677 sense specified by invert. */
3679 /* Get the assignment statement of a WHERE statement, or the first
3680 statement in where-body-construct of a WHERE construct. */
3681 cnext = cblock->next;
3686 /* WHERE assignment statement. */
3687 case EXEC_ASSIGN_CALL:
3689 arg = cnext->ext.actual;
3690 expr1 = expr2 = NULL;
3691 for (; arg; arg = arg->next)
3703 expr1 = cnext->expr1;
3704 expr2 = cnext->expr2;
3706 if (nested_forall_info != NULL)
3708 need_temp = gfc_check_dependency (expr1, expr2, 0);
3709 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3710 gfc_trans_assign_need_temp (expr1, expr2,
3712 nested_forall_info, block);
3715 /* Variables to control maskexpr. */
3716 count1 = gfc_create_var (gfc_array_index_type, "count1");
3717 count2 = gfc_create_var (gfc_array_index_type, "count2");
3718 gfc_add_modify (block, count1, gfc_index_zero_node);
3719 gfc_add_modify (block, count2, gfc_index_zero_node);
3721 tmp = gfc_trans_where_assign (expr1, expr2,
3726 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3728 gfc_add_expr_to_block (block, tmp);
3733 /* Variables to control maskexpr. */
3734 count1 = gfc_create_var (gfc_array_index_type, "count1");
3735 count2 = gfc_create_var (gfc_array_index_type, "count2");
3736 gfc_add_modify (block, count1, gfc_index_zero_node);
3737 gfc_add_modify (block, count2, gfc_index_zero_node);
3739 tmp = gfc_trans_where_assign (expr1, expr2,
3743 gfc_add_expr_to_block (block, tmp);
3748 /* WHERE or WHERE construct is part of a where-body-construct. */
3750 gfc_trans_where_2 (cnext, cmask, invert,
3751 nested_forall_info, block);
3758 /* The next statement within the same where-body-construct. */
3759 cnext = cnext->next;
3761 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3762 cblock = cblock->block;
3763 if (mask == NULL_TREE)
3765 /* If we're the initial WHERE, we can simply invert the sense
3766 of the current mask to obtain the "mask" for the remaining
3773 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3779 /* If we allocated a pending mask array, deallocate it now. */
3782 tmp = gfc_call_free (ppmask);
3783 gfc_add_expr_to_block (block, tmp);
3786 /* If we allocated a current mask array, deallocate it now. */
3789 tmp = gfc_call_free (pcmask);
3790 gfc_add_expr_to_block (block, tmp);
3794 /* Translate a simple WHERE construct or statement without dependencies.
3795 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3796 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3797 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3800 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3802 stmtblock_t block, body;
3803 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3804 tree tmp, cexpr, tstmt, estmt;
3805 gfc_ss *css, *tdss, *tsss;
3806 gfc_se cse, tdse, tsse, edse, esse;
3811 /* Allow the scalarizer to workshare simple where loops. */
3812 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
3813 ompws_flags |= OMPWS_SCALARIZER_WS;
3815 cond = cblock->expr1;
3816 tdst = cblock->next->expr1;
3817 tsrc = cblock->next->expr2;
3818 edst = eblock ? eblock->next->expr1 : NULL;
3819 esrc = eblock ? eblock->next->expr2 : NULL;
3821 gfc_start_block (&block);
3822 gfc_init_loopinfo (&loop);
3824 /* Handle the condition. */
3825 gfc_init_se (&cse, NULL);
3826 css = gfc_walk_expr (cond);
3827 gfc_add_ss_to_loop (&loop, css);
3829 /* Handle the then-clause. */
3830 gfc_init_se (&tdse, NULL);
3831 gfc_init_se (&tsse, NULL);
3832 tdss = gfc_walk_expr (tdst);
3833 tsss = gfc_walk_expr (tsrc);
3834 if (tsss == gfc_ss_terminator)
3836 tsss = gfc_get_ss ();
3838 tsss->next = gfc_ss_terminator;
3839 tsss->type = GFC_SS_SCALAR;
3842 gfc_add_ss_to_loop (&loop, tdss);
3843 gfc_add_ss_to_loop (&loop, tsss);
3847 /* Handle the else clause. */
3848 gfc_init_se (&edse, NULL);
3849 gfc_init_se (&esse, NULL);
3850 edss = gfc_walk_expr (edst);
3851 esss = gfc_walk_expr (esrc);
3852 if (esss == gfc_ss_terminator)
3854 esss = gfc_get_ss ();
3856 esss->next = gfc_ss_terminator;
3857 esss->type = GFC_SS_SCALAR;
3860 gfc_add_ss_to_loop (&loop, edss);
3861 gfc_add_ss_to_loop (&loop, esss);
3864 gfc_conv_ss_startstride (&loop);
3865 gfc_conv_loop_setup (&loop, &tdst->where);
3867 gfc_mark_ss_chain_used (css, 1);
3868 gfc_mark_ss_chain_used (tdss, 1);
3869 gfc_mark_ss_chain_used (tsss, 1);
3872 gfc_mark_ss_chain_used (edss, 1);
3873 gfc_mark_ss_chain_used (esss, 1);
3876 gfc_start_scalarized_body (&loop, &body);
3878 gfc_copy_loopinfo_to_se (&cse, &loop);
3879 gfc_copy_loopinfo_to_se (&tdse, &loop);
3880 gfc_copy_loopinfo_to_se (&tsse, &loop);
3886 gfc_copy_loopinfo_to_se (&edse, &loop);
3887 gfc_copy_loopinfo_to_se (&esse, &loop);
3892 gfc_conv_expr (&cse, cond);
3893 gfc_add_block_to_block (&body, &cse.pre);
3896 gfc_conv_expr (&tsse, tsrc);
3897 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3899 gfc_conv_tmp_array_ref (&tdse);
3900 gfc_advance_se_ss_chain (&tdse);
3903 gfc_conv_expr (&tdse, tdst);
3907 gfc_conv_expr (&esse, esrc);
3908 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3910 gfc_conv_tmp_array_ref (&edse);
3911 gfc_advance_se_ss_chain (&edse);
3914 gfc_conv_expr (&edse, edst);
3917 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
3918 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
3920 : build_empty_stmt (input_location);
3921 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3922 gfc_add_expr_to_block (&body, tmp);
3923 gfc_add_block_to_block (&body, &cse.post);
3925 gfc_trans_scalarizing_loops (&loop, &body);
3926 gfc_add_block_to_block (&block, &loop.pre);
3927 gfc_add_block_to_block (&block, &loop.post);
3928 gfc_cleanup_loop (&loop);
3930 return gfc_finish_block (&block);
3933 /* As the WHERE or WHERE construct statement can be nested, we call
3934 gfc_trans_where_2 to do the translation, and pass the initial
3935 NULL values for both the control mask and the pending control mask. */
3938 gfc_trans_where (gfc_code * code)
3944 cblock = code->block;
3946 && cblock->next->op == EXEC_ASSIGN
3947 && !cblock->next->next)
3949 eblock = cblock->block;
3952 /* A simple "WHERE (cond) x = y" statement or block is
3953 dependence free if cond is not dependent upon writing x,
3954 and the source y is unaffected by the destination x. */
3955 if (!gfc_check_dependency (cblock->next->expr1,
3957 && !gfc_check_dependency (cblock->next->expr1,
3958 cblock->next->expr2, 0))
3959 return gfc_trans_where_3 (cblock, NULL);
3961 else if (!eblock->expr1
3964 && eblock->next->op == EXEC_ASSIGN
3965 && !eblock->next->next)
3967 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3968 block is dependence free if cond is not dependent on writes
3969 to x1 and x2, y1 is not dependent on writes to x2, and y2
3970 is not dependent on writes to x1, and both y's are not
3971 dependent upon their own x's. In addition to this, the
3972 final two dependency checks below exclude all but the same
3973 array reference if the where and elswhere destinations
3974 are the same. In short, this is VERY conservative and this
3975 is needed because the two loops, required by the standard
3976 are coalesced in gfc_trans_where_3. */
3977 if (!gfc_check_dependency(cblock->next->expr1,
3979 && !gfc_check_dependency(eblock->next->expr1,
3981 && !gfc_check_dependency(cblock->next->expr1,
3982 eblock->next->expr2, 1)
3983 && !gfc_check_dependency(eblock->next->expr1,
3984 cblock->next->expr2, 1)
3985 && !gfc_check_dependency(cblock->next->expr1,
3986 cblock->next->expr2, 1)
3987 && !gfc_check_dependency(eblock->next->expr1,
3988 eblock->next->expr2, 1)
3989 && !gfc_check_dependency(cblock->next->expr1,
3990 eblock->next->expr1, 0)
3991 && !gfc_check_dependency(eblock->next->expr1,
3992 cblock->next->expr1, 0))
3993 return gfc_trans_where_3 (cblock, eblock);
3997 gfc_start_block (&block);
3999 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4001 return gfc_finish_block (&block);
4005 /* CYCLE a DO loop. The label decl has already been created by
4006 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4007 node at the head of the loop. We must mark the label as used. */
4010 gfc_trans_cycle (gfc_code * code)
4014 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
4015 TREE_USED (cycle_label) = 1;
4016 return build1_v (GOTO_EXPR, cycle_label);
4020 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4021 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4025 gfc_trans_exit (gfc_code * code)
4029 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
4030 TREE_USED (exit_label) = 1;
4031 return build1_v (GOTO_EXPR, exit_label);
4035 /* Translate the ALLOCATE statement. */
4038 gfc_trans_allocate (gfc_code * code)
4051 if (!code->ext.alloc.list)
4054 pstat = stat = error_label = tmp = memsz = NULL_TREE;
4056 gfc_start_block (&block);
4058 /* Either STAT= and/or ERRMSG is present. */
4059 if (code->expr1 || code->expr2)
4061 tree gfc_int4_type_node = gfc_get_int_type (4);
4063 stat = gfc_create_var (gfc_int4_type_node, "stat");
4064 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4066 error_label = gfc_build_label_decl (NULL_TREE);
4067 TREE_USED (error_label) = 1;
4070 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4072 expr = gfc_copy_expr (al->expr);
4074 if (expr->ts.type == BT_CLASS)
4075 gfc_add_component_ref (expr, "$data");
4077 gfc_init_se (&se, NULL);
4078 gfc_start_block (&se.pre);
4080 se.want_pointer = 1;
4081 se.descriptor_only = 1;
4082 gfc_conv_expr (&se, expr);
4084 if (!gfc_array_allocate (&se, expr, pstat))
4086 /* A scalar or derived type. */
4088 /* Determine allocate size. */
4089 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4093 sz = gfc_copy_expr (code->expr3);
4094 gfc_add_component_ref (sz, "$vptr");
4095 gfc_add_component_ref (sz, "$size");
4096 gfc_init_se (&se_sz, NULL);
4097 gfc_conv_expr (&se_sz, sz);
4101 else if (code->expr3 && code->expr3->ts.type != BT_CLASS)
4102 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4103 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4104 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4106 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4108 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4109 memsz = se.string_length;
4111 /* Allocate - for non-pointers with re-alloc checking. */
4118 /* Find the last reference in the chain. */
4119 while (ref && ref->next != NULL)
4121 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT);
4126 allocatable = expr->symtree->n.sym->attr.allocatable;
4128 allocatable = ref->u.c.component->attr.allocatable;
4131 tmp = gfc_allocate_array_with_status (&se.pre, se.expr, memsz,
4134 tmp = gfc_allocate_with_status (&se.pre, memsz, pstat);
4137 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
4138 fold_convert (TREE_TYPE (se.expr), tmp));
4139 gfc_add_expr_to_block (&se.pre, tmp);
4141 if (code->expr1 || code->expr2)
4143 tmp = build1_v (GOTO_EXPR, error_label);
4144 parm = fold_build2 (NE_EXPR, boolean_type_node,
4145 stat, build_int_cst (TREE_TYPE (stat), 0));
4146 tmp = fold_build3 (COND_EXPR, void_type_node,
4147 parm, tmp, build_empty_stmt (input_location));
4148 gfc_add_expr_to_block (&se.pre, tmp);
4151 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4153 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4154 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4155 gfc_add_expr_to_block (&se.pre, tmp);
4160 tmp = gfc_finish_block (&se.pre);
4161 gfc_add_expr_to_block (&block, tmp);
4163 /* Initialization via SOURCE block. */
4166 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4167 if (al->expr->ts.type == BT_CLASS)
4170 if (rhs->ts.type == BT_CLASS)
4171 gfc_add_component_ref (rhs, "$data");
4172 gfc_init_se (&dst, NULL);
4173 gfc_init_se (&src, NULL);
4174 gfc_conv_expr (&dst, expr);
4175 gfc_conv_expr (&src, rhs);
4176 gfc_add_block_to_block (&block, &src.pre);
4177 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4180 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4182 gfc_free_expr (rhs);
4183 gfc_add_expr_to_block (&block, tmp);
4186 /* Allocation of CLASS entities. */
4187 gfc_free_expr (expr);
4189 if (expr->ts.type == BT_CLASS)
4194 /* Initialize VPTR for CLASS objects. */
4195 lhs = gfc_expr_to_initialize (expr);
4196 gfc_add_component_ref (lhs, "$vptr");
4198 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4200 /* VPTR must be determined at run time. */
4201 rhs = gfc_copy_expr (code->expr3);
4202 gfc_add_component_ref (rhs, "$vptr");
4203 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4204 gfc_add_expr_to_block (&block, tmp);
4205 gfc_free_expr (rhs);
4209 /* VPTR is fixed at compile time. */
4213 ts = &code->expr3->ts;
4214 else if (expr->ts.type == BT_DERIVED)
4216 else if (code->ext.alloc.ts.type == BT_DERIVED)
4217 ts = &code->ext.alloc.ts;
4218 else if (expr->ts.type == BT_CLASS)
4219 ts = &expr->ts.u.derived->components->ts;
4223 if (ts->type == BT_DERIVED)
4225 vtab = gfc_find_derived_vtab (ts->u.derived);
4227 gfc_init_se (&lse, NULL);
4228 lse.want_pointer = 1;
4229 gfc_conv_expr (&lse, lhs);
4230 tmp = gfc_build_addr_expr (NULL_TREE,
4231 gfc_get_symbol_decl (vtab));
4232 gfc_add_modify (&block, lse.expr,
4233 fold_convert (TREE_TYPE (lse.expr), tmp));
4243 tmp = build1_v (LABEL_EXPR, error_label);
4244 gfc_add_expr_to_block (&block, tmp);
4246 gfc_init_se (&se, NULL);
4247 gfc_conv_expr_lhs (&se, code->expr1);
4248 tmp = convert (TREE_TYPE (se.expr), stat);
4249 gfc_add_modify (&block, se.expr, tmp);
4255 /* A better error message may be possible, but not required. */
4256 const char *msg = "Attempt to allocate an allocated object";
4257 tree errmsg, slen, dlen;
4259 gfc_init_se (&se, NULL);
4260 gfc_conv_expr_lhs (&se, code->expr2);
4262 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4264 gfc_add_modify (&block, errmsg,
4265 gfc_build_addr_expr (pchar_type_node,
4266 gfc_build_localized_cstring_const (msg)));
4268 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4269 dlen = gfc_get_expr_charlen (code->expr2);
4270 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4272 dlen = build_call_expr_loc (input_location,
4273 built_in_decls[BUILT_IN_MEMCPY], 3,
4274 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4276 tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
4277 build_int_cst (TREE_TYPE (stat), 0));
4279 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4281 gfc_add_expr_to_block (&block, tmp);
4284 return gfc_finish_block (&block);
4288 /* Translate a DEALLOCATE statement. */
4291 gfc_trans_deallocate (gfc_code *code)
4296 tree apstat, astat, pstat, stat, tmp;
4299 pstat = apstat = stat = astat = tmp = NULL_TREE;
4301 gfc_start_block (&block);
4303 /* Count the number of failed deallocations. If deallocate() was
4304 called with STAT= , then set STAT to the count. If deallocate
4305 was called with ERRMSG, then set ERRMG to a string. */
4306 if (code->expr1 || code->expr2)
4308 tree gfc_int4_type_node = gfc_get_int_type (4);
4310 stat = gfc_create_var (gfc_int4_type_node, "stat");
4311 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4313 /* Running total of possible deallocation failures. */
4314 astat = gfc_create_var (gfc_int4_type_node, "astat");
4315 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4317 /* Initialize astat to 0. */
4318 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4321 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4324 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4326 gfc_init_se (&se, NULL);
4327 gfc_start_block (&se.pre);
4329 se.want_pointer = 1;
4330 se.descriptor_only = 1;
4331 gfc_conv_expr (&se, expr);
4333 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4336 gfc_ref *last = NULL;
4337 for (ref = expr->ref; ref; ref = ref->next)
4338 if (ref->type == REF_COMPONENT)
4341 /* Do not deallocate the components of a derived type
4342 ultimate pointer component. */
4343 if (!(last && last->u.c.component->attr.pointer)
4344 && !(!last && expr->symtree->n.sym->attr.pointer))
4346 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
4348 gfc_add_expr_to_block (&se.pre, tmp);
4353 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4356 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4357 gfc_add_expr_to_block (&se.pre, tmp);
4359 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4360 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4363 gfc_add_expr_to_block (&se.pre, tmp);
4365 /* Keep track of the number of failed deallocations by adding stat
4366 of the last deallocation to the running total. */
4367 if (code->expr1 || code->expr2)
4369 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4370 gfc_add_modify (&se.pre, astat, apstat);
4373 tmp = gfc_finish_block (&se.pre);
4374 gfc_add_expr_to_block (&block, tmp);
4381 gfc_init_se (&se, NULL);
4382 gfc_conv_expr_lhs (&se, code->expr1);
4383 tmp = convert (TREE_TYPE (se.expr), astat);
4384 gfc_add_modify (&block, se.expr, tmp);
4390 /* A better error message may be possible, but not required. */
4391 const char *msg = "Attempt to deallocate an unallocated object";
4392 tree errmsg, slen, dlen;
4394 gfc_init_se (&se, NULL);
4395 gfc_conv_expr_lhs (&se, code->expr2);
4397 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4399 gfc_add_modify (&block, errmsg,
4400 gfc_build_addr_expr (pchar_type_node,
4401 gfc_build_localized_cstring_const (msg)));
4403 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4404 dlen = gfc_get_expr_charlen (code->expr2);
4405 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4407 dlen = build_call_expr_loc (input_location,
4408 built_in_decls[BUILT_IN_MEMCPY], 3,
4409 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4411 tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
4412 build_int_cst (TREE_TYPE (astat), 0));
4414 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4416 gfc_add_expr_to_block (&block, tmp);
4419 return gfc_finish_block (&block);