1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Paul Brook <paul@nowt.org>
4 and Steven Bosscher <s.bosscher@student.tudelft.nl>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 #include "coretypes.h"
28 #include "tree-gimple.h"
34 #include "trans-stmt.h"
35 #include "trans-types.h"
36 #include "trans-array.h"
37 #include "trans-const.h"
40 int has_alternate_specifier;
42 typedef struct iter_info
48 struct iter_info *next;
52 typedef struct temporary_list
55 struct temporary_list *next;
59 typedef struct forall_info
67 struct forall_info *outer;
68 struct forall_info *next_nest;
72 static void gfc_trans_where_2 (gfc_code *, tree, tree, forall_info *,
73 stmtblock_t *, temporary_list **temp);
75 /* Translate a F95 label number to a LABEL_EXPR. */
78 gfc_trans_label_here (gfc_code * code)
80 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
84 /* Given a variable expression which has been ASSIGNed to, find the decl
85 containing the auxiliary variables. For variables in common blocks this
89 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
91 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
92 gfc_conv_expr (se, expr);
93 /* Deals with variable in common block. Get the field declaration. */
94 if (TREE_CODE (se->expr) == COMPONENT_REF)
95 se->expr = TREE_OPERAND (se->expr, 1);
98 /* Translate a label assignment statement. */
101 gfc_trans_label_assign (gfc_code * code)
111 /* Start a new block. */
112 gfc_init_se (&se, NULL);
113 gfc_start_block (&se.pre);
114 gfc_conv_label_variable (&se, code->expr);
116 len = GFC_DECL_STRING_LEN (se.expr);
117 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
119 label_tree = gfc_get_label_decl (code->label);
121 if (code->label->defined == ST_LABEL_TARGET)
123 /* Shouldn't need to set this flag. Reserve for optimization bug. */
124 DECL_ARTIFICIAL (label_tree) = 0;
125 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
126 len_tree = integer_minus_one_node;
130 label_str = code->label->format->value.character.string;
131 label_len = code->label->format->value.character.length;
132 len_tree = build_int_cst (NULL_TREE, label_len);
133 label_tree = gfc_build_string_const (label_len + 1, label_str);
134 label_tree = gfc_build_addr_expr (pchar_type_node, label_tree);
137 gfc_add_modify_expr (&se.pre, len, len_tree);
138 gfc_add_modify_expr (&se.pre, addr, label_tree);
140 return gfc_finish_block (&se.pre);
143 /* Translate a GOTO statement. */
146 gfc_trans_goto (gfc_code * code)
156 if (code->label != NULL)
157 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
160 gfc_init_se (&se, NULL);
161 gfc_start_block (&se.pre);
162 gfc_conv_label_variable (&se, code->expr);
164 gfc_build_cstring_const ("Assigned label is not a target label");
165 tmp = GFC_DECL_STRING_LEN (se.expr);
166 tmp = build2 (NE_EXPR, boolean_type_node, tmp, integer_minus_one_node);
167 gfc_trans_runtime_check (tmp, assign_error, &se.pre);
169 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
170 target = build1 (GOTO_EXPR, void_type_node, assigned_goto);
175 gfc_add_expr_to_block (&se.pre, target);
176 return gfc_finish_block (&se.pre);
179 /* Check the label list. */
180 range_error = gfc_build_cstring_const ("Assigned label is not in the list");
184 tmp = gfc_get_label_decl (code->label);
185 tmp = gfc_build_addr_expr (pvoid_type_node, tmp);
186 tmp = build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
187 tmp = build3_v (COND_EXPR, tmp, target, build_empty_stmt ());
188 gfc_add_expr_to_block (&se.pre, tmp);
191 while (code != NULL);
192 gfc_trans_runtime_check (boolean_true_node, range_error, &se.pre);
193 return gfc_finish_block (&se.pre);
197 /* Translate an ENTRY statement. Just adds a label for this entry point. */
199 gfc_trans_entry (gfc_code * code)
201 return build1_v (LABEL_EXPR, code->ext.entry->label);
205 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
208 gfc_trans_call (gfc_code * code)
212 /* A CALL starts a new block because the actual arguments may have to
213 be evaluated first. */
214 gfc_init_se (&se, NULL);
215 gfc_start_block (&se.pre);
217 gcc_assert (code->resolved_sym);
218 has_alternate_specifier = 0;
220 /* Translate the call. */
221 gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual);
223 /* A subroutine without side-effect, by definition, does nothing! */
224 TREE_SIDE_EFFECTS (se.expr) = 1;
226 /* Chain the pieces together and return the block. */
227 if (has_alternate_specifier)
229 gfc_code *select_code;
231 select_code = code->next;
232 gcc_assert(select_code->op == EXEC_SELECT);
233 sym = select_code->expr->symtree->n.sym;
234 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
235 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
238 gfc_add_expr_to_block (&se.pre, se.expr);
240 gfc_add_block_to_block (&se.pre, &se.post);
241 return gfc_finish_block (&se.pre);
245 /* Translate the RETURN statement. */
248 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
256 /* if code->expr is not NULL, this return statement must appear
257 in a subroutine and current_fake_result_decl has already
260 result = gfc_get_fake_result_decl (NULL);
263 gfc_warning ("An alternate return at %L without a * dummy argument",
265 return build1_v (GOTO_EXPR, gfc_get_return_label ());
268 /* Start a new block for this statement. */
269 gfc_init_se (&se, NULL);
270 gfc_start_block (&se.pre);
272 gfc_conv_expr (&se, code->expr);
274 tmp = build2 (MODIFY_EXPR, TREE_TYPE (result), result, se.expr);
275 gfc_add_expr_to_block (&se.pre, tmp);
277 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
278 gfc_add_expr_to_block (&se.pre, tmp);
279 gfc_add_block_to_block (&se.pre, &se.post);
280 return gfc_finish_block (&se.pre);
283 return build1_v (GOTO_EXPR, gfc_get_return_label ());
287 /* Translate the PAUSE statement. We have to translate this statement
288 to a runtime library call. */
291 gfc_trans_pause (gfc_code * code)
293 tree gfc_int4_type_node = gfc_get_int_type (4);
299 /* Start a new block for this statement. */
300 gfc_init_se (&se, NULL);
301 gfc_start_block (&se.pre);
304 if (code->expr == NULL)
306 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
307 args = gfc_chainon_list (NULL_TREE, tmp);
308 fndecl = gfor_fndecl_pause_numeric;
312 gfc_conv_expr_reference (&se, code->expr);
313 args = gfc_chainon_list (NULL_TREE, se.expr);
314 args = gfc_chainon_list (args, se.string_length);
315 fndecl = gfor_fndecl_pause_string;
318 tmp = gfc_build_function_call (fndecl, args);
319 gfc_add_expr_to_block (&se.pre, tmp);
321 gfc_add_block_to_block (&se.pre, &se.post);
323 return gfc_finish_block (&se.pre);
327 /* Translate the STOP statement. We have to translate this statement
328 to a runtime library call. */
331 gfc_trans_stop (gfc_code * code)
333 tree gfc_int4_type_node = gfc_get_int_type (4);
339 /* Start a new block for this statement. */
340 gfc_init_se (&se, NULL);
341 gfc_start_block (&se.pre);
344 if (code->expr == NULL)
346 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
347 args = gfc_chainon_list (NULL_TREE, tmp);
348 fndecl = gfor_fndecl_stop_numeric;
352 gfc_conv_expr_reference (&se, code->expr);
353 args = gfc_chainon_list (NULL_TREE, se.expr);
354 args = gfc_chainon_list (args, se.string_length);
355 fndecl = gfor_fndecl_stop_string;
358 tmp = gfc_build_function_call (fndecl, args);
359 gfc_add_expr_to_block (&se.pre, tmp);
361 gfc_add_block_to_block (&se.pre, &se.post);
363 return gfc_finish_block (&se.pre);
367 /* Generate GENERIC for the IF construct. This function also deals with
368 the simple IF statement, because the front end translates the IF
369 statement into an IF construct.
401 where COND_S is the simplified version of the predicate. PRE_COND_S
402 are the pre side-effects produced by the translation of the
404 We need to build the chain recursively otherwise we run into
405 problems with folding incomplete statements. */
408 gfc_trans_if_1 (gfc_code * code)
413 /* Check for an unconditional ELSE clause. */
415 return gfc_trans_code (code->next);
417 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
418 gfc_init_se (&if_se, NULL);
419 gfc_start_block (&if_se.pre);
421 /* Calculate the IF condition expression. */
422 gfc_conv_expr_val (&if_se, code->expr);
424 /* Translate the THEN clause. */
425 stmt = gfc_trans_code (code->next);
427 /* Translate the ELSE clause. */
429 elsestmt = gfc_trans_if_1 (code->block);
431 elsestmt = build_empty_stmt ();
433 /* Build the condition expression and add it to the condition block. */
434 stmt = build3_v (COND_EXPR, if_se.expr, stmt, elsestmt);
436 gfc_add_expr_to_block (&if_se.pre, stmt);
438 /* Finish off this statement. */
439 return gfc_finish_block (&if_se.pre);
443 gfc_trans_if (gfc_code * code)
445 /* Ignore the top EXEC_IF, it only announces an IF construct. The
446 actual code we must translate is in code->block. */
448 return gfc_trans_if_1 (code->block);
452 /* Translage an arithmetic IF expression.
454 IF (cond) label1, label2, label3 translates to
468 gfc_trans_arithmetic_if (gfc_code * code)
476 /* Start a new block. */
477 gfc_init_se (&se, NULL);
478 gfc_start_block (&se.pre);
480 /* Pre-evaluate COND. */
481 gfc_conv_expr_val (&se, code->expr);
483 /* Build something to compare with. */
484 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
486 /* If (cond < 0) take branch1 else take branch2.
487 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
488 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
489 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
491 tmp = build2 (LT_EXPR, boolean_type_node, se.expr, zero);
492 branch1 = build3_v (COND_EXPR, tmp, branch1, branch2);
494 /* if (cond <= 0) take branch1 else take branch2. */
495 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
496 tmp = build2 (LE_EXPR, boolean_type_node, se.expr, zero);
497 branch1 = build3_v (COND_EXPR, tmp, branch1, branch2);
499 /* Append the COND_EXPR to the evaluation of COND, and return. */
500 gfc_add_expr_to_block (&se.pre, branch1);
501 return gfc_finish_block (&se.pre);
505 /* Translate the simple DO construct. This is where the loop variable has
506 integer type and step +-1. We can't use this in the general case
507 because integer overflow and floating point errors could give incorrect
509 We translate a do loop from:
511 DO dovar = from, to, step
517 [Evaluate loop bounds and step]
519 if ((step > 0) ? (dovar <= to) : (dovar => to))
525 cond = (dovar == to);
527 if (cond) goto end_label;
532 This helps the optimizers by avoiding the extra induction variable
533 used in the general case. */
536 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
537 tree from, tree to, tree step)
546 type = TREE_TYPE (dovar);
548 /* Initialize the DO variable: dovar = from. */
549 gfc_add_modify_expr (pblock, dovar, from);
551 /* Cycle and exit statements are implemented with gotos. */
552 cycle_label = gfc_build_label_decl (NULL_TREE);
553 exit_label = gfc_build_label_decl (NULL_TREE);
555 /* Put the labels where they can be found later. See gfc_trans_do(). */
556 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
559 gfc_start_block (&body);
561 /* Main loop body. */
562 tmp = gfc_trans_code (code->block->next);
563 gfc_add_expr_to_block (&body, tmp);
565 /* Label for cycle statements (if needed). */
566 if (TREE_USED (cycle_label))
568 tmp = build1_v (LABEL_EXPR, cycle_label);
569 gfc_add_expr_to_block (&body, tmp);
572 /* Evaluate the loop condition. */
573 cond = build2 (EQ_EXPR, boolean_type_node, dovar, to);
574 cond = gfc_evaluate_now (cond, &body);
576 /* Increment the loop variable. */
577 tmp = build2 (PLUS_EXPR, type, dovar, step);
578 gfc_add_modify_expr (&body, dovar, tmp);
581 tmp = build1_v (GOTO_EXPR, exit_label);
582 TREE_USED (exit_label) = 1;
583 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt ());
584 gfc_add_expr_to_block (&body, tmp);
586 /* Finish the loop body. */
587 tmp = gfc_finish_block (&body);
588 tmp = build1_v (LOOP_EXPR, tmp);
590 /* Only execute the loop if the number of iterations is positive. */
591 if (tree_int_cst_sgn (step) > 0)
592 cond = fold (build2 (LE_EXPR, boolean_type_node, dovar, to));
594 cond = fold (build2 (GE_EXPR, boolean_type_node, dovar, to));
595 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt ());
596 gfc_add_expr_to_block (pblock, tmp);
598 /* Add the exit label. */
599 tmp = build1_v (LABEL_EXPR, exit_label);
600 gfc_add_expr_to_block (pblock, tmp);
602 return gfc_finish_block (pblock);
605 /* Translate the DO construct. This obviously is one of the most
606 important ones to get right with any compiler, but especially
609 We special case some loop forms as described in gfc_trans_simple_do.
610 For other cases we implement them with a separate loop count,
611 as described in the standard.
613 We translate a do loop from:
615 DO dovar = from, to, step
621 [evaluate loop bounds and step]
622 count = to + step - from;
630 if (count <=0) goto exit_label;
634 TODO: Large loop counts
635 The code above assumes the loop count fits into a signed integer kind,
636 i.e. Does not work for loop counts > 2^31 for integer(kind=4) variables
637 We must support the full range. */
640 gfc_trans_do (gfc_code * code)
657 gfc_start_block (&block);
659 /* Evaluate all the expressions in the iterator. */
660 gfc_init_se (&se, NULL);
661 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
662 gfc_add_block_to_block (&block, &se.pre);
664 type = TREE_TYPE (dovar);
666 gfc_init_se (&se, NULL);
667 gfc_conv_expr_val (&se, code->ext.iterator->start);
668 gfc_add_block_to_block (&block, &se.pre);
669 from = gfc_evaluate_now (se.expr, &block);
671 gfc_init_se (&se, NULL);
672 gfc_conv_expr_val (&se, code->ext.iterator->end);
673 gfc_add_block_to_block (&block, &se.pre);
674 to = gfc_evaluate_now (se.expr, &block);
676 gfc_init_se (&se, NULL);
677 gfc_conv_expr_val (&se, code->ext.iterator->step);
678 gfc_add_block_to_block (&block, &se.pre);
679 step = gfc_evaluate_now (se.expr, &block);
681 /* Special case simple loops. */
682 if (TREE_CODE (type) == INTEGER_TYPE
683 && (integer_onep (step)
684 || tree_int_cst_equal (step, integer_minus_one_node)))
685 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
687 /* Initialize loop count. This code is executed before we enter the
688 loop body. We generate: count = (to + step - from) / step. */
690 tmp = fold (build2 (MINUS_EXPR, type, step, from));
691 tmp = fold (build2 (PLUS_EXPR, type, to, tmp));
692 if (TREE_CODE (type) == INTEGER_TYPE)
694 tmp = fold (build2 (TRUNC_DIV_EXPR, type, tmp, step));
695 count = gfc_create_var (type, "count");
699 /* TODO: We could use the same width as the real type.
700 This would probably cause more problems that it solves
701 when we implement "long double" types. */
702 tmp = fold (build2 (RDIV_EXPR, type, tmp, step));
703 tmp = fold (build1 (FIX_TRUNC_EXPR, gfc_array_index_type, tmp));
704 count = gfc_create_var (gfc_array_index_type, "count");
706 gfc_add_modify_expr (&block, count, tmp);
708 count_one = convert (TREE_TYPE (count), integer_one_node);
710 /* Initialize the DO variable: dovar = from. */
711 gfc_add_modify_expr (&block, dovar, from);
714 gfc_start_block (&body);
716 /* Cycle and exit statements are implemented with gotos. */
717 cycle_label = gfc_build_label_decl (NULL_TREE);
718 exit_label = gfc_build_label_decl (NULL_TREE);
720 /* Start with the loop condition. Loop until count <= 0. */
721 cond = build2 (LE_EXPR, boolean_type_node, count,
722 convert (TREE_TYPE (count), integer_zero_node));
723 tmp = build1_v (GOTO_EXPR, exit_label);
724 TREE_USED (exit_label) = 1;
725 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt ());
726 gfc_add_expr_to_block (&body, tmp);
728 /* Put these labels where they can be found later. We put the
729 labels in a TREE_LIST node (because TREE_CHAIN is already
730 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
731 label in TREE_VALUE (backend_decl). */
733 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
735 /* Main loop body. */
736 tmp = gfc_trans_code (code->block->next);
737 gfc_add_expr_to_block (&body, tmp);
739 /* Label for cycle statements (if needed). */
740 if (TREE_USED (cycle_label))
742 tmp = build1_v (LABEL_EXPR, cycle_label);
743 gfc_add_expr_to_block (&body, tmp);
746 /* Increment the loop variable. */
747 tmp = build2 (PLUS_EXPR, type, dovar, step);
748 gfc_add_modify_expr (&body, dovar, tmp);
750 /* Decrement the loop count. */
751 tmp = build2 (MINUS_EXPR, TREE_TYPE (count), count, count_one);
752 gfc_add_modify_expr (&body, count, tmp);
754 /* End of loop body. */
755 tmp = gfc_finish_block (&body);
757 /* The for loop itself. */
758 tmp = build1_v (LOOP_EXPR, tmp);
759 gfc_add_expr_to_block (&block, tmp);
761 /* Add the exit label. */
762 tmp = build1_v (LABEL_EXPR, exit_label);
763 gfc_add_expr_to_block (&block, tmp);
765 return gfc_finish_block (&block);
769 /* Translate the DO WHILE construct.
782 if (! cond) goto exit_label;
788 Because the evaluation of the exit condition `cond' may have side
789 effects, we can't do much for empty loop bodies. The backend optimizers
790 should be smart enough to eliminate any dead loops. */
793 gfc_trans_do_while (gfc_code * code)
801 /* Everything we build here is part of the loop body. */
802 gfc_start_block (&block);
804 /* Cycle and exit statements are implemented with gotos. */
805 cycle_label = gfc_build_label_decl (NULL_TREE);
806 exit_label = gfc_build_label_decl (NULL_TREE);
808 /* Put the labels where they can be found later. See gfc_trans_do(). */
809 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
811 /* Create a GIMPLE version of the exit condition. */
812 gfc_init_se (&cond, NULL);
813 gfc_conv_expr_val (&cond, code->expr);
814 gfc_add_block_to_block (&block, &cond.pre);
815 cond.expr = fold (build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr));
817 /* Build "IF (! cond) GOTO exit_label". */
818 tmp = build1_v (GOTO_EXPR, exit_label);
819 TREE_USED (exit_label) = 1;
820 tmp = build3_v (COND_EXPR, cond.expr, tmp, build_empty_stmt ());
821 gfc_add_expr_to_block (&block, tmp);
823 /* The main body of the loop. */
824 tmp = gfc_trans_code (code->block->next);
825 gfc_add_expr_to_block (&block, tmp);
827 /* Label for cycle statements (if needed). */
828 if (TREE_USED (cycle_label))
830 tmp = build1_v (LABEL_EXPR, cycle_label);
831 gfc_add_expr_to_block (&block, tmp);
834 /* End of loop body. */
835 tmp = gfc_finish_block (&block);
837 gfc_init_block (&block);
838 /* Build the loop. */
839 tmp = build1_v (LOOP_EXPR, tmp);
840 gfc_add_expr_to_block (&block, tmp);
842 /* Add the exit label. */
843 tmp = build1_v (LABEL_EXPR, exit_label);
844 gfc_add_expr_to_block (&block, tmp);
846 return gfc_finish_block (&block);
850 /* Translate the SELECT CASE construct for INTEGER case expressions,
851 without killing all potential optimizations. The problem is that
852 Fortran allows unbounded cases, but the back-end does not, so we
853 need to intercept those before we enter the equivalent SWITCH_EXPR
856 For example, we translate this,
859 CASE (:100,101,105:115)
869 to the GENERIC equivalent,
873 case (minimum value for typeof(expr) ... 100:
879 case 200 ... (maximum value for typeof(expr):
896 gfc_trans_integer_select (gfc_code * code)
906 gfc_start_block (&block);
908 /* Calculate the switch expression. */
909 gfc_init_se (&se, NULL);
910 gfc_conv_expr_val (&se, code->expr);
911 gfc_add_block_to_block (&block, &se.pre);
913 end_label = gfc_build_label_decl (NULL_TREE);
915 gfc_init_block (&body);
917 for (c = code->block; c; c = c->block)
919 for (cp = c->ext.case_list; cp; cp = cp->next)
924 /* Assume it's the default case. */
925 low = high = NULL_TREE;
929 low = gfc_conv_constant_to_tree (cp->low);
931 /* If there's only a lower bound, set the high bound to the
932 maximum value of the case expression. */
934 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
939 /* Three cases are possible here:
941 1) There is no lower bound, e.g. CASE (:N).
942 2) There is a lower bound .NE. high bound, that is
943 a case range, e.g. CASE (N:M) where M>N (we make
944 sure that M>N during type resolution).
945 3) There is a lower bound, and it has the same value
946 as the high bound, e.g. CASE (N:N). This is our
947 internal representation of CASE(N).
949 In the first and second case, we need to set a value for
950 high. In the thirth case, we don't because the GCC middle
951 end represents a single case value by just letting high be
952 a NULL_TREE. We can't do that because we need to be able
953 to represent unbounded cases. */
957 && mpz_cmp (cp->low->value.integer,
958 cp->high->value.integer) != 0))
959 high = gfc_conv_constant_to_tree (cp->high);
961 /* Unbounded case. */
963 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
967 label = gfc_build_label_decl (NULL_TREE);
969 /* Add this case label.
970 Add parameter 'label', make it match GCC backend. */
971 tmp = build3 (CASE_LABEL_EXPR, void_type_node, low, high, label);
972 gfc_add_expr_to_block (&body, tmp);
975 /* Add the statements for this case. */
976 tmp = gfc_trans_code (c->next);
977 gfc_add_expr_to_block (&body, tmp);
979 /* Break to the end of the construct. */
980 tmp = build1_v (GOTO_EXPR, end_label);
981 gfc_add_expr_to_block (&body, tmp);
984 tmp = gfc_finish_block (&body);
985 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
986 gfc_add_expr_to_block (&block, tmp);
988 tmp = build1_v (LABEL_EXPR, end_label);
989 gfc_add_expr_to_block (&block, tmp);
991 return gfc_finish_block (&block);
995 /* Translate the SELECT CASE construct for LOGICAL case expressions.
997 There are only two cases possible here, even though the standard
998 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
999 .FALSE., and DEFAULT.
1001 We never generate more than two blocks here. Instead, we always
1002 try to eliminate the DEFAULT case. This way, we can translate this
1003 kind of SELECT construct to a simple
1007 expression in GENERIC. */
1010 gfc_trans_logical_select (gfc_code * code)
1013 gfc_code *t, *f, *d;
1018 /* Assume we don't have any cases at all. */
1021 /* Now see which ones we actually do have. We can have at most two
1022 cases in a single case list: one for .TRUE. and one for .FALSE.
1023 The default case is always separate. If the cases for .TRUE. and
1024 .FALSE. are in the same case list, the block for that case list
1025 always executed, and we don't generate code a COND_EXPR. */
1026 for (c = code->block; c; c = c->block)
1028 for (cp = c->ext.case_list; cp; cp = cp->next)
1032 if (cp->low->value.logical == 0) /* .FALSE. */
1034 else /* if (cp->value.logical != 0), thus .TRUE. */
1042 /* Start a new block. */
1043 gfc_start_block (&block);
1045 /* Calculate the switch expression. We always need to do this
1046 because it may have side effects. */
1047 gfc_init_se (&se, NULL);
1048 gfc_conv_expr_val (&se, code->expr);
1049 gfc_add_block_to_block (&block, &se.pre);
1051 if (t == f && t != NULL)
1053 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1054 translate the code for these cases, append it to the current
1056 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1060 tree true_tree, false_tree;
1062 true_tree = build_empty_stmt ();
1063 false_tree = build_empty_stmt ();
1065 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1066 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1067 make the missing case the default case. */
1068 if (t != NULL && f != NULL)
1078 /* Translate the code for each of these blocks, and append it to
1079 the current block. */
1081 true_tree = gfc_trans_code (t->next);
1084 false_tree = gfc_trans_code (f->next);
1086 gfc_add_expr_to_block (&block, build3_v (COND_EXPR, se.expr,
1087 true_tree, false_tree));
1090 return gfc_finish_block (&block);
1094 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1095 Instead of generating compares and jumps, it is far simpler to
1096 generate a data structure describing the cases in order and call a
1097 library subroutine that locates the right case.
1098 This is particularly true because this is the only case where we
1099 might have to dispose of a temporary.
1100 The library subroutine returns a pointer to jump to or NULL if no
1101 branches are to be taken. */
1104 gfc_trans_character_select (gfc_code *code)
1106 tree init, node, end_label, tmp, type, args, *labels;
1107 stmtblock_t block, body;
1113 static tree select_struct;
1114 static tree ss_string1, ss_string1_len;
1115 static tree ss_string2, ss_string2_len;
1116 static tree ss_target;
1118 if (select_struct == NULL)
1120 tree gfc_int4_type_node = gfc_get_int_type (4);
1122 select_struct = make_node (RECORD_TYPE);
1123 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1126 #define ADD_FIELD(NAME, TYPE) \
1127 ss_##NAME = gfc_add_field_to_struct \
1128 (&(TYPE_FIELDS (select_struct)), select_struct, \
1129 get_identifier (stringize(NAME)), TYPE)
1131 ADD_FIELD (string1, pchar_type_node);
1132 ADD_FIELD (string1_len, gfc_int4_type_node);
1134 ADD_FIELD (string2, pchar_type_node);
1135 ADD_FIELD (string2_len, gfc_int4_type_node);
1137 ADD_FIELD (target, pvoid_type_node);
1140 gfc_finish_type (select_struct);
1143 cp = code->block->ext.case_list;
1144 while (cp->left != NULL)
1148 for (d = cp; d; d = d->right)
1152 labels = gfc_getmem (n * sizeof (tree));
1156 for(i = 0; i < n; i++)
1158 labels[i] = gfc_build_label_decl (NULL_TREE);
1159 TREE_USED (labels[i]) = 1;
1160 /* TODO: The gimplifier should do this for us, but it has
1161 inadequacies when dealing with static initializers. */
1162 FORCED_LABEL (labels[i]) = 1;
1165 end_label = gfc_build_label_decl (NULL_TREE);
1167 /* Generate the body */
1168 gfc_start_block (&block);
1169 gfc_init_block (&body);
1171 for (c = code->block; c; c = c->block)
1173 for (d = c->ext.case_list; d; d = d->next)
1175 tmp = build1_v (LABEL_EXPR, labels[d->n]);
1176 gfc_add_expr_to_block (&body, tmp);
1179 tmp = gfc_trans_code (c->next);
1180 gfc_add_expr_to_block (&body, tmp);
1182 tmp = build1_v (GOTO_EXPR, end_label);
1183 gfc_add_expr_to_block (&body, tmp);
1186 /* Generate the structure describing the branches */
1190 for(d = cp; d; d = d->right, i++)
1194 gfc_init_se (&se, NULL);
1198 node = tree_cons (ss_string1, null_pointer_node, node);
1199 node = tree_cons (ss_string1_len, integer_zero_node, node);
1203 gfc_conv_expr_reference (&se, d->low);
1205 node = tree_cons (ss_string1, se.expr, node);
1206 node = tree_cons (ss_string1_len, se.string_length, node);
1209 if (d->high == NULL)
1211 node = tree_cons (ss_string2, null_pointer_node, node);
1212 node = tree_cons (ss_string2_len, integer_zero_node, node);
1216 gfc_init_se (&se, NULL);
1217 gfc_conv_expr_reference (&se, d->high);
1219 node = tree_cons (ss_string2, se.expr, node);
1220 node = tree_cons (ss_string2_len, se.string_length, node);
1223 tmp = gfc_build_addr_expr (pvoid_type_node, labels[i]);
1224 node = tree_cons (ss_target, tmp, node);
1226 tmp = build1 (CONSTRUCTOR, select_struct, nreverse (node));
1227 init = tree_cons (NULL_TREE, tmp, init);
1230 type = build_array_type (select_struct, build_index_type
1231 (build_int_cst (NULL_TREE, n - 1)));
1233 init = build1 (CONSTRUCTOR, type, nreverse(init));
1234 TREE_CONSTANT (init) = 1;
1235 TREE_INVARIANT (init) = 1;
1236 TREE_STATIC (init) = 1;
1237 /* Create a static variable to hold the jump table. */
1238 tmp = gfc_create_var (type, "jumptable");
1239 TREE_CONSTANT (tmp) = 1;
1240 TREE_INVARIANT (tmp) = 1;
1241 TREE_STATIC (tmp) = 1;
1242 DECL_INITIAL (tmp) = init;
1245 /* Build an argument list for the library call */
1246 init = gfc_build_addr_expr (pvoid_type_node, init);
1247 args = gfc_chainon_list (NULL_TREE, init);
1249 tmp = build_int_cst (NULL_TREE, n);
1250 args = gfc_chainon_list (args, tmp);
1252 tmp = gfc_build_addr_expr (pvoid_type_node, end_label);
1253 args = gfc_chainon_list (args, tmp);
1255 gfc_init_se (&se, NULL);
1256 gfc_conv_expr_reference (&se, code->expr);
1258 args = gfc_chainon_list (args, se.expr);
1259 args = gfc_chainon_list (args, se.string_length);
1261 gfc_add_block_to_block (&block, &se.pre);
1263 tmp = gfc_build_function_call (gfor_fndecl_select_string, args);
1264 tmp = build1 (GOTO_EXPR, void_type_node, tmp);
1265 gfc_add_expr_to_block (&block, tmp);
1267 tmp = gfc_finish_block (&body);
1268 gfc_add_expr_to_block (&block, tmp);
1269 tmp = build1_v (LABEL_EXPR, end_label);
1270 gfc_add_expr_to_block (&block, tmp);
1275 return gfc_finish_block (&block);
1279 /* Translate the three variants of the SELECT CASE construct.
1281 SELECT CASEs with INTEGER case expressions can be translated to an
1282 equivalent GENERIC switch statement, and for LOGICAL case
1283 expressions we build one or two if-else compares.
1285 SELECT CASEs with CHARACTER case expressions are a whole different
1286 story, because they don't exist in GENERIC. So we sort them and
1287 do a binary search at runtime.
1289 Fortran has no BREAK statement, and it does not allow jumps from
1290 one case block to another. That makes things a lot easier for
1294 gfc_trans_select (gfc_code * code)
1296 gcc_assert (code && code->expr);
1298 /* Empty SELECT constructs are legal. */
1299 if (code->block == NULL)
1300 return build_empty_stmt ();
1302 /* Select the correct translation function. */
1303 switch (code->expr->ts.type)
1305 case BT_LOGICAL: return gfc_trans_logical_select (code);
1306 case BT_INTEGER: return gfc_trans_integer_select (code);
1307 case BT_CHARACTER: return gfc_trans_character_select (code);
1309 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1315 /* Generate the loops for a FORALL block. The normal loop format:
1316 count = (end - start + step) / step
1329 gfc_trans_forall_loop (forall_info *forall_tmp, int nvar, tree body, int mask_flag)
1337 tree var, start, end, step, mask, maskindex;
1340 iter = forall_tmp->this_loop;
1341 for (n = 0; n < nvar; n++)
1344 start = iter->start;
1348 exit_label = gfc_build_label_decl (NULL_TREE);
1349 TREE_USED (exit_label) = 1;
1351 /* The loop counter. */
1352 count = gfc_create_var (TREE_TYPE (var), "count");
1354 /* The body of the loop. */
1355 gfc_init_block (&block);
1357 /* The exit condition. */
1358 cond = build2 (LE_EXPR, boolean_type_node, count, integer_zero_node);
1359 tmp = build1_v (GOTO_EXPR, exit_label);
1360 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt ());
1361 gfc_add_expr_to_block (&block, tmp);
1363 /* The main loop body. */
1364 gfc_add_expr_to_block (&block, body);
1366 /* Increment the loop variable. */
1367 tmp = build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1368 gfc_add_modify_expr (&block, var, tmp);
1370 /* Advance to the next mask element. */
1373 mask = forall_tmp->mask;
1374 maskindex = forall_tmp->maskindex;
1377 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
1378 maskindex, gfc_index_one_node);
1379 gfc_add_modify_expr (&block, maskindex, tmp);
1382 /* Decrement the loop counter. */
1383 tmp = build2 (MINUS_EXPR, TREE_TYPE (var), count, gfc_index_one_node);
1384 gfc_add_modify_expr (&block, count, tmp);
1386 body = gfc_finish_block (&block);
1388 /* Loop var initialization. */
1389 gfc_init_block (&block);
1390 gfc_add_modify_expr (&block, var, start);
1392 /* Initialize the loop counter. */
1393 tmp = fold (build2 (MINUS_EXPR, TREE_TYPE (var), step, start));
1394 tmp = fold (build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp));
1395 tmp = fold (build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step));
1396 gfc_add_modify_expr (&block, count, tmp);
1398 /* The loop expression. */
1399 tmp = build1_v (LOOP_EXPR, body);
1400 gfc_add_expr_to_block (&block, tmp);
1402 /* The exit label. */
1403 tmp = build1_v (LABEL_EXPR, exit_label);
1404 gfc_add_expr_to_block (&block, tmp);
1406 body = gfc_finish_block (&block);
1413 /* Generate the body and loops according to MASK_FLAG and NEST_FLAG.
1414 if MASK_FLAG is nonzero, the body is controlled by maskes in forall
1415 nest, otherwise, the body is not controlled by maskes.
1416 if NEST_FLAG is nonzero, generate loops for nested forall, otherwise,
1417 only generate loops for the current forall level. */
1420 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1421 int mask_flag, int nest_flag)
1425 forall_info *forall_tmp;
1426 tree pmask, mask, maskindex;
1428 forall_tmp = nested_forall_info;
1429 /* Generate loops for nested forall. */
1432 while (forall_tmp->next_nest != NULL)
1433 forall_tmp = forall_tmp->next_nest;
1434 while (forall_tmp != NULL)
1436 /* Generate body with masks' control. */
1439 pmask = forall_tmp->pmask;
1440 mask = forall_tmp->mask;
1441 maskindex = forall_tmp->maskindex;
1445 /* If a mask was specified make the assignment conditional. */
1447 tmp = gfc_build_indirect_ref (mask);
1450 tmp = gfc_build_array_ref (tmp, maskindex);
1452 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1455 nvar = forall_tmp->nvar;
1456 body = gfc_trans_forall_loop (forall_tmp, nvar, body, mask_flag);
1457 forall_tmp = forall_tmp->outer;
1462 nvar = forall_tmp->nvar;
1463 body = gfc_trans_forall_loop (forall_tmp, nvar, body, mask_flag);
1470 /* Allocate data for holding a temporary array. Returns either a local
1471 temporary array or a pointer variable. */
1474 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1482 if (INTEGER_CST_P (size))
1484 tmp = fold (build2 (MINUS_EXPR, gfc_array_index_type, size,
1485 gfc_index_one_node));
1490 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1491 type = build_array_type (elem_type, type);
1492 if (gfc_can_put_var_on_stack (bytesize))
1494 gcc_assert (INTEGER_CST_P (size));
1495 tmpvar = gfc_create_var (type, "temp");
1500 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1501 *pdata = convert (pvoid_type_node, tmpvar);
1503 args = gfc_chainon_list (NULL_TREE, bytesize);
1504 if (gfc_index_integer_kind == 4)
1505 tmp = gfor_fndecl_internal_malloc;
1506 else if (gfc_index_integer_kind == 8)
1507 tmp = gfor_fndecl_internal_malloc64;
1510 tmp = gfc_build_function_call (tmp, args);
1511 tmp = convert (TREE_TYPE (tmpvar), tmp);
1512 gfc_add_modify_expr (pblock, tmpvar, tmp);
1518 /* Generate codes to copy the temporary to the actual lhs. */
1521 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree size,
1522 tree count3, tree count1, tree count2, tree wheremask)
1526 stmtblock_t block, body;
1533 lss = gfc_walk_expr (expr);
1535 if (lss == gfc_ss_terminator)
1537 gfc_start_block (&block);
1539 gfc_init_se (&lse, NULL);
1541 /* Translate the expression. */
1542 gfc_conv_expr (&lse, expr);
1544 /* Form the expression for the temporary. */
1545 tmp = gfc_build_array_ref (tmp1, count1);
1547 /* Use the scalar assignment as is. */
1548 gfc_add_block_to_block (&block, &lse.pre);
1549 gfc_add_modify_expr (&block, lse.expr, tmp);
1550 gfc_add_block_to_block (&block, &lse.post);
1552 /* Increment the count1. */
1553 tmp = fold (build2 (PLUS_EXPR, TREE_TYPE (count1), count1, size));
1554 gfc_add_modify_expr (&block, count1, tmp);
1555 tmp = gfc_finish_block (&block);
1559 gfc_start_block (&block);
1561 gfc_init_loopinfo (&loop1);
1562 gfc_init_se (&rse, NULL);
1563 gfc_init_se (&lse, NULL);
1565 /* Associate the lss with the loop. */
1566 gfc_add_ss_to_loop (&loop1, lss);
1568 /* Calculate the bounds of the scalarization. */
1569 gfc_conv_ss_startstride (&loop1);
1570 /* Setup the scalarizing loops. */
1571 gfc_conv_loop_setup (&loop1);
1573 gfc_mark_ss_chain_used (lss, 1);
1574 /* Initialize count2. */
1575 gfc_add_modify_expr (&block, count2, gfc_index_zero_node);
1577 /* Start the scalarized loop body. */
1578 gfc_start_scalarized_body (&loop1, &body);
1580 /* Setup the gfc_se structures. */
1581 gfc_copy_loopinfo_to_se (&lse, &loop1);
1584 /* Form the expression of the temporary. */
1585 if (lss != gfc_ss_terminator)
1587 index = fold (build2 (PLUS_EXPR, gfc_array_index_type,
1589 rse.expr = gfc_build_array_ref (tmp1, index);
1591 /* Translate expr. */
1592 gfc_conv_expr (&lse, expr);
1594 /* Use the scalar assignment. */
1595 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts.type);
1597 /* Form the mask expression according to the mask tree list. */
1600 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1601 tmp2 = TREE_CHAIN (wheremask);
1604 tmp1 = gfc_build_array_ref (tmp2, count3);
1605 wheremaskexpr = build2 (TRUTH_AND_EXPR, TREE_TYPE (tmp1),
1606 wheremaskexpr, tmp1);
1607 tmp2 = TREE_CHAIN (tmp2);
1609 tmp = build3_v (COND_EXPR, wheremaskexpr, tmp, build_empty_stmt ());
1612 gfc_add_expr_to_block (&body, tmp);
1614 /* Increment count2. */
1615 tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type,
1616 count2, gfc_index_one_node));
1617 gfc_add_modify_expr (&body, count2, tmp);
1619 /* Increment count3. */
1622 tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type,
1623 count3, gfc_index_one_node));
1624 gfc_add_modify_expr (&body, count3, tmp);
1627 /* Generate the copying loops. */
1628 gfc_trans_scalarizing_loops (&loop1, &body);
1629 gfc_add_block_to_block (&block, &loop1.pre);
1630 gfc_add_block_to_block (&block, &loop1.post);
1631 gfc_cleanup_loop (&loop1);
1633 /* Increment count1. */
1634 tmp = fold (build2 (PLUS_EXPR, TREE_TYPE (count1), count1, size));
1635 gfc_add_modify_expr (&block, count1, tmp);
1636 tmp = gfc_finish_block (&block);
1642 /* Generate codes to copy rhs to the temporary. TMP1 is the address of temporary
1643 LSS and RSS are formed in function compute_inner_temp_size(), and should
1647 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree size,
1648 tree count3, tree count1, tree count2,
1649 gfc_ss *lss, gfc_ss *rss, tree wheremask)
1651 stmtblock_t block, body1;
1655 tree tmp, tmp2, index;
1658 gfc_start_block (&block);
1660 gfc_init_se (&rse, NULL);
1661 gfc_init_se (&lse, NULL);
1663 if (lss == gfc_ss_terminator)
1665 gfc_init_block (&body1);
1666 gfc_conv_expr (&rse, expr2);
1667 lse.expr = gfc_build_array_ref (tmp1, count1);
1671 /* Initialize count2. */
1672 gfc_add_modify_expr (&block, count2, gfc_index_zero_node);
1674 /* Initialize the loop. */
1675 gfc_init_loopinfo (&loop);
1677 /* We may need LSS to determine the shape of the expression. */
1678 gfc_add_ss_to_loop (&loop, lss);
1679 gfc_add_ss_to_loop (&loop, rss);
1681 gfc_conv_ss_startstride (&loop);
1682 gfc_conv_loop_setup (&loop);
1684 gfc_mark_ss_chain_used (rss, 1);
1685 /* Start the loop body. */
1686 gfc_start_scalarized_body (&loop, &body1);
1688 /* Translate the expression. */
1689 gfc_copy_loopinfo_to_se (&rse, &loop);
1691 gfc_conv_expr (&rse, expr2);
1693 /* Form the expression of the temporary. */
1694 index = fold (build2 (PLUS_EXPR, gfc_array_index_type, count1, count2));
1695 lse.expr = gfc_build_array_ref (tmp1, index);
1698 /* Use the scalar assignment. */
1699 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts.type);
1701 /* Form the mask expression according to the mask tree list. */
1704 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1705 tmp2 = TREE_CHAIN (wheremask);
1708 tmp1 = gfc_build_array_ref (tmp2, count3);
1709 wheremaskexpr = build2 (TRUTH_AND_EXPR, TREE_TYPE (tmp1),
1710 wheremaskexpr, tmp1);
1711 tmp2 = TREE_CHAIN (tmp2);
1713 tmp = build3_v (COND_EXPR, wheremaskexpr, tmp, build_empty_stmt ());
1716 gfc_add_expr_to_block (&body1, tmp);
1718 if (lss == gfc_ss_terminator)
1720 gfc_add_block_to_block (&block, &body1);
1724 /* Increment count2. */
1725 tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type,
1726 count2, gfc_index_one_node));
1727 gfc_add_modify_expr (&body1, count2, tmp);
1729 /* Increment count3. */
1732 tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type,
1733 count3, gfc_index_one_node));
1734 gfc_add_modify_expr (&body1, count3, tmp);
1737 /* Generate the copying loops. */
1738 gfc_trans_scalarizing_loops (&loop, &body1);
1740 gfc_add_block_to_block (&block, &loop.pre);
1741 gfc_add_block_to_block (&block, &loop.post);
1743 gfc_cleanup_loop (&loop);
1744 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
1745 as tree nodes in SS may not be valid in different scope. */
1747 /* Increment count1. */
1748 tmp = fold (build2 (PLUS_EXPR, TREE_TYPE (count1), count1, size));
1749 gfc_add_modify_expr (&block, count1, tmp);
1751 tmp = gfc_finish_block (&block);
1756 /* Calculate the size of temporary needed in the assignment inside forall.
1757 LSS and RSS are filled in this function. */
1760 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
1761 stmtblock_t * pblock,
1762 gfc_ss **lss, gfc_ss **rss)
1769 *lss = gfc_walk_expr (expr1);
1772 size = gfc_index_one_node;
1773 if (*lss != gfc_ss_terminator)
1775 gfc_init_loopinfo (&loop);
1777 /* Walk the RHS of the expression. */
1778 *rss = gfc_walk_expr (expr2);
1779 if (*rss == gfc_ss_terminator)
1781 /* The rhs is scalar. Add a ss for the expression. */
1782 *rss = gfc_get_ss ();
1783 (*rss)->next = gfc_ss_terminator;
1784 (*rss)->type = GFC_SS_SCALAR;
1785 (*rss)->expr = expr2;
1788 /* Associate the SS with the loop. */
1789 gfc_add_ss_to_loop (&loop, *lss);
1790 /* We don't actually need to add the rhs at this point, but it might
1791 make guessing the loop bounds a bit easier. */
1792 gfc_add_ss_to_loop (&loop, *rss);
1794 /* We only want the shape of the expression, not rest of the junk
1795 generated by the scalarizer. */
1796 loop.array_parameter = 1;
1798 /* Calculate the bounds of the scalarization. */
1799 gfc_conv_ss_startstride (&loop);
1800 gfc_conv_loop_setup (&loop);
1802 /* Figure out how many elements we need. */
1803 for (i = 0; i < loop.dimen; i++)
1805 tmp = fold (build2 (MINUS_EXPR, gfc_array_index_type,
1806 gfc_index_one_node, loop.from[i]));
1807 tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type,
1809 size = fold (build2 (MULT_EXPR, gfc_array_index_type, size, tmp));
1811 gfc_add_block_to_block (pblock, &loop.pre);
1812 size = gfc_evaluate_now (size, pblock);
1813 gfc_add_block_to_block (pblock, &loop.post);
1815 /* TODO: write a function that cleans up a loopinfo without freeing
1816 the SS chains. Currently a NOP. */
1823 /* Calculate the overall iterator number of the nested forall construct. */
1826 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
1832 /* TODO: optimizing the computing process. */
1833 number = gfc_create_var (gfc_array_index_type, "num");
1834 gfc_add_modify_expr (block, number, gfc_index_zero_node);
1836 gfc_start_block (&body);
1837 if (nested_forall_info)
1838 tmp = build2 (PLUS_EXPR, gfc_array_index_type, number,
1842 gfc_add_modify_expr (&body, number, tmp);
1843 tmp = gfc_finish_block (&body);
1845 /* Generate loops. */
1846 if (nested_forall_info != NULL)
1847 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 0, 1);
1849 gfc_add_expr_to_block (block, tmp);
1855 /* Allocate temporary for forall construct according to the information in
1856 nested_forall_info. INNER_SIZE is the size of temporary needed in the
1857 assignment inside forall. PTEMP1 is returned for space free. */
1860 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
1861 tree inner_size, stmtblock_t * block,
1867 tree bytesize, size;
1869 /* Calculate the total size of temporary needed in forall construct. */
1870 size = compute_overall_iter_number (nested_forall_info, inner_size, block);
1872 unit = TYPE_SIZE_UNIT (type);
1873 bytesize = fold (build2 (MULT_EXPR, gfc_array_index_type, size, unit));
1876 temp1 = gfc_do_allocate (bytesize, size, ptemp1, block, type);
1879 tmp = gfc_build_indirect_ref (temp1);
1887 /* Handle assignments inside forall which need temporary. */
1889 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, tree wheremask,
1890 forall_info * nested_forall_info,
1891 stmtblock_t * block)
1896 tree count, count1, count2;
1899 tree mask, maskindex;
1900 forall_info *forall_tmp;
1902 /* Create vars. count1 is the current iterator number of the nested forall.
1903 count2 is the current iterator number of the inner loops needed in the
1905 count1 = gfc_create_var (gfc_array_index_type, "count1");
1906 count2 = gfc_create_var (gfc_array_index_type, "count2");
1908 /* Count is the wheremask index. */
1911 count = gfc_create_var (gfc_array_index_type, "count");
1912 gfc_add_modify_expr (block, count, gfc_index_zero_node);
1917 /* Initialize count1. */
1918 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
1920 /* Calculate the size of temporary needed in the assignment. Return loop, lss
1921 and rss which are used in function generate_loop_for_rhs_to_temp(). */
1922 inner_size = compute_inner_temp_size (expr1, expr2, block, &lss, &rss);
1924 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
1925 type = gfc_typenode_for_spec (&expr1->ts);
1927 /* Allocate temporary for nested forall construct according to the
1928 information in nested_forall_info and inner_size. */
1929 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
1930 inner_size, block, &ptemp1);
1932 /* Initialize the maskindexes. */
1933 forall_tmp = nested_forall_info;
1934 while (forall_tmp != NULL)
1936 mask = forall_tmp->mask;
1937 maskindex = forall_tmp->maskindex;
1939 gfc_add_modify_expr (block, maskindex, gfc_index_zero_node);
1940 forall_tmp = forall_tmp->next_nest;
1943 /* Generate codes to copy rhs to the temporary . */
1944 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, inner_size, count,
1945 count1, count2, lss, rss, wheremask);
1947 /* Generate body and loops according to the information in
1948 nested_forall_info. */
1949 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
1950 gfc_add_expr_to_block (block, tmp);
1953 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
1955 /* Reset maskindexed. */
1956 forall_tmp = nested_forall_info;
1957 while (forall_tmp != NULL)
1959 mask = forall_tmp->mask;
1960 maskindex = forall_tmp->maskindex;
1962 gfc_add_modify_expr (block, maskindex, gfc_index_zero_node);
1963 forall_tmp = forall_tmp->next_nest;
1968 gfc_add_modify_expr (block, count, gfc_index_zero_node);
1970 /* Generate codes to copy the temporary to lhs. */
1971 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, inner_size, count,
1972 count1, count2, wheremask);
1974 /* Generate body and loops according to the information in
1975 nested_forall_info. */
1976 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
1977 gfc_add_expr_to_block (block, tmp);
1981 /* Free the temporary. */
1982 tmp = gfc_chainon_list (NULL_TREE, ptemp1);
1983 tmp = gfc_build_function_call (gfor_fndecl_internal_free, tmp);
1984 gfc_add_expr_to_block (block, tmp);
1989 /* Translate pointer assignment inside FORALL which need temporary. */
1992 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
1993 forall_info * nested_forall_info,
1994 stmtblock_t * block)
2008 tree tmp, tmp1, ptemp1;
2009 tree mask, maskindex;
2010 forall_info *forall_tmp;
2012 count = gfc_create_var (gfc_array_index_type, "count");
2013 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2015 inner_size = integer_one_node;
2016 lss = gfc_walk_expr (expr1);
2017 rss = gfc_walk_expr (expr2);
2018 if (lss == gfc_ss_terminator)
2020 type = gfc_typenode_for_spec (&expr1->ts);
2021 type = build_pointer_type (type);
2023 /* Allocate temporary for nested forall construct according to the
2024 information in nested_forall_info and inner_size. */
2025 tmp1 = allocate_temp_for_forall_nest (nested_forall_info,
2026 type, inner_size, block, &ptemp1);
2027 gfc_start_block (&body);
2028 gfc_init_se (&lse, NULL);
2029 lse.expr = gfc_build_array_ref (tmp1, count);
2030 gfc_init_se (&rse, NULL);
2031 rse.want_pointer = 1;
2032 gfc_conv_expr (&rse, expr2);
2033 gfc_add_block_to_block (&body, &rse.pre);
2034 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2035 gfc_add_block_to_block (&body, &rse.post);
2037 /* Increment count. */
2038 tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type,
2039 count, gfc_index_one_node));
2040 gfc_add_modify_expr (&body, count, tmp);
2042 tmp = gfc_finish_block (&body);
2044 /* Initialize the maskindexes. */
2045 forall_tmp = nested_forall_info;
2046 while (forall_tmp != NULL)
2048 mask = forall_tmp->mask;
2049 maskindex = forall_tmp->maskindex;
2051 gfc_add_modify_expr (block, maskindex, gfc_index_zero_node);
2052 forall_tmp = forall_tmp->next_nest;
2055 /* Generate body and loops according to the information in
2056 nested_forall_info. */
2057 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2058 gfc_add_expr_to_block (block, tmp);
2061 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2063 /* Reset maskindexes. */
2064 forall_tmp = nested_forall_info;
2065 while (forall_tmp != NULL)
2067 mask = forall_tmp->mask;
2068 maskindex = forall_tmp->maskindex;
2070 gfc_add_modify_expr (block, maskindex, gfc_index_zero_node);
2071 forall_tmp = forall_tmp->next_nest;
2073 gfc_start_block (&body);
2074 gfc_init_se (&lse, NULL);
2075 gfc_init_se (&rse, NULL);
2076 rse.expr = gfc_build_array_ref (tmp1, count);
2077 lse.want_pointer = 1;
2078 gfc_conv_expr (&lse, expr1);
2079 gfc_add_block_to_block (&body, &lse.pre);
2080 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2081 gfc_add_block_to_block (&body, &lse.post);
2082 /* Increment count. */
2083 tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type,
2084 count, gfc_index_one_node));
2085 gfc_add_modify_expr (&body, count, tmp);
2086 tmp = gfc_finish_block (&body);
2088 /* Generate body and loops according to the information in
2089 nested_forall_info. */
2090 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2091 gfc_add_expr_to_block (block, tmp);
2095 gfc_init_loopinfo (&loop);
2097 /* Associate the SS with the loop. */
2098 gfc_add_ss_to_loop (&loop, rss);
2100 /* Setup the scalarizing loops and bounds. */
2101 gfc_conv_ss_startstride (&loop);
2103 gfc_conv_loop_setup (&loop);
2105 info = &rss->data.info;
2106 desc = info->descriptor;
2108 /* Make a new descriptor. */
2109 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2110 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2111 loop.from, loop.to, 1);
2113 /* Allocate temporary for nested forall construct. */
2114 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2115 inner_size, block, &ptemp1);
2116 gfc_start_block (&body);
2117 gfc_init_se (&lse, NULL);
2118 lse.expr = gfc_build_array_ref (tmp1, count);
2119 lse.direct_byref = 1;
2120 rss = gfc_walk_expr (expr2);
2121 gfc_conv_expr_descriptor (&lse, expr2, rss);
2123 gfc_add_block_to_block (&body, &lse.pre);
2124 gfc_add_block_to_block (&body, &lse.post);
2126 /* Increment count. */
2127 tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type,
2128 count, gfc_index_one_node));
2129 gfc_add_modify_expr (&body, count, tmp);
2131 tmp = gfc_finish_block (&body);
2133 /* Initialize the maskindexes. */
2134 forall_tmp = nested_forall_info;
2135 while (forall_tmp != NULL)
2137 mask = forall_tmp->mask;
2138 maskindex = forall_tmp->maskindex;
2140 gfc_add_modify_expr (block, maskindex, gfc_index_zero_node);
2141 forall_tmp = forall_tmp->next_nest;
2144 /* Generate body and loops according to the information in
2145 nested_forall_info. */
2146 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2147 gfc_add_expr_to_block (block, tmp);
2150 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2152 /* Reset maskindexes. */
2153 forall_tmp = nested_forall_info;
2154 while (forall_tmp != NULL)
2156 mask = forall_tmp->mask;
2157 maskindex = forall_tmp->maskindex;
2159 gfc_add_modify_expr (block, maskindex, gfc_index_zero_node);
2160 forall_tmp = forall_tmp->next_nest;
2162 parm = gfc_build_array_ref (tmp1, count);
2163 lss = gfc_walk_expr (expr1);
2164 gfc_init_se (&lse, NULL);
2165 gfc_conv_expr_descriptor (&lse, expr1, lss);
2166 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2167 gfc_start_block (&body);
2168 gfc_add_block_to_block (&body, &lse.pre);
2169 gfc_add_block_to_block (&body, &lse.post);
2171 /* Increment count. */
2172 tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type,
2173 count, gfc_index_one_node));
2174 gfc_add_modify_expr (&body, count, tmp);
2176 tmp = gfc_finish_block (&body);
2178 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2179 gfc_add_expr_to_block (block, tmp);
2181 /* Free the temporary. */
2184 tmp = gfc_chainon_list (NULL_TREE, ptemp1);
2185 tmp = gfc_build_function_call (gfor_fndecl_internal_free, tmp);
2186 gfc_add_expr_to_block (block, tmp);
2191 /* FORALL and WHERE statements are really nasty, especially when you nest
2192 them. All the rhs of a forall assignment must be evaluated before the
2193 actual assignments are performed. Presumably this also applies to all the
2194 assignments in an inner where statement. */
2196 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2197 linear array, relying on the fact that we process in the same order in all
2200 forall (i=start:end:stride; maskexpr)
2204 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2206 count = ((end + 1 - start) / staride)
2207 masktmp(:) = maskexpr(:)
2210 for (i = start; i <= end; i += stride)
2212 if (masktmp[maskindex++])
2216 for (i = start; i <= end; i += stride)
2218 if (masktmp[maskindex++])
2222 Note that this code only works when there are no dependencies.
2223 Forall loop with array assignments and data dependencies are a real pain,
2224 because the size of the temporary cannot always be determined before the
2225 loop is executed. This problem is compounded by the presence of nested
2230 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2252 gfc_forall_iterator *fa;
2255 gfc_saved_var *saved_vars;
2256 iter_info *this_forall, *iter_tmp;
2257 forall_info *info, *forall_tmp;
2258 temporary_list *temp;
2260 gfc_start_block (&block);
2263 /* Count the FORALL index number. */
2264 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2268 /* Allocate the space for var, start, end, step, varexpr. */
2269 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2270 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2271 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2272 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2273 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2274 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2276 /* Allocate the space for info. */
2277 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2279 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2281 gfc_symbol *sym = fa->var->symtree->n.sym;
2283 /* allocate space for this_forall. */
2284 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2286 /* Create a temporary variable for the FORALL index. */
2287 tmp = gfc_typenode_for_spec (&sym->ts);
2288 var[n] = gfc_create_var (tmp, sym->name);
2289 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2291 /* Record it in this_forall. */
2292 this_forall->var = var[n];
2294 /* Replace the index symbol's backend_decl with the temporary decl. */
2295 sym->backend_decl = var[n];
2297 /* Work out the start, end and stride for the loop. */
2298 gfc_init_se (&se, NULL);
2299 gfc_conv_expr_val (&se, fa->start);
2300 /* Record it in this_forall. */
2301 this_forall->start = se.expr;
2302 gfc_add_block_to_block (&block, &se.pre);
2305 gfc_init_se (&se, NULL);
2306 gfc_conv_expr_val (&se, fa->end);
2307 /* Record it in this_forall. */
2308 this_forall->end = se.expr;
2309 gfc_make_safe_expr (&se);
2310 gfc_add_block_to_block (&block, &se.pre);
2313 gfc_init_se (&se, NULL);
2314 gfc_conv_expr_val (&se, fa->stride);
2315 /* Record it in this_forall. */
2316 this_forall->step = se.expr;
2317 gfc_make_safe_expr (&se);
2318 gfc_add_block_to_block (&block, &se.pre);
2321 /* Set the NEXT field of this_forall to NULL. */
2322 this_forall->next = NULL;
2323 /* Link this_forall to the info construct. */
2324 if (info->this_loop == NULL)
2325 info->this_loop = this_forall;
2328 iter_tmp = info->this_loop;
2329 while (iter_tmp->next != NULL)
2330 iter_tmp = iter_tmp->next;
2331 iter_tmp->next = this_forall;
2338 /* Work out the number of elements in the mask array. */
2341 size = gfc_index_one_node;
2342 sizevar = NULL_TREE;
2344 for (n = 0; n < nvar; n++)
2346 if (lenvar && TREE_TYPE (lenvar) != TREE_TYPE (start[n]))
2349 /* size = (end + step - start) / step. */
2350 tmp = fold (build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2351 step[n], start[n]));
2352 tmp = fold (build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp));
2354 tmp = fold (build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]));
2355 tmp = convert (gfc_array_index_type, tmp);
2357 size = fold (build2 (MULT_EXPR, gfc_array_index_type, size, tmp));
2360 /* Record the nvar and size of current forall level. */
2364 /* Link the current forall level to nested_forall_info. */
2365 forall_tmp = nested_forall_info;
2366 if (forall_tmp == NULL)
2367 nested_forall_info = info;
2370 while (forall_tmp->next_nest != NULL)
2371 forall_tmp = forall_tmp->next_nest;
2372 info->outer = forall_tmp;
2373 forall_tmp->next_nest = info;
2376 /* Copy the mask into a temporary variable if required.
2377 For now we assume a mask temporary is needed. */
2380 /* Allocate the mask temporary. */
2381 bytesize = fold (build2 (MULT_EXPR, gfc_array_index_type, size,
2382 TYPE_SIZE_UNIT (boolean_type_node)));
2384 mask = gfc_do_allocate (bytesize, size, &pmask, &block, boolean_type_node);
2386 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2387 /* Record them in the info structure. */
2388 info->pmask = pmask;
2390 info->maskindex = maskindex;
2392 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2394 /* Start of mask assignment loop body. */
2395 gfc_start_block (&body);
2397 /* Evaluate the mask expression. */
2398 gfc_init_se (&se, NULL);
2399 gfc_conv_expr_val (&se, code->expr);
2400 gfc_add_block_to_block (&body, &se.pre);
2402 /* Store the mask. */
2403 se.expr = convert (boolean_type_node, se.expr);
2406 tmp = gfc_build_indirect_ref (mask);
2409 tmp = gfc_build_array_ref (tmp, maskindex);
2410 gfc_add_modify_expr (&body, tmp, se.expr);
2412 /* Advance to the next mask element. */
2413 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
2414 maskindex, gfc_index_one_node);
2415 gfc_add_modify_expr (&body, maskindex, tmp);
2417 /* Generate the loops. */
2418 tmp = gfc_finish_block (&body);
2419 tmp = gfc_trans_nested_forall_loop (info, tmp, 0, 0);
2420 gfc_add_expr_to_block (&block, tmp);
2424 /* No mask was specified. */
2425 maskindex = NULL_TREE;
2426 mask = pmask = NULL_TREE;
2429 c = code->block->next;
2431 /* TODO: loop merging in FORALL statements. */
2432 /* Now that we've got a copy of the mask, generate the assignment loops. */
2438 /* A scalar or array assignment. */
2439 need_temp = gfc_check_dependency (c->expr, c->expr2, varexpr, nvar);
2440 /* Teporaries due to array assignment data dependencies introduce
2441 no end of problems. */
2443 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL,
2444 nested_forall_info, &block);
2447 /* Use the normal assignment copying routines. */
2448 assign = gfc_trans_assignment (c->expr, c->expr2);
2450 /* Reset the mask index. */
2452 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2454 /* Generate body and loops. */
2455 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1, 1);
2456 gfc_add_expr_to_block (&block, tmp);
2463 /* Translate WHERE or WHERE construct nested in FORALL. */
2465 gfc_trans_where_2 (c, NULL, NULL, nested_forall_info, &block, &temp);
2472 /* Free the temporary. */
2473 args = gfc_chainon_list (NULL_TREE, temp->temporary);
2474 tmp = gfc_build_function_call (gfor_fndecl_internal_free, args);
2475 gfc_add_expr_to_block (&block, tmp);
2484 /* Pointer assignment inside FORALL. */
2485 case EXEC_POINTER_ASSIGN:
2486 need_temp = gfc_check_dependency (c->expr, c->expr2, varexpr, nvar);
2488 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2489 nested_forall_info, &block);
2492 /* Use the normal assignment copying routines. */
2493 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2495 /* Reset the mask index. */
2497 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2499 /* Generate body and loops. */
2500 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign,
2502 gfc_add_expr_to_block (&block, tmp);
2507 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2508 gfc_add_expr_to_block (&block, tmp);
2518 /* Restore the original index variables. */
2519 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2520 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2522 /* Free the space for var, start, end, step, varexpr. */
2528 gfc_free (saved_vars);
2532 /* Free the temporary for the mask. */
2533 tmp = gfc_chainon_list (NULL_TREE, pmask);
2534 tmp = gfc_build_function_call (gfor_fndecl_internal_free, tmp);
2535 gfc_add_expr_to_block (&block, tmp);
2538 pushdecl (maskindex);
2540 return gfc_finish_block (&block);
2544 /* Translate the FORALL statement or construct. */
2546 tree gfc_trans_forall (gfc_code * code)
2548 return gfc_trans_forall_1 (code, NULL);
2552 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2553 If the WHERE construct is nested in FORALL, compute the overall temporary
2554 needed by the WHERE mask expression multiplied by the iterator number of
2556 ME is the WHERE mask expression.
2557 MASK is the temporary which value is mask's value.
2558 NMASK is another temporary which value is !mask.
2559 TEMP records the temporary's address allocated in this function in order to
2560 free them outside this function.
2561 MASK, NMASK and TEMP are all OUT arguments. */
2564 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2565 tree * mask, tree * nmask, temporary_list ** temp,
2566 stmtblock_t * block)
2571 tree ptemp1, ntmp, ptemp2;
2573 stmtblock_t body, body1;
2578 gfc_init_loopinfo (&loop);
2580 /* Calculate the size of temporary needed by the mask-expr. */
2581 inner_size = compute_inner_temp_size (me, me, block, &lss, &rss);
2583 /* Allocate temporary for where mask. */
2584 tmp = allocate_temp_for_forall_nest (nested_forall_info, boolean_type_node,
2585 inner_size, block, &ptemp1);
2586 /* Record the temporary address in order to free it later. */
2589 temporary_list *tempo;
2590 tempo = (temporary_list *) gfc_getmem (sizeof (temporary_list));
2591 tempo->temporary = ptemp1;
2592 tempo->next = *temp;
2596 /* Allocate temporary for !mask. */
2597 ntmp = allocate_temp_for_forall_nest (nested_forall_info, boolean_type_node,
2598 inner_size, block, &ptemp2);
2599 /* Record the temporary in order to free it later. */
2602 temporary_list *tempo;
2603 tempo = (temporary_list *) gfc_getmem (sizeof (temporary_list));
2604 tempo->temporary = ptemp2;
2605 tempo->next = *temp;
2609 /* Variable to index the temporary. */
2610 count = gfc_create_var (gfc_array_index_type, "count");
2611 /* Initialize count. */
2612 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2614 gfc_start_block (&body);
2616 gfc_init_se (&rse, NULL);
2617 gfc_init_se (&lse, NULL);
2619 if (lss == gfc_ss_terminator)
2621 gfc_init_block (&body1);
2625 /* Initialize the loop. */
2626 gfc_init_loopinfo (&loop);
2628 /* We may need LSS to determine the shape of the expression. */
2629 gfc_add_ss_to_loop (&loop, lss);
2630 gfc_add_ss_to_loop (&loop, rss);
2632 gfc_conv_ss_startstride (&loop);
2633 gfc_conv_loop_setup (&loop);
2635 gfc_mark_ss_chain_used (rss, 1);
2636 /* Start the loop body. */
2637 gfc_start_scalarized_body (&loop, &body1);
2639 /* Translate the expression. */
2640 gfc_copy_loopinfo_to_se (&rse, &loop);
2642 gfc_conv_expr (&rse, me);
2644 /* Form the expression of the temporary. */
2645 lse.expr = gfc_build_array_ref (tmp, count);
2646 tmpexpr = gfc_build_array_ref (ntmp, count);
2648 /* Use the scalar assignment to fill temporary TMP. */
2649 tmp1 = gfc_trans_scalar_assign (&lse, &rse, me->ts.type);
2650 gfc_add_expr_to_block (&body1, tmp1);
2652 /* Fill temporary NTMP. */
2653 tmp1 = build1 (TRUTH_NOT_EXPR, TREE_TYPE (lse.expr), lse.expr);
2654 gfc_add_modify_expr (&body1, tmpexpr, tmp1);
2656 if (lss == gfc_ss_terminator)
2658 gfc_add_block_to_block (&body, &body1);
2662 /* Increment count. */
2663 tmp1 = fold (build2 (PLUS_EXPR, gfc_array_index_type, count,
2664 gfc_index_one_node));
2665 gfc_add_modify_expr (&body1, count, tmp1);
2667 /* Generate the copying loops. */
2668 gfc_trans_scalarizing_loops (&loop, &body1);
2670 gfc_add_block_to_block (&body, &loop.pre);
2671 gfc_add_block_to_block (&body, &loop.post);
2673 gfc_cleanup_loop (&loop);
2674 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2675 as tree nodes in SS may not be valid in different scope. */
2678 tmp1 = gfc_finish_block (&body);
2679 /* If the WHERE construct is inside FORALL, fill the full temporary. */
2680 if (nested_forall_info != NULL)
2681 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1, 1);
2684 gfc_add_expr_to_block (block, tmp1);
2693 /* Translate an assignment statement in a WHERE statement or construct
2694 statement. The MASK expression is used to control which elements
2695 of EXPR1 shall be assigned. */
2698 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2, tree mask,
2699 tree count1, tree count2)
2704 gfc_ss *lss_section;
2711 tree index, maskexpr, tmp1;
2714 /* TODO: handle this special case.
2715 Special case a single function returning an array. */
2716 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
2718 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
2724 /* Assignment of the form lhs = rhs. */
2725 gfc_start_block (&block);
2727 gfc_init_se (&lse, NULL);
2728 gfc_init_se (&rse, NULL);
2731 lss = gfc_walk_expr (expr1);
2734 /* In each where-assign-stmt, the mask-expr and the variable being
2735 defined shall be arrays of the same shape. */
2736 gcc_assert (lss != gfc_ss_terminator);
2738 /* The assignment needs scalarization. */
2741 /* Find a non-scalar SS from the lhs. */
2742 while (lss_section != gfc_ss_terminator
2743 && lss_section->type != GFC_SS_SECTION)
2744 lss_section = lss_section->next;
2746 gcc_assert (lss_section != gfc_ss_terminator);
2748 /* Initialize the scalarizer. */
2749 gfc_init_loopinfo (&loop);
2752 rss = gfc_walk_expr (expr2);
2753 if (rss == gfc_ss_terminator)
2755 /* The rhs is scalar. Add a ss for the expression. */
2756 rss = gfc_get_ss ();
2757 rss->next = gfc_ss_terminator;
2758 rss->type = GFC_SS_SCALAR;
2762 /* Associate the SS with the loop. */
2763 gfc_add_ss_to_loop (&loop, lss);
2764 gfc_add_ss_to_loop (&loop, rss);
2766 /* Calculate the bounds of the scalarization. */
2767 gfc_conv_ss_startstride (&loop);
2769 /* Resolve any data dependencies in the statement. */
2770 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
2772 /* Setup the scalarizing loops. */
2773 gfc_conv_loop_setup (&loop);
2775 /* Setup the gfc_se structures. */
2776 gfc_copy_loopinfo_to_se (&lse, &loop);
2777 gfc_copy_loopinfo_to_se (&rse, &loop);
2780 gfc_mark_ss_chain_used (rss, 1);
2781 if (loop.temp_ss == NULL)
2784 gfc_mark_ss_chain_used (lss, 1);
2788 lse.ss = loop.temp_ss;
2789 gfc_mark_ss_chain_used (lss, 3);
2790 gfc_mark_ss_chain_used (loop.temp_ss, 3);
2793 /* Start the scalarized loop body. */
2794 gfc_start_scalarized_body (&loop, &body);
2796 /* Translate the expression. */
2797 gfc_conv_expr (&rse, expr2);
2798 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
2800 gfc_conv_tmp_array_ref (&lse);
2801 gfc_advance_se_ss_chain (&lse);
2804 gfc_conv_expr (&lse, expr1);
2806 /* Form the mask expression according to the mask tree list. */
2810 maskexpr = gfc_build_array_ref (tmp, index);
2814 tmp = TREE_CHAIN (tmp);
2817 tmp1 = gfc_build_array_ref (tmp, index);
2818 maskexpr = build2 (TRUTH_AND_EXPR, TREE_TYPE (tmp1), maskexpr, tmp1);
2819 tmp = TREE_CHAIN (tmp);
2821 /* Use the scalar assignment as is. */
2822 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts.type);
2823 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
2825 gfc_add_expr_to_block (&body, tmp);
2827 if (lss == gfc_ss_terminator)
2829 /* Increment count1. */
2830 tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type,
2831 count1, gfc_index_one_node));
2832 gfc_add_modify_expr (&body, count1, tmp);
2834 /* Use the scalar assignment as is. */
2835 gfc_add_block_to_block (&block, &body);
2839 gcc_assert (lse.ss == gfc_ss_terminator
2840 && rse.ss == gfc_ss_terminator);
2842 if (loop.temp_ss != NULL)
2844 /* Increment count1 before finish the main body of a scalarized
2846 tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type,
2847 count1, gfc_index_one_node));
2848 gfc_add_modify_expr (&body, count1, tmp);
2849 gfc_trans_scalarized_loop_boundary (&loop, &body);
2851 /* We need to copy the temporary to the actual lhs. */
2852 gfc_init_se (&lse, NULL);
2853 gfc_init_se (&rse, NULL);
2854 gfc_copy_loopinfo_to_se (&lse, &loop);
2855 gfc_copy_loopinfo_to_se (&rse, &loop);
2857 rse.ss = loop.temp_ss;
2860 gfc_conv_tmp_array_ref (&rse);
2861 gfc_advance_se_ss_chain (&rse);
2862 gfc_conv_expr (&lse, expr1);
2864 gcc_assert (lse.ss == gfc_ss_terminator
2865 && rse.ss == gfc_ss_terminator);
2867 /* Form the mask expression according to the mask tree list. */
2871 maskexpr = gfc_build_array_ref (tmp, index);
2875 tmp = TREE_CHAIN (tmp);
2878 tmp1 = gfc_build_array_ref (tmp, index);
2879 maskexpr = build2 (TRUTH_AND_EXPR, TREE_TYPE (tmp1),
2881 tmp = TREE_CHAIN (tmp);
2883 /* Use the scalar assignment as is. */
2884 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts.type);
2885 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
2886 gfc_add_expr_to_block (&body, tmp);
2888 /* Increment count2. */
2889 tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type,
2890 count2, gfc_index_one_node));
2891 gfc_add_modify_expr (&body, count2, tmp);
2895 /* Increment count1. */
2896 tmp = fold (build2 (PLUS_EXPR, gfc_array_index_type,
2897 count1, gfc_index_one_node));
2898 gfc_add_modify_expr (&body, count1, tmp);
2901 /* Generate the copying loops. */
2902 gfc_trans_scalarizing_loops (&loop, &body);
2904 /* Wrap the whole thing up. */
2905 gfc_add_block_to_block (&block, &loop.pre);
2906 gfc_add_block_to_block (&block, &loop.post);
2907 gfc_cleanup_loop (&loop);
2910 return gfc_finish_block (&block);
2914 /* Translate the WHERE construct or statement.
2915 This fuction can be called iteratively to translate the nested WHERE
2916 construct or statement.
2917 MASK is the control mask, and PMASK is the pending control mask.
2918 TEMP records the temporary address which must be freed later. */
2921 gfc_trans_where_2 (gfc_code * code, tree mask, tree pmask,
2922 forall_info * nested_forall_info, stmtblock_t * block,
2923 temporary_list ** temp)
2929 tree tmp, tmp1, tmp2;
2930 tree count1, count2;
2934 /* the WHERE statement or the WHERE construct statement. */
2935 cblock = code->block;
2938 /* Has mask-expr. */
2941 /* Ensure that the WHERE mask be evaluated only once. */
2942 tmp2 = gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
2943 &tmp, &tmp1, temp, block);
2945 /* Set the control mask and the pending control mask. */
2946 /* It's a where-stmt. */
2952 /* It's a nested where-stmt. */
2953 else if (mask && pmask == NULL)
2956 /* Use the TREE_CHAIN to list the masks. */
2957 tmp2 = copy_list (mask);
2958 pmask = chainon (mask, tmp1);
2959 mask = chainon (tmp2, tmp);
2961 /* It's a masked-elsewhere-stmt. */
2962 else if (mask && cblock->expr)
2965 tmp2 = copy_list (pmask);
2968 tmp2 = chainon (tmp2, tmp);
2969 pmask = chainon (mask, tmp1);
2973 /* It's a elsewhere-stmt. No mask-expr is present. */
2977 /* Get the assignment statement of a WHERE statement, or the first
2978 statement in where-body-construct of a WHERE construct. */
2979 cnext = cblock->next;
2984 /* WHERE assignment statement. */
2986 expr1 = cnext->expr;
2987 expr2 = cnext->expr2;
2988 if (nested_forall_info != NULL)
2993 nvar = nested_forall_info->nvar;
2994 varexpr = (gfc_expr **)
2995 gfc_getmem (nvar * sizeof (gfc_expr *));
2996 need_temp = gfc_check_dependency (expr1, expr2, varexpr,
2999 gfc_trans_assign_need_temp (expr1, expr2, mask,
3000 nested_forall_info, block);
3003 /* Variables to control maskexpr. */
3004 count1 = gfc_create_var (gfc_array_index_type, "count1");
3005 count2 = gfc_create_var (gfc_array_index_type, "count2");
3006 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3007 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3009 tmp = gfc_trans_where_assign (expr1, expr2, mask, count1,
3011 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3013 gfc_add_expr_to_block (block, tmp);
3018 /* Variables to control maskexpr. */
3019 count1 = gfc_create_var (gfc_array_index_type, "count1");
3020 count2 = gfc_create_var (gfc_array_index_type, "count2");
3021 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3022 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3024 tmp = gfc_trans_where_assign (expr1, expr2, mask, count1,
3026 gfc_add_expr_to_block (block, tmp);
3031 /* WHERE or WHERE construct is part of a where-body-construct. */
3033 /* Ensure that MASK is not modified by next gfc_trans_where_2. */
3034 mask_copy = copy_list (mask);
3035 gfc_trans_where_2 (cnext, mask_copy, NULL, nested_forall_info,
3043 /* The next statement within the same where-body-construct. */
3044 cnext = cnext->next;
3046 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3047 cblock = cblock->block;
3052 /* As the WHERE or WHERE construct statement can be nested, we call
3053 gfc_trans_where_2 to do the translation, and pass the initial
3054 NULL values for both the control mask and the pending control mask. */
3057 gfc_trans_where (gfc_code * code)
3060 temporary_list *temp, *p;
3064 gfc_start_block (&block);
3067 gfc_trans_where_2 (code, NULL, NULL, NULL, &block, &temp);
3069 /* Add calls to free temporaries which were dynamically allocated. */
3072 args = gfc_chainon_list (NULL_TREE, temp->temporary);
3073 tmp = gfc_build_function_call (gfor_fndecl_internal_free, args);
3074 gfc_add_expr_to_block (&block, tmp);
3080 return gfc_finish_block (&block);
3084 /* CYCLE a DO loop. The label decl has already been created by
3085 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3086 node at the head of the loop. We must mark the label as used. */
3089 gfc_trans_cycle (gfc_code * code)
3093 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3094 TREE_USED (cycle_label) = 1;
3095 return build1_v (GOTO_EXPR, cycle_label);
3099 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3100 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3104 gfc_trans_exit (gfc_code * code)
3108 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3109 TREE_USED (exit_label) = 1;
3110 return build1_v (GOTO_EXPR, exit_label);
3114 /* Translate the ALLOCATE statement. */
3117 gfc_trans_allocate (gfc_code * code)
3130 if (!code->ext.alloc_list)
3133 gfc_start_block (&block);
3137 tree gfc_int4_type_node = gfc_get_int_type (4);
3139 stat = gfc_create_var (gfc_int4_type_node, "stat");
3140 pstat = gfc_build_addr_expr (NULL, stat);
3142 error_label = gfc_build_label_decl (NULL_TREE);
3143 TREE_USED (error_label) = 1;
3147 pstat = integer_zero_node;
3148 stat = error_label = NULL_TREE;
3152 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3156 gfc_init_se (&se, NULL);
3157 gfc_start_block (&se.pre);
3159 se.want_pointer = 1;
3160 se.descriptor_only = 1;
3161 gfc_conv_expr (&se, expr);
3165 /* Find the last reference in the chain. */
3166 while (ref && ref->next != NULL)
3168 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT);
3172 if (ref != NULL && ref->type == REF_ARRAY)
3175 gfc_array_allocate (&se, ref, pstat);
3179 /* A scalar or derived type. */
3182 val = gfc_create_var (ppvoid_type_node, "ptr");
3183 tmp = gfc_build_addr_expr (ppvoid_type_node, se.expr);
3184 gfc_add_modify_expr (&se.pre, val, tmp);
3186 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3187 parm = gfc_chainon_list (NULL_TREE, val);
3188 parm = gfc_chainon_list (parm, tmp);
3189 parm = gfc_chainon_list (parm, pstat);
3190 tmp = gfc_build_function_call (gfor_fndecl_allocate, parm);
3191 gfc_add_expr_to_block (&se.pre, tmp);
3195 tmp = build1_v (GOTO_EXPR, error_label);
3197 build2 (NE_EXPR, boolean_type_node, stat, integer_zero_node);
3198 tmp = build3_v (COND_EXPR, parm, tmp, build_empty_stmt ());
3199 gfc_add_expr_to_block (&se.pre, tmp);
3203 tmp = gfc_finish_block (&se.pre);
3204 gfc_add_expr_to_block (&block, tmp);
3207 /* Assign the value to the status variable. */
3210 tmp = build1_v (LABEL_EXPR, error_label);
3211 gfc_add_expr_to_block (&block, tmp);
3213 gfc_init_se (&se, NULL);
3214 gfc_conv_expr_lhs (&se, code->expr);
3215 tmp = convert (TREE_TYPE (se.expr), stat);
3216 gfc_add_modify_expr (&block, se.expr, tmp);
3219 return gfc_finish_block (&block);
3224 gfc_trans_deallocate (gfc_code * code)
3234 gfc_start_block (&block);
3236 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3239 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3241 gfc_init_se (&se, NULL);
3242 gfc_start_block (&se.pre);
3244 se.want_pointer = 1;
3245 se.descriptor_only = 1;
3246 gfc_conv_expr (&se, expr);
3248 if (expr->symtree->n.sym->attr.dimension)
3250 tmp = gfc_array_deallocate (se.expr);
3251 gfc_add_expr_to_block (&se.pre, tmp);
3255 type = build_pointer_type (TREE_TYPE (se.expr));
3256 var = gfc_create_var (type, "ptr");
3257 tmp = gfc_build_addr_expr (type, se.expr);
3258 gfc_add_modify_expr (&se.pre, var, tmp);
3260 tmp = gfc_chainon_list (NULL_TREE, var);
3261 tmp = gfc_chainon_list (tmp, integer_zero_node);
3262 tmp = gfc_build_function_call (gfor_fndecl_deallocate, tmp);
3263 gfc_add_expr_to_block (&se.pre, tmp);
3265 tmp = gfc_finish_block (&se.pre);
3266 gfc_add_expr_to_block (&block, tmp);
3269 return gfc_finish_block (&block);