1 /* Array translation routines
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 Free Software Foundation, Inc.
5 Contributed by Paul Brook <paul@nowt.org>
6 and Steven Bosscher <s.bosscher@student.tudelft.nl>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* trans-array.c-- Various array related code, including scalarization,
25 allocation, initialization and other support routines. */
27 /* How the scalarizer works.
28 In gfortran, array expressions use the same core routines as scalar
30 First, a Scalarization State (SS) chain is built. This is done by walking
31 the expression tree, and building a linear list of the terms in the
32 expression. As the tree is walked, scalar subexpressions are translated.
34 The scalarization parameters are stored in a gfc_loopinfo structure.
35 First the start and stride of each term is calculated by
36 gfc_conv_ss_startstride. During this process the expressions for the array
37 descriptors and data pointers are also translated.
39 If the expression is an assignment, we must then resolve any dependencies.
40 In fortran all the rhs values of an assignment must be evaluated before
41 any assignments take place. This can require a temporary array to store the
42 values. We also require a temporary when we are passing array expressions
43 or vector subscripts as procedure parameters.
45 Array sections are passed without copying to a temporary. These use the
46 scalarizer to determine the shape of the section. The flag
47 loop->array_parameter tells the scalarizer that the actual values and loop
48 variables will not be required.
50 The function gfc_conv_loop_setup generates the scalarization setup code.
51 It determines the range of the scalarizing loop variables. If a temporary
52 is required, this is created and initialized. Code for scalar expressions
53 taken outside the loop is also generated at this time. Next the offset and
54 scaling required to translate from loop variables to array indices for each
57 A call to gfc_start_scalarized_body marks the start of the scalarized
58 expression. This creates a scope and declares the loop variables. Before
59 calling this gfc_make_ss_chain_used must be used to indicate which terms
60 will be used inside this loop.
62 The scalar gfc_conv_* functions are then used to build the main body of the
63 scalarization loop. Scalarization loop variables and precalculated scalar
64 values are automatically substituted. Note that gfc_advance_se_ss_chain
65 must be used, rather than changing the se->ss directly.
67 For assignment expressions requiring a temporary two sub loops are
68 generated. The first stores the result of the expression in the temporary,
69 the second copies it to the result. A call to
70 gfc_trans_scalarized_loop_boundary marks the end of the main loop code and
71 the start of the copying loop. The temporary may be less than full rank.
73 Finally gfc_trans_scalarizing_loops is called to generate the implicit do
74 loops. The loops are added to the pre chain of the loopinfo. The post
75 chain may still contain cleanup code.
77 After the loop code has been added into its parent scope gfc_cleanup_loop
78 is called to free all the SS allocated by the scalarizer. */
82 #include "coretypes.h"
85 #include "diagnostic-core.h" /* For internal_error/fatal_error. */
88 #include "constructor.h"
90 #include "trans-stmt.h"
91 #include "trans-types.h"
92 #include "trans-array.h"
93 #include "trans-const.h"
94 #include "dependency.h"
96 static bool gfc_get_array_constructor_size (mpz_t *, gfc_constructor_base);
98 /* The contents of this structure aren't actually used, just the address. */
99 static gfc_ss gfc_ss_terminator_var;
100 gfc_ss * const gfc_ss_terminator = &gfc_ss_terminator_var;
104 gfc_array_dataptr_type (tree desc)
106 return (GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc)));
110 /* Build expressions to access the members of an array descriptor.
111 It's surprisingly easy to mess up here, so never access
112 an array descriptor by "brute force", always use these
113 functions. This also avoids problems if we change the format
114 of an array descriptor.
116 To understand these magic numbers, look at the comments
117 before gfc_build_array_type() in trans-types.c.
119 The code within these defines should be the only code which knows the format
120 of an array descriptor.
122 Any code just needing to read obtain the bounds of an array should use
123 gfc_conv_array_* rather than the following functions as these will return
124 know constant values, and work with arrays which do not have descriptors.
126 Don't forget to #undef these! */
129 #define OFFSET_FIELD 1
130 #define DTYPE_FIELD 2
131 #define DIMENSION_FIELD 3
132 #define CAF_TOKEN_FIELD 4
134 #define STRIDE_SUBFIELD 0
135 #define LBOUND_SUBFIELD 1
136 #define UBOUND_SUBFIELD 2
138 /* This provides READ-ONLY access to the data field. The field itself
139 doesn't have the proper type. */
142 gfc_conv_descriptor_data_get (tree desc)
146 type = TREE_TYPE (desc);
147 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
149 field = TYPE_FIELDS (type);
150 gcc_assert (DATA_FIELD == 0);
152 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
154 t = fold_convert (GFC_TYPE_ARRAY_DATAPTR_TYPE (type), t);
159 /* This provides WRITE access to the data field.
161 TUPLES_P is true if we are generating tuples.
163 This function gets called through the following macros:
164 gfc_conv_descriptor_data_set
165 gfc_conv_descriptor_data_set. */
168 gfc_conv_descriptor_data_set (stmtblock_t *block, tree desc, tree value)
172 type = TREE_TYPE (desc);
173 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
175 field = TYPE_FIELDS (type);
176 gcc_assert (DATA_FIELD == 0);
178 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
180 gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value));
184 /* This provides address access to the data field. This should only be
185 used by array allocation, passing this on to the runtime. */
188 gfc_conv_descriptor_data_addr (tree desc)
192 type = TREE_TYPE (desc);
193 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
195 field = TYPE_FIELDS (type);
196 gcc_assert (DATA_FIELD == 0);
198 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
200 return gfc_build_addr_expr (NULL_TREE, t);
204 gfc_conv_descriptor_offset (tree desc)
209 type = TREE_TYPE (desc);
210 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
212 field = gfc_advance_chain (TYPE_FIELDS (type), OFFSET_FIELD);
213 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
215 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
216 desc, field, NULL_TREE);
220 gfc_conv_descriptor_offset_get (tree desc)
222 return gfc_conv_descriptor_offset (desc);
226 gfc_conv_descriptor_offset_set (stmtblock_t *block, tree desc,
229 tree t = gfc_conv_descriptor_offset (desc);
230 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
235 gfc_conv_descriptor_dtype (tree desc)
240 type = TREE_TYPE (desc);
241 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
243 field = gfc_advance_chain (TYPE_FIELDS (type), DTYPE_FIELD);
244 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
246 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
247 desc, field, NULL_TREE);
251 gfc_conv_descriptor_dimension (tree desc, tree dim)
257 type = TREE_TYPE (desc);
258 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
260 field = gfc_advance_chain (TYPE_FIELDS (type), DIMENSION_FIELD);
261 gcc_assert (field != NULL_TREE
262 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
263 && TREE_CODE (TREE_TYPE (TREE_TYPE (field))) == RECORD_TYPE);
265 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
266 desc, field, NULL_TREE);
267 tmp = gfc_build_array_ref (tmp, dim, NULL);
273 gfc_conv_descriptor_token (tree desc)
278 type = TREE_TYPE (desc);
279 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
280 gcc_assert (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE);
281 gcc_assert (gfc_option.coarray == GFC_FCOARRAY_LIB);
282 field = gfc_advance_chain (TYPE_FIELDS (type), CAF_TOKEN_FIELD);
283 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == prvoid_type_node);
285 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
286 desc, field, NULL_TREE);
291 gfc_conv_descriptor_stride (tree desc, tree dim)
296 tmp = gfc_conv_descriptor_dimension (desc, dim);
297 field = TYPE_FIELDS (TREE_TYPE (tmp));
298 field = gfc_advance_chain (field, STRIDE_SUBFIELD);
299 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
301 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
302 tmp, field, NULL_TREE);
307 gfc_conv_descriptor_stride_get (tree desc, tree dim)
309 tree type = TREE_TYPE (desc);
310 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
311 if (integer_zerop (dim)
312 && (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE
313 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_SHAPE_CONT
314 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT))
315 return gfc_index_one_node;
317 return gfc_conv_descriptor_stride (desc, dim);
321 gfc_conv_descriptor_stride_set (stmtblock_t *block, tree desc,
322 tree dim, tree value)
324 tree t = gfc_conv_descriptor_stride (desc, dim);
325 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
329 gfc_conv_descriptor_lbound (tree desc, tree dim)
334 tmp = gfc_conv_descriptor_dimension (desc, dim);
335 field = TYPE_FIELDS (TREE_TYPE (tmp));
336 field = gfc_advance_chain (field, LBOUND_SUBFIELD);
337 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
339 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
340 tmp, field, NULL_TREE);
345 gfc_conv_descriptor_lbound_get (tree desc, tree dim)
347 return gfc_conv_descriptor_lbound (desc, dim);
351 gfc_conv_descriptor_lbound_set (stmtblock_t *block, tree desc,
352 tree dim, tree value)
354 tree t = gfc_conv_descriptor_lbound (desc, dim);
355 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
359 gfc_conv_descriptor_ubound (tree desc, tree dim)
364 tmp = gfc_conv_descriptor_dimension (desc, dim);
365 field = TYPE_FIELDS (TREE_TYPE (tmp));
366 field = gfc_advance_chain (field, UBOUND_SUBFIELD);
367 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
369 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
370 tmp, field, NULL_TREE);
375 gfc_conv_descriptor_ubound_get (tree desc, tree dim)
377 return gfc_conv_descriptor_ubound (desc, dim);
381 gfc_conv_descriptor_ubound_set (stmtblock_t *block, tree desc,
382 tree dim, tree value)
384 tree t = gfc_conv_descriptor_ubound (desc, dim);
385 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
388 /* Build a null array descriptor constructor. */
391 gfc_build_null_descriptor (tree type)
396 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
397 gcc_assert (DATA_FIELD == 0);
398 field = TYPE_FIELDS (type);
400 /* Set a NULL data pointer. */
401 tmp = build_constructor_single (type, field, null_pointer_node);
402 TREE_CONSTANT (tmp) = 1;
403 /* All other fields are ignored. */
409 /* Modify a descriptor such that the lbound of a given dimension is the value
410 specified. This also updates ubound and offset accordingly. */
413 gfc_conv_shift_descriptor_lbound (stmtblock_t* block, tree desc,
414 int dim, tree new_lbound)
416 tree offs, ubound, lbound, stride;
417 tree diff, offs_diff;
419 new_lbound = fold_convert (gfc_array_index_type, new_lbound);
421 offs = gfc_conv_descriptor_offset_get (desc);
422 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
423 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
424 stride = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[dim]);
426 /* Get difference (new - old) by which to shift stuff. */
427 diff = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
430 /* Shift ubound and offset accordingly. This has to be done before
431 updating the lbound, as they depend on the lbound expression! */
432 ubound = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
434 gfc_conv_descriptor_ubound_set (block, desc, gfc_rank_cst[dim], ubound);
435 offs_diff = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
437 offs = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
439 gfc_conv_descriptor_offset_set (block, desc, offs);
441 /* Finally set lbound to value we want. */
442 gfc_conv_descriptor_lbound_set (block, desc, gfc_rank_cst[dim], new_lbound);
446 /* Cleanup those #defines. */
451 #undef DIMENSION_FIELD
452 #undef CAF_TOKEN_FIELD
453 #undef STRIDE_SUBFIELD
454 #undef LBOUND_SUBFIELD
455 #undef UBOUND_SUBFIELD
458 /* Mark a SS chain as used. Flags specifies in which loops the SS is used.
459 flags & 1 = Main loop body.
460 flags & 2 = temp copy loop. */
463 gfc_mark_ss_chain_used (gfc_ss * ss, unsigned flags)
465 for (; ss != gfc_ss_terminator; ss = ss->next)
466 ss->useflags = flags;
469 static void gfc_free_ss (gfc_ss *);
472 /* Free a gfc_ss chain. */
475 gfc_free_ss_chain (gfc_ss * ss)
479 while (ss != gfc_ss_terminator)
481 gcc_assert (ss != NULL);
492 gfc_free_ss (gfc_ss * ss)
499 for (n = 0; n < ss->dimen; n++)
501 if (ss->data.info.subscript[ss->dim[n]])
502 gfc_free_ss_chain (ss->data.info.subscript[ss->dim[n]]);
514 /* Creates and initializes an array type gfc_ss struct. */
517 gfc_get_array_ss (gfc_ss *next, gfc_expr *expr, int dimen, gfc_ss_type type)
527 for (i = 0; i < ss->dimen; i++)
534 /* Creates and initializes a temporary type gfc_ss struct. */
537 gfc_get_temp_ss (tree type, tree string_length, int dimen)
543 ss->next = gfc_ss_terminator;
544 ss->type = GFC_SS_TEMP;
545 ss->string_length = string_length;
546 ss->data.temp.type = type;
548 for (i = 0; i < ss->dimen; i++)
555 /* Creates and initializes a scalar type gfc_ss struct. */
558 gfc_get_scalar_ss (gfc_ss *next, gfc_expr *expr)
564 ss->type = GFC_SS_SCALAR;
571 /* Free all the SS associated with a loop. */
574 gfc_cleanup_loop (gfc_loopinfo * loop)
580 while (ss != gfc_ss_terminator)
582 gcc_assert (ss != NULL);
583 next = ss->loop_chain;
590 /* Associate a SS chain with a loop. */
593 gfc_add_ss_to_loop (gfc_loopinfo * loop, gfc_ss * head)
597 if (head == gfc_ss_terminator)
601 for (; ss && ss != gfc_ss_terminator; ss = ss->next)
603 if (ss->next == gfc_ss_terminator)
604 ss->loop_chain = loop->ss;
606 ss->loop_chain = ss->next;
608 gcc_assert (ss == gfc_ss_terminator);
613 /* Generate an initializer for a static pointer or allocatable array. */
616 gfc_trans_static_array_pointer (gfc_symbol * sym)
620 gcc_assert (TREE_STATIC (sym->backend_decl));
621 /* Just zero the data member. */
622 type = TREE_TYPE (sym->backend_decl);
623 DECL_INITIAL (sym->backend_decl) = gfc_build_null_descriptor (type);
627 /* If the bounds of SE's loop have not yet been set, see if they can be
628 determined from array spec AS, which is the array spec of a called
629 function. MAPPING maps the callee's dummy arguments to the values
630 that the caller is passing. Add any initialization and finalization
634 gfc_set_loop_bounds_from_array_spec (gfc_interface_mapping * mapping,
635 gfc_se * se, gfc_array_spec * as)
643 if (as && as->type == AS_EXPLICIT)
644 for (n = 0; n < se->loop->dimen; n++)
646 dim = se->ss->dim[n];
647 gcc_assert (dim < as->rank);
648 gcc_assert (se->loop->dimen == as->rank);
649 if (se->loop->to[n] == NULL_TREE)
651 /* Evaluate the lower bound. */
652 gfc_init_se (&tmpse, NULL);
653 gfc_apply_interface_mapping (mapping, &tmpse, as->lower[dim]);
654 gfc_add_block_to_block (&se->pre, &tmpse.pre);
655 gfc_add_block_to_block (&se->post, &tmpse.post);
656 lower = fold_convert (gfc_array_index_type, tmpse.expr);
658 /* ...and the upper bound. */
659 gfc_init_se (&tmpse, NULL);
660 gfc_apply_interface_mapping (mapping, &tmpse, as->upper[dim]);
661 gfc_add_block_to_block (&se->pre, &tmpse.pre);
662 gfc_add_block_to_block (&se->post, &tmpse.post);
663 upper = fold_convert (gfc_array_index_type, tmpse.expr);
665 /* Set the upper bound of the loop to UPPER - LOWER. */
666 tmp = fold_build2_loc (input_location, MINUS_EXPR,
667 gfc_array_index_type, upper, lower);
668 tmp = gfc_evaluate_now (tmp, &se->pre);
669 se->loop->to[n] = tmp;
675 /* Generate code to allocate an array temporary, or create a variable to
676 hold the data. If size is NULL, zero the descriptor so that the
677 callee will allocate the array. If DEALLOC is true, also generate code to
678 free the array afterwards.
680 If INITIAL is not NULL, it is packed using internal_pack and the result used
681 as data instead of allocating a fresh, unitialized area of memory.
683 Initialization code is added to PRE and finalization code to POST.
684 DYNAMIC is true if the caller may want to extend the array later
685 using realloc. This prevents us from putting the array on the stack. */
688 gfc_trans_allocate_array_storage (stmtblock_t * pre, stmtblock_t * post,
689 gfc_array_info * info, tree size, tree nelem,
690 tree initial, bool dynamic, bool dealloc)
696 desc = info->descriptor;
697 info->offset = gfc_index_zero_node;
698 if (size == NULL_TREE || integer_zerop (size))
700 /* A callee allocated array. */
701 gfc_conv_descriptor_data_set (pre, desc, null_pointer_node);
706 /* Allocate the temporary. */
707 onstack = !dynamic && initial == NULL_TREE
708 && (gfc_option.flag_stack_arrays
709 || gfc_can_put_var_on_stack (size));
713 /* Make a temporary variable to hold the data. */
714 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (nelem),
715 nelem, gfc_index_one_node);
716 tmp = gfc_evaluate_now (tmp, pre);
717 tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node,
719 tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)),
721 tmp = gfc_create_var (tmp, "A");
722 /* If we're here only because of -fstack-arrays we have to
723 emit a DECL_EXPR to make the gimplifier emit alloca calls. */
724 if (!gfc_can_put_var_on_stack (size))
725 gfc_add_expr_to_block (pre,
726 fold_build1_loc (input_location,
727 DECL_EXPR, TREE_TYPE (tmp),
729 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
730 gfc_conv_descriptor_data_set (pre, desc, tmp);
734 /* Allocate memory to hold the data or call internal_pack. */
735 if (initial == NULL_TREE)
737 tmp = gfc_call_malloc (pre, NULL, size);
738 tmp = gfc_evaluate_now (tmp, pre);
745 stmtblock_t do_copying;
747 tmp = TREE_TYPE (initial); /* Pointer to descriptor. */
748 gcc_assert (TREE_CODE (tmp) == POINTER_TYPE);
749 tmp = TREE_TYPE (tmp); /* The descriptor itself. */
750 tmp = gfc_get_element_type (tmp);
751 gcc_assert (tmp == gfc_get_element_type (TREE_TYPE (desc)));
752 packed = gfc_create_var (build_pointer_type (tmp), "data");
754 tmp = build_call_expr_loc (input_location,
755 gfor_fndecl_in_pack, 1, initial);
756 tmp = fold_convert (TREE_TYPE (packed), tmp);
757 gfc_add_modify (pre, packed, tmp);
759 tmp = build_fold_indirect_ref_loc (input_location,
761 source_data = gfc_conv_descriptor_data_get (tmp);
763 /* internal_pack may return source->data without any allocation
764 or copying if it is already packed. If that's the case, we
765 need to allocate and copy manually. */
767 gfc_start_block (&do_copying);
768 tmp = gfc_call_malloc (&do_copying, NULL, size);
769 tmp = fold_convert (TREE_TYPE (packed), tmp);
770 gfc_add_modify (&do_copying, packed, tmp);
771 tmp = gfc_build_memcpy_call (packed, source_data, size);
772 gfc_add_expr_to_block (&do_copying, tmp);
774 was_packed = fold_build2_loc (input_location, EQ_EXPR,
775 boolean_type_node, packed,
777 tmp = gfc_finish_block (&do_copying);
778 tmp = build3_v (COND_EXPR, was_packed, tmp,
779 build_empty_stmt (input_location));
780 gfc_add_expr_to_block (pre, tmp);
782 tmp = fold_convert (pvoid_type_node, packed);
785 gfc_conv_descriptor_data_set (pre, desc, tmp);
788 info->data = gfc_conv_descriptor_data_get (desc);
790 /* The offset is zero because we create temporaries with a zero
792 gfc_conv_descriptor_offset_set (pre, desc, gfc_index_zero_node);
794 if (dealloc && !onstack)
796 /* Free the temporary. */
797 tmp = gfc_conv_descriptor_data_get (desc);
798 tmp = gfc_call_free (fold_convert (pvoid_type_node, tmp));
799 gfc_add_expr_to_block (post, tmp);
804 /* Get the array reference dimension corresponding to the given loop dimension.
805 It is different from the true array dimension given by the dim array in
806 the case of a partial array reference
807 It is different from the loop dimension in the case of a transposed array.
811 get_array_ref_dim (gfc_ss *ss, int loop_dim)
813 int n, array_dim, array_ref_dim;
816 array_dim = ss->dim[loop_dim];
818 for (n = 0; n < ss->dimen; n++)
819 if (ss->dim[n] < array_dim)
822 return array_ref_dim;
826 /* Generate code to create and initialize the descriptor for a temporary
827 array. This is used for both temporaries needed by the scalarizer, and
828 functions returning arrays. Adjusts the loop variables to be
829 zero-based, and calculates the loop bounds for callee allocated arrays.
830 Allocate the array unless it's callee allocated (we have a callee
831 allocated array if 'callee_alloc' is true, or if loop->to[n] is
832 NULL_TREE for any n). Also fills in the descriptor, data and offset
833 fields of info if known. Returns the size of the array, or NULL for a
834 callee allocated array.
836 PRE, POST, INITIAL, DYNAMIC and DEALLOC are as for
837 gfc_trans_allocate_array_storage.
841 gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post,
842 gfc_loopinfo * loop, gfc_ss * ss,
843 tree eltype, tree initial, bool dynamic,
844 bool dealloc, bool callee_alloc, locus * where)
846 gfc_array_info *info;
847 tree from[GFC_MAX_DIMENSIONS], to[GFC_MAX_DIMENSIONS];
857 memset (from, 0, sizeof (from));
858 memset (to, 0, sizeof (to));
860 info = &ss->data.info;
862 gcc_assert (ss->dimen > 0);
863 gcc_assert (loop->dimen == ss->dimen);
865 if (gfc_option.warn_array_temp && where)
866 gfc_warning ("Creating array temporary at %L", where);
868 /* Set the lower bound to zero. */
869 for (n = 0; n < loop->dimen; n++)
873 /* Callee allocated arrays may not have a known bound yet. */
875 loop->to[n] = gfc_evaluate_now (
876 fold_build2_loc (input_location, MINUS_EXPR,
877 gfc_array_index_type,
878 loop->to[n], loop->from[n]),
880 loop->from[n] = gfc_index_zero_node;
882 /* We are constructing the temporary's descriptor based on the loop
883 dimensions. As the dimensions may be accessed in arbitrary order
884 (think of transpose) the size taken from the n'th loop may not map
885 to the n'th dimension of the array. We need to reconstruct loop infos
886 in the right order before using it to set the descriptor
888 tmp_dim = get_array_ref_dim (ss, n);
889 from[tmp_dim] = loop->from[n];
890 to[tmp_dim] = loop->to[n];
892 info->delta[dim] = gfc_index_zero_node;
893 info->start[dim] = gfc_index_zero_node;
894 info->end[dim] = gfc_index_zero_node;
895 info->stride[dim] = gfc_index_one_node;
898 /* Initialize the descriptor. */
900 gfc_get_array_type_bounds (eltype, ss->dimen, 0, from, to, 1,
901 GFC_ARRAY_UNKNOWN, true);
902 desc = gfc_create_var (type, "atmp");
903 GFC_DECL_PACKED_ARRAY (desc) = 1;
905 info->descriptor = desc;
906 size = gfc_index_one_node;
908 /* Fill in the array dtype. */
909 tmp = gfc_conv_descriptor_dtype (desc);
910 gfc_add_modify (pre, tmp, gfc_get_dtype (TREE_TYPE (desc)));
913 Fill in the bounds and stride. This is a packed array, so:
916 for (n = 0; n < rank; n++)
919 delta = ubound[n] + 1 - lbound[n];
922 size = size * sizeof(element);
927 /* If there is at least one null loop->to[n], it is a callee allocated
929 for (n = 0; n < loop->dimen; n++)
930 if (loop->to[n] == NULL_TREE)
936 for (n = 0; n < loop->dimen; n++)
940 if (size == NULL_TREE)
942 /* For a callee allocated array express the loop bounds in terms
943 of the descriptor fields. */
944 tmp = fold_build2_loc (input_location,
945 MINUS_EXPR, gfc_array_index_type,
946 gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]),
947 gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]));
952 /* Store the stride and bound components in the descriptor. */
953 gfc_conv_descriptor_stride_set (pre, desc, gfc_rank_cst[n], size);
955 gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n],
956 gfc_index_zero_node);
958 gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n],
961 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
962 to[n], gfc_index_one_node);
964 /* Check whether the size for this dimension is negative. */
965 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, tmp,
966 gfc_index_zero_node);
967 cond = gfc_evaluate_now (cond, pre);
972 or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
973 boolean_type_node, or_expr, cond);
975 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
977 size = gfc_evaluate_now (size, pre);
980 /* Get the size of the array. */
982 if (size && !callee_alloc)
984 /* If or_expr is true, then the extent in at least one
985 dimension is zero and the size is set to zero. */
986 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
987 or_expr, gfc_index_zero_node, size);
990 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
992 fold_convert (gfc_array_index_type,
993 TYPE_SIZE_UNIT (gfc_get_element_type (type))));
1001 gfc_trans_allocate_array_storage (pre, post, info, size, nelem, initial,
1004 if (ss->dimen > loop->temp_dim)
1005 loop->temp_dim = ss->dimen;
1011 /* Return the number of iterations in a loop that starts at START,
1012 ends at END, and has step STEP. */
1015 gfc_get_iteration_count (tree start, tree end, tree step)
1020 type = TREE_TYPE (step);
1021 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, end, start);
1022 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, type, tmp, step);
1023 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, tmp,
1024 build_int_cst (type, 1));
1025 tmp = fold_build2_loc (input_location, MAX_EXPR, type, tmp,
1026 build_int_cst (type, 0));
1027 return fold_convert (gfc_array_index_type, tmp);
1031 /* Extend the data in array DESC by EXTRA elements. */
1034 gfc_grow_array (stmtblock_t * pblock, tree desc, tree extra)
1041 if (integer_zerop (extra))
1044 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[0]);
1046 /* Add EXTRA to the upper bound. */
1047 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1049 gfc_conv_descriptor_ubound_set (pblock, desc, gfc_rank_cst[0], tmp);
1051 /* Get the value of the current data pointer. */
1052 arg0 = gfc_conv_descriptor_data_get (desc);
1054 /* Calculate the new array size. */
1055 size = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
1056 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1057 ubound, gfc_index_one_node);
1058 arg1 = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
1059 fold_convert (size_type_node, tmp),
1060 fold_convert (size_type_node, size));
1062 /* Call the realloc() function. */
1063 tmp = gfc_call_realloc (pblock, arg0, arg1);
1064 gfc_conv_descriptor_data_set (pblock, desc, tmp);
1068 /* Return true if the bounds of iterator I can only be determined
1072 gfc_iterator_has_dynamic_bounds (gfc_iterator * i)
1074 return (i->start->expr_type != EXPR_CONSTANT
1075 || i->end->expr_type != EXPR_CONSTANT
1076 || i->step->expr_type != EXPR_CONSTANT);
1080 /* Split the size of constructor element EXPR into the sum of two terms,
1081 one of which can be determined at compile time and one of which must
1082 be calculated at run time. Set *SIZE to the former and return true
1083 if the latter might be nonzero. */
1086 gfc_get_array_constructor_element_size (mpz_t * size, gfc_expr * expr)
1088 if (expr->expr_type == EXPR_ARRAY)
1089 return gfc_get_array_constructor_size (size, expr->value.constructor);
1090 else if (expr->rank > 0)
1092 /* Calculate everything at run time. */
1093 mpz_set_ui (*size, 0);
1098 /* A single element. */
1099 mpz_set_ui (*size, 1);
1105 /* Like gfc_get_array_constructor_element_size, but applied to the whole
1106 of array constructor C. */
1109 gfc_get_array_constructor_size (mpz_t * size, gfc_constructor_base base)
1117 mpz_set_ui (*size, 0);
1122 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1125 if (i && gfc_iterator_has_dynamic_bounds (i))
1129 dynamic |= gfc_get_array_constructor_element_size (&len, c->expr);
1132 /* Multiply the static part of the element size by the
1133 number of iterations. */
1134 mpz_sub (val, i->end->value.integer, i->start->value.integer);
1135 mpz_fdiv_q (val, val, i->step->value.integer);
1136 mpz_add_ui (val, val, 1);
1137 if (mpz_sgn (val) > 0)
1138 mpz_mul (len, len, val);
1140 mpz_set_ui (len, 0);
1142 mpz_add (*size, *size, len);
1151 /* Make sure offset is a variable. */
1154 gfc_put_offset_into_var (stmtblock_t * pblock, tree * poffset,
1157 /* We should have already created the offset variable. We cannot
1158 create it here because we may be in an inner scope. */
1159 gcc_assert (*offsetvar != NULL_TREE);
1160 gfc_add_modify (pblock, *offsetvar, *poffset);
1161 *poffset = *offsetvar;
1162 TREE_USED (*offsetvar) = 1;
1166 /* Variables needed for bounds-checking. */
1167 static bool first_len;
1168 static tree first_len_val;
1169 static bool typespec_chararray_ctor;
1172 gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
1173 tree offset, gfc_se * se, gfc_expr * expr)
1177 gfc_conv_expr (se, expr);
1179 /* Store the value. */
1180 tmp = build_fold_indirect_ref_loc (input_location,
1181 gfc_conv_descriptor_data_get (desc));
1182 tmp = gfc_build_array_ref (tmp, offset, NULL);
1184 if (expr->ts.type == BT_CHARACTER)
1186 int i = gfc_validate_kind (BT_CHARACTER, expr->ts.kind, false);
1189 esize = size_in_bytes (gfc_get_element_type (TREE_TYPE (desc)));
1190 esize = fold_convert (gfc_charlen_type_node, esize);
1191 esize = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
1192 gfc_charlen_type_node, esize,
1193 build_int_cst (gfc_charlen_type_node,
1194 gfc_character_kinds[i].bit_size / 8));
1196 gfc_conv_string_parameter (se);
1197 if (POINTER_TYPE_P (TREE_TYPE (tmp)))
1199 /* The temporary is an array of pointers. */
1200 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1201 gfc_add_modify (&se->pre, tmp, se->expr);
1205 /* The temporary is an array of string values. */
1206 tmp = gfc_build_addr_expr (gfc_get_pchar_type (expr->ts.kind), tmp);
1207 /* We know the temporary and the value will be the same length,
1208 so can use memcpy. */
1209 gfc_trans_string_copy (&se->pre, esize, tmp, expr->ts.kind,
1210 se->string_length, se->expr, expr->ts.kind);
1212 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) && !typespec_chararray_ctor)
1216 gfc_add_modify (&se->pre, first_len_val,
1222 /* Verify that all constructor elements are of the same
1224 tree cond = fold_build2_loc (input_location, NE_EXPR,
1225 boolean_type_node, first_len_val,
1227 gfc_trans_runtime_check
1228 (true, false, cond, &se->pre, &expr->where,
1229 "Different CHARACTER lengths (%ld/%ld) in array constructor",
1230 fold_convert (long_integer_type_node, first_len_val),
1231 fold_convert (long_integer_type_node, se->string_length));
1237 /* TODO: Should the frontend already have done this conversion? */
1238 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1239 gfc_add_modify (&se->pre, tmp, se->expr);
1242 gfc_add_block_to_block (pblock, &se->pre);
1243 gfc_add_block_to_block (pblock, &se->post);
1247 /* Add the contents of an array to the constructor. DYNAMIC is as for
1248 gfc_trans_array_constructor_value. */
1251 gfc_trans_array_constructor_subarray (stmtblock_t * pblock,
1252 tree type ATTRIBUTE_UNUSED,
1253 tree desc, gfc_expr * expr,
1254 tree * poffset, tree * offsetvar,
1265 /* We need this to be a variable so we can increment it. */
1266 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1268 gfc_init_se (&se, NULL);
1270 /* Walk the array expression. */
1271 ss = gfc_walk_expr (expr);
1272 gcc_assert (ss != gfc_ss_terminator);
1274 /* Initialize the scalarizer. */
1275 gfc_init_loopinfo (&loop);
1276 gfc_add_ss_to_loop (&loop, ss);
1278 /* Initialize the loop. */
1279 gfc_conv_ss_startstride (&loop);
1280 gfc_conv_loop_setup (&loop, &expr->where);
1282 /* Make sure the constructed array has room for the new data. */
1285 /* Set SIZE to the total number of elements in the subarray. */
1286 size = gfc_index_one_node;
1287 for (n = 0; n < loop.dimen; n++)
1289 tmp = gfc_get_iteration_count (loop.from[n], loop.to[n],
1290 gfc_index_one_node);
1291 size = fold_build2_loc (input_location, MULT_EXPR,
1292 gfc_array_index_type, size, tmp);
1295 /* Grow the constructed array by SIZE elements. */
1296 gfc_grow_array (&loop.pre, desc, size);
1299 /* Make the loop body. */
1300 gfc_mark_ss_chain_used (ss, 1);
1301 gfc_start_scalarized_body (&loop, &body);
1302 gfc_copy_loopinfo_to_se (&se, &loop);
1305 gfc_trans_array_ctor_element (&body, desc, *poffset, &se, expr);
1306 gcc_assert (se.ss == gfc_ss_terminator);
1308 /* Increment the offset. */
1309 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1310 *poffset, gfc_index_one_node);
1311 gfc_add_modify (&body, *poffset, tmp);
1313 /* Finish the loop. */
1314 gfc_trans_scalarizing_loops (&loop, &body);
1315 gfc_add_block_to_block (&loop.pre, &loop.post);
1316 tmp = gfc_finish_block (&loop.pre);
1317 gfc_add_expr_to_block (pblock, tmp);
1319 gfc_cleanup_loop (&loop);
1323 /* Assign the values to the elements of an array constructor. DYNAMIC
1324 is true if descriptor DESC only contains enough data for the static
1325 size calculated by gfc_get_array_constructor_size. When true, memory
1326 for the dynamic parts must be allocated using realloc. */
1329 gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
1330 tree desc, gfc_constructor_base base,
1331 tree * poffset, tree * offsetvar,
1340 tree shadow_loopvar = NULL_TREE;
1341 gfc_saved_var saved_loopvar;
1344 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1346 /* If this is an iterator or an array, the offset must be a variable. */
1347 if ((c->iterator || c->expr->rank > 0) && INTEGER_CST_P (*poffset))
1348 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1350 /* Shadowing the iterator avoids changing its value and saves us from
1351 keeping track of it. Further, it makes sure that there's always a
1352 backend-decl for the symbol, even if there wasn't one before,
1353 e.g. in the case of an iterator that appears in a specification
1354 expression in an interface mapping. */
1357 gfc_symbol *sym = c->iterator->var->symtree->n.sym;
1358 tree type = gfc_typenode_for_spec (&sym->ts);
1360 shadow_loopvar = gfc_create_var (type, "shadow_loopvar");
1361 gfc_shadow_sym (sym, shadow_loopvar, &saved_loopvar);
1364 gfc_start_block (&body);
1366 if (c->expr->expr_type == EXPR_ARRAY)
1368 /* Array constructors can be nested. */
1369 gfc_trans_array_constructor_value (&body, type, desc,
1370 c->expr->value.constructor,
1371 poffset, offsetvar, dynamic);
1373 else if (c->expr->rank > 0)
1375 gfc_trans_array_constructor_subarray (&body, type, desc, c->expr,
1376 poffset, offsetvar, dynamic);
1380 /* This code really upsets the gimplifier so don't bother for now. */
1387 while (p && !(p->iterator || p->expr->expr_type != EXPR_CONSTANT))
1389 p = gfc_constructor_next (p);
1394 /* Scalar values. */
1395 gfc_init_se (&se, NULL);
1396 gfc_trans_array_ctor_element (&body, desc, *poffset,
1399 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1400 gfc_array_index_type,
1401 *poffset, gfc_index_one_node);
1405 /* Collect multiple scalar constants into a constructor. */
1406 VEC(constructor_elt,gc) *v = NULL;
1410 HOST_WIDE_INT idx = 0;
1413 /* Count the number of consecutive scalar constants. */
1414 while (p && !(p->iterator
1415 || p->expr->expr_type != EXPR_CONSTANT))
1417 gfc_init_se (&se, NULL);
1418 gfc_conv_constant (&se, p->expr);
1420 if (c->expr->ts.type != BT_CHARACTER)
1421 se.expr = fold_convert (type, se.expr);
1422 /* For constant character array constructors we build
1423 an array of pointers. */
1424 else if (POINTER_TYPE_P (type))
1425 se.expr = gfc_build_addr_expr
1426 (gfc_get_pchar_type (p->expr->ts.kind),
1429 CONSTRUCTOR_APPEND_ELT (v,
1430 build_int_cst (gfc_array_index_type,
1434 p = gfc_constructor_next (p);
1437 bound = size_int (n - 1);
1438 /* Create an array type to hold them. */
1439 tmptype = build_range_type (gfc_array_index_type,
1440 gfc_index_zero_node, bound);
1441 tmptype = build_array_type (type, tmptype);
1443 init = build_constructor (tmptype, v);
1444 TREE_CONSTANT (init) = 1;
1445 TREE_STATIC (init) = 1;
1446 /* Create a static variable to hold the data. */
1447 tmp = gfc_create_var (tmptype, "data");
1448 TREE_STATIC (tmp) = 1;
1449 TREE_CONSTANT (tmp) = 1;
1450 TREE_READONLY (tmp) = 1;
1451 DECL_INITIAL (tmp) = init;
1454 /* Use BUILTIN_MEMCPY to assign the values. */
1455 tmp = gfc_conv_descriptor_data_get (desc);
1456 tmp = build_fold_indirect_ref_loc (input_location,
1458 tmp = gfc_build_array_ref (tmp, *poffset, NULL);
1459 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
1460 init = gfc_build_addr_expr (NULL_TREE, init);
1462 size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
1463 bound = build_int_cst (size_type_node, n * size);
1464 tmp = build_call_expr_loc (input_location,
1465 builtin_decl_explicit (BUILT_IN_MEMCPY),
1466 3, tmp, init, bound);
1467 gfc_add_expr_to_block (&body, tmp);
1469 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1470 gfc_array_index_type, *poffset,
1471 build_int_cst (gfc_array_index_type, n));
1473 if (!INTEGER_CST_P (*poffset))
1475 gfc_add_modify (&body, *offsetvar, *poffset);
1476 *poffset = *offsetvar;
1480 /* The frontend should already have done any expansions
1484 /* Pass the code as is. */
1485 tmp = gfc_finish_block (&body);
1486 gfc_add_expr_to_block (pblock, tmp);
1490 /* Build the implied do-loop. */
1491 stmtblock_t implied_do_block;
1499 loopbody = gfc_finish_block (&body);
1501 /* Create a new block that holds the implied-do loop. A temporary
1502 loop-variable is used. */
1503 gfc_start_block(&implied_do_block);
1505 /* Initialize the loop. */
1506 gfc_init_se (&se, NULL);
1507 gfc_conv_expr_val (&se, c->iterator->start);
1508 gfc_add_block_to_block (&implied_do_block, &se.pre);
1509 gfc_add_modify (&implied_do_block, shadow_loopvar, se.expr);
1511 gfc_init_se (&se, NULL);
1512 gfc_conv_expr_val (&se, c->iterator->end);
1513 gfc_add_block_to_block (&implied_do_block, &se.pre);
1514 end = gfc_evaluate_now (se.expr, &implied_do_block);
1516 gfc_init_se (&se, NULL);
1517 gfc_conv_expr_val (&se, c->iterator->step);
1518 gfc_add_block_to_block (&implied_do_block, &se.pre);
1519 step = gfc_evaluate_now (se.expr, &implied_do_block);
1521 /* If this array expands dynamically, and the number of iterations
1522 is not constant, we won't have allocated space for the static
1523 part of C->EXPR's size. Do that now. */
1524 if (dynamic && gfc_iterator_has_dynamic_bounds (c->iterator))
1526 /* Get the number of iterations. */
1527 tmp = gfc_get_iteration_count (shadow_loopvar, end, step);
1529 /* Get the static part of C->EXPR's size. */
1530 gfc_get_array_constructor_element_size (&size, c->expr);
1531 tmp2 = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
1533 /* Grow the array by TMP * TMP2 elements. */
1534 tmp = fold_build2_loc (input_location, MULT_EXPR,
1535 gfc_array_index_type, tmp, tmp2);
1536 gfc_grow_array (&implied_do_block, desc, tmp);
1539 /* Generate the loop body. */
1540 exit_label = gfc_build_label_decl (NULL_TREE);
1541 gfc_start_block (&body);
1543 /* Generate the exit condition. Depending on the sign of
1544 the step variable we have to generate the correct
1546 tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
1547 step, build_int_cst (TREE_TYPE (step), 0));
1548 cond = fold_build3_loc (input_location, COND_EXPR,
1549 boolean_type_node, tmp,
1550 fold_build2_loc (input_location, GT_EXPR,
1551 boolean_type_node, shadow_loopvar, end),
1552 fold_build2_loc (input_location, LT_EXPR,
1553 boolean_type_node, shadow_loopvar, end));
1554 tmp = build1_v (GOTO_EXPR, exit_label);
1555 TREE_USED (exit_label) = 1;
1556 tmp = build3_v (COND_EXPR, cond, tmp,
1557 build_empty_stmt (input_location));
1558 gfc_add_expr_to_block (&body, tmp);
1560 /* The main loop body. */
1561 gfc_add_expr_to_block (&body, loopbody);
1563 /* Increase loop variable by step. */
1564 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1565 TREE_TYPE (shadow_loopvar), shadow_loopvar,
1567 gfc_add_modify (&body, shadow_loopvar, tmp);
1569 /* Finish the loop. */
1570 tmp = gfc_finish_block (&body);
1571 tmp = build1_v (LOOP_EXPR, tmp);
1572 gfc_add_expr_to_block (&implied_do_block, tmp);
1574 /* Add the exit label. */
1575 tmp = build1_v (LABEL_EXPR, exit_label);
1576 gfc_add_expr_to_block (&implied_do_block, tmp);
1578 /* Finishe the implied-do loop. */
1579 tmp = gfc_finish_block(&implied_do_block);
1580 gfc_add_expr_to_block(pblock, tmp);
1582 gfc_restore_sym (c->iterator->var->symtree->n.sym, &saved_loopvar);
1589 /* A catch-all to obtain the string length for anything that is not a
1590 a substring of non-constant length, a constant, array or variable. */
1593 get_array_ctor_all_strlen (stmtblock_t *block, gfc_expr *e, tree *len)
1598 /* Don't bother if we already know the length is a constant. */
1599 if (*len && INTEGER_CST_P (*len))
1602 if (!e->ref && e->ts.u.cl && e->ts.u.cl->length
1603 && e->ts.u.cl->length->expr_type == EXPR_CONSTANT)
1606 gfc_conv_const_charlen (e->ts.u.cl);
1607 *len = e->ts.u.cl->backend_decl;
1611 /* Otherwise, be brutal even if inefficient. */
1612 ss = gfc_walk_expr (e);
1613 gfc_init_se (&se, NULL);
1615 /* No function call, in case of side effects. */
1616 se.no_function_call = 1;
1617 if (ss == gfc_ss_terminator)
1618 gfc_conv_expr (&se, e);
1620 gfc_conv_expr_descriptor (&se, e, ss);
1622 /* Fix the value. */
1623 *len = gfc_evaluate_now (se.string_length, &se.pre);
1625 gfc_add_block_to_block (block, &se.pre);
1626 gfc_add_block_to_block (block, &se.post);
1628 e->ts.u.cl->backend_decl = *len;
1633 /* Figure out the string length of a variable reference expression.
1634 Used by get_array_ctor_strlen. */
1637 get_array_ctor_var_strlen (stmtblock_t *block, gfc_expr * expr, tree * len)
1643 /* Don't bother if we already know the length is a constant. */
1644 if (*len && INTEGER_CST_P (*len))
1647 ts = &expr->symtree->n.sym->ts;
1648 for (ref = expr->ref; ref; ref = ref->next)
1653 /* Array references don't change the string length. */
1657 /* Use the length of the component. */
1658 ts = &ref->u.c.component->ts;
1662 if (ref->u.ss.start->expr_type != EXPR_CONSTANT
1663 || ref->u.ss.end->expr_type != EXPR_CONSTANT)
1665 /* Note that this might evaluate expr. */
1666 get_array_ctor_all_strlen (block, expr, len);
1669 mpz_init_set_ui (char_len, 1);
1670 mpz_add (char_len, char_len, ref->u.ss.end->value.integer);
1671 mpz_sub (char_len, char_len, ref->u.ss.start->value.integer);
1672 *len = gfc_conv_mpz_to_tree (char_len, gfc_default_integer_kind);
1673 *len = convert (gfc_charlen_type_node, *len);
1674 mpz_clear (char_len);
1682 *len = ts->u.cl->backend_decl;
1686 /* Figure out the string length of a character array constructor.
1687 If len is NULL, don't calculate the length; this happens for recursive calls
1688 when a sub-array-constructor is an element but not at the first position,
1689 so when we're not interested in the length.
1690 Returns TRUE if all elements are character constants. */
1693 get_array_ctor_strlen (stmtblock_t *block, gfc_constructor_base base, tree * len)
1700 if (gfc_constructor_first (base) == NULL)
1703 *len = build_int_cstu (gfc_charlen_type_node, 0);
1707 /* Loop over all constructor elements to find out is_const, but in len we
1708 want to store the length of the first, not the last, element. We can
1709 of course exit the loop as soon as is_const is found to be false. */
1710 for (c = gfc_constructor_first (base);
1711 c && is_const; c = gfc_constructor_next (c))
1713 switch (c->expr->expr_type)
1716 if (len && !(*len && INTEGER_CST_P (*len)))
1717 *len = build_int_cstu (gfc_charlen_type_node,
1718 c->expr->value.character.length);
1722 if (!get_array_ctor_strlen (block, c->expr->value.constructor, len))
1729 get_array_ctor_var_strlen (block, c->expr, len);
1735 get_array_ctor_all_strlen (block, c->expr, len);
1739 /* After the first iteration, we don't want the length modified. */
1746 /* Check whether the array constructor C consists entirely of constant
1747 elements, and if so returns the number of those elements, otherwise
1748 return zero. Note, an empty or NULL array constructor returns zero. */
1750 unsigned HOST_WIDE_INT
1751 gfc_constant_array_constructor_p (gfc_constructor_base base)
1753 unsigned HOST_WIDE_INT nelem = 0;
1755 gfc_constructor *c = gfc_constructor_first (base);
1759 || c->expr->rank > 0
1760 || c->expr->expr_type != EXPR_CONSTANT)
1762 c = gfc_constructor_next (c);
1769 /* Given EXPR, the constant array constructor specified by an EXPR_ARRAY,
1770 and the tree type of it's elements, TYPE, return a static constant
1771 variable that is compile-time initialized. */
1774 gfc_build_constant_array_constructor (gfc_expr * expr, tree type)
1776 tree tmptype, init, tmp;
1777 HOST_WIDE_INT nelem;
1782 VEC(constructor_elt,gc) *v = NULL;
1784 /* First traverse the constructor list, converting the constants
1785 to tree to build an initializer. */
1787 c = gfc_constructor_first (expr->value.constructor);
1790 gfc_init_se (&se, NULL);
1791 gfc_conv_constant (&se, c->expr);
1792 if (c->expr->ts.type != BT_CHARACTER)
1793 se.expr = fold_convert (type, se.expr);
1794 else if (POINTER_TYPE_P (type))
1795 se.expr = gfc_build_addr_expr (gfc_get_pchar_type (c->expr->ts.kind),
1797 CONSTRUCTOR_APPEND_ELT (v, build_int_cst (gfc_array_index_type, nelem),
1799 c = gfc_constructor_next (c);
1803 /* Next determine the tree type for the array. We use the gfortran
1804 front-end's gfc_get_nodesc_array_type in order to create a suitable
1805 GFC_ARRAY_TYPE_P that may be used by the scalarizer. */
1807 memset (&as, 0, sizeof (gfc_array_spec));
1809 as.rank = expr->rank;
1810 as.type = AS_EXPLICIT;
1813 as.lower[0] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
1814 as.upper[0] = gfc_get_int_expr (gfc_default_integer_kind,
1818 for (i = 0; i < expr->rank; i++)
1820 int tmp = (int) mpz_get_si (expr->shape[i]);
1821 as.lower[i] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
1822 as.upper[i] = gfc_get_int_expr (gfc_default_integer_kind,
1826 tmptype = gfc_get_nodesc_array_type (type, &as, PACKED_STATIC, true);
1828 /* as is not needed anymore. */
1829 for (i = 0; i < as.rank + as.corank; i++)
1831 gfc_free_expr (as.lower[i]);
1832 gfc_free_expr (as.upper[i]);
1835 init = build_constructor (tmptype, v);
1837 TREE_CONSTANT (init) = 1;
1838 TREE_STATIC (init) = 1;
1840 tmp = gfc_create_var (tmptype, "A");
1841 TREE_STATIC (tmp) = 1;
1842 TREE_CONSTANT (tmp) = 1;
1843 TREE_READONLY (tmp) = 1;
1844 DECL_INITIAL (tmp) = init;
1850 /* Translate a constant EXPR_ARRAY array constructor for the scalarizer.
1851 This mostly initializes the scalarizer state info structure with the
1852 appropriate values to directly use the array created by the function
1853 gfc_build_constant_array_constructor. */
1856 trans_constant_array_constructor (gfc_ss * ss, tree type)
1858 gfc_array_info *info;
1862 tmp = gfc_build_constant_array_constructor (ss->expr, type);
1864 info = &ss->data.info;
1866 info->descriptor = tmp;
1867 info->data = gfc_build_addr_expr (NULL_TREE, tmp);
1868 info->offset = gfc_index_zero_node;
1870 for (i = 0; i < ss->dimen; i++)
1872 info->delta[i] = gfc_index_zero_node;
1873 info->start[i] = gfc_index_zero_node;
1874 info->end[i] = gfc_index_zero_node;
1875 info->stride[i] = gfc_index_one_node;
1879 /* Helper routine of gfc_trans_array_constructor to determine if the
1880 bounds of the loop specified by LOOP are constant and simple enough
1881 to use with trans_constant_array_constructor. Returns the
1882 iteration count of the loop if suitable, and NULL_TREE otherwise. */
1885 constant_array_constructor_loop_size (gfc_loopinfo * loop)
1887 tree size = gfc_index_one_node;
1891 for (i = 0; i < loop->dimen; i++)
1893 /* If the bounds aren't constant, return NULL_TREE. */
1894 if (!INTEGER_CST_P (loop->from[i]) || !INTEGER_CST_P (loop->to[i]))
1896 if (!integer_zerop (loop->from[i]))
1898 /* Only allow nonzero "from" in one-dimensional arrays. */
1899 if (loop->dimen != 1)
1901 tmp = fold_build2_loc (input_location, MINUS_EXPR,
1902 gfc_array_index_type,
1903 loop->to[i], loop->from[i]);
1907 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1908 tmp, gfc_index_one_node);
1909 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
1917 /* Array constructors are handled by constructing a temporary, then using that
1918 within the scalarization loop. This is not optimal, but seems by far the
1922 gfc_trans_array_constructor (gfc_loopinfo * loop, gfc_ss * ss, locus * where)
1924 gfc_constructor_base c;
1931 bool old_first_len, old_typespec_chararray_ctor;
1932 tree old_first_len_val;
1934 /* Save the old values for nested checking. */
1935 old_first_len = first_len;
1936 old_first_len_val = first_len_val;
1937 old_typespec_chararray_ctor = typespec_chararray_ctor;
1939 /* Do bounds-checking here and in gfc_trans_array_ctor_element only if no
1940 typespec was given for the array constructor. */
1941 typespec_chararray_ctor = (ss->expr->ts.u.cl
1942 && ss->expr->ts.u.cl->length_from_typespec);
1944 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
1945 && ss->expr->ts.type == BT_CHARACTER && !typespec_chararray_ctor)
1947 first_len_val = gfc_create_var (gfc_charlen_type_node, "len");
1951 gcc_assert (ss->dimen == loop->dimen);
1953 c = ss->expr->value.constructor;
1954 if (ss->expr->ts.type == BT_CHARACTER)
1958 /* get_array_ctor_strlen walks the elements of the constructor, if a
1959 typespec was given, we already know the string length and want the one
1961 if (typespec_chararray_ctor && ss->expr->ts.u.cl->length
1962 && ss->expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
1966 const_string = false;
1967 gfc_init_se (&length_se, NULL);
1968 gfc_conv_expr_type (&length_se, ss->expr->ts.u.cl->length,
1969 gfc_charlen_type_node);
1970 ss->string_length = length_se.expr;
1971 gfc_add_block_to_block (&loop->pre, &length_se.pre);
1972 gfc_add_block_to_block (&loop->post, &length_se.post);
1975 const_string = get_array_ctor_strlen (&loop->pre, c,
1976 &ss->string_length);
1978 /* Complex character array constructors should have been taken care of
1979 and not end up here. */
1980 gcc_assert (ss->string_length);
1982 ss->expr->ts.u.cl->backend_decl = ss->string_length;
1984 type = gfc_get_character_type_len (ss->expr->ts.kind, ss->string_length);
1986 type = build_pointer_type (type);
1989 type = gfc_typenode_for_spec (&ss->expr->ts);
1991 /* See if the constructor determines the loop bounds. */
1994 if (ss->expr->shape && loop->dimen > 1 && loop->to[0] == NULL_TREE)
1996 /* We have a multidimensional parameter. */
1998 for (n = 0; n < ss->expr->rank; n++)
2000 loop->from[n] = gfc_index_zero_node;
2001 loop->to[n] = gfc_conv_mpz_to_tree (ss->expr->shape [n],
2002 gfc_index_integer_kind);
2003 loop->to[n] = fold_build2_loc (input_location, MINUS_EXPR,
2004 gfc_array_index_type,
2005 loop->to[n], gfc_index_one_node);
2009 if (loop->to[0] == NULL_TREE)
2013 /* We should have a 1-dimensional, zero-based loop. */
2014 gcc_assert (loop->dimen == 1);
2015 gcc_assert (integer_zerop (loop->from[0]));
2017 /* Split the constructor size into a static part and a dynamic part.
2018 Allocate the static size up-front and record whether the dynamic
2019 size might be nonzero. */
2021 dynamic = gfc_get_array_constructor_size (&size, c);
2022 mpz_sub_ui (size, size, 1);
2023 loop->to[0] = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
2027 /* Special case constant array constructors. */
2030 unsigned HOST_WIDE_INT nelem = gfc_constant_array_constructor_p (c);
2033 tree size = constant_array_constructor_loop_size (loop);
2034 if (size && compare_tree_int (size, nelem) == 0)
2036 trans_constant_array_constructor (ss, type);
2042 if (TREE_CODE (loop->to[0]) == VAR_DECL)
2045 gfc_trans_create_temp_array (&loop->pre, &loop->post, loop, ss,
2046 type, NULL_TREE, dynamic, true, false, where);
2048 desc = ss->data.info.descriptor;
2049 offset = gfc_index_zero_node;
2050 offsetvar = gfc_create_var_np (gfc_array_index_type, "offset");
2051 TREE_NO_WARNING (offsetvar) = 1;
2052 TREE_USED (offsetvar) = 0;
2053 gfc_trans_array_constructor_value (&loop->pre, type, desc, c,
2054 &offset, &offsetvar, dynamic);
2056 /* If the array grows dynamically, the upper bound of the loop variable
2057 is determined by the array's final upper bound. */
2060 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2061 gfc_array_index_type,
2062 offsetvar, gfc_index_one_node);
2063 tmp = gfc_evaluate_now (tmp, &loop->pre);
2064 gfc_conv_descriptor_ubound_set (&loop->pre, desc, gfc_rank_cst[0], tmp);
2065 if (loop->to[0] && TREE_CODE (loop->to[0]) == VAR_DECL)
2066 gfc_add_modify (&loop->pre, loop->to[0], tmp);
2071 if (TREE_USED (offsetvar))
2072 pushdecl (offsetvar);
2074 gcc_assert (INTEGER_CST_P (offset));
2077 /* Disable bound checking for now because it's probably broken. */
2078 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2085 /* Restore old values of globals. */
2086 first_len = old_first_len;
2087 first_len_val = old_first_len_val;
2088 typespec_chararray_ctor = old_typespec_chararray_ctor;
2092 /* INFO describes a GFC_SS_SECTION in loop LOOP, and this function is
2093 called after evaluating all of INFO's vector dimensions. Go through
2094 each such vector dimension and see if we can now fill in any missing
2098 set_vector_loop_bounds (gfc_loopinfo * loop, gfc_ss * ss)
2100 gfc_array_info *info;
2108 info = &ss->data.info;
2110 for (n = 0; n < loop->dimen; n++)
2113 if (info->ref->u.ar.dimen_type[dim] == DIMEN_VECTOR
2114 && loop->to[n] == NULL)
2116 /* Loop variable N indexes vector dimension DIM, and we don't
2117 yet know the upper bound of loop variable N. Set it to the
2118 difference between the vector's upper and lower bounds. */
2119 gcc_assert (loop->from[n] == gfc_index_zero_node);
2120 gcc_assert (info->subscript[dim]
2121 && info->subscript[dim]->type == GFC_SS_VECTOR);
2123 gfc_init_se (&se, NULL);
2124 desc = info->subscript[dim]->data.info.descriptor;
2125 zero = gfc_rank_cst[0];
2126 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2127 gfc_array_index_type,
2128 gfc_conv_descriptor_ubound_get (desc, zero),
2129 gfc_conv_descriptor_lbound_get (desc, zero));
2130 tmp = gfc_evaluate_now (tmp, &loop->pre);
2137 /* Add the pre and post chains for all the scalar expressions in a SS chain
2138 to loop. This is called after the loop parameters have been calculated,
2139 but before the actual scalarizing loops. */
2142 gfc_add_loop_ss_code (gfc_loopinfo * loop, gfc_ss * ss, bool subscript,
2148 /* TODO: This can generate bad code if there are ordering dependencies,
2149 e.g., a callee allocated function and an unknown size constructor. */
2150 gcc_assert (ss != NULL);
2152 for (; ss != gfc_ss_terminator; ss = ss->loop_chain)
2159 /* Scalar expression. Evaluate this now. This includes elemental
2160 dimension indices, but not array section bounds. */
2161 gfc_init_se (&se, NULL);
2162 gfc_conv_expr (&se, ss->expr);
2163 gfc_add_block_to_block (&loop->pre, &se.pre);
2165 if (ss->expr->ts.type != BT_CHARACTER)
2167 /* Move the evaluation of scalar expressions outside the
2168 scalarization loop, except for WHERE assignments. */
2170 se.expr = convert(gfc_array_index_type, se.expr);
2172 se.expr = gfc_evaluate_now (se.expr, &loop->pre);
2173 gfc_add_block_to_block (&loop->pre, &se.post);
2176 gfc_add_block_to_block (&loop->post, &se.post);
2178 ss->data.scalar.expr = se.expr;
2179 ss->string_length = se.string_length;
2182 case GFC_SS_REFERENCE:
2183 /* Scalar argument to elemental procedure. Evaluate this
2185 gfc_init_se (&se, NULL);
2186 gfc_conv_expr (&se, ss->expr);
2187 gfc_add_block_to_block (&loop->pre, &se.pre);
2188 gfc_add_block_to_block (&loop->post, &se.post);
2190 ss->data.scalar.expr = gfc_evaluate_now (se.expr, &loop->pre);
2191 ss->string_length = se.string_length;
2194 case GFC_SS_SECTION:
2195 /* Add the expressions for scalar and vector subscripts. */
2196 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2197 if (ss->data.info.subscript[n])
2198 gfc_add_loop_ss_code (loop, ss->data.info.subscript[n], true,
2201 set_vector_loop_bounds (loop, ss);
2205 /* Get the vector's descriptor and store it in SS. */
2206 gfc_init_se (&se, NULL);
2207 gfc_conv_expr_descriptor (&se, ss->expr, gfc_walk_expr (ss->expr));
2208 gfc_add_block_to_block (&loop->pre, &se.pre);
2209 gfc_add_block_to_block (&loop->post, &se.post);
2210 ss->data.info.descriptor = se.expr;
2213 case GFC_SS_INTRINSIC:
2214 gfc_add_intrinsic_ss_code (loop, ss);
2217 case GFC_SS_FUNCTION:
2218 /* Array function return value. We call the function and save its
2219 result in a temporary for use inside the loop. */
2220 gfc_init_se (&se, NULL);
2223 gfc_conv_expr (&se, ss->expr);
2224 gfc_add_block_to_block (&loop->pre, &se.pre);
2225 gfc_add_block_to_block (&loop->post, &se.post);
2226 ss->string_length = se.string_length;
2229 case GFC_SS_CONSTRUCTOR:
2230 if (ss->expr->ts.type == BT_CHARACTER
2231 && ss->string_length == NULL
2232 && ss->expr->ts.u.cl
2233 && ss->expr->ts.u.cl->length)
2235 gfc_init_se (&se, NULL);
2236 gfc_conv_expr_type (&se, ss->expr->ts.u.cl->length,
2237 gfc_charlen_type_node);
2238 ss->string_length = se.expr;
2239 gfc_add_block_to_block (&loop->pre, &se.pre);
2240 gfc_add_block_to_block (&loop->post, &se.post);
2242 gfc_trans_array_constructor (loop, ss, where);
2246 case GFC_SS_COMPONENT:
2247 /* Do nothing. These are handled elsewhere. */
2257 /* Translate expressions for the descriptor and data pointer of a SS. */
2261 gfc_conv_ss_descriptor (stmtblock_t * block, gfc_ss * ss, int base)
2266 /* Get the descriptor for the array to be scalarized. */
2267 gcc_assert (ss->expr->expr_type == EXPR_VARIABLE);
2268 gfc_init_se (&se, NULL);
2269 se.descriptor_only = 1;
2270 gfc_conv_expr_lhs (&se, ss->expr);
2271 gfc_add_block_to_block (block, &se.pre);
2272 ss->data.info.descriptor = se.expr;
2273 ss->string_length = se.string_length;
2277 /* Also the data pointer. */
2278 tmp = gfc_conv_array_data (se.expr);
2279 /* If this is a variable or address of a variable we use it directly.
2280 Otherwise we must evaluate it now to avoid breaking dependency
2281 analysis by pulling the expressions for elemental array indices
2284 || (TREE_CODE (tmp) == ADDR_EXPR
2285 && DECL_P (TREE_OPERAND (tmp, 0)))))
2286 tmp = gfc_evaluate_now (tmp, block);
2287 ss->data.info.data = tmp;
2289 tmp = gfc_conv_array_offset (se.expr);
2290 ss->data.info.offset = gfc_evaluate_now (tmp, block);
2292 /* Make absolutely sure that the saved_offset is indeed saved
2293 so that the variable is still accessible after the loops
2295 ss->data.info.saved_offset = ss->data.info.offset;
2300 /* Initialize a gfc_loopinfo structure. */
2303 gfc_init_loopinfo (gfc_loopinfo * loop)
2307 memset (loop, 0, sizeof (gfc_loopinfo));
2308 gfc_init_block (&loop->pre);
2309 gfc_init_block (&loop->post);
2311 /* Initially scalarize in order and default to no loop reversal. */
2312 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2315 loop->reverse[n] = GFC_INHIBIT_REVERSE;
2318 loop->ss = gfc_ss_terminator;
2322 /* Copies the loop variable info to a gfc_se structure. Does not copy the SS
2326 gfc_copy_loopinfo_to_se (gfc_se * se, gfc_loopinfo * loop)
2332 /* Return an expression for the data pointer of an array. */
2335 gfc_conv_array_data (tree descriptor)
2339 type = TREE_TYPE (descriptor);
2340 if (GFC_ARRAY_TYPE_P (type))
2342 if (TREE_CODE (type) == POINTER_TYPE)
2346 /* Descriptorless arrays. */
2347 return gfc_build_addr_expr (NULL_TREE, descriptor);
2351 return gfc_conv_descriptor_data_get (descriptor);
2355 /* Return an expression for the base offset of an array. */
2358 gfc_conv_array_offset (tree descriptor)
2362 type = TREE_TYPE (descriptor);
2363 if (GFC_ARRAY_TYPE_P (type))
2364 return GFC_TYPE_ARRAY_OFFSET (type);
2366 return gfc_conv_descriptor_offset_get (descriptor);
2370 /* Get an expression for the array stride. */
2373 gfc_conv_array_stride (tree descriptor, int dim)
2378 type = TREE_TYPE (descriptor);
2380 /* For descriptorless arrays use the array size. */
2381 tmp = GFC_TYPE_ARRAY_STRIDE (type, dim);
2382 if (tmp != NULL_TREE)
2385 tmp = gfc_conv_descriptor_stride_get (descriptor, gfc_rank_cst[dim]);
2390 /* Like gfc_conv_array_stride, but for the lower bound. */
2393 gfc_conv_array_lbound (tree descriptor, int dim)
2398 type = TREE_TYPE (descriptor);
2400 tmp = GFC_TYPE_ARRAY_LBOUND (type, dim);
2401 if (tmp != NULL_TREE)
2404 tmp = gfc_conv_descriptor_lbound_get (descriptor, gfc_rank_cst[dim]);
2409 /* Like gfc_conv_array_stride, but for the upper bound. */
2412 gfc_conv_array_ubound (tree descriptor, int dim)
2417 type = TREE_TYPE (descriptor);
2419 tmp = GFC_TYPE_ARRAY_UBOUND (type, dim);
2420 if (tmp != NULL_TREE)
2423 /* This should only ever happen when passing an assumed shape array
2424 as an actual parameter. The value will never be used. */
2425 if (GFC_ARRAY_TYPE_P (TREE_TYPE (descriptor)))
2426 return gfc_index_zero_node;
2428 tmp = gfc_conv_descriptor_ubound_get (descriptor, gfc_rank_cst[dim]);
2433 /* Generate code to perform an array index bound check. */
2436 trans_array_bound_check (gfc_se * se, gfc_ss *ss, tree index, int n,
2437 locus * where, bool check_upper)
2440 tree tmp_lo, tmp_up;
2443 const char * name = NULL;
2445 if (!(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS))
2448 descriptor = ss->data.info.descriptor;
2450 index = gfc_evaluate_now (index, &se->pre);
2452 /* We find a name for the error message. */
2453 name = ss->expr->symtree->n.sym->name;
2454 gcc_assert (name != NULL);
2456 if (TREE_CODE (descriptor) == VAR_DECL)
2457 name = IDENTIFIER_POINTER (DECL_NAME (descriptor));
2459 /* If upper bound is present, include both bounds in the error message. */
2462 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2463 tmp_up = gfc_conv_array_ubound (descriptor, n);
2466 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2467 "outside of expected range (%%ld:%%ld)", n+1, name);
2469 asprintf (&msg, "Index '%%ld' of dimension %d "
2470 "outside of expected range (%%ld:%%ld)", n+1);
2472 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2474 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2475 fold_convert (long_integer_type_node, index),
2476 fold_convert (long_integer_type_node, tmp_lo),
2477 fold_convert (long_integer_type_node, tmp_up));
2478 fault = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
2480 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2481 fold_convert (long_integer_type_node, index),
2482 fold_convert (long_integer_type_node, tmp_lo),
2483 fold_convert (long_integer_type_node, tmp_up));
2488 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2491 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2492 "below lower bound of %%ld", n+1, name);
2494 asprintf (&msg, "Index '%%ld' of dimension %d "
2495 "below lower bound of %%ld", n+1);
2497 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2499 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2500 fold_convert (long_integer_type_node, index),
2501 fold_convert (long_integer_type_node, tmp_lo));
2509 /* Return the offset for an index. Performs bound checking for elemental
2510 dimensions. Single element references are processed separately.
2511 DIM is the array dimension, I is the loop dimension. */
2514 conv_array_index_offset (gfc_se * se, gfc_ss * ss, int dim, int i,
2515 gfc_array_ref * ar, tree stride)
2517 gfc_array_info *info;
2522 info = &ss->data.info;
2524 /* Get the index into the array for this dimension. */
2527 gcc_assert (ar->type != AR_ELEMENT);
2528 switch (ar->dimen_type[dim])
2530 case DIMEN_THIS_IMAGE:
2534 /* Elemental dimension. */
2535 gcc_assert (info->subscript[dim]
2536 && info->subscript[dim]->type == GFC_SS_SCALAR);
2537 /* We've already translated this value outside the loop. */
2538 index = info->subscript[dim]->data.scalar.expr;
2540 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2541 ar->as->type != AS_ASSUMED_SIZE
2542 || dim < ar->dimen - 1);
2546 gcc_assert (info && se->loop);
2547 gcc_assert (info->subscript[dim]
2548 && info->subscript[dim]->type == GFC_SS_VECTOR);
2549 desc = info->subscript[dim]->data.info.descriptor;
2551 /* Get a zero-based index into the vector. */
2552 index = fold_build2_loc (input_location, MINUS_EXPR,
2553 gfc_array_index_type,
2554 se->loop->loopvar[i], se->loop->from[i]);
2556 /* Multiply the index by the stride. */
2557 index = fold_build2_loc (input_location, MULT_EXPR,
2558 gfc_array_index_type,
2559 index, gfc_conv_array_stride (desc, 0));
2561 /* Read the vector to get an index into info->descriptor. */
2562 data = build_fold_indirect_ref_loc (input_location,
2563 gfc_conv_array_data (desc));
2564 index = gfc_build_array_ref (data, index, NULL);
2565 index = gfc_evaluate_now (index, &se->pre);
2566 index = fold_convert (gfc_array_index_type, index);
2568 /* Do any bounds checking on the final info->descriptor index. */
2569 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2570 ar->as->type != AS_ASSUMED_SIZE
2571 || dim < ar->dimen - 1);
2575 /* Scalarized dimension. */
2576 gcc_assert (info && se->loop);
2578 /* Multiply the loop variable by the stride and delta. */
2579 index = se->loop->loopvar[i];
2580 if (!integer_onep (info->stride[dim]))
2581 index = fold_build2_loc (input_location, MULT_EXPR,
2582 gfc_array_index_type, index,
2584 if (!integer_zerop (info->delta[dim]))
2585 index = fold_build2_loc (input_location, PLUS_EXPR,
2586 gfc_array_index_type, index,
2596 /* Temporary array or derived type component. */
2597 gcc_assert (se->loop);
2598 index = se->loop->loopvar[se->loop->order[i]];
2600 /* Pointer functions can have stride[0] different from unity.
2601 Use the stride returned by the function call and stored in
2602 the descriptor for the temporary. */
2603 if (se->ss && se->ss->type == GFC_SS_FUNCTION
2605 && se->ss->expr->symtree
2606 && se->ss->expr->symtree->n.sym->result
2607 && se->ss->expr->symtree->n.sym->result->attr.pointer)
2608 stride = gfc_conv_descriptor_stride_get (info->descriptor,
2611 if (!integer_zerop (info->delta[dim]))
2612 index = fold_build2_loc (input_location, PLUS_EXPR,
2613 gfc_array_index_type, index, info->delta[dim]);
2616 /* Multiply by the stride. */
2617 if (!integer_onep (stride))
2618 index = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
2625 /* Build a scalarized reference to an array. */
2628 gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar)
2630 gfc_array_info *info;
2631 tree decl = NULL_TREE;
2638 info = &ss->data.info;
2640 n = se->loop->order[0];
2644 index = conv_array_index_offset (se, ss, ss->dim[n], n, ar, info->stride0);
2645 /* Add the offset for this dimension to the stored offset for all other
2647 if (!integer_zerop (info->offset))
2648 index = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2649 index, info->offset);
2651 if (se->ss->expr && is_subref_array (se->ss->expr))
2652 decl = se->ss->expr->symtree->n.sym->backend_decl;
2654 tmp = build_fold_indirect_ref_loc (input_location,
2656 se->expr = gfc_build_array_ref (tmp, index, decl);
2660 /* Translate access of temporary array. */
2663 gfc_conv_tmp_array_ref (gfc_se * se)
2665 se->string_length = se->ss->string_length;
2666 gfc_conv_scalarized_array_ref (se, NULL);
2667 gfc_advance_se_ss_chain (se);
2670 /* Add T to the offset pair *OFFSET, *CST_OFFSET. */
2673 add_to_offset (tree *cst_offset, tree *offset, tree t)
2675 if (TREE_CODE (t) == INTEGER_CST)
2676 *cst_offset = int_const_binop (PLUS_EXPR, *cst_offset, t);
2679 if (!integer_zerop (*offset))
2680 *offset = fold_build2_loc (input_location, PLUS_EXPR,
2681 gfc_array_index_type, *offset, t);
2687 /* Build an array reference. se->expr already holds the array descriptor.
2688 This should be either a variable, indirect variable reference or component
2689 reference. For arrays which do not have a descriptor, se->expr will be
2691 a(i, j, k) = base[offset + i * stride[0] + j * stride[1] + k * stride[2]]*/
2694 gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar, gfc_symbol * sym,
2698 tree offset, cst_offset;
2706 gcc_assert (ar->codimen);
2708 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se->expr)))
2709 se->expr = build_fold_indirect_ref (gfc_conv_array_data (se->expr));
2712 if (GFC_ARRAY_TYPE_P (TREE_TYPE (se->expr))
2713 && TREE_CODE (TREE_TYPE (se->expr)) == POINTER_TYPE)
2714 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
2716 /* Use the actual tree type and not the wrapped coarray. */
2717 if (!se->want_pointer)
2718 se->expr = fold_convert (TYPE_MAIN_VARIANT (TREE_TYPE (se->expr)),
2725 /* Handle scalarized references separately. */
2726 if (ar->type != AR_ELEMENT)
2728 gfc_conv_scalarized_array_ref (se, ar);
2729 gfc_advance_se_ss_chain (se);
2733 cst_offset = offset = gfc_index_zero_node;
2734 add_to_offset (&cst_offset, &offset, gfc_conv_array_offset (se->expr));
2736 /* Calculate the offsets from all the dimensions. Make sure to associate
2737 the final offset so that we form a chain of loop invariant summands. */
2738 for (n = ar->dimen - 1; n >= 0; n--)
2740 /* Calculate the index for this dimension. */
2741 gfc_init_se (&indexse, se);
2742 gfc_conv_expr_type (&indexse, ar->start[n], gfc_array_index_type);
2743 gfc_add_block_to_block (&se->pre, &indexse.pre);
2745 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2747 /* Check array bounds. */
2751 /* Evaluate the indexse.expr only once. */
2752 indexse.expr = save_expr (indexse.expr);
2755 tmp = gfc_conv_array_lbound (se->expr, n);
2756 if (sym->attr.temporary)
2758 gfc_init_se (&tmpse, se);
2759 gfc_conv_expr_type (&tmpse, ar->as->lower[n],
2760 gfc_array_index_type);
2761 gfc_add_block_to_block (&se->pre, &tmpse.pre);
2765 cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2767 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2768 "below lower bound of %%ld", n+1, sym->name);
2769 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
2770 fold_convert (long_integer_type_node,
2772 fold_convert (long_integer_type_node, tmp));
2775 /* Upper bound, but not for the last dimension of assumed-size
2777 if (n < ar->dimen - 1 || ar->as->type != AS_ASSUMED_SIZE)
2779 tmp = gfc_conv_array_ubound (se->expr, n);
2780 if (sym->attr.temporary)
2782 gfc_init_se (&tmpse, se);
2783 gfc_conv_expr_type (&tmpse, ar->as->upper[n],
2784 gfc_array_index_type);
2785 gfc_add_block_to_block (&se->pre, &tmpse.pre);
2789 cond = fold_build2_loc (input_location, GT_EXPR,
2790 boolean_type_node, indexse.expr, tmp);
2791 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2792 "above upper bound of %%ld", n+1, sym->name);
2793 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
2794 fold_convert (long_integer_type_node,
2796 fold_convert (long_integer_type_node, tmp));
2801 /* Multiply the index by the stride. */
2802 stride = gfc_conv_array_stride (se->expr, n);
2803 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
2804 indexse.expr, stride);
2806 /* And add it to the total. */
2807 add_to_offset (&cst_offset, &offset, tmp);
2810 if (!integer_zerop (cst_offset))
2811 offset = fold_build2_loc (input_location, PLUS_EXPR,
2812 gfc_array_index_type, offset, cst_offset);
2814 /* Access the calculated element. */
2815 tmp = gfc_conv_array_data (se->expr);
2816 tmp = build_fold_indirect_ref (tmp);
2817 se->expr = gfc_build_array_ref (tmp, offset, sym->backend_decl);
2821 /* Add the offset corresponding to array's ARRAY_DIM dimension and loop's
2822 LOOP_DIM dimension (if any) to array's offset. */
2825 add_array_offset (stmtblock_t *pblock, gfc_loopinfo *loop, gfc_ss *ss,
2826 gfc_array_ref *ar, int array_dim, int loop_dim)
2829 gfc_array_info *info;
2832 info = &ss->data.info;
2834 gfc_init_se (&se, NULL);
2836 se.expr = info->descriptor;
2837 stride = gfc_conv_array_stride (info->descriptor, array_dim);
2838 index = conv_array_index_offset (&se, ss, array_dim, loop_dim, ar, stride);
2839 gfc_add_block_to_block (pblock, &se.pre);
2841 info->offset = fold_build2_loc (input_location, PLUS_EXPR,
2842 gfc_array_index_type,
2843 info->offset, index);
2844 info->offset = gfc_evaluate_now (info->offset, pblock);
2848 /* Generate the code to be executed immediately before entering a
2849 scalarization loop. */
2852 gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag,
2853 stmtblock_t * pblock)
2856 gfc_array_info *info;
2861 /* This code will be executed before entering the scalarization loop
2862 for this dimension. */
2863 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
2865 if ((ss->useflags & flag) == 0)
2868 if (ss->type != GFC_SS_SECTION
2869 && ss->type != GFC_SS_FUNCTION && ss->type != GFC_SS_CONSTRUCTOR
2870 && ss->type != GFC_SS_COMPONENT)
2873 info = &ss->data.info;
2875 gcc_assert (dim < ss->dimen);
2876 gcc_assert (ss->dimen == loop->dimen);
2879 ar = &info->ref->u.ar;
2883 if (dim == loop->dimen - 1)
2888 /* For the time being, there is no loop reordering. */
2889 gcc_assert (i == loop->order[i]);
2892 if (dim == loop->dimen - 1)
2894 stride = gfc_conv_array_stride (info->descriptor, ss->dim[i]);
2896 /* Calculate the stride of the innermost loop. Hopefully this will
2897 allow the backend optimizers to do their stuff more effectively.
2899 info->stride0 = gfc_evaluate_now (stride, pblock);
2901 /* For the outermost loop calculate the offset due to any
2902 elemental dimensions. It will have been initialized with the
2903 base offset of the array. */
2906 for (i = 0; i < ar->dimen; i++)
2908 if (ar->dimen_type[i] != DIMEN_ELEMENT)
2911 add_array_offset (pblock, loop, ss, ar, i, /* unused */ -1);
2916 /* Add the offset for the previous loop dimension. */
2917 add_array_offset (pblock, loop, ss, ar, ss->dim[i], i);
2919 /* Remember this offset for the second loop. */
2920 if (dim == loop->temp_dim - 1)
2921 info->saved_offset = info->offset;
2926 /* Start a scalarized expression. Creates a scope and declares loop
2930 gfc_start_scalarized_body (gfc_loopinfo * loop, stmtblock_t * pbody)
2936 gcc_assert (!loop->array_parameter);
2938 for (dim = loop->dimen - 1; dim >= 0; dim--)
2940 n = loop->order[dim];
2942 gfc_start_block (&loop->code[n]);
2944 /* Create the loop variable. */
2945 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "S");
2947 if (dim < loop->temp_dim)
2951 /* Calculate values that will be constant within this loop. */
2952 gfc_trans_preloop_setup (loop, dim, flags, &loop->code[n]);
2954 gfc_start_block (pbody);
2958 /* Generates the actual loop code for a scalarization loop. */
2961 gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n,
2962 stmtblock_t * pbody)
2973 if ((ompws_flags & (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS))
2974 == (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS)
2975 && n == loop->dimen - 1)
2977 /* We create an OMP_FOR construct for the outermost scalarized loop. */
2978 init = make_tree_vec (1);
2979 cond = make_tree_vec (1);
2980 incr = make_tree_vec (1);
2982 /* Cycle statement is implemented with a goto. Exit statement must not
2983 be present for this loop. */
2984 exit_label = gfc_build_label_decl (NULL_TREE);
2985 TREE_USED (exit_label) = 1;
2987 /* Label for cycle statements (if needed). */
2988 tmp = build1_v (LABEL_EXPR, exit_label);
2989 gfc_add_expr_to_block (pbody, tmp);
2991 stmt = make_node (OMP_FOR);
2993 TREE_TYPE (stmt) = void_type_node;
2994 OMP_FOR_BODY (stmt) = loopbody = gfc_finish_block (pbody);
2996 OMP_FOR_CLAUSES (stmt) = build_omp_clause (input_location,
2997 OMP_CLAUSE_SCHEDULE);
2998 OMP_CLAUSE_SCHEDULE_KIND (OMP_FOR_CLAUSES (stmt))
2999 = OMP_CLAUSE_SCHEDULE_STATIC;
3000 if (ompws_flags & OMPWS_NOWAIT)
3001 OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (stmt))
3002 = build_omp_clause (input_location, OMP_CLAUSE_NOWAIT);
3004 /* Initialize the loopvar. */
3005 TREE_VEC_ELT (init, 0) = build2_v (MODIFY_EXPR, loop->loopvar[n],
3007 OMP_FOR_INIT (stmt) = init;
3008 /* The exit condition. */
3009 TREE_VEC_ELT (cond, 0) = build2_loc (input_location, LE_EXPR,
3011 loop->loopvar[n], loop->to[n]);
3012 SET_EXPR_LOCATION (TREE_VEC_ELT (cond, 0), input_location);
3013 OMP_FOR_COND (stmt) = cond;
3014 /* Increment the loopvar. */
3015 tmp = build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3016 loop->loopvar[n], gfc_index_one_node);
3017 TREE_VEC_ELT (incr, 0) = fold_build2_loc (input_location, MODIFY_EXPR,
3018 void_type_node, loop->loopvar[n], tmp);
3019 OMP_FOR_INCR (stmt) = incr;
3021 ompws_flags &= ~OMPWS_CURR_SINGLEUNIT;
3022 gfc_add_expr_to_block (&loop->code[n], stmt);
3026 bool reverse_loop = (loop->reverse[n] == GFC_REVERSE_SET)
3027 && (loop->temp_ss == NULL);
3029 loopbody = gfc_finish_block (pbody);
3033 tmp = loop->from[n];
3034 loop->from[n] = loop->to[n];
3038 /* Initialize the loopvar. */
3039 if (loop->loopvar[n] != loop->from[n])
3040 gfc_add_modify (&loop->code[n], loop->loopvar[n], loop->from[n]);
3042 exit_label = gfc_build_label_decl (NULL_TREE);
3044 /* Generate the loop body. */
3045 gfc_init_block (&block);
3047 /* The exit condition. */
3048 cond = fold_build2_loc (input_location, reverse_loop ? LT_EXPR : GT_EXPR,
3049 boolean_type_node, loop->loopvar[n], loop->to[n]);
3050 tmp = build1_v (GOTO_EXPR, exit_label);
3051 TREE_USED (exit_label) = 1;
3052 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
3053 gfc_add_expr_to_block (&block, tmp);
3055 /* The main body. */
3056 gfc_add_expr_to_block (&block, loopbody);
3058 /* Increment the loopvar. */
3059 tmp = fold_build2_loc (input_location,
3060 reverse_loop ? MINUS_EXPR : PLUS_EXPR,
3061 gfc_array_index_type, loop->loopvar[n],
3062 gfc_index_one_node);
3064 gfc_add_modify (&block, loop->loopvar[n], tmp);
3066 /* Build the loop. */
3067 tmp = gfc_finish_block (&block);
3068 tmp = build1_v (LOOP_EXPR, tmp);
3069 gfc_add_expr_to_block (&loop->code[n], tmp);
3071 /* Add the exit label. */
3072 tmp = build1_v (LABEL_EXPR, exit_label);
3073 gfc_add_expr_to_block (&loop->code[n], tmp);
3079 /* Finishes and generates the loops for a scalarized expression. */
3082 gfc_trans_scalarizing_loops (gfc_loopinfo * loop, stmtblock_t * body)
3087 stmtblock_t *pblock;
3091 /* Generate the loops. */
3092 for (dim = 0; dim < loop->dimen; dim++)
3094 n = loop->order[dim];
3095 gfc_trans_scalarized_loop_end (loop, n, pblock);
3096 loop->loopvar[n] = NULL_TREE;
3097 pblock = &loop->code[n];
3100 tmp = gfc_finish_block (pblock);
3101 gfc_add_expr_to_block (&loop->pre, tmp);
3103 /* Clear all the used flags. */
3104 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3109 /* Finish the main body of a scalarized expression, and start the secondary
3113 gfc_trans_scalarized_loop_boundary (gfc_loopinfo * loop, stmtblock_t * body)
3117 stmtblock_t *pblock;
3121 /* We finish as many loops as are used by the temporary. */
3122 for (dim = 0; dim < loop->temp_dim - 1; dim++)
3124 n = loop->order[dim];
3125 gfc_trans_scalarized_loop_end (loop, n, pblock);
3126 loop->loopvar[n] = NULL_TREE;
3127 pblock = &loop->code[n];
3130 /* We don't want to finish the outermost loop entirely. */
3131 n = loop->order[loop->temp_dim - 1];
3132 gfc_trans_scalarized_loop_end (loop, n, pblock);
3134 /* Restore the initial offsets. */
3135 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3137 if ((ss->useflags & 2) == 0)
3140 if (ss->type != GFC_SS_SECTION
3141 && ss->type != GFC_SS_FUNCTION && ss->type != GFC_SS_CONSTRUCTOR
3142 && ss->type != GFC_SS_COMPONENT)
3145 ss->data.info.offset = ss->data.info.saved_offset;
3148 /* Restart all the inner loops we just finished. */
3149 for (dim = loop->temp_dim - 2; dim >= 0; dim--)
3151 n = loop->order[dim];
3153 gfc_start_block (&loop->code[n]);
3155 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "Q");
3157 gfc_trans_preloop_setup (loop, dim, 2, &loop->code[n]);
3160 /* Start a block for the secondary copying code. */
3161 gfc_start_block (body);
3165 /* Precalculate (either lower or upper) bound of an array section.
3166 BLOCK: Block in which the (pre)calculation code will go.
3167 BOUNDS[DIM]: Where the bound value will be stored once evaluated.
3168 VALUES[DIM]: Specified bound (NULL <=> unspecified).
3169 DESC: Array descriptor from which the bound will be picked if unspecified
3170 (either lower or upper bound according to LBOUND). */
3173 evaluate_bound (stmtblock_t *block, tree *bounds, gfc_expr ** values,
3174 tree desc, int dim, bool lbound)
3177 gfc_expr * input_val = values[dim];
3178 tree *output = &bounds[dim];
3183 /* Specified section bound. */
3184 gfc_init_se (&se, NULL);
3185 gfc_conv_expr_type (&se, input_val, gfc_array_index_type);
3186 gfc_add_block_to_block (block, &se.pre);
3191 /* No specific bound specified so use the bound of the array. */
3192 *output = lbound ? gfc_conv_array_lbound (desc, dim) :
3193 gfc_conv_array_ubound (desc, dim);
3195 *output = gfc_evaluate_now (*output, block);
3199 /* Calculate the lower bound of an array section. */
3202 gfc_conv_section_startstride (gfc_loopinfo * loop, gfc_ss * ss, int dim)
3204 gfc_expr *stride = NULL;
3207 gfc_array_info *info;
3210 gcc_assert (ss->type == GFC_SS_SECTION);
3212 info = &ss->data.info;
3213 ar = &info->ref->u.ar;
3215 if (ar->dimen_type[dim] == DIMEN_VECTOR)
3217 /* We use a zero-based index to access the vector. */
3218 info->start[dim] = gfc_index_zero_node;
3219 info->end[dim] = NULL;
3220 info->stride[dim] = gfc_index_one_node;
3224 gcc_assert (ar->dimen_type[dim] == DIMEN_RANGE
3225 || ar->dimen_type[dim] == DIMEN_THIS_IMAGE);
3226 desc = info->descriptor;
3227 stride = ar->stride[dim];
3229 /* Calculate the start of the range. For vector subscripts this will
3230 be the range of the vector. */
3231 evaluate_bound (&loop->pre, info->start, ar->start, desc, dim, true);
3233 /* Similarly calculate the end. Although this is not used in the
3234 scalarizer, it is needed when checking bounds and where the end
3235 is an expression with side-effects. */
3236 evaluate_bound (&loop->pre, info->end, ar->end, desc, dim, false);
3238 /* Calculate the stride. */
3240 info->stride[dim] = gfc_index_one_node;
3243 gfc_init_se (&se, NULL);
3244 gfc_conv_expr_type (&se, stride, gfc_array_index_type);
3245 gfc_add_block_to_block (&loop->pre, &se.pre);
3246 info->stride[dim] = gfc_evaluate_now (se.expr, &loop->pre);
3251 /* Calculates the range start and stride for a SS chain. Also gets the
3252 descriptor and data pointer. The range of vector subscripts is the size
3253 of the vector. Array bounds are also checked. */
3256 gfc_conv_ss_startstride (gfc_loopinfo * loop)
3264 /* Determine the rank of the loop. */
3265 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3269 case GFC_SS_SECTION:
3270 case GFC_SS_CONSTRUCTOR:
3271 case GFC_SS_FUNCTION:
3272 case GFC_SS_COMPONENT:
3273 loop->dimen = ss->dimen;
3276 /* As usual, lbound and ubound are exceptions!. */
3277 case GFC_SS_INTRINSIC:
3278 switch (ss->expr->value.function.isym->id)
3280 case GFC_ISYM_LBOUND:
3281 case GFC_ISYM_UBOUND:
3282 case GFC_ISYM_LCOBOUND:
3283 case GFC_ISYM_UCOBOUND:
3284 case GFC_ISYM_THIS_IMAGE:
3285 loop->dimen = ss->dimen;
3297 /* We should have determined the rank of the expression by now. If
3298 not, that's bad news. */
3302 /* Loop over all the SS in the chain. */
3303 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3305 gfc_array_info *info;
3307 info = &ss->data.info;
3309 if (ss->expr && ss->expr->shape && !info->shape)
3310 info->shape = ss->expr->shape;
3314 case GFC_SS_SECTION:
3315 /* Get the descriptor for the array. */
3316 gfc_conv_ss_descriptor (&loop->pre, ss, !loop->array_parameter);
3318 for (n = 0; n < ss->dimen; n++)
3319 gfc_conv_section_startstride (loop, ss, ss->dim[n]);
3322 case GFC_SS_INTRINSIC:
3323 switch (ss->expr->value.function.isym->id)
3325 /* Fall through to supply start and stride. */
3326 case GFC_ISYM_LBOUND:
3327 case GFC_ISYM_UBOUND:
3328 case GFC_ISYM_LCOBOUND:
3329 case GFC_ISYM_UCOBOUND:
3330 case GFC_ISYM_THIS_IMAGE:
3337 case GFC_SS_CONSTRUCTOR:
3338 case GFC_SS_FUNCTION:
3339 for (n = 0; n < ss->dimen; n++)
3341 int dim = ss->dim[n];
3343 ss->data.info.start[dim] = gfc_index_zero_node;
3344 ss->data.info.end[dim] = gfc_index_zero_node;
3345 ss->data.info.stride[dim] = gfc_index_one_node;
3354 /* The rest is just runtime bound checking. */
3355 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3358 tree lbound, ubound;
3360 tree size[GFC_MAX_DIMENSIONS];
3361 tree stride_pos, stride_neg, non_zerosized, tmp2, tmp3;
3362 gfc_array_info *info;
3366 gfc_start_block (&block);
3368 for (n = 0; n < loop->dimen; n++)
3369 size[n] = NULL_TREE;
3371 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3375 if (ss->type != GFC_SS_SECTION)
3378 /* Catch allocatable lhs in f2003. */
3379 if (gfc_option.flag_realloc_lhs && ss->is_alloc_lhs)
3382 gfc_start_block (&inner);
3384 /* TODO: range checking for mapped dimensions. */
3385 info = &ss->data.info;
3387 /* This code only checks ranges. Elemental and vector
3388 dimensions are checked later. */
3389 for (n = 0; n < loop->dimen; n++)
3394 if (info->ref->u.ar.dimen_type[dim] != DIMEN_RANGE)
3397 if (dim == info->ref->u.ar.dimen - 1
3398 && info->ref->u.ar.as->type == AS_ASSUMED_SIZE)
3399 check_upper = false;
3403 /* Zero stride is not allowed. */
3404 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
3405 info->stride[dim], gfc_index_zero_node);
3406 asprintf (&msg, "Zero stride is not allowed, for dimension %d "
3407 "of array '%s'", dim + 1, ss->expr->symtree->name);
3408 gfc_trans_runtime_check (true, false, tmp, &inner,
3409 &ss->expr->where, msg);
3412 desc = ss->data.info.descriptor;
3414 /* This is the run-time equivalent of resolve.c's
3415 check_dimension(). The logical is more readable there
3416 than it is here, with all the trees. */
3417 lbound = gfc_conv_array_lbound (desc, dim);
3418 end = info->end[dim];
3420 ubound = gfc_conv_array_ubound (desc, dim);
3424 /* non_zerosized is true when the selected range is not
3426 stride_pos = fold_build2_loc (input_location, GT_EXPR,
3427 boolean_type_node, info->stride[dim],
3428 gfc_index_zero_node);
3429 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
3430 info->start[dim], end);
3431 stride_pos = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3432 boolean_type_node, stride_pos, tmp);
3434 stride_neg = fold_build2_loc (input_location, LT_EXPR,
3436 info->stride[dim], gfc_index_zero_node);
3437 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
3438 info->start[dim], end);
3439 stride_neg = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3442 non_zerosized = fold_build2_loc (input_location, TRUTH_OR_EXPR,
3444 stride_pos, stride_neg);
3446 /* Check the start of the range against the lower and upper
3447 bounds of the array, if the range is not empty.
3448 If upper bound is present, include both bounds in the
3452 tmp = fold_build2_loc (input_location, LT_EXPR,
3454 info->start[dim], lbound);
3455 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3457 non_zerosized, tmp);
3458 tmp2 = fold_build2_loc (input_location, GT_EXPR,
3460 info->start[dim], ubound);
3461 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3463 non_zerosized, tmp2);
3464 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3465 "outside of expected range (%%ld:%%ld)",
3466 dim + 1, ss->expr->symtree->name);
3467 gfc_trans_runtime_check (true, false, tmp, &inner,
3468 &ss->expr->where, msg,
3469 fold_convert (long_integer_type_node, info->start[dim]),
3470 fold_convert (long_integer_type_node, lbound),
3471 fold_convert (long_integer_type_node, ubound));
3472 gfc_trans_runtime_check (true, false, tmp2, &inner,
3473 &ss->expr->where, msg,
3474 fold_convert (long_integer_type_node, info->start[dim]),
3475 fold_convert (long_integer_type_node, lbound),
3476 fold_convert (long_integer_type_node, ubound));
3481 tmp = fold_build2_loc (input_location, LT_EXPR,
3483 info->start[dim], lbound);
3484 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3485 boolean_type_node, non_zerosized, tmp);
3486 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3487 "below lower bound of %%ld",
3488 dim + 1, ss->expr->symtree->name);
3489 gfc_trans_runtime_check (true, false, tmp, &inner,
3490 &ss->expr->where, msg,
3491 fold_convert (long_integer_type_node, info->start[dim]),
3492 fold_convert (long_integer_type_node, lbound));
3496 /* Compute the last element of the range, which is not
3497 necessarily "end" (think 0:5:3, which doesn't contain 5)
3498 and check it against both lower and upper bounds. */
3500 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3501 gfc_array_index_type, end,
3503 tmp = fold_build2_loc (input_location, TRUNC_MOD_EXPR,
3504 gfc_array_index_type, tmp,
3506 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3507 gfc_array_index_type, end, tmp);
3508 tmp2 = fold_build2_loc (input_location, LT_EXPR,
3509 boolean_type_node, tmp, lbound);
3510 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3511 boolean_type_node, non_zerosized, tmp2);
3514 tmp3 = fold_build2_loc (input_location, GT_EXPR,
3515 boolean_type_node, tmp, ubound);
3516 tmp3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3517 boolean_type_node, non_zerosized, tmp3);
3518 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3519 "outside of expected range (%%ld:%%ld)",
3520 dim + 1, ss->expr->symtree->name);
3521 gfc_trans_runtime_check (true, false, tmp2, &inner,
3522 &ss->expr->where, msg,
3523 fold_convert (long_integer_type_node, tmp),
3524 fold_convert (long_integer_type_node, ubound),
3525 fold_convert (long_integer_type_node, lbound));
3526 gfc_trans_runtime_check (true, false, tmp3, &inner,
3527 &ss->expr->where, msg,
3528 fold_convert (long_integer_type_node, tmp),
3529 fold_convert (long_integer_type_node, ubound),
3530 fold_convert (long_integer_type_node, lbound));
3535 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3536 "below lower bound of %%ld",
3537 dim + 1, ss->expr->symtree->name);
3538 gfc_trans_runtime_check (true, false, tmp2, &inner,
3539 &ss->expr->where, msg,
3540 fold_convert (long_integer_type_node, tmp),
3541 fold_convert (long_integer_type_node, lbound));
3545 /* Check the section sizes match. */
3546 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3547 gfc_array_index_type, end,
3549 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
3550 gfc_array_index_type, tmp,
3552 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3553 gfc_array_index_type,
3554 gfc_index_one_node, tmp);
3555 tmp = fold_build2_loc (input_location, MAX_EXPR,
3556 gfc_array_index_type, tmp,
3557 build_int_cst (gfc_array_index_type, 0));
3558 /* We remember the size of the first section, and check all the
3559 others against this. */
3562 tmp3 = fold_build2_loc (input_location, NE_EXPR,
3563 boolean_type_node, tmp, size[n]);
3564 asprintf (&msg, "Array bound mismatch for dimension %d "
3565 "of array '%s' (%%ld/%%ld)",
3566 dim + 1, ss->expr->symtree->name);
3568 gfc_trans_runtime_check (true, false, tmp3, &inner,
3569 &ss->expr->where, msg,
3570 fold_convert (long_integer_type_node, tmp),
3571 fold_convert (long_integer_type_node, size[n]));
3576 size[n] = gfc_evaluate_now (tmp, &inner);
3579 tmp = gfc_finish_block (&inner);
3581 /* For optional arguments, only check bounds if the argument is
3583 if (ss->expr->symtree->n.sym->attr.optional
3584 || ss->expr->symtree->n.sym->attr.not_always_present)
3585 tmp = build3_v (COND_EXPR,
3586 gfc_conv_expr_present (ss->expr->symtree->n.sym),
3587 tmp, build_empty_stmt (input_location));
3589 gfc_add_expr_to_block (&block, tmp);
3593 tmp = gfc_finish_block (&block);
3594 gfc_add_expr_to_block (&loop->pre, tmp);
3598 /* Return true if both symbols could refer to the same data object. Does
3599 not take account of aliasing due to equivalence statements. */
3602 symbols_could_alias (gfc_symbol *lsym, gfc_symbol *rsym, bool lsym_pointer,
3603 bool lsym_target, bool rsym_pointer, bool rsym_target)
3605 /* Aliasing isn't possible if the symbols have different base types. */
3606 if (gfc_compare_types (&lsym->ts, &rsym->ts) == 0)
3609 /* Pointers can point to other pointers and target objects. */
3611 if ((lsym_pointer && (rsym_pointer || rsym_target))
3612 || (rsym_pointer && (lsym_pointer || lsym_target)))
3615 /* Special case: Argument association, cf. F90 12.4.1.6, F2003 12.4.1.7
3616 and F2008 12.5.2.13 items 3b and 4b. The pointer case (a) is already
3618 if (lsym_target && rsym_target
3619 && ((lsym->attr.dummy && !lsym->attr.contiguous
3620 && (!lsym->attr.dimension || lsym->as->type == AS_ASSUMED_SHAPE))
3621 || (rsym->attr.dummy && !rsym->attr.contiguous
3622 && (!rsym->attr.dimension
3623 || rsym->as->type == AS_ASSUMED_SHAPE))))
3630 /* Return true if the two SS could be aliased, i.e. both point to the same data
3632 /* TODO: resolve aliases based on frontend expressions. */
3635 gfc_could_be_alias (gfc_ss * lss, gfc_ss * rss)
3641 bool lsym_pointer, lsym_target, rsym_pointer, rsym_target;
3643 lsym = lss->expr->symtree->n.sym;
3644 rsym = rss->expr->symtree->n.sym;
3646 lsym_pointer = lsym->attr.pointer;
3647 lsym_target = lsym->attr.target;
3648 rsym_pointer = rsym->attr.pointer;
3649 rsym_target = rsym->attr.target;
3651 if (symbols_could_alias (lsym, rsym, lsym_pointer, lsym_target,
3652 rsym_pointer, rsym_target))
3655 if (rsym->ts.type != BT_DERIVED && rsym->ts.type != BT_CLASS
3656 && lsym->ts.type != BT_DERIVED && lsym->ts.type != BT_CLASS)
3659 /* For derived types we must check all the component types. We can ignore
3660 array references as these will have the same base type as the previous
3662 for (lref = lss->expr->ref; lref != lss->data.info.ref; lref = lref->next)
3664 if (lref->type != REF_COMPONENT)
3667 lsym_pointer = lsym_pointer || lref->u.c.sym->attr.pointer;
3668 lsym_target = lsym_target || lref->u.c.sym->attr.target;
3670 if (symbols_could_alias (lref->u.c.sym, rsym, lsym_pointer, lsym_target,
3671 rsym_pointer, rsym_target))
3674 if ((lsym_pointer && (rsym_pointer || rsym_target))
3675 || (rsym_pointer && (lsym_pointer || lsym_target)))
3677 if (gfc_compare_types (&lref->u.c.component->ts,
3682 for (rref = rss->expr->ref; rref != rss->data.info.ref;
3685 if (rref->type != REF_COMPONENT)
3688 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
3689 rsym_target = lsym_target || rref->u.c.sym->attr.target;
3691 if (symbols_could_alias (lref->u.c.sym, rref->u.c.sym,
3692 lsym_pointer, lsym_target,
3693 rsym_pointer, rsym_target))
3696 if ((lsym_pointer && (rsym_pointer || rsym_target))
3697 || (rsym_pointer && (lsym_pointer || lsym_target)))
3699 if (gfc_compare_types (&lref->u.c.component->ts,
3700 &rref->u.c.sym->ts))
3702 if (gfc_compare_types (&lref->u.c.sym->ts,
3703 &rref->u.c.component->ts))
3705 if (gfc_compare_types (&lref->u.c.component->ts,
3706 &rref->u.c.component->ts))
3712 lsym_pointer = lsym->attr.pointer;
3713 lsym_target = lsym->attr.target;
3714 lsym_pointer = lsym->attr.pointer;
3715 lsym_target = lsym->attr.target;
3717 for (rref = rss->expr->ref; rref != rss->data.info.ref; rref = rref->next)
3719 if (rref->type != REF_COMPONENT)
3722 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
3723 rsym_target = lsym_target || rref->u.c.sym->attr.target;
3725 if (symbols_could_alias (rref->u.c.sym, lsym,
3726 lsym_pointer, lsym_target,
3727 rsym_pointer, rsym_target))
3730 if ((lsym_pointer && (rsym_pointer || rsym_target))
3731 || (rsym_pointer && (lsym_pointer || lsym_target)))
3733 if (gfc_compare_types (&lsym->ts, &rref->u.c.component->ts))
3742 /* Resolve array data dependencies. Creates a temporary if required. */
3743 /* TODO: Calc dependencies with gfc_expr rather than gfc_ss, and move to
3747 gfc_conv_resolve_dependencies (gfc_loopinfo * loop, gfc_ss * dest,
3756 loop->temp_ss = NULL;
3758 for (ss = rss; ss != gfc_ss_terminator; ss = ss->next)
3760 if (ss->type != GFC_SS_SECTION)
3763 if (dest->expr->symtree->n.sym != ss->expr->symtree->n.sym)
3765 if (gfc_could_be_alias (dest, ss)
3766 || gfc_are_equivalenced_arrays (dest->expr, ss->expr))
3774 lref = dest->expr->ref;
3775 rref = ss->expr->ref;
3777 nDepend = gfc_dep_resolver (lref, rref, &loop->reverse[0]);
3782 for (i = 0; i < dest->dimen; i++)
3783 for (j = 0; j < ss->dimen; j++)
3785 && dest->dim[i] == ss->dim[j])
3787 /* If we don't access array elements in the same order,
3788 there is a dependency. */
3793 /* TODO : loop shifting. */
3796 /* Mark the dimensions for LOOP SHIFTING */
3797 for (n = 0; n < loop->dimen; n++)
3799 int dim = dest->data.info.dim[n];
3801 if (lref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
3803 else if (! gfc_is_same_range (&lref->u.ar,
3804 &rref->u.ar, dim, 0))
3808 /* Put all the dimensions with dependencies in the
3811 for (n = 0; n < loop->dimen; n++)
3813 gcc_assert (loop->order[n] == n);
3815 loop->order[dim++] = n;
3817 for (n = 0; n < loop->dimen; n++)
3820 loop->order[dim++] = n;
3823 gcc_assert (dim == loop->dimen);
3834 tree base_type = gfc_typenode_for_spec (&dest->expr->ts);
3835 if (GFC_ARRAY_TYPE_P (base_type)
3836 || GFC_DESCRIPTOR_TYPE_P (base_type))
3837 base_type = gfc_get_element_type (base_type);
3838 loop->temp_ss = gfc_get_temp_ss (base_type, dest->string_length,
3840 gfc_add_ss_to_loop (loop, loop->temp_ss);
3843 loop->temp_ss = NULL;
3847 /* Initialize the scalarization loop. Creates the loop variables. Determines
3848 the range of the loop variables. Creates a temporary if required.
3849 Calculates how to transform from loop variables to array indices for each
3850 expression. Also generates code for scalar expressions which have been
3851 moved outside the loop. */
3854 gfc_conv_loop_setup (gfc_loopinfo * loop, locus * where)
3856 int n, dim, spec_dim;
3857 gfc_array_info *info;
3858 gfc_array_info *specinfo;
3859 gfc_ss *ss, *tmp_ss;
3861 gfc_ss *loopspec[GFC_MAX_DIMENSIONS];
3862 bool dynamic[GFC_MAX_DIMENSIONS];
3867 for (n = 0; n < loop->dimen; n++)
3871 /* We use one SS term, and use that to determine the bounds of the
3872 loop for this dimension. We try to pick the simplest term. */
3873 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3875 gfc_ss_type ss_type;
3878 if (ss_type == GFC_SS_SCALAR
3879 || ss_type == GFC_SS_TEMP
3880 || ss_type == GFC_SS_REFERENCE)
3883 info = &ss->data.info;
3886 if (loopspec[n] != NULL)
3888 specinfo = &loopspec[n]->data.info;
3889 spec_dim = loopspec[n]->dim[n];
3893 /* Silence unitialized warnings. */
3900 gcc_assert (info->shape[dim]);
3901 /* The frontend has worked out the size for us. */
3904 || !integer_zerop (specinfo->start[spec_dim]))
3905 /* Prefer zero-based descriptors if possible. */
3910 if (ss->type == GFC_SS_CONSTRUCTOR)
3912 gfc_constructor_base base;
3913 /* An unknown size constructor will always be rank one.
3914 Higher rank constructors will either have known shape,
3915 or still be wrapped in a call to reshape. */
3916 gcc_assert (loop->dimen == 1);
3918 /* Always prefer to use the constructor bounds if the size
3919 can be determined at compile time. Prefer not to otherwise,
3920 since the general case involves realloc, and it's better to
3921 avoid that overhead if possible. */
3922 base = ss->expr->value.constructor;
3923 dynamic[n] = gfc_get_array_constructor_size (&i, base);
3924 if (!dynamic[n] || !loopspec[n])
3929 /* TODO: Pick the best bound if we have a choice between a
3930 function and something else. */
3931 if (ss->type == GFC_SS_FUNCTION)
3937 /* Avoid using an allocatable lhs in an assignment, since
3938 there might be a reallocation coming. */
3939 if (loopspec[n] && ss->is_alloc_lhs)
3942 if (ss->type != GFC_SS_SECTION)
3947 /* Criteria for choosing a loop specifier (most important first):
3948 doesn't need realloc
3954 else if ((loopspec[n]->type == GFC_SS_CONSTRUCTOR && dynamic[n])
3955 || n >= loop->dimen)
3957 else if (integer_onep (info->stride[dim])
3958 && !integer_onep (specinfo->stride[spec_dim]))
3960 else if (INTEGER_CST_P (info->stride[dim])
3961 && !INTEGER_CST_P (specinfo->stride[spec_dim]))
3963 else if (INTEGER_CST_P (info->start[dim])
3964 && !INTEGER_CST_P (specinfo->start[spec_dim]))
3966 /* We don't work out the upper bound.
3967 else if (INTEGER_CST_P (info->finish[n])
3968 && ! INTEGER_CST_P (specinfo->finish[n]))
3969 loopspec[n] = ss; */
3972 /* We should have found the scalarization loop specifier. If not,
3974 gcc_assert (loopspec[n]);
3976 info = &loopspec[n]->data.info;
3977 dim = loopspec[n]->dim[n];
3979 /* Set the extents of this range. */
3980 cshape = info->shape;
3981 if (cshape && INTEGER_CST_P (info->start[dim])
3982 && INTEGER_CST_P (info->stride[dim]))
3984 loop->from[n] = info->start[dim];
3985 mpz_set (i, cshape[get_array_ref_dim (loopspec[n], n)]);
3986 mpz_sub_ui (i, i, 1);
3987 /* To = from + (size - 1) * stride. */
3988 tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind);
3989 if (!integer_onep (info->stride[dim]))
3990 tmp = fold_build2_loc (input_location, MULT_EXPR,
3991 gfc_array_index_type, tmp,
3993 loop->to[n] = fold_build2_loc (input_location, PLUS_EXPR,
3994 gfc_array_index_type,
3995 loop->from[n], tmp);
3999 loop->from[n] = info->start[dim];
4000 switch (loopspec[n]->type)
4002 case GFC_SS_CONSTRUCTOR:
4003 /* The upper bound is calculated when we expand the
4005 gcc_assert (loop->to[n] == NULL_TREE);
4008 case GFC_SS_SECTION:
4009 /* Use the end expression if it exists and is not constant,
4010 so that it is only evaluated once. */
4011 loop->to[n] = info->end[dim];
4014 case GFC_SS_FUNCTION:
4015 /* The loop bound will be set when we generate the call. */
4016 gcc_assert (loop->to[n] == NULL_TREE);
4024 /* Transform everything so we have a simple incrementing variable. */
4025 if (n < loop->dimen && integer_onep (info->stride[dim]))
4026 info->delta[dim] = gfc_index_zero_node;
4027 else if (n < loop->dimen)
4029 /* Set the delta for this section. */
4030 info->delta[dim] = gfc_evaluate_now (loop->from[n], &loop->pre);
4031 /* Number of iterations is (end - start + step) / step.
4032 with start = 0, this simplifies to
4034 for (i = 0; i<=last; i++){...}; */
4035 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4036 gfc_array_index_type, loop->to[n],
4038 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
4039 gfc_array_index_type, tmp, info->stride[dim]);
4040 tmp = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
4041 tmp, build_int_cst (gfc_array_index_type, -1));
4042 loop->to[n] = gfc_evaluate_now (tmp, &loop->pre);
4043 /* Make the loop variable start at 0. */
4044 loop->from[n] = gfc_index_zero_node;
4048 /* Add all the scalar code that can be taken out of the loops.
4049 This may include calculating the loop bounds, so do it before
4050 allocating the temporary. */
4051 gfc_add_loop_ss_code (loop, loop->ss, false, where);
4053 tmp_ss = loop->temp_ss;
4054 /* If we want a temporary then create it. */
4057 gcc_assert (loop->temp_ss->type == GFC_SS_TEMP);
4059 /* Make absolutely sure that this is a complete type. */
4060 if (loop->temp_ss->string_length)
4061 loop->temp_ss->data.temp.type
4062 = gfc_get_character_type_len_for_eltype
4063 (TREE_TYPE (loop->temp_ss->data.temp.type),
4064 loop->temp_ss->string_length);
4066 tmp = loop->temp_ss->data.temp.type;
4067 memset (&loop->temp_ss->data.info, 0, sizeof (gfc_array_info));
4068 loop->temp_ss->type = GFC_SS_SECTION;
4070 gcc_assert (tmp_ss->dimen != 0);
4072 gfc_trans_create_temp_array (&loop->pre, &loop->post, loop,
4073 tmp_ss, tmp, NULL_TREE,
4074 false, true, false, where);
4077 for (n = 0; n < loop->temp_dim; n++)
4078 loopspec[loop->order[n]] = NULL;
4082 /* For array parameters we don't have loop variables, so don't calculate the
4084 if (loop->array_parameter)
4087 /* Calculate the translation from loop variables to array indices. */
4088 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4090 if (ss->type != GFC_SS_SECTION && ss->type != GFC_SS_COMPONENT
4091 && ss->type != GFC_SS_CONSTRUCTOR)
4095 info = &ss->data.info;
4097 for (n = 0; n < ss->dimen; n++)
4099 /* If we are specifying the range the delta is already set. */
4100 if (loopspec[n] != ss)
4104 /* Calculate the offset relative to the loop variable.
4105 First multiply by the stride. */
4106 tmp = loop->from[n];
4107 if (!integer_onep (info->stride[dim]))
4108 tmp = fold_build2_loc (input_location, MULT_EXPR,
4109 gfc_array_index_type,
4110 tmp, info->stride[dim]);
4112 /* Then subtract this from our starting value. */
4113 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4114 gfc_array_index_type,
4115 info->start[dim], tmp);
4117 info->delta[dim] = gfc_evaluate_now (tmp, &loop->pre);
4124 /* Calculate the size of a given array dimension from the bounds. This
4125 is simply (ubound - lbound + 1) if this expression is positive
4126 or 0 if it is negative (pick either one if it is zero). Optionally
4127 (if or_expr is present) OR the (expression != 0) condition to it. */
4130 gfc_conv_array_extent_dim (tree lbound, tree ubound, tree* or_expr)
4135 /* Calculate (ubound - lbound + 1). */
4136 res = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4138 res = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, res,
4139 gfc_index_one_node);
4141 /* Check whether the size for this dimension is negative. */
4142 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, res,
4143 gfc_index_zero_node);
4144 res = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, cond,
4145 gfc_index_zero_node, res);
4147 /* Build OR expression. */
4149 *or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
4150 boolean_type_node, *or_expr, cond);
4156 /* For an array descriptor, get the total number of elements. This is just
4157 the product of the extents along from_dim to to_dim. */
4160 gfc_conv_descriptor_size_1 (tree desc, int from_dim, int to_dim)
4165 res = gfc_index_one_node;
4167 for (dim = from_dim; dim < to_dim; ++dim)
4173 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
4174 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
4176 extent = gfc_conv_array_extent_dim (lbound, ubound, NULL);
4177 res = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4185 /* Full size of an array. */
4188 gfc_conv_descriptor_size (tree desc, int rank)
4190 return gfc_conv_descriptor_size_1 (desc, 0, rank);
4194 /* Size of a coarray for all dimensions but the last. */
4197 gfc_conv_descriptor_cosize (tree desc, int rank, int corank)
4199 return gfc_conv_descriptor_size_1 (desc, rank, rank + corank - 1);
4203 /* Fills in an array descriptor, and returns the size of the array.
4204 The size will be a simple_val, ie a variable or a constant. Also
4205 calculates the offset of the base. The pointer argument overflow,
4206 which should be of integer type, will increase in value if overflow
4207 occurs during the size calculation. Returns the size of the array.
4211 for (n = 0; n < rank; n++)
4213 a.lbound[n] = specified_lower_bound;
4214 offset = offset + a.lbond[n] * stride;
4216 a.ubound[n] = specified_upper_bound;
4217 a.stride[n] = stride;
4218 size = size >= 0 ? ubound + size : 0; //size = ubound + 1 - lbound
4219 overflow += size == 0 ? 0: (MAX/size < stride ? 1: 0);
4220 stride = stride * size;
4222 for (n = rank; n < rank+corank; n++)
4223 (Set lcobound/ucobound as above.)
4224 element_size = sizeof (array element);
4227 stride = (size_t) stride;
4228 overflow += element_size == 0 ? 0: (MAX/element_size < stride ? 1: 0);
4229 stride = stride * element_size;
4235 gfc_array_init_size (tree descriptor, int rank, int corank, tree * poffset,
4236 gfc_expr ** lower, gfc_expr ** upper, stmtblock_t * pblock,
4237 stmtblock_t * descriptor_block, tree * overflow)
4250 stmtblock_t thenblock;
4251 stmtblock_t elseblock;
4256 type = TREE_TYPE (descriptor);
4258 stride = gfc_index_one_node;
4259 offset = gfc_index_zero_node;
4261 /* Set the dtype. */
4262 tmp = gfc_conv_descriptor_dtype (descriptor);
4263 gfc_add_modify (descriptor_block, tmp, gfc_get_dtype (TREE_TYPE (descriptor)));
4265 or_expr = boolean_false_node;
4267 for (n = 0; n < rank; n++)
4272 /* We have 3 possibilities for determining the size of the array:
4273 lower == NULL => lbound = 1, ubound = upper[n]
4274 upper[n] = NULL => lbound = 1, ubound = lower[n]
4275 upper[n] != NULL => lbound = lower[n], ubound = upper[n] */
4278 /* Set lower bound. */
4279 gfc_init_se (&se, NULL);
4281 se.expr = gfc_index_one_node;
4284 gcc_assert (lower[n]);
4287 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4288 gfc_add_block_to_block (pblock, &se.pre);
4292 se.expr = gfc_index_one_node;
4296 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
4297 gfc_rank_cst[n], se.expr);
4298 conv_lbound = se.expr;
4300 /* Work out the offset for this component. */
4301 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4303 offset = fold_build2_loc (input_location, MINUS_EXPR,
4304 gfc_array_index_type, offset, tmp);
4306 /* Set upper bound. */
4307 gfc_init_se (&se, NULL);
4308 gcc_assert (ubound);
4309 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4310 gfc_add_block_to_block (pblock, &se.pre);
4312 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
4313 gfc_rank_cst[n], se.expr);
4314 conv_ubound = se.expr;
4316 /* Store the stride. */
4317 gfc_conv_descriptor_stride_set (descriptor_block, descriptor,
4318 gfc_rank_cst[n], stride);
4320 /* Calculate size and check whether extent is negative. */
4321 size = gfc_conv_array_extent_dim (conv_lbound, conv_ubound, &or_expr);
4322 size = gfc_evaluate_now (size, pblock);
4324 /* Check whether multiplying the stride by the number of
4325 elements in this dimension would overflow. We must also check
4326 whether the current dimension has zero size in order to avoid
4329 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
4330 gfc_array_index_type,
4331 fold_convert (gfc_array_index_type,
4332 TYPE_MAX_VALUE (gfc_array_index_type)),
4334 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
4335 boolean_type_node, tmp, stride));
4336 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4337 integer_one_node, integer_zero_node);
4338 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
4339 boolean_type_node, size,
4340 gfc_index_zero_node));
4341 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4342 integer_zero_node, tmp);
4343 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
4345 *overflow = gfc_evaluate_now (tmp, pblock);
4347 /* Multiply the stride by the number of elements in this dimension. */
4348 stride = fold_build2_loc (input_location, MULT_EXPR,
4349 gfc_array_index_type, stride, size);
4350 stride = gfc_evaluate_now (stride, pblock);
4353 for (n = rank; n < rank + corank; n++)
4357 /* Set lower bound. */
4358 gfc_init_se (&se, NULL);
4359 if (lower == NULL || lower[n] == NULL)
4361 gcc_assert (n == rank + corank - 1);
4362 se.expr = gfc_index_one_node;
4366 if (ubound || n == rank + corank - 1)
4368 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4369 gfc_add_block_to_block (pblock, &se.pre);
4373 se.expr = gfc_index_one_node;
4377 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
4378 gfc_rank_cst[n], se.expr);
4380 if (n < rank + corank - 1)
4382 gfc_init_se (&se, NULL);
4383 gcc_assert (ubound);
4384 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4385 gfc_add_block_to_block (pblock, &se.pre);
4386 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
4387 gfc_rank_cst[n], se.expr);
4391 /* The stride is the number of elements in the array, so multiply by the
4392 size of an element to get the total size. */
4393 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
4394 /* Convert to size_t. */
4395 element_size = fold_convert (size_type_node, tmp);
4398 return element_size;
4400 stride = fold_convert (size_type_node, stride);
4402 /* First check for overflow. Since an array of type character can
4403 have zero element_size, we must check for that before
4405 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
4407 TYPE_MAX_VALUE (size_type_node), element_size);
4408 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
4409 boolean_type_node, tmp, stride));
4410 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4411 integer_one_node, integer_zero_node);
4412 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
4413 boolean_type_node, element_size,
4414 build_int_cst (size_type_node, 0)));
4415 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4416 integer_zero_node, tmp);
4417 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
4419 *overflow = gfc_evaluate_now (tmp, pblock);
4421 size = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
4422 stride, element_size);
4424 if (poffset != NULL)
4426 offset = gfc_evaluate_now (offset, pblock);
4430 if (integer_zerop (or_expr))
4432 if (integer_onep (or_expr))
4433 return build_int_cst (size_type_node, 0);
4435 var = gfc_create_var (TREE_TYPE (size), "size");
4436 gfc_start_block (&thenblock);
4437 gfc_add_modify (&thenblock, var, build_int_cst (size_type_node, 0));
4438 thencase = gfc_finish_block (&thenblock);
4440 gfc_start_block (&elseblock);
4441 gfc_add_modify (&elseblock, var, size);
4442 elsecase = gfc_finish_block (&elseblock);
4444 tmp = gfc_evaluate_now (or_expr, pblock);
4445 tmp = build3_v (COND_EXPR, tmp, thencase, elsecase);
4446 gfc_add_expr_to_block (pblock, tmp);
4452 /* Initializes the descriptor and generates a call to _gfor_allocate. Does
4453 the work for an ALLOCATE statement. */
4457 gfc_array_allocate (gfc_se * se, gfc_expr * expr, tree status, tree errmsg,
4462 tree offset = NULL_TREE;
4463 tree token = NULL_TREE;
4466 tree error = NULL_TREE;
4467 tree overflow; /* Boolean storing whether size calculation overflows. */
4468 tree var_overflow = NULL_TREE;
4470 tree set_descriptor;
4471 stmtblock_t set_descriptor_block;
4472 stmtblock_t elseblock;
4475 gfc_ref *ref, *prev_ref = NULL;
4476 bool allocatable, coarray, dimension;
4480 /* Find the last reference in the chain. */
4481 while (ref && ref->next != NULL)
4483 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT
4484 || (ref->u.ar.dimen == 0 && ref->u.ar.codimen > 0));
4489 if (ref == NULL || ref->type != REF_ARRAY)
4494 allocatable = expr->symtree->n.sym->attr.allocatable;
4495 coarray = expr->symtree->n.sym->attr.codimension;
4496 dimension = expr->symtree->n.sym->attr.dimension;
4500 allocatable = prev_ref->u.c.component->attr.allocatable;
4501 coarray = prev_ref->u.c.component->attr.codimension;
4502 dimension = prev_ref->u.c.component->attr.dimension;
4506 gcc_assert (coarray);
4508 /* Figure out the size of the array. */
4509 switch (ref->u.ar.type)
4515 upper = ref->u.ar.start;
4521 lower = ref->u.ar.start;
4522 upper = ref->u.ar.end;
4526 gcc_assert (ref->u.ar.as->type == AS_EXPLICIT);
4528 lower = ref->u.ar.as->lower;
4529 upper = ref->u.ar.as->upper;
4537 overflow = integer_zero_node;
4539 gfc_init_block (&set_descriptor_block);
4540 size = gfc_array_init_size (se->expr, ref->u.ar.as->rank,
4541 ref->u.ar.as->corank, &offset, lower, upper,
4542 &se->pre, &set_descriptor_block, &overflow);
4547 var_overflow = gfc_create_var (integer_type_node, "overflow");
4548 gfc_add_modify (&se->pre, var_overflow, overflow);
4550 /* Generate the block of code handling overflow. */
4551 msg = gfc_build_addr_expr (pchar_type_node,
4552 gfc_build_localized_cstring_const
4553 ("Integer overflow when calculating the amount of "
4554 "memory to allocate"));
4555 error = build_call_expr_loc (input_location, gfor_fndecl_runtime_error,
4559 if (status != NULL_TREE)
4561 tree status_type = TREE_TYPE (status);
4562 stmtblock_t set_status_block;
4564 gfc_start_block (&set_status_block);
4565 gfc_add_modify (&set_status_block, status,
4566 build_int_cst (status_type, LIBERROR_ALLOCATION));
4567 error = gfc_finish_block (&set_status_block);
4570 gfc_start_block (&elseblock);
4572 /* Allocate memory to store the data. */
4573 pointer = gfc_conv_descriptor_data_get (se->expr);
4574 STRIP_NOPS (pointer);
4576 if (coarray && gfc_option.coarray == GFC_FCOARRAY_LIB)
4577 token = gfc_build_addr_expr (NULL_TREE,
4578 gfc_conv_descriptor_token (se->expr));
4580 /* The allocatable variant takes the old pointer as first argument. */
4582 gfc_allocate_allocatable (&elseblock, pointer, size, token,
4583 status, errmsg, errlen, expr);
4585 gfc_allocate_using_malloc (&elseblock, pointer, size, status);
4589 cond = gfc_unlikely (fold_build2_loc (input_location, NE_EXPR,
4590 boolean_type_node, var_overflow, integer_zero_node));
4591 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
4592 error, gfc_finish_block (&elseblock));
4595 tmp = gfc_finish_block (&elseblock);
4597 gfc_add_expr_to_block (&se->pre, tmp);
4599 /* Update the array descriptors. */
4601 gfc_conv_descriptor_offset_set (&set_descriptor_block, se->expr, offset);
4603 set_descriptor = gfc_finish_block (&set_descriptor_block);
4604 if (status != NULL_TREE)
4606 cond = fold_build2_loc (input_location, EQ_EXPR,
4607 boolean_type_node, status,
4608 build_int_cst (TREE_TYPE (status), 0));
4609 gfc_add_expr_to_block (&se->pre,
4610 fold_build3_loc (input_location, COND_EXPR, void_type_node,
4611 gfc_likely (cond), set_descriptor,
4612 build_empty_stmt (input_location)));
4615 gfc_add_expr_to_block (&se->pre, set_descriptor);
4617 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
4618 && expr->ts.u.derived->attr.alloc_comp)
4620 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, se->expr,
4621 ref->u.ar.as->rank);
4622 gfc_add_expr_to_block (&se->pre, tmp);
4629 /* Deallocate an array variable. Also used when an allocated variable goes
4634 gfc_array_deallocate (tree descriptor, tree pstat, gfc_expr* expr)
4640 gfc_start_block (&block);
4641 /* Get a pointer to the data. */
4642 var = gfc_conv_descriptor_data_get (descriptor);
4645 /* Parameter is the address of the data component. */
4646 tmp = gfc_deallocate_with_status (var, pstat, false, expr);
4647 gfc_add_expr_to_block (&block, tmp);
4649 /* Zero the data pointer. */
4650 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
4651 var, build_int_cst (TREE_TYPE (var), 0));
4652 gfc_add_expr_to_block (&block, tmp);
4654 return gfc_finish_block (&block);
4658 /* Create an array constructor from an initialization expression.
4659 We assume the frontend already did any expansions and conversions. */
4662 gfc_conv_array_initializer (tree type, gfc_expr * expr)
4668 unsigned HOST_WIDE_INT lo;
4670 VEC(constructor_elt,gc) *v = NULL;
4672 switch (expr->expr_type)
4675 case EXPR_STRUCTURE:
4676 /* A single scalar or derived type value. Create an array with all
4677 elements equal to that value. */
4678 gfc_init_se (&se, NULL);
4680 if (expr->expr_type == EXPR_CONSTANT)
4681 gfc_conv_constant (&se, expr);
4683 gfc_conv_structure (&se, expr, 1);
4685 tmp = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4686 gcc_assert (tmp && INTEGER_CST_P (tmp));
4687 hi = TREE_INT_CST_HIGH (tmp);
4688 lo = TREE_INT_CST_LOW (tmp);
4692 /* This will probably eat buckets of memory for large arrays. */
4693 while (hi != 0 || lo != 0)
4695 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, se.expr);
4703 /* Create a vector of all the elements. */
4704 for (c = gfc_constructor_first (expr->value.constructor);
4705 c; c = gfc_constructor_next (c))
4709 /* Problems occur when we get something like
4710 integer :: a(lots) = (/(i, i=1, lots)/) */
4711 gfc_fatal_error ("The number of elements in the array constructor "
4712 "at %L requires an increase of the allowed %d "
4713 "upper limit. See -fmax-array-constructor "
4714 "option", &expr->where,
4715 gfc_option.flag_max_array_constructor);
4718 if (mpz_cmp_si (c->offset, 0) != 0)
4719 index = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
4723 if (mpz_cmp_si (c->repeat, 1) > 0)
4729 mpz_add (maxval, c->offset, c->repeat);
4730 mpz_sub_ui (maxval, maxval, 1);
4731 tmp2 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
4732 if (mpz_cmp_si (c->offset, 0) != 0)
4734 mpz_add_ui (maxval, c->offset, 1);
4735 tmp1 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
4738 tmp1 = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
4740 range = fold_build2 (RANGE_EXPR, gfc_array_index_type, tmp1, tmp2);
4746 gfc_init_se (&se, NULL);
4747 switch (c->expr->expr_type)
4750 gfc_conv_constant (&se, c->expr);
4753 case EXPR_STRUCTURE:
4754 gfc_conv_structure (&se, c->expr, 1);
4758 /* Catch those occasional beasts that do not simplify
4759 for one reason or another, assuming that if they are
4760 standard defying the frontend will catch them. */
4761 gfc_conv_expr (&se, c->expr);
4765 if (range == NULL_TREE)
4766 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
4769 if (index != NULL_TREE)
4770 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
4771 CONSTRUCTOR_APPEND_ELT (v, range, se.expr);
4777 return gfc_build_null_descriptor (type);
4783 /* Create a constructor from the list of elements. */
4784 tmp = build_constructor (type, v);
4785 TREE_CONSTANT (tmp) = 1;
4790 /* Generate code to evaluate non-constant coarray cobounds. */
4793 gfc_trans_array_cobounds (tree type, stmtblock_t * pblock,
4794 const gfc_symbol *sym)
4804 for (dim = as->rank; dim < as->rank + as->corank; dim++)
4806 /* Evaluate non-constant array bound expressions. */
4807 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
4808 if (as->lower[dim] && !INTEGER_CST_P (lbound))
4810 gfc_init_se (&se, NULL);
4811 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
4812 gfc_add_block_to_block (pblock, &se.pre);
4813 gfc_add_modify (pblock, lbound, se.expr);
4815 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
4816 if (as->upper[dim] && !INTEGER_CST_P (ubound))
4818 gfc_init_se (&se, NULL);
4819 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
4820 gfc_add_block_to_block (pblock, &se.pre);
4821 gfc_add_modify (pblock, ubound, se.expr);
4827 /* Generate code to evaluate non-constant array bounds. Sets *poffset and
4828 returns the size (in elements) of the array. */
4831 gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
4832 stmtblock_t * pblock)
4847 size = gfc_index_one_node;
4848 offset = gfc_index_zero_node;
4849 for (dim = 0; dim < as->rank; dim++)
4851 /* Evaluate non-constant array bound expressions. */
4852 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
4853 if (as->lower[dim] && !INTEGER_CST_P (lbound))
4855 gfc_init_se (&se, NULL);
4856 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
4857 gfc_add_block_to_block (pblock, &se.pre);
4858 gfc_add_modify (pblock, lbound, se.expr);
4860 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
4861 if (as->upper[dim] && !INTEGER_CST_P (ubound))
4863 gfc_init_se (&se, NULL);
4864 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
4865 gfc_add_block_to_block (pblock, &se.pre);
4866 gfc_add_modify (pblock, ubound, se.expr);
4868 /* The offset of this dimension. offset = offset - lbound * stride. */
4869 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4871 offset = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4874 /* The size of this dimension, and the stride of the next. */
4875 if (dim + 1 < as->rank)
4876 stride = GFC_TYPE_ARRAY_STRIDE (type, dim + 1);
4878 stride = GFC_TYPE_ARRAY_SIZE (type);
4880 if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride)))
4882 /* Calculate stride = size * (ubound + 1 - lbound). */
4883 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4884 gfc_array_index_type,
4885 gfc_index_one_node, lbound);
4886 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4887 gfc_array_index_type, ubound, tmp);
4888 tmp = fold_build2_loc (input_location, MULT_EXPR,
4889 gfc_array_index_type, size, tmp);
4891 gfc_add_modify (pblock, stride, tmp);
4893 stride = gfc_evaluate_now (tmp, pblock);
4895 /* Make sure that negative size arrays are translated
4896 to being zero size. */
4897 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
4898 stride, gfc_index_zero_node);
4899 tmp = fold_build3_loc (input_location, COND_EXPR,
4900 gfc_array_index_type, tmp,
4901 stride, gfc_index_zero_node);
4902 gfc_add_modify (pblock, stride, tmp);
4908 gfc_trans_array_cobounds (type, pblock, sym);
4909 gfc_trans_vla_type_sizes (sym, pblock);
4916 /* Generate code to initialize/allocate an array variable. */
4919 gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym,
4920 gfc_wrapped_block * block)
4924 tree tmp = NULL_TREE;
4931 gcc_assert (!(sym->attr.pointer || sym->attr.allocatable));
4933 /* Do nothing for USEd variables. */
4934 if (sym->attr.use_assoc)
4937 type = TREE_TYPE (decl);
4938 gcc_assert (GFC_ARRAY_TYPE_P (type));
4939 onstack = TREE_CODE (type) != POINTER_TYPE;
4941 gfc_init_block (&init);
4943 /* Evaluate character string length. */
4944 if (sym->ts.type == BT_CHARACTER
4945 && onstack && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
4947 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
4949 gfc_trans_vla_type_sizes (sym, &init);
4951 /* Emit a DECL_EXPR for this variable, which will cause the
4952 gimplifier to allocate storage, and all that good stuff. */
4953 tmp = fold_build1_loc (input_location, DECL_EXPR, TREE_TYPE (decl), decl);
4954 gfc_add_expr_to_block (&init, tmp);
4959 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
4963 type = TREE_TYPE (type);
4965 gcc_assert (!sym->attr.use_assoc);
4966 gcc_assert (!TREE_STATIC (decl));
4967 gcc_assert (!sym->module);
4969 if (sym->ts.type == BT_CHARACTER
4970 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
4971 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
4973 size = gfc_trans_array_bounds (type, sym, &offset, &init);
4975 /* Don't actually allocate space for Cray Pointees. */
4976 if (sym->attr.cray_pointee)
4978 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
4979 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
4981 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
4985 if (gfc_option.flag_stack_arrays)
4987 gcc_assert (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE);
4988 space = build_decl (sym->declared_at.lb->location,
4989 VAR_DECL, create_tmp_var_name ("A"),
4990 TREE_TYPE (TREE_TYPE (decl)));
4991 gfc_trans_vla_type_sizes (sym, &init);
4995 /* The size is the number of elements in the array, so multiply by the
4996 size of an element to get the total size. */
4997 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
4998 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4999 size, fold_convert (gfc_array_index_type, tmp));
5001 /* Allocate memory to hold the data. */
5002 tmp = gfc_call_malloc (&init, TREE_TYPE (decl), size);
5003 gfc_add_modify (&init, decl, tmp);
5005 /* Free the temporary. */
5006 tmp = gfc_call_free (convert (pvoid_type_node, decl));
5010 /* Set offset of the array. */
5011 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5012 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5014 /* Automatic arrays should not have initializers. */
5015 gcc_assert (!sym->value);
5017 inittree = gfc_finish_block (&init);
5024 /* Don't create new scope, emit the DECL_EXPR in exactly the scope
5025 where also space is located. */
5026 gfc_init_block (&init);
5027 tmp = fold_build1_loc (input_location, DECL_EXPR,
5028 TREE_TYPE (space), space);
5029 gfc_add_expr_to_block (&init, tmp);
5030 addr = fold_build1_loc (sym->declared_at.lb->location,
5031 ADDR_EXPR, TREE_TYPE (decl), space);
5032 gfc_add_modify (&init, decl, addr);
5033 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5036 gfc_add_init_cleanup (block, inittree, tmp);
5040 /* Generate entry and exit code for g77 calling convention arrays. */
5043 gfc_trans_g77_array (gfc_symbol * sym, gfc_wrapped_block * block)
5053 gfc_save_backend_locus (&loc);
5054 gfc_set_backend_locus (&sym->declared_at);
5056 /* Descriptor type. */
5057 parm = sym->backend_decl;
5058 type = TREE_TYPE (parm);
5059 gcc_assert (GFC_ARRAY_TYPE_P (type));
5061 gfc_start_block (&init);
5063 if (sym->ts.type == BT_CHARACTER
5064 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5065 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5067 /* Evaluate the bounds of the array. */
5068 gfc_trans_array_bounds (type, sym, &offset, &init);
5070 /* Set the offset. */
5071 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5072 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5074 /* Set the pointer itself if we aren't using the parameter directly. */
5075 if (TREE_CODE (parm) != PARM_DECL)
5077 tmp = convert (TREE_TYPE (parm), GFC_DECL_SAVED_DESCRIPTOR (parm));
5078 gfc_add_modify (&init, parm, tmp);
5080 stmt = gfc_finish_block (&init);
5082 gfc_restore_backend_locus (&loc);
5084 /* Add the initialization code to the start of the function. */
5086 if (sym->attr.optional || sym->attr.not_always_present)
5088 tmp = gfc_conv_expr_present (sym);
5089 stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
5092 gfc_add_init_cleanup (block, stmt, NULL_TREE);
5096 /* Modify the descriptor of an array parameter so that it has the
5097 correct lower bound. Also move the upper bound accordingly.
5098 If the array is not packed, it will be copied into a temporary.
5099 For each dimension we set the new lower and upper bounds. Then we copy the
5100 stride and calculate the offset for this dimension. We also work out
5101 what the stride of a packed array would be, and see it the two match.
5102 If the array need repacking, we set the stride to the values we just
5103 calculated, recalculate the offset and copy the array data.
5104 Code is also added to copy the data back at the end of the function.
5108 gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc,
5109 gfc_wrapped_block * block)
5116 tree stmtInit, stmtCleanup;
5123 tree stride, stride2;
5133 /* Do nothing for pointer and allocatable arrays. */
5134 if (sym->attr.pointer || sym->attr.allocatable)
5137 if (sym->attr.dummy && gfc_is_nodesc_array (sym))
5139 gfc_trans_g77_array (sym, block);
5143 gfc_save_backend_locus (&loc);
5144 gfc_set_backend_locus (&sym->declared_at);
5146 /* Descriptor type. */
5147 type = TREE_TYPE (tmpdesc);
5148 gcc_assert (GFC_ARRAY_TYPE_P (type));
5149 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5150 dumdesc = build_fold_indirect_ref_loc (input_location, dumdesc);
5151 gfc_start_block (&init);
5153 if (sym->ts.type == BT_CHARACTER
5154 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5155 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5157 checkparm = (sym->as->type == AS_EXPLICIT
5158 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS));
5160 no_repack = !(GFC_DECL_PACKED_ARRAY (tmpdesc)
5161 || GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc));
5163 if (GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc))
5165 /* For non-constant shape arrays we only check if the first dimension
5166 is contiguous. Repacking higher dimensions wouldn't gain us
5167 anything as we still don't know the array stride. */
5168 partial = gfc_create_var (boolean_type_node, "partial");
5169 TREE_USED (partial) = 1;
5170 tmp = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
5171 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, tmp,
5172 gfc_index_one_node);
5173 gfc_add_modify (&init, partial, tmp);
5176 partial = NULL_TREE;
5178 /* The naming of stmt_unpacked and stmt_packed may be counter-intuitive
5179 here, however I think it does the right thing. */
5182 /* Set the first stride. */
5183 stride = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
5184 stride = gfc_evaluate_now (stride, &init);
5186 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
5187 stride, gfc_index_zero_node);
5188 tmp = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
5189 tmp, gfc_index_one_node, stride);
5190 stride = GFC_TYPE_ARRAY_STRIDE (type, 0);
5191 gfc_add_modify (&init, stride, tmp);
5193 /* Allow the user to disable array repacking. */
5194 stmt_unpacked = NULL_TREE;
5198 gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type, 0)));
5199 /* A library call to repack the array if necessary. */
5200 tmp = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5201 stmt_unpacked = build_call_expr_loc (input_location,
5202 gfor_fndecl_in_pack, 1, tmp);
5204 stride = gfc_index_one_node;
5206 if (gfc_option.warn_array_temp)
5207 gfc_warning ("Creating array temporary at %L", &loc);
5210 /* This is for the case where the array data is used directly without
5211 calling the repack function. */
5212 if (no_repack || partial != NULL_TREE)
5213 stmt_packed = gfc_conv_descriptor_data_get (dumdesc);
5215 stmt_packed = NULL_TREE;
5217 /* Assign the data pointer. */
5218 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
5220 /* Don't repack unknown shape arrays when the first stride is 1. */
5221 tmp = fold_build3_loc (input_location, COND_EXPR, TREE_TYPE (stmt_packed),
5222 partial, stmt_packed, stmt_unpacked);
5225 tmp = stmt_packed != NULL_TREE ? stmt_packed : stmt_unpacked;
5226 gfc_add_modify (&init, tmpdesc, fold_convert (type, tmp));
5228 offset = gfc_index_zero_node;
5229 size = gfc_index_one_node;
5231 /* Evaluate the bounds of the array. */
5232 for (n = 0; n < sym->as->rank; n++)
5234 if (checkparm || !sym->as->upper[n])
5236 /* Get the bounds of the actual parameter. */
5237 dubound = gfc_conv_descriptor_ubound_get (dumdesc, gfc_rank_cst[n]);
5238 dlbound = gfc_conv_descriptor_lbound_get (dumdesc, gfc_rank_cst[n]);
5242 dubound = NULL_TREE;
5243 dlbound = NULL_TREE;
5246 lbound = GFC_TYPE_ARRAY_LBOUND (type, n);
5247 if (!INTEGER_CST_P (lbound))
5249 gfc_init_se (&se, NULL);
5250 gfc_conv_expr_type (&se, sym->as->lower[n],
5251 gfc_array_index_type);
5252 gfc_add_block_to_block (&init, &se.pre);
5253 gfc_add_modify (&init, lbound, se.expr);
5256 ubound = GFC_TYPE_ARRAY_UBOUND (type, n);
5257 /* Set the desired upper bound. */
5258 if (sym->as->upper[n])
5260 /* We know what we want the upper bound to be. */
5261 if (!INTEGER_CST_P (ubound))
5263 gfc_init_se (&se, NULL);
5264 gfc_conv_expr_type (&se, sym->as->upper[n],
5265 gfc_array_index_type);
5266 gfc_add_block_to_block (&init, &se.pre);
5267 gfc_add_modify (&init, ubound, se.expr);
5270 /* Check the sizes match. */
5273 /* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */
5277 temp = fold_build2_loc (input_location, MINUS_EXPR,
5278 gfc_array_index_type, ubound, lbound);
5279 temp = fold_build2_loc (input_location, PLUS_EXPR,
5280 gfc_array_index_type,
5281 gfc_index_one_node, temp);
5282 stride2 = fold_build2_loc (input_location, MINUS_EXPR,
5283 gfc_array_index_type, dubound,
5285 stride2 = fold_build2_loc (input_location, PLUS_EXPR,
5286 gfc_array_index_type,
5287 gfc_index_one_node, stride2);
5288 tmp = fold_build2_loc (input_location, NE_EXPR,
5289 gfc_array_index_type, temp, stride2);
5290 asprintf (&msg, "Dimension %d of array '%s' has extent "
5291 "%%ld instead of %%ld", n+1, sym->name);
5293 gfc_trans_runtime_check (true, false, tmp, &init, &loc, msg,
5294 fold_convert (long_integer_type_node, temp),
5295 fold_convert (long_integer_type_node, stride2));
5302 /* For assumed shape arrays move the upper bound by the same amount
5303 as the lower bound. */
5304 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5305 gfc_array_index_type, dubound, dlbound);
5306 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5307 gfc_array_index_type, tmp, lbound);
5308 gfc_add_modify (&init, ubound, tmp);
5310 /* The offset of this dimension. offset = offset - lbound * stride. */
5311 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5313 offset = fold_build2_loc (input_location, MINUS_EXPR,
5314 gfc_array_index_type, offset, tmp);
5316 /* The size of this dimension, and the stride of the next. */
5317 if (n + 1 < sym->as->rank)
5319 stride = GFC_TYPE_ARRAY_STRIDE (type, n + 1);
5321 if (no_repack || partial != NULL_TREE)
5323 gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[n+1]);
5325 /* Figure out the stride if not a known constant. */
5326 if (!INTEGER_CST_P (stride))
5329 stmt_packed = NULL_TREE;
5332 /* Calculate stride = size * (ubound + 1 - lbound). */
5333 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5334 gfc_array_index_type,
5335 gfc_index_one_node, lbound);
5336 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5337 gfc_array_index_type, ubound, tmp);
5338 size = fold_build2_loc (input_location, MULT_EXPR,
5339 gfc_array_index_type, size, tmp);
5343 /* Assign the stride. */
5344 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
5345 tmp = fold_build3_loc (input_location, COND_EXPR,
5346 gfc_array_index_type, partial,
5347 stmt_unpacked, stmt_packed);
5349 tmp = (stmt_packed != NULL_TREE) ? stmt_packed : stmt_unpacked;
5350 gfc_add_modify (&init, stride, tmp);
5355 stride = GFC_TYPE_ARRAY_SIZE (type);
5357 if (stride && !INTEGER_CST_P (stride))
5359 /* Calculate size = stride * (ubound + 1 - lbound). */
5360 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5361 gfc_array_index_type,
5362 gfc_index_one_node, lbound);
5363 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5364 gfc_array_index_type,
5366 tmp = fold_build2_loc (input_location, MULT_EXPR,
5367 gfc_array_index_type,
5368 GFC_TYPE_ARRAY_STRIDE (type, n), tmp);
5369 gfc_add_modify (&init, stride, tmp);
5374 gfc_trans_array_cobounds (type, &init, sym);
5376 /* Set the offset. */
5377 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5378 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5380 gfc_trans_vla_type_sizes (sym, &init);
5382 stmtInit = gfc_finish_block (&init);
5384 /* Only do the entry/initialization code if the arg is present. */
5385 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5386 optional_arg = (sym->attr.optional
5387 || (sym->ns->proc_name->attr.entry_master
5388 && sym->attr.dummy));
5391 tmp = gfc_conv_expr_present (sym);
5392 stmtInit = build3_v (COND_EXPR, tmp, stmtInit,
5393 build_empty_stmt (input_location));
5398 stmtCleanup = NULL_TREE;
5401 stmtblock_t cleanup;
5402 gfc_start_block (&cleanup);
5404 if (sym->attr.intent != INTENT_IN)
5406 /* Copy the data back. */
5407 tmp = build_call_expr_loc (input_location,
5408 gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc);
5409 gfc_add_expr_to_block (&cleanup, tmp);
5412 /* Free the temporary. */
5413 tmp = gfc_call_free (tmpdesc);
5414 gfc_add_expr_to_block (&cleanup, tmp);
5416 stmtCleanup = gfc_finish_block (&cleanup);
5418 /* Only do the cleanup if the array was repacked. */
5419 tmp = build_fold_indirect_ref_loc (input_location, dumdesc);
5420 tmp = gfc_conv_descriptor_data_get (tmp);
5421 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
5423 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
5424 build_empty_stmt (input_location));
5428 tmp = gfc_conv_expr_present (sym);
5429 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
5430 build_empty_stmt (input_location));
5434 /* We don't need to free any memory allocated by internal_pack as it will
5435 be freed at the end of the function by pop_context. */
5436 gfc_add_init_cleanup (block, stmtInit, stmtCleanup);
5438 gfc_restore_backend_locus (&loc);
5442 /* Calculate the overall offset, including subreferences. */
5444 gfc_get_dataptr_offset (stmtblock_t *block, tree parm, tree desc, tree offset,
5445 bool subref, gfc_expr *expr)
5455 /* If offset is NULL and this is not a subreferenced array, there is
5457 if (offset == NULL_TREE)
5460 offset = gfc_index_zero_node;
5465 tmp = gfc_conv_array_data (desc);
5466 tmp = build_fold_indirect_ref_loc (input_location,
5468 tmp = gfc_build_array_ref (tmp, offset, NULL);
5470 /* Offset the data pointer for pointer assignments from arrays with
5471 subreferences; e.g. my_integer => my_type(:)%integer_component. */
5474 /* Go past the array reference. */
5475 for (ref = expr->ref; ref; ref = ref->next)
5476 if (ref->type == REF_ARRAY &&
5477 ref->u.ar.type != AR_ELEMENT)
5483 /* Calculate the offset for each subsequent subreference. */
5484 for (; ref; ref = ref->next)
5489 field = ref->u.c.component->backend_decl;
5490 gcc_assert (field && TREE_CODE (field) == FIELD_DECL);
5491 tmp = fold_build3_loc (input_location, COMPONENT_REF,
5493 tmp, field, NULL_TREE);
5497 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE);
5498 gfc_init_se (&start, NULL);
5499 gfc_conv_expr_type (&start, ref->u.ss.start, gfc_charlen_type_node);
5500 gfc_add_block_to_block (block, &start.pre);
5501 tmp = gfc_build_array_ref (tmp, start.expr, NULL);
5505 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE
5506 && ref->u.ar.type == AR_ELEMENT);
5508 /* TODO - Add bounds checking. */
5509 stride = gfc_index_one_node;
5510 index = gfc_index_zero_node;
5511 for (n = 0; n < ref->u.ar.dimen; n++)
5516 /* Update the index. */
5517 gfc_init_se (&start, NULL);
5518 gfc_conv_expr_type (&start, ref->u.ar.start[n], gfc_array_index_type);
5519 itmp = gfc_evaluate_now (start.expr, block);
5520 gfc_init_se (&start, NULL);
5521 gfc_conv_expr_type (&start, ref->u.ar.as->lower[n], gfc_array_index_type);
5522 jtmp = gfc_evaluate_now (start.expr, block);
5523 itmp = fold_build2_loc (input_location, MINUS_EXPR,
5524 gfc_array_index_type, itmp, jtmp);
5525 itmp = fold_build2_loc (input_location, MULT_EXPR,
5526 gfc_array_index_type, itmp, stride);
5527 index = fold_build2_loc (input_location, PLUS_EXPR,
5528 gfc_array_index_type, itmp, index);
5529 index = gfc_evaluate_now (index, block);
5531 /* Update the stride. */
5532 gfc_init_se (&start, NULL);
5533 gfc_conv_expr_type (&start, ref->u.ar.as->upper[n], gfc_array_index_type);
5534 itmp = fold_build2_loc (input_location, MINUS_EXPR,
5535 gfc_array_index_type, start.expr,
5537 itmp = fold_build2_loc (input_location, PLUS_EXPR,
5538 gfc_array_index_type,
5539 gfc_index_one_node, itmp);
5540 stride = fold_build2_loc (input_location, MULT_EXPR,
5541 gfc_array_index_type, stride, itmp);
5542 stride = gfc_evaluate_now (stride, block);
5545 /* Apply the index to obtain the array element. */
5546 tmp = gfc_build_array_ref (tmp, index, NULL);
5556 /* Set the target data pointer. */
5557 offset = gfc_build_addr_expr (gfc_array_dataptr_type (desc), tmp);
5558 gfc_conv_descriptor_data_set (block, parm, offset);
5562 /* gfc_conv_expr_descriptor needs the string length an expression
5563 so that the size of the temporary can be obtained. This is done
5564 by adding up the string lengths of all the elements in the
5565 expression. Function with non-constant expressions have their
5566 string lengths mapped onto the actual arguments using the
5567 interface mapping machinery in trans-expr.c. */
5569 get_array_charlen (gfc_expr *expr, gfc_se *se)
5571 gfc_interface_mapping mapping;
5572 gfc_formal_arglist *formal;
5573 gfc_actual_arglist *arg;
5576 if (expr->ts.u.cl->length
5577 && gfc_is_constant_expr (expr->ts.u.cl->length))
5579 if (!expr->ts.u.cl->backend_decl)
5580 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5584 switch (expr->expr_type)
5587 get_array_charlen (expr->value.op.op1, se);
5589 /* For parentheses the expression ts.u.cl is identical. */
5590 if (expr->value.op.op == INTRINSIC_PARENTHESES)
5593 expr->ts.u.cl->backend_decl =
5594 gfc_create_var (gfc_charlen_type_node, "sln");
5596 if (expr->value.op.op2)
5598 get_array_charlen (expr->value.op.op2, se);
5600 gcc_assert (expr->value.op.op == INTRINSIC_CONCAT);
5602 /* Add the string lengths and assign them to the expression
5603 string length backend declaration. */
5604 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
5605 fold_build2_loc (input_location, PLUS_EXPR,
5606 gfc_charlen_type_node,
5607 expr->value.op.op1->ts.u.cl->backend_decl,
5608 expr->value.op.op2->ts.u.cl->backend_decl));
5611 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
5612 expr->value.op.op1->ts.u.cl->backend_decl);
5616 if (expr->value.function.esym == NULL
5617 || expr->ts.u.cl->length->expr_type == EXPR_CONSTANT)
5619 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5623 /* Map expressions involving the dummy arguments onto the actual
5624 argument expressions. */
5625 gfc_init_interface_mapping (&mapping);
5626 formal = expr->symtree->n.sym->formal;
5627 arg = expr->value.function.actual;
5629 /* Set se = NULL in the calls to the interface mapping, to suppress any
5631 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
5636 gfc_add_interface_mapping (&mapping, formal->sym, NULL, arg->expr);
5639 gfc_init_se (&tse, NULL);
5641 /* Build the expression for the character length and convert it. */
5642 gfc_apply_interface_mapping (&mapping, &tse, expr->ts.u.cl->length);
5644 gfc_add_block_to_block (&se->pre, &tse.pre);
5645 gfc_add_block_to_block (&se->post, &tse.post);
5646 tse.expr = fold_convert (gfc_charlen_type_node, tse.expr);
5647 tse.expr = fold_build2_loc (input_location, MAX_EXPR,
5648 gfc_charlen_type_node, tse.expr,
5649 build_int_cst (gfc_charlen_type_node, 0));
5650 expr->ts.u.cl->backend_decl = tse.expr;
5651 gfc_free_interface_mapping (&mapping);
5655 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5661 /* Helper function to check dimensions. */
5663 transposed_dims (gfc_ss *ss)
5667 for (n = 0; n < ss->dimen; n++)
5668 if (ss->dim[n] != n)
5673 /* Convert an array for passing as an actual argument. Expressions and
5674 vector subscripts are evaluated and stored in a temporary, which is then
5675 passed. For whole arrays the descriptor is passed. For array sections
5676 a modified copy of the descriptor is passed, but using the original data.
5678 This function is also used for array pointer assignments, and there
5681 - se->want_pointer && !se->direct_byref
5682 EXPR is an actual argument. On exit, se->expr contains a
5683 pointer to the array descriptor.
5685 - !se->want_pointer && !se->direct_byref
5686 EXPR is an actual argument to an intrinsic function or the
5687 left-hand side of a pointer assignment. On exit, se->expr
5688 contains the descriptor for EXPR.
5690 - !se->want_pointer && se->direct_byref
5691 EXPR is the right-hand side of a pointer assignment and
5692 se->expr is the descriptor for the previously-evaluated
5693 left-hand side. The function creates an assignment from
5697 The se->force_tmp flag disables the non-copying descriptor optimization
5698 that is used for transpose. It may be used in cases where there is an
5699 alias between the transpose argument and another argument in the same
5703 gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss)
5706 gfc_array_info *info;
5715 bool subref_array_target = false;
5718 gcc_assert (ss != NULL);
5719 gcc_assert (ss != gfc_ss_terminator);
5721 /* Special case things we know we can pass easily. */
5722 switch (expr->expr_type)
5725 /* If we have a linear array section, we can pass it directly.
5726 Otherwise we need to copy it into a temporary. */
5728 gcc_assert (ss->type == GFC_SS_SECTION);
5729 gcc_assert (ss->expr == expr);
5730 info = &ss->data.info;
5732 /* Get the descriptor for the array. */
5733 gfc_conv_ss_descriptor (&se->pre, ss, 0);
5734 desc = info->descriptor;
5736 subref_array_target = se->direct_byref && is_subref_array (expr);
5737 need_tmp = gfc_ref_needs_temporary_p (expr->ref)
5738 && !subref_array_target;
5745 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
5747 /* Create a new descriptor if the array doesn't have one. */
5750 else if (info->ref->u.ar.type == AR_FULL)
5752 else if (se->direct_byref)
5755 full = gfc_full_array_ref_p (info->ref, NULL);
5757 if (full && !transposed_dims (ss))
5759 if (se->direct_byref && !se->byref_noassign)
5761 /* Copy the descriptor for pointer assignments. */
5762 gfc_add_modify (&se->pre, se->expr, desc);
5764 /* Add any offsets from subreferences. */
5765 gfc_get_dataptr_offset (&se->pre, se->expr, desc, NULL_TREE,
5766 subref_array_target, expr);
5768 else if (se->want_pointer)
5770 /* We pass full arrays directly. This means that pointers and
5771 allocatable arrays should also work. */
5772 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
5779 if (expr->ts.type == BT_CHARACTER)
5780 se->string_length = gfc_get_expr_charlen (expr);
5788 /* We don't need to copy data in some cases. */
5789 arg = gfc_get_noncopying_intrinsic_argument (expr);
5792 /* This is a call to transpose... */
5793 gcc_assert (expr->value.function.isym->id == GFC_ISYM_TRANSPOSE);
5794 /* ... which has already been handled by the scalarizer, so
5795 that we just need to get its argument's descriptor. */
5796 gfc_conv_expr_descriptor (se, expr->value.function.actual->expr, ss);
5800 /* A transformational function return value will be a temporary
5801 array descriptor. We still need to go through the scalarizer
5802 to create the descriptor. Elemental functions ar handled as
5803 arbitrary expressions, i.e. copy to a temporary. */
5805 if (se->direct_byref)
5807 gcc_assert (ss->type == GFC_SS_FUNCTION && ss->expr == expr);
5809 /* For pointer assignments pass the descriptor directly. */
5813 gcc_assert (se->ss == ss);
5814 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
5815 gfc_conv_expr (se, expr);
5819 if (ss->expr != expr || ss->type != GFC_SS_FUNCTION)
5821 if (ss->expr != expr)
5822 /* Elemental function. */
5823 gcc_assert ((expr->value.function.esym != NULL
5824 && expr->value.function.esym->attr.elemental)
5825 || (expr->value.function.isym != NULL
5826 && expr->value.function.isym->elemental));
5828 gcc_assert (ss->type == GFC_SS_INTRINSIC);
5831 if (expr->ts.type == BT_CHARACTER
5832 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
5833 get_array_charlen (expr, se);
5839 /* Transformational function. */
5840 info = &ss->data.info;
5846 /* Constant array constructors don't need a temporary. */
5847 if (ss->type == GFC_SS_CONSTRUCTOR
5848 && expr->ts.type != BT_CHARACTER
5849 && gfc_constant_array_constructor_p (expr->value.constructor))
5852 info = &ss->data.info;
5862 /* Something complicated. Copy it into a temporary. */
5868 /* If we are creating a temporary, we don't need to bother about aliases
5873 gfc_init_loopinfo (&loop);
5875 /* Associate the SS with the loop. */
5876 gfc_add_ss_to_loop (&loop, ss);
5878 /* Tell the scalarizer not to bother creating loop variables, etc. */
5880 loop.array_parameter = 1;
5882 /* The right-hand side of a pointer assignment mustn't use a temporary. */
5883 gcc_assert (!se->direct_byref);
5885 /* Setup the scalarizing loops and bounds. */
5886 gfc_conv_ss_startstride (&loop);
5890 if (expr->ts.type == BT_CHARACTER && !expr->ts.u.cl->backend_decl)
5891 get_array_charlen (expr, se);
5893 /* Tell the scalarizer to make a temporary. */
5894 loop.temp_ss = gfc_get_temp_ss (gfc_typenode_for_spec (&expr->ts),
5895 ((expr->ts.type == BT_CHARACTER)
5896 ? expr->ts.u.cl->backend_decl
5900 se->string_length = loop.temp_ss->string_length;
5901 gcc_assert (loop.temp_ss->dimen == loop.dimen);
5902 gfc_add_ss_to_loop (&loop, loop.temp_ss);
5905 gfc_conv_loop_setup (&loop, & expr->where);
5909 /* Copy into a temporary and pass that. We don't need to copy the data
5910 back because expressions and vector subscripts must be INTENT_IN. */
5911 /* TODO: Optimize passing function return values. */
5915 /* Start the copying loops. */
5916 gfc_mark_ss_chain_used (loop.temp_ss, 1);
5917 gfc_mark_ss_chain_used (ss, 1);
5918 gfc_start_scalarized_body (&loop, &block);
5920 /* Copy each data element. */
5921 gfc_init_se (&lse, NULL);
5922 gfc_copy_loopinfo_to_se (&lse, &loop);
5923 gfc_init_se (&rse, NULL);
5924 gfc_copy_loopinfo_to_se (&rse, &loop);
5926 lse.ss = loop.temp_ss;
5929 gfc_conv_scalarized_array_ref (&lse, NULL);
5930 if (expr->ts.type == BT_CHARACTER)
5932 gfc_conv_expr (&rse, expr);
5933 if (POINTER_TYPE_P (TREE_TYPE (rse.expr)))
5934 rse.expr = build_fold_indirect_ref_loc (input_location,
5938 gfc_conv_expr_val (&rse, expr);
5940 gfc_add_block_to_block (&block, &rse.pre);
5941 gfc_add_block_to_block (&block, &lse.pre);
5943 lse.string_length = rse.string_length;
5944 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, true,
5945 expr->expr_type == EXPR_VARIABLE
5946 || expr->expr_type == EXPR_ARRAY, true);
5947 gfc_add_expr_to_block (&block, tmp);
5949 /* Finish the copying loops. */
5950 gfc_trans_scalarizing_loops (&loop, &block);
5952 desc = loop.temp_ss->data.info.descriptor;
5954 else if (expr->expr_type == EXPR_FUNCTION && !transposed_dims (ss))
5956 desc = info->descriptor;
5957 se->string_length = ss->string_length;
5961 /* We pass sections without copying to a temporary. Make a new
5962 descriptor and point it at the section we want. The loop variable
5963 limits will be the limits of the section.
5964 A function may decide to repack the array to speed up access, but
5965 we're not bothered about that here. */
5966 int dim, ndim, codim;
5974 ndim = info->ref ? info->ref->u.ar.dimen : ss->dimen;
5976 if (se->want_coarray)
5978 gfc_array_ref *ar = &info->ref->u.ar;
5980 codim = gfc_get_corank (expr);
5981 for (n = 0; n < codim - 1; n++)
5983 /* Make sure we are not lost somehow. */
5984 gcc_assert (ar->dimen_type[n + ndim] == DIMEN_THIS_IMAGE);
5986 /* Make sure the call to gfc_conv_section_startstride won't
5987 generate unnecessary code to calculate stride. */
5988 gcc_assert (ar->stride[n + ndim] == NULL);
5990 gfc_conv_section_startstride (&loop, ss, n + ndim);
5991 loop.from[n + loop.dimen] = info->start[n + ndim];
5992 loop.to[n + loop.dimen] = info->end[n + ndim];
5995 gcc_assert (n == codim - 1);
5996 evaluate_bound (&loop.pre, info->start, ar->start,
5997 info->descriptor, n + ndim, true);
5998 loop.from[n + loop.dimen] = info->start[n + ndim];
6003 /* Set the string_length for a character array. */
6004 if (expr->ts.type == BT_CHARACTER)
6005 se->string_length = gfc_get_expr_charlen (expr);
6007 desc = info->descriptor;
6008 if (se->direct_byref && !se->byref_noassign)
6010 /* For pointer assignments we fill in the destination. */
6012 parmtype = TREE_TYPE (parm);
6016 /* Otherwise make a new one. */
6017 parmtype = gfc_get_element_type (TREE_TYPE (desc));
6018 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, codim,
6019 loop.from, loop.to, 0,
6020 GFC_ARRAY_UNKNOWN, false);
6021 parm = gfc_create_var (parmtype, "parm");
6024 offset = gfc_index_zero_node;
6026 /* The following can be somewhat confusing. We have two
6027 descriptors, a new one and the original array.
6028 {parm, parmtype, dim} refer to the new one.
6029 {desc, type, n, loop} refer to the original, which maybe
6030 a descriptorless array.
6031 The bounds of the scalarization are the bounds of the section.
6032 We don't have to worry about numeric overflows when calculating
6033 the offsets because all elements are within the array data. */
6035 /* Set the dtype. */
6036 tmp = gfc_conv_descriptor_dtype (parm);
6037 gfc_add_modify (&loop.pre, tmp, gfc_get_dtype (parmtype));
6039 /* Set offset for assignments to pointer only to zero if it is not
6041 if (se->direct_byref
6042 && info->ref && info->ref->u.ar.type != AR_FULL)
6043 base = gfc_index_zero_node;
6044 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6045 base = gfc_evaluate_now (gfc_conv_array_offset (desc), &loop.pre);
6049 for (n = 0; n < ndim; n++)
6051 stride = gfc_conv_array_stride (desc, n);
6053 /* Work out the offset. */
6055 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
6057 gcc_assert (info->subscript[n]
6058 && info->subscript[n]->type == GFC_SS_SCALAR);
6059 start = info->subscript[n]->data.scalar.expr;
6063 /* Evaluate and remember the start of the section. */
6064 start = info->start[n];
6065 stride = gfc_evaluate_now (stride, &loop.pre);
6068 tmp = gfc_conv_array_lbound (desc, n);
6069 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (tmp),
6071 tmp = fold_build2_loc (input_location, MULT_EXPR, TREE_TYPE (tmp),
6073 offset = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (tmp),
6077 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
6079 /* For elemental dimensions, we only need the offset. */
6083 /* Vector subscripts need copying and are handled elsewhere. */
6085 gcc_assert (info->ref->u.ar.dimen_type[n] == DIMEN_RANGE);
6087 /* look for the corresponding scalarizer dimension: dim. */
6088 for (dim = 0; dim < ndim; dim++)
6089 if (ss->dim[dim] == n)
6092 /* loop exited early: the DIM being looked for has been found. */
6093 gcc_assert (dim < ndim);
6095 /* Set the new lower bound. */
6096 from = loop.from[dim];
6099 /* If we have an array section or are assigning make sure that
6100 the lower bound is 1. References to the full
6101 array should otherwise keep the original bounds. */
6103 || info->ref->u.ar.type != AR_FULL)
6104 && !integer_onep (from))
6106 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6107 gfc_array_index_type, gfc_index_one_node,
6109 to = fold_build2_loc (input_location, PLUS_EXPR,
6110 gfc_array_index_type, to, tmp);
6111 from = gfc_index_one_node;
6113 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
6114 gfc_rank_cst[dim], from);
6116 /* Set the new upper bound. */
6117 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
6118 gfc_rank_cst[dim], to);
6120 /* Multiply the stride by the section stride to get the
6122 stride = fold_build2_loc (input_location, MULT_EXPR,
6123 gfc_array_index_type,
6124 stride, info->stride[n]);
6126 if (se->direct_byref
6128 && info->ref->u.ar.type != AR_FULL)
6130 base = fold_build2_loc (input_location, MINUS_EXPR,
6131 TREE_TYPE (base), base, stride);
6133 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6135 tmp = gfc_conv_array_lbound (desc, n);
6136 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6137 TREE_TYPE (base), tmp, loop.from[dim]);
6138 tmp = fold_build2_loc (input_location, MULT_EXPR,
6139 TREE_TYPE (base), tmp,
6140 gfc_conv_array_stride (desc, n));
6141 base = fold_build2_loc (input_location, PLUS_EXPR,
6142 TREE_TYPE (base), tmp, base);
6145 /* Store the new stride. */
6146 gfc_conv_descriptor_stride_set (&loop.pre, parm,
6147 gfc_rank_cst[dim], stride);
6150 for (n = loop.dimen; n < loop.dimen + codim; n++)
6152 from = loop.from[n];
6154 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
6155 gfc_rank_cst[n], from);
6156 if (n < loop.dimen + codim - 1)
6157 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
6158 gfc_rank_cst[n], to);
6161 if (se->data_not_needed)
6162 gfc_conv_descriptor_data_set (&loop.pre, parm,
6163 gfc_index_zero_node);
6165 /* Point the data pointer at the 1st element in the section. */
6166 gfc_get_dataptr_offset (&loop.pre, parm, desc, offset,
6167 subref_array_target, expr);
6169 if ((se->direct_byref || GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6170 && !se->data_not_needed)
6172 /* Set the offset. */
6173 gfc_conv_descriptor_offset_set (&loop.pre, parm, base);
6177 /* Only the callee knows what the correct offset it, so just set
6179 gfc_conv_descriptor_offset_set (&loop.pre, parm, gfc_index_zero_node);
6184 if (!se->direct_byref || se->byref_noassign)
6186 /* Get a pointer to the new descriptor. */
6187 if (se->want_pointer)
6188 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
6193 gfc_add_block_to_block (&se->pre, &loop.pre);
6194 gfc_add_block_to_block (&se->post, &loop.post);
6196 /* Cleanup the scalarizer. */
6197 gfc_cleanup_loop (&loop);
6200 /* Helper function for gfc_conv_array_parameter if array size needs to be
6204 array_parameter_size (tree desc, gfc_expr *expr, tree *size)
6207 if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6208 *size = GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc));
6209 else if (expr->rank > 1)
6210 *size = build_call_expr_loc (input_location,
6211 gfor_fndecl_size0, 1,
6212 gfc_build_addr_expr (NULL, desc));
6215 tree ubound = gfc_conv_descriptor_ubound_get (desc, gfc_index_zero_node);
6216 tree lbound = gfc_conv_descriptor_lbound_get (desc, gfc_index_zero_node);
6218 *size = fold_build2_loc (input_location, MINUS_EXPR,
6219 gfc_array_index_type, ubound, lbound);
6220 *size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
6221 *size, gfc_index_one_node);
6222 *size = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
6223 *size, gfc_index_zero_node);
6225 elem = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
6226 *size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6227 *size, fold_convert (gfc_array_index_type, elem));
6230 /* Convert an array for passing as an actual parameter. */
6231 /* TODO: Optimize passing g77 arrays. */
6234 gfc_conv_array_parameter (gfc_se * se, gfc_expr * expr, gfc_ss * ss, bool g77,
6235 const gfc_symbol *fsym, const char *proc_name,
6240 tree tmp = NULL_TREE;
6242 tree parent = DECL_CONTEXT (current_function_decl);
6243 bool full_array_var;
6244 bool this_array_result;
6247 bool array_constructor;
6248 bool good_allocatable;
6249 bool ultimate_ptr_comp;
6250 bool ultimate_alloc_comp;
6255 ultimate_ptr_comp = false;
6256 ultimate_alloc_comp = false;
6258 for (ref = expr->ref; ref; ref = ref->next)
6260 if (ref->next == NULL)
6263 if (ref->type == REF_COMPONENT)
6265 ultimate_ptr_comp = ref->u.c.component->attr.pointer;
6266 ultimate_alloc_comp = ref->u.c.component->attr.allocatable;
6270 full_array_var = false;
6273 if (expr->expr_type == EXPR_VARIABLE && ref && !ultimate_ptr_comp)
6274 full_array_var = gfc_full_array_ref_p (ref, &contiguous);
6276 sym = full_array_var ? expr->symtree->n.sym : NULL;
6278 /* The symbol should have an array specification. */
6279 gcc_assert (!sym || sym->as || ref->u.ar.as);
6281 if (expr->expr_type == EXPR_ARRAY && expr->ts.type == BT_CHARACTER)
6283 get_array_ctor_strlen (&se->pre, expr->value.constructor, &tmp);
6284 expr->ts.u.cl->backend_decl = tmp;
6285 se->string_length = tmp;
6288 /* Is this the result of the enclosing procedure? */
6289 this_array_result = (full_array_var && sym->attr.flavor == FL_PROCEDURE);
6290 if (this_array_result
6291 && (sym->backend_decl != current_function_decl)
6292 && (sym->backend_decl != parent))
6293 this_array_result = false;
6295 /* Passing address of the array if it is not pointer or assumed-shape. */
6296 if (full_array_var && g77 && !this_array_result)
6298 tmp = gfc_get_symbol_decl (sym);
6300 if (sym->ts.type == BT_CHARACTER)
6301 se->string_length = sym->ts.u.cl->backend_decl;
6303 if (sym->ts.type == BT_DERIVED || sym->ts.type == BT_CLASS)
6305 gfc_conv_expr_descriptor (se, expr, ss);
6306 se->expr = gfc_conv_array_data (se->expr);
6310 if (!sym->attr.pointer
6312 && sym->as->type != AS_ASSUMED_SHAPE
6313 && !sym->attr.allocatable)
6315 /* Some variables are declared directly, others are declared as
6316 pointers and allocated on the heap. */
6317 if (sym->attr.dummy || POINTER_TYPE_P (TREE_TYPE (tmp)))
6320 se->expr = gfc_build_addr_expr (NULL_TREE, tmp);
6322 array_parameter_size (tmp, expr, size);
6326 if (sym->attr.allocatable)
6328 if (sym->attr.dummy || sym->attr.result)
6330 gfc_conv_expr_descriptor (se, expr, ss);
6334 array_parameter_size (tmp, expr, size);
6335 se->expr = gfc_conv_array_data (tmp);
6340 /* A convenient reduction in scope. */
6341 contiguous = g77 && !this_array_result && contiguous;
6343 /* There is no need to pack and unpack the array, if it is contiguous
6344 and not a deferred- or assumed-shape array, or if it is simply
6346 no_pack = ((sym && sym->as
6347 && !sym->attr.pointer
6348 && sym->as->type != AS_DEFERRED
6349 && sym->as->type != AS_ASSUMED_SHAPE)
6351 (ref && ref->u.ar.as
6352 && ref->u.ar.as->type != AS_DEFERRED
6353 && ref->u.ar.as->type != AS_ASSUMED_SHAPE)
6355 gfc_is_simply_contiguous (expr, false));
6357 no_pack = contiguous && no_pack;
6359 /* Array constructors are always contiguous and do not need packing. */
6360 array_constructor = g77 && !this_array_result && expr->expr_type == EXPR_ARRAY;
6362 /* Same is true of contiguous sections from allocatable variables. */
6363 good_allocatable = contiguous
6365 && expr->symtree->n.sym->attr.allocatable;
6367 /* Or ultimate allocatable components. */
6368 ultimate_alloc_comp = contiguous && ultimate_alloc_comp;
6370 if (no_pack || array_constructor || good_allocatable || ultimate_alloc_comp)
6372 gfc_conv_expr_descriptor (se, expr, ss);
6373 if (expr->ts.type == BT_CHARACTER)
6374 se->string_length = expr->ts.u.cl->backend_decl;
6376 array_parameter_size (se->expr, expr, size);
6377 se->expr = gfc_conv_array_data (se->expr);
6381 if (this_array_result)
6383 /* Result of the enclosing function. */
6384 gfc_conv_expr_descriptor (se, expr, ss);
6386 array_parameter_size (se->expr, expr, size);
6387 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
6389 if (g77 && TREE_TYPE (TREE_TYPE (se->expr)) != NULL_TREE
6390 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr))))
6391 se->expr = gfc_conv_array_data (build_fold_indirect_ref_loc (input_location,
6398 /* Every other type of array. */
6399 se->want_pointer = 1;
6400 gfc_conv_expr_descriptor (se, expr, ss);
6402 array_parameter_size (build_fold_indirect_ref_loc (input_location,
6407 /* Deallocate the allocatable components of structures that are
6409 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
6410 && expr->ts.u.derived->attr.alloc_comp
6411 && expr->expr_type != EXPR_VARIABLE)
6413 tmp = build_fold_indirect_ref_loc (input_location, se->expr);
6414 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, tmp, expr->rank);
6416 /* The components shall be deallocated before their containing entity. */
6417 gfc_prepend_expr_to_block (&se->post, tmp);
6420 if (g77 || (fsym && fsym->attr.contiguous
6421 && !gfc_is_simply_contiguous (expr, false)))
6423 tree origptr = NULL_TREE;
6427 /* For contiguous arrays, save the original value of the descriptor. */
6430 origptr = gfc_create_var (pvoid_type_node, "origptr");
6431 tmp = build_fold_indirect_ref_loc (input_location, desc);
6432 tmp = gfc_conv_array_data (tmp);
6433 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6434 TREE_TYPE (origptr), origptr,
6435 fold_convert (TREE_TYPE (origptr), tmp));
6436 gfc_add_expr_to_block (&se->pre, tmp);
6439 /* Repack the array. */
6440 if (gfc_option.warn_array_temp)
6443 gfc_warning ("Creating array temporary at %L for argument '%s'",
6444 &expr->where, fsym->name);
6446 gfc_warning ("Creating array temporary at %L", &expr->where);
6449 ptr = build_call_expr_loc (input_location,
6450 gfor_fndecl_in_pack, 1, desc);
6452 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6454 tmp = gfc_conv_expr_present (sym);
6455 ptr = build3_loc (input_location, COND_EXPR, TREE_TYPE (se->expr),
6456 tmp, fold_convert (TREE_TYPE (se->expr), ptr),
6457 fold_convert (TREE_TYPE (se->expr), null_pointer_node));
6460 ptr = gfc_evaluate_now (ptr, &se->pre);
6462 /* Use the packed data for the actual argument, except for contiguous arrays,
6463 where the descriptor's data component is set. */
6468 tmp = build_fold_indirect_ref_loc (input_location, desc);
6469 gfc_conv_descriptor_data_set (&se->pre, tmp, ptr);
6472 if (gfc_option.rtcheck & GFC_RTCHECK_ARRAY_TEMPS)
6476 if (fsym && proc_name)
6477 asprintf (&msg, "An array temporary was created for argument "
6478 "'%s' of procedure '%s'", fsym->name, proc_name);
6480 asprintf (&msg, "An array temporary was created");
6482 tmp = build_fold_indirect_ref_loc (input_location,
6484 tmp = gfc_conv_array_data (tmp);
6485 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6486 fold_convert (TREE_TYPE (tmp), ptr), tmp);
6488 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6489 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
6491 gfc_conv_expr_present (sym), tmp);
6493 gfc_trans_runtime_check (false, true, tmp, &se->pre,
6498 gfc_start_block (&block);
6500 /* Copy the data back. */
6501 if (fsym == NULL || fsym->attr.intent != INTENT_IN)
6503 tmp = build_call_expr_loc (input_location,
6504 gfor_fndecl_in_unpack, 2, desc, ptr);
6505 gfc_add_expr_to_block (&block, tmp);
6508 /* Free the temporary. */
6509 tmp = gfc_call_free (convert (pvoid_type_node, ptr));
6510 gfc_add_expr_to_block (&block, tmp);
6512 stmt = gfc_finish_block (&block);
6514 gfc_init_block (&block);
6515 /* Only if it was repacked. This code needs to be executed before the
6516 loop cleanup code. */
6517 tmp = build_fold_indirect_ref_loc (input_location,
6519 tmp = gfc_conv_array_data (tmp);
6520 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6521 fold_convert (TREE_TYPE (tmp), ptr), tmp);
6523 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6524 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
6526 gfc_conv_expr_present (sym), tmp);
6528 tmp = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
6530 gfc_add_expr_to_block (&block, tmp);
6531 gfc_add_block_to_block (&block, &se->post);
6533 gfc_init_block (&se->post);
6535 /* Reset the descriptor pointer. */
6538 tmp = build_fold_indirect_ref_loc (input_location, desc);
6539 gfc_conv_descriptor_data_set (&se->post, tmp, origptr);
6542 gfc_add_block_to_block (&se->post, &block);
6547 /* Generate code to deallocate an array, if it is allocated. */
6550 gfc_trans_dealloc_allocated (tree descriptor)
6556 gfc_start_block (&block);
6558 var = gfc_conv_descriptor_data_get (descriptor);
6561 /* Call array_deallocate with an int * present in the second argument.
6562 Although it is ignored here, it's presence ensures that arrays that
6563 are already deallocated are ignored. */
6564 tmp = gfc_deallocate_with_status (var, NULL_TREE, true, NULL);
6565 gfc_add_expr_to_block (&block, tmp);
6567 /* Zero the data pointer. */
6568 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
6569 var, build_int_cst (TREE_TYPE (var), 0));
6570 gfc_add_expr_to_block (&block, tmp);
6572 return gfc_finish_block (&block);
6576 /* This helper function calculates the size in words of a full array. */
6579 get_full_array_size (stmtblock_t *block, tree decl, int rank)
6584 idx = gfc_rank_cst[rank - 1];
6585 nelems = gfc_conv_descriptor_ubound_get (decl, idx);
6586 tmp = gfc_conv_descriptor_lbound_get (decl, idx);
6587 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
6589 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
6590 tmp, gfc_index_one_node);
6591 tmp = gfc_evaluate_now (tmp, block);
6593 nelems = gfc_conv_descriptor_stride_get (decl, idx);
6594 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6596 return gfc_evaluate_now (tmp, block);
6600 /* Allocate dest to the same size as src, and copy src -> dest.
6601 If no_malloc is set, only the copy is done. */
6604 duplicate_allocatable (tree dest, tree src, tree type, int rank,
6614 /* If the source is null, set the destination to null. Then,
6615 allocate memory to the destination. */
6616 gfc_init_block (&block);
6620 tmp = null_pointer_node;
6621 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type, dest, tmp);
6622 gfc_add_expr_to_block (&block, tmp);
6623 null_data = gfc_finish_block (&block);
6625 gfc_init_block (&block);
6626 size = TYPE_SIZE_UNIT (TREE_TYPE (type));
6629 tmp = gfc_call_malloc (&block, type, size);
6630 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
6631 dest, fold_convert (type, tmp));
6632 gfc_add_expr_to_block (&block, tmp);
6635 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
6636 tmp = build_call_expr_loc (input_location, tmp, 3,
6641 gfc_conv_descriptor_data_set (&block, dest, null_pointer_node);
6642 null_data = gfc_finish_block (&block);
6644 gfc_init_block (&block);
6645 nelems = get_full_array_size (&block, src, rank);
6646 tmp = fold_convert (gfc_array_index_type,
6647 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
6648 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6652 tmp = TREE_TYPE (gfc_conv_descriptor_data_get (src));
6653 tmp = gfc_call_malloc (&block, tmp, size);
6654 gfc_conv_descriptor_data_set (&block, dest, tmp);
6657 /* We know the temporary and the value will be the same length,
6658 so can use memcpy. */
6659 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
6660 tmp = build_call_expr_loc (input_location,
6661 tmp, 3, gfc_conv_descriptor_data_get (dest),
6662 gfc_conv_descriptor_data_get (src), size);
6665 gfc_add_expr_to_block (&block, tmp);
6666 tmp = gfc_finish_block (&block);
6668 /* Null the destination if the source is null; otherwise do
6669 the allocate and copy. */
6673 null_cond = gfc_conv_descriptor_data_get (src);
6675 null_cond = convert (pvoid_type_node, null_cond);
6676 null_cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6677 null_cond, null_pointer_node);
6678 return build3_v (COND_EXPR, null_cond, tmp, null_data);
6682 /* Allocate dest to the same size as src, and copy data src -> dest. */
6685 gfc_duplicate_allocatable (tree dest, tree src, tree type, int rank)
6687 return duplicate_allocatable (dest, src, type, rank, false);
6691 /* Copy data src -> dest. */
6694 gfc_copy_allocatable_data (tree dest, tree src, tree type, int rank)
6696 return duplicate_allocatable (dest, src, type, rank, true);
6700 /* Recursively traverse an object of derived type, generating code to
6701 deallocate, nullify or copy allocatable components. This is the work horse
6702 function for the functions named in this enum. */
6704 enum {DEALLOCATE_ALLOC_COMP = 1, NULLIFY_ALLOC_COMP, COPY_ALLOC_COMP,
6705 COPY_ONLY_ALLOC_COMP};
6708 structure_alloc_comps (gfc_symbol * der_type, tree decl,
6709 tree dest, int rank, int purpose)
6713 stmtblock_t fnblock;
6714 stmtblock_t loopbody;
6725 tree null_cond = NULL_TREE;
6727 gfc_init_block (&fnblock);
6729 decl_type = TREE_TYPE (decl);
6731 if ((POINTER_TYPE_P (decl_type) && rank != 0)
6732 || (TREE_CODE (decl_type) == REFERENCE_TYPE && rank == 0))
6734 decl = build_fold_indirect_ref_loc (input_location,
6737 /* Just in case in gets dereferenced. */
6738 decl_type = TREE_TYPE (decl);
6740 /* If this an array of derived types with allocatable components
6741 build a loop and recursively call this function. */
6742 if (TREE_CODE (decl_type) == ARRAY_TYPE
6743 || GFC_DESCRIPTOR_TYPE_P (decl_type))
6745 tmp = gfc_conv_array_data (decl);
6746 var = build_fold_indirect_ref_loc (input_location,
6749 /* Get the number of elements - 1 and set the counter. */
6750 if (GFC_DESCRIPTOR_TYPE_P (decl_type))
6752 /* Use the descriptor for an allocatable array. Since this
6753 is a full array reference, we only need the descriptor
6754 information from dimension = rank. */
6755 tmp = get_full_array_size (&fnblock, decl, rank);
6756 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6757 gfc_array_index_type, tmp,
6758 gfc_index_one_node);
6760 null_cond = gfc_conv_descriptor_data_get (decl);
6761 null_cond = fold_build2_loc (input_location, NE_EXPR,
6762 boolean_type_node, null_cond,
6763 build_int_cst (TREE_TYPE (null_cond), 0));
6767 /* Otherwise use the TYPE_DOMAIN information. */
6768 tmp = array_type_nelts (decl_type);
6769 tmp = fold_convert (gfc_array_index_type, tmp);
6772 /* Remember that this is, in fact, the no. of elements - 1. */
6773 nelems = gfc_evaluate_now (tmp, &fnblock);
6774 index = gfc_create_var (gfc_array_index_type, "S");
6776 /* Build the body of the loop. */
6777 gfc_init_block (&loopbody);
6779 vref = gfc_build_array_ref (var, index, NULL);
6781 if (purpose == COPY_ALLOC_COMP)
6783 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
6785 tmp = gfc_duplicate_allocatable (dest, decl, decl_type, rank);
6786 gfc_add_expr_to_block (&fnblock, tmp);
6788 tmp = build_fold_indirect_ref_loc (input_location,
6789 gfc_conv_array_data (dest));
6790 dref = gfc_build_array_ref (tmp, index, NULL);
6791 tmp = structure_alloc_comps (der_type, vref, dref, rank, purpose);
6793 else if (purpose == COPY_ONLY_ALLOC_COMP)
6795 tmp = build_fold_indirect_ref_loc (input_location,
6796 gfc_conv_array_data (dest));
6797 dref = gfc_build_array_ref (tmp, index, NULL);
6798 tmp = structure_alloc_comps (der_type, vref, dref, rank,
6802 tmp = structure_alloc_comps (der_type, vref, NULL_TREE, rank, purpose);
6804 gfc_add_expr_to_block (&loopbody, tmp);
6806 /* Build the loop and return. */
6807 gfc_init_loopinfo (&loop);
6809 loop.from[0] = gfc_index_zero_node;
6810 loop.loopvar[0] = index;
6811 loop.to[0] = nelems;
6812 gfc_trans_scalarizing_loops (&loop, &loopbody);
6813 gfc_add_block_to_block (&fnblock, &loop.pre);
6815 tmp = gfc_finish_block (&fnblock);
6816 if (null_cond != NULL_TREE)
6817 tmp = build3_v (COND_EXPR, null_cond, tmp,
6818 build_empty_stmt (input_location));
6823 /* Otherwise, act on the components or recursively call self to
6824 act on a chain of components. */
6825 for (c = der_type->components; c; c = c->next)
6827 bool cmp_has_alloc_comps = (c->ts.type == BT_DERIVED
6828 || c->ts.type == BT_CLASS)
6829 && c->ts.u.derived->attr.alloc_comp;
6830 cdecl = c->backend_decl;
6831 ctype = TREE_TYPE (cdecl);
6835 case DEALLOCATE_ALLOC_COMP:
6836 if (cmp_has_alloc_comps && !c->attr.pointer)
6838 /* Do not deallocate the components of ultimate pointer
6840 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6841 decl, cdecl, NULL_TREE);
6842 rank = c->as ? c->as->rank : 0;
6843 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
6845 gfc_add_expr_to_block (&fnblock, tmp);
6848 if (c->attr.allocatable
6849 && (c->attr.dimension || c->attr.codimension))
6851 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6852 decl, cdecl, NULL_TREE);
6853 tmp = gfc_trans_dealloc_allocated (comp);
6854 gfc_add_expr_to_block (&fnblock, tmp);
6856 else if (c->attr.allocatable)
6858 /* Allocatable scalar components. */
6859 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6860 decl, cdecl, NULL_TREE);
6862 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
6864 gfc_add_expr_to_block (&fnblock, tmp);
6866 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6867 void_type_node, comp,
6868 build_int_cst (TREE_TYPE (comp), 0));
6869 gfc_add_expr_to_block (&fnblock, tmp);
6871 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
6873 /* Allocatable scalar CLASS components. */
6874 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6875 decl, cdecl, NULL_TREE);
6877 /* Add reference to '_data' component. */
6878 tmp = CLASS_DATA (c)->backend_decl;
6879 comp = fold_build3_loc (input_location, COMPONENT_REF,
6880 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
6882 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
6883 CLASS_DATA (c)->ts);
6884 gfc_add_expr_to_block (&fnblock, tmp);
6886 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6887 void_type_node, comp,
6888 build_int_cst (TREE_TYPE (comp), 0));
6889 gfc_add_expr_to_block (&fnblock, tmp);
6893 case NULLIFY_ALLOC_COMP:
6894 if (c->attr.pointer)
6896 else if (c->attr.allocatable
6897 && (c->attr.dimension|| c->attr.codimension))
6899 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6900 decl, cdecl, NULL_TREE);
6901 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
6903 else if (c->attr.allocatable)
6905 /* Allocatable scalar components. */
6906 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6907 decl, cdecl, NULL_TREE);
6908 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6909 void_type_node, comp,
6910 build_int_cst (TREE_TYPE (comp), 0));
6911 gfc_add_expr_to_block (&fnblock, tmp);
6913 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
6915 /* Allocatable scalar CLASS components. */
6916 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6917 decl, cdecl, NULL_TREE);
6918 /* Add reference to '_data' component. */
6919 tmp = CLASS_DATA (c)->backend_decl;
6920 comp = fold_build3_loc (input_location, COMPONENT_REF,
6921 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
6922 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6923 void_type_node, comp,
6924 build_int_cst (TREE_TYPE (comp), 0));
6925 gfc_add_expr_to_block (&fnblock, tmp);
6927 else if (cmp_has_alloc_comps)
6929 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6930 decl, cdecl, NULL_TREE);
6931 rank = c->as ? c->as->rank : 0;
6932 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
6934 gfc_add_expr_to_block (&fnblock, tmp);
6938 case COPY_ALLOC_COMP:
6939 if (c->attr.pointer)
6942 /* We need source and destination components. */
6943 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
6945 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
6947 dcmp = fold_convert (TREE_TYPE (comp), dcmp);
6949 if (c->attr.allocatable && !cmp_has_alloc_comps)
6951 rank = c->as ? c->as->rank : 0;
6952 tmp = gfc_duplicate_allocatable (dcmp, comp, ctype, rank);
6953 gfc_add_expr_to_block (&fnblock, tmp);
6956 if (cmp_has_alloc_comps)
6958 rank = c->as ? c->as->rank : 0;
6959 tmp = fold_convert (TREE_TYPE (dcmp), comp);
6960 gfc_add_modify (&fnblock, dcmp, tmp);
6961 tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp,
6963 gfc_add_expr_to_block (&fnblock, tmp);
6973 return gfc_finish_block (&fnblock);
6976 /* Recursively traverse an object of derived type, generating code to
6977 nullify allocatable components. */
6980 gfc_nullify_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
6982 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
6983 NULLIFY_ALLOC_COMP);
6987 /* Recursively traverse an object of derived type, generating code to
6988 deallocate allocatable components. */
6991 gfc_deallocate_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
6993 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
6994 DEALLOCATE_ALLOC_COMP);
6998 /* Recursively traverse an object of derived type, generating code to
6999 copy it and its allocatable components. */
7002 gfc_copy_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
7004 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ALLOC_COMP);
7008 /* Recursively traverse an object of derived type, generating code to
7009 copy only its allocatable components. */
7012 gfc_copy_only_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
7014 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ONLY_ALLOC_COMP);
7018 /* Returns the value of LBOUND for an expression. This could be broken out
7019 from gfc_conv_intrinsic_bound but this seemed to be simpler. This is
7020 called by gfc_alloc_allocatable_for_assignment. */
7022 get_std_lbound (gfc_expr *expr, tree desc, int dim, bool assumed_size)
7027 tree cond, cond1, cond3, cond4;
7031 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)))
7033 tmp = gfc_rank_cst[dim];
7034 lbound = gfc_conv_descriptor_lbound_get (desc, tmp);
7035 ubound = gfc_conv_descriptor_ubound_get (desc, tmp);
7036 stride = gfc_conv_descriptor_stride_get (desc, tmp);
7037 cond1 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
7039 cond3 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
7040 stride, gfc_index_zero_node);
7041 cond3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7042 boolean_type_node, cond3, cond1);
7043 cond4 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
7044 stride, gfc_index_zero_node);
7046 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
7047 tmp, build_int_cst (gfc_array_index_type,
7050 cond = boolean_false_node;
7052 cond1 = fold_build2_loc (input_location, TRUTH_OR_EXPR,
7053 boolean_type_node, cond3, cond4);
7054 cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
7055 boolean_type_node, cond, cond1);
7057 return fold_build3_loc (input_location, COND_EXPR,
7058 gfc_array_index_type, cond,
7059 lbound, gfc_index_one_node);
7061 else if (expr->expr_type == EXPR_VARIABLE)
7063 tmp = TREE_TYPE (expr->symtree->n.sym->backend_decl);
7064 for (ref = expr->ref; ref; ref = ref->next)
7066 if (ref->type == REF_COMPONENT
7067 && ref->u.c.component->as
7069 && ref->next->u.ar.type == AR_FULL)
7070 tmp = TREE_TYPE (ref->u.c.component->backend_decl);
7072 return GFC_TYPE_ARRAY_LBOUND(tmp, dim);
7074 else if (expr->expr_type == EXPR_FUNCTION)
7076 /* A conversion function, so use the argument. */
7077 expr = expr->value.function.actual->expr;
7078 if (expr->expr_type != EXPR_VARIABLE)
7079 return gfc_index_one_node;
7080 desc = TREE_TYPE (expr->symtree->n.sym->backend_decl);
7081 return get_std_lbound (expr, desc, dim, assumed_size);
7084 return gfc_index_one_node;
7088 /* Returns true if an expression represents an lhs that can be reallocated
7092 gfc_is_reallocatable_lhs (gfc_expr *expr)
7099 /* An allocatable variable. */
7100 if (expr->symtree->n.sym->attr.allocatable
7102 && expr->ref->type == REF_ARRAY
7103 && expr->ref->u.ar.type == AR_FULL)
7106 /* All that can be left are allocatable components. */
7107 if ((expr->symtree->n.sym->ts.type != BT_DERIVED
7108 && expr->symtree->n.sym->ts.type != BT_CLASS)
7109 || !expr->symtree->n.sym->ts.u.derived->attr.alloc_comp)
7112 /* Find a component ref followed by an array reference. */
7113 for (ref = expr->ref; ref; ref = ref->next)
7115 && ref->type == REF_COMPONENT
7116 && ref->next->type == REF_ARRAY
7117 && !ref->next->next)
7123 /* Return true if valid reallocatable lhs. */
7124 if (ref->u.c.component->attr.allocatable
7125 && ref->next->u.ar.type == AR_FULL)
7132 /* Allocate the lhs of an assignment to an allocatable array, otherwise
7136 gfc_alloc_allocatable_for_assignment (gfc_loopinfo *loop,
7140 stmtblock_t realloc_block;
7141 stmtblock_t alloc_block;
7164 gfc_array_spec * as;
7166 /* x = f(...) with x allocatable. In this case, expr1 is the rhs.
7167 Find the lhs expression in the loop chain and set expr1 and
7168 expr2 accordingly. */
7169 if (expr1->expr_type == EXPR_FUNCTION && expr2 == NULL)
7172 /* Find the ss for the lhs. */
7174 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
7175 if (lss->expr && lss->expr->expr_type == EXPR_VARIABLE)
7177 if (lss == gfc_ss_terminator)
7182 /* Bail out if this is not a valid allocate on assignment. */
7183 if (!gfc_is_reallocatable_lhs (expr1)
7184 || (expr2 && !expr2->rank))
7187 /* Find the ss for the lhs. */
7189 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
7190 if (lss->expr == expr1)
7193 if (lss == gfc_ss_terminator)
7196 /* Find an ss for the rhs. For operator expressions, we see the
7197 ss's for the operands. Any one of these will do. */
7199 for (; rss && rss != gfc_ss_terminator; rss = rss->loop_chain)
7200 if (rss->expr != expr1 && rss != loop->temp_ss)
7203 if (expr2 && rss == gfc_ss_terminator)
7206 gfc_start_block (&fblock);
7208 /* Since the lhs is allocatable, this must be a descriptor type.
7209 Get the data and array size. */
7210 desc = lss->data.info.descriptor;
7211 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)));
7212 array1 = gfc_conv_descriptor_data_get (desc);
7214 /* 7.4.1.3 "If variable is an allocated allocatable variable, it is
7215 deallocated if expr is an array of different shape or any of the
7216 corresponding length type parameter values of variable and expr
7217 differ." This assures F95 compatibility. */
7218 jump_label1 = gfc_build_label_decl (NULL_TREE);
7219 jump_label2 = gfc_build_label_decl (NULL_TREE);
7221 /* Allocate if data is NULL. */
7222 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
7223 array1, build_int_cst (TREE_TYPE (array1), 0));
7224 tmp = build3_v (COND_EXPR, cond,
7225 build1_v (GOTO_EXPR, jump_label1),
7226 build_empty_stmt (input_location));
7227 gfc_add_expr_to_block (&fblock, tmp);
7229 /* Get arrayspec if expr is a full array. */
7230 if (expr2 && expr2->expr_type == EXPR_FUNCTION
7231 && expr2->value.function.isym
7232 && expr2->value.function.isym->conversion)
7234 /* For conversion functions, take the arg. */
7235 gfc_expr *arg = expr2->value.function.actual->expr;
7236 as = gfc_get_full_arrayspec_from_expr (arg);
7239 as = gfc_get_full_arrayspec_from_expr (expr2);
7243 /* If the lhs shape is not the same as the rhs jump to setting the
7244 bounds and doing the reallocation....... */
7245 for (n = 0; n < expr1->rank; n++)
7247 /* Check the shape. */
7248 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
7249 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[n]);
7250 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7251 gfc_array_index_type,
7252 loop->to[n], loop->from[n]);
7253 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7254 gfc_array_index_type,
7256 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7257 gfc_array_index_type,
7259 cond = fold_build2_loc (input_location, NE_EXPR,
7261 tmp, gfc_index_zero_node);
7262 tmp = build3_v (COND_EXPR, cond,
7263 build1_v (GOTO_EXPR, jump_label1),
7264 build_empty_stmt (input_location));
7265 gfc_add_expr_to_block (&fblock, tmp);
7268 /* ....else jump past the (re)alloc code. */
7269 tmp = build1_v (GOTO_EXPR, jump_label2);
7270 gfc_add_expr_to_block (&fblock, tmp);
7272 /* Add the label to start automatic (re)allocation. */
7273 tmp = build1_v (LABEL_EXPR, jump_label1);
7274 gfc_add_expr_to_block (&fblock, tmp);
7276 size1 = gfc_conv_descriptor_size (desc, expr1->rank);
7278 /* Get the rhs size. Fix both sizes. */
7280 desc2 = rss->data.info.descriptor;
7283 size2 = gfc_index_one_node;
7284 for (n = 0; n < expr2->rank; n++)
7286 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7287 gfc_array_index_type,
7288 loop->to[n], loop->from[n]);
7289 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7290 gfc_array_index_type,
7291 tmp, gfc_index_one_node);
7292 size2 = fold_build2_loc (input_location, MULT_EXPR,
7293 gfc_array_index_type,
7297 size1 = gfc_evaluate_now (size1, &fblock);
7298 size2 = gfc_evaluate_now (size2, &fblock);
7300 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7302 neq_size = gfc_evaluate_now (cond, &fblock);
7305 /* Now modify the lhs descriptor and the associated scalarizer
7306 variables. F2003 7.4.1.3: "If variable is or becomes an
7307 unallocated allocatable variable, then it is allocated with each
7308 deferred type parameter equal to the corresponding type parameters
7309 of expr , with the shape of expr , and with each lower bound equal
7310 to the corresponding element of LBOUND(expr)."
7311 Reuse size1 to keep a dimension-by-dimension track of the
7312 stride of the new array. */
7313 size1 = gfc_index_one_node;
7314 offset = gfc_index_zero_node;
7316 for (n = 0; n < expr2->rank; n++)
7318 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7319 gfc_array_index_type,
7320 loop->to[n], loop->from[n]);
7321 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7322 gfc_array_index_type,
7323 tmp, gfc_index_one_node);
7325 lbound = gfc_index_one_node;
7330 lbd = get_std_lbound (expr2, desc2, n,
7331 as->type == AS_ASSUMED_SIZE);
7332 ubound = fold_build2_loc (input_location,
7334 gfc_array_index_type,
7336 ubound = fold_build2_loc (input_location,
7338 gfc_array_index_type,
7343 gfc_conv_descriptor_lbound_set (&fblock, desc,
7346 gfc_conv_descriptor_ubound_set (&fblock, desc,
7349 gfc_conv_descriptor_stride_set (&fblock, desc,
7352 lbound = gfc_conv_descriptor_lbound_get (desc,
7354 tmp2 = fold_build2_loc (input_location, MULT_EXPR,
7355 gfc_array_index_type,
7357 offset = fold_build2_loc (input_location, MINUS_EXPR,
7358 gfc_array_index_type,
7360 size1 = fold_build2_loc (input_location, MULT_EXPR,
7361 gfc_array_index_type,
7365 /* Set the lhs descriptor and scalarizer offsets. For rank > 1,
7366 the array offset is saved and the info.offset is used for a
7367 running offset. Use the saved_offset instead. */
7368 tmp = gfc_conv_descriptor_offset (desc);
7369 gfc_add_modify (&fblock, tmp, offset);
7370 if (lss->data.info.saved_offset
7371 && TREE_CODE (lss->data.info.saved_offset) == VAR_DECL)
7372 gfc_add_modify (&fblock, lss->data.info.saved_offset, tmp);
7374 /* Now set the deltas for the lhs. */
7375 for (n = 0; n < expr1->rank; n++)
7377 tmp = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
7379 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7380 gfc_array_index_type, tmp,
7382 if (lss->data.info.delta[dim]
7383 && TREE_CODE (lss->data.info.delta[dim]) == VAR_DECL)
7384 gfc_add_modify (&fblock, lss->data.info.delta[dim], tmp);
7387 /* Get the new lhs size in bytes. */
7388 if (expr1->ts.type == BT_CHARACTER && expr1->ts.deferred)
7390 tmp = expr2->ts.u.cl->backend_decl;
7391 gcc_assert (expr1->ts.u.cl->backend_decl);
7392 tmp = fold_convert (TREE_TYPE (expr1->ts.u.cl->backend_decl), tmp);
7393 gfc_add_modify (&fblock, expr1->ts.u.cl->backend_decl, tmp);
7395 else if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->backend_decl)
7397 tmp = TYPE_SIZE_UNIT (TREE_TYPE (gfc_typenode_for_spec (&expr1->ts)));
7398 tmp = fold_build2_loc (input_location, MULT_EXPR,
7399 gfc_array_index_type, tmp,
7400 expr1->ts.u.cl->backend_decl);
7403 tmp = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&expr1->ts));
7404 tmp = fold_convert (gfc_array_index_type, tmp);
7405 size2 = fold_build2_loc (input_location, MULT_EXPR,
7406 gfc_array_index_type,
7408 size2 = fold_convert (size_type_node, size2);
7409 size2 = gfc_evaluate_now (size2, &fblock);
7411 /* Realloc expression. Note that the scalarizer uses desc.data
7412 in the array reference - (*desc.data)[<element>]. */
7413 gfc_init_block (&realloc_block);
7414 tmp = build_call_expr_loc (input_location,
7415 builtin_decl_explicit (BUILT_IN_REALLOC), 2,
7416 fold_convert (pvoid_type_node, array1),
7418 gfc_conv_descriptor_data_set (&realloc_block,
7420 realloc_expr = gfc_finish_block (&realloc_block);
7422 /* Only reallocate if sizes are different. */
7423 tmp = build3_v (COND_EXPR, neq_size, realloc_expr,
7424 build_empty_stmt (input_location));
7428 /* Malloc expression. */
7429 gfc_init_block (&alloc_block);
7430 tmp = build_call_expr_loc (input_location,
7431 builtin_decl_explicit (BUILT_IN_MALLOC),
7433 gfc_conv_descriptor_data_set (&alloc_block,
7435 tmp = gfc_conv_descriptor_dtype (desc);
7436 gfc_add_modify (&alloc_block, tmp, gfc_get_dtype (TREE_TYPE (desc)));
7437 alloc_expr = gfc_finish_block (&alloc_block);
7439 /* Malloc if not allocated; realloc otherwise. */
7440 tmp = build_int_cst (TREE_TYPE (array1), 0);
7441 cond = fold_build2_loc (input_location, EQ_EXPR,
7444 tmp = build3_v (COND_EXPR, cond, alloc_expr, realloc_expr);
7445 gfc_add_expr_to_block (&fblock, tmp);
7447 /* Make sure that the scalarizer data pointer is updated. */
7448 if (lss->data.info.data
7449 && TREE_CODE (lss->data.info.data) == VAR_DECL)
7451 tmp = gfc_conv_descriptor_data_get (desc);
7452 gfc_add_modify (&fblock, lss->data.info.data, tmp);
7455 /* Add the exit label. */
7456 tmp = build1_v (LABEL_EXPR, jump_label2);
7457 gfc_add_expr_to_block (&fblock, tmp);
7459 return gfc_finish_block (&fblock);
7463 /* NULLIFY an allocatable/pointer array on function entry, free it on exit.
7464 Do likewise, recursively if necessary, with the allocatable components of
7468 gfc_trans_deferred_array (gfc_symbol * sym, gfc_wrapped_block * block)
7474 stmtblock_t cleanup;
7477 bool sym_has_alloc_comp;
7479 sym_has_alloc_comp = (sym->ts.type == BT_DERIVED
7480 || sym->ts.type == BT_CLASS)
7481 && sym->ts.u.derived->attr.alloc_comp;
7483 /* Make sure the frontend gets these right. */
7484 if (!(sym->attr.pointer || sym->attr.allocatable || sym_has_alloc_comp))
7485 fatal_error ("Possible front-end bug: Deferred array size without pointer, "
7486 "allocatable attribute or derived type without allocatable "
7489 gfc_save_backend_locus (&loc);
7490 gfc_set_backend_locus (&sym->declared_at);
7491 gfc_init_block (&init);
7493 gcc_assert (TREE_CODE (sym->backend_decl) == VAR_DECL
7494 || TREE_CODE (sym->backend_decl) == PARM_DECL);
7496 if (sym->ts.type == BT_CHARACTER
7497 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
7499 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
7500 gfc_trans_vla_type_sizes (sym, &init);
7503 /* Dummy, use associated and result variables don't need anything special. */
7504 if (sym->attr.dummy || sym->attr.use_assoc || sym->attr.result)
7506 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
7507 gfc_restore_backend_locus (&loc);
7511 descriptor = sym->backend_decl;
7513 /* Although static, derived types with default initializers and
7514 allocatable components must not be nulled wholesale; instead they
7515 are treated component by component. */
7516 if (TREE_STATIC (descriptor) && !sym_has_alloc_comp)
7518 /* SAVEd variables are not freed on exit. */
7519 gfc_trans_static_array_pointer (sym);
7521 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
7522 gfc_restore_backend_locus (&loc);
7526 /* Get the descriptor type. */
7527 type = TREE_TYPE (sym->backend_decl);
7529 if (sym_has_alloc_comp && !(sym->attr.pointer || sym->attr.allocatable))
7532 && !(TREE_STATIC (sym->backend_decl) && sym->attr.is_main_program))
7534 if (sym->value == NULL
7535 || !gfc_has_default_initializer (sym->ts.u.derived))
7537 rank = sym->as ? sym->as->rank : 0;
7538 tmp = gfc_nullify_alloc_comp (sym->ts.u.derived,
7540 gfc_add_expr_to_block (&init, tmp);
7543 gfc_init_default_dt (sym, &init, false);
7546 else if (!GFC_DESCRIPTOR_TYPE_P (type))
7548 /* If the backend_decl is not a descriptor, we must have a pointer
7550 descriptor = build_fold_indirect_ref_loc (input_location,
7552 type = TREE_TYPE (descriptor);
7555 /* NULLIFY the data pointer. */
7556 if (GFC_DESCRIPTOR_TYPE_P (type) && !sym->attr.save)
7557 gfc_conv_descriptor_data_set (&init, descriptor, null_pointer_node);
7559 gfc_restore_backend_locus (&loc);
7560 gfc_init_block (&cleanup);
7562 /* Allocatable arrays need to be freed when they go out of scope.
7563 The allocatable components of pointers must not be touched. */
7564 if (sym_has_alloc_comp && !(sym->attr.function || sym->attr.result)
7565 && !sym->attr.pointer && !sym->attr.save)
7568 rank = sym->as ? sym->as->rank : 0;
7569 tmp = gfc_deallocate_alloc_comp (sym->ts.u.derived, descriptor, rank);
7570 gfc_add_expr_to_block (&cleanup, tmp);
7573 if (sym->attr.allocatable && (sym->attr.dimension || sym->attr.codimension)
7574 && !sym->attr.save && !sym->attr.result)
7576 tmp = gfc_trans_dealloc_allocated (sym->backend_decl);
7577 gfc_add_expr_to_block (&cleanup, tmp);
7580 gfc_add_init_cleanup (block, gfc_finish_block (&init),
7581 gfc_finish_block (&cleanup));
7584 /************ Expression Walking Functions ******************/
7586 /* Walk a variable reference.
7588 Possible extension - multiple component subscripts.
7589 x(:,:) = foo%a(:)%b(:)
7591 forall (i=..., j=...)
7592 x(i,j) = foo%a(j)%b(i)
7594 This adds a fair amount of complexity because you need to deal with more
7595 than one ref. Maybe handle in a similar manner to vector subscripts.
7596 Maybe not worth the effort. */
7600 gfc_walk_variable_expr (gfc_ss * ss, gfc_expr * expr)
7604 for (ref = expr->ref; ref; ref = ref->next)
7605 if (ref->type == REF_ARRAY && ref->u.ar.type != AR_ELEMENT)
7608 return gfc_walk_array_ref (ss, expr, ref);
7613 gfc_walk_array_ref (gfc_ss * ss, gfc_expr * expr, gfc_ref * ref)
7619 for (; ref; ref = ref->next)
7621 if (ref->type == REF_SUBSTRING)
7623 ss = gfc_get_scalar_ss (ss, ref->u.ss.start);
7624 ss = gfc_get_scalar_ss (ss, ref->u.ss.end);
7627 /* We're only interested in array sections from now on. */
7628 if (ref->type != REF_ARRAY)
7636 for (n = ar->dimen - 1; n >= 0; n--)
7637 ss = gfc_get_scalar_ss (ss, ar->start[n]);
7641 newss = gfc_get_array_ss (ss, expr, ar->as->rank, GFC_SS_SECTION);
7642 newss->data.info.ref = ref;
7644 /* Make sure array is the same as array(:,:), this way
7645 we don't need to special case all the time. */
7646 ar->dimen = ar->as->rank;
7647 for (n = 0; n < ar->dimen; n++)
7649 ar->dimen_type[n] = DIMEN_RANGE;
7651 gcc_assert (ar->start[n] == NULL);
7652 gcc_assert (ar->end[n] == NULL);
7653 gcc_assert (ar->stride[n] == NULL);
7659 newss = gfc_get_array_ss (ss, expr, 0, GFC_SS_SECTION);
7660 newss->data.info.ref = ref;
7662 /* We add SS chains for all the subscripts in the section. */
7663 for (n = 0; n < ar->dimen; n++)
7667 switch (ar->dimen_type[n])
7670 /* Add SS for elemental (scalar) subscripts. */
7671 gcc_assert (ar->start[n]);
7672 indexss = gfc_get_scalar_ss (gfc_ss_terminator, ar->start[n]);
7673 indexss->loop_chain = gfc_ss_terminator;
7674 newss->data.info.subscript[n] = indexss;
7678 /* We don't add anything for sections, just remember this
7679 dimension for later. */
7680 newss->dim[newss->dimen] = n;
7685 /* Create a GFC_SS_VECTOR index in which we can store
7686 the vector's descriptor. */
7687 indexss = gfc_get_array_ss (gfc_ss_terminator, ar->start[n],
7689 indexss->loop_chain = gfc_ss_terminator;
7690 newss->data.info.subscript[n] = indexss;
7691 newss->dim[newss->dimen] = n;
7696 /* We should know what sort of section it is by now. */
7700 /* We should have at least one non-elemental dimension,
7701 unless we are creating a descriptor for a (scalar) coarray. */
7702 gcc_assert (newss->dimen > 0
7703 || newss->data.info.ref->u.ar.as->corank > 0);
7708 /* We should know what sort of section it is by now. */
7717 /* Walk an expression operator. If only one operand of a binary expression is
7718 scalar, we must also add the scalar term to the SS chain. */
7721 gfc_walk_op_expr (gfc_ss * ss, gfc_expr * expr)
7726 head = gfc_walk_subexpr (ss, expr->value.op.op1);
7727 if (expr->value.op.op2 == NULL)
7730 head2 = gfc_walk_subexpr (head, expr->value.op.op2);
7732 /* All operands are scalar. Pass back and let the caller deal with it. */
7736 /* All operands require scalarization. */
7737 if (head != ss && (expr->value.op.op2 == NULL || head2 != head))
7740 /* One of the operands needs scalarization, the other is scalar.
7741 Create a gfc_ss for the scalar expression. */
7744 /* First operand is scalar. We build the chain in reverse order, so
7745 add the scalar SS after the second operand. */
7747 while (head && head->next != ss)
7749 /* Check we haven't somehow broken the chain. */
7751 head->next = gfc_get_scalar_ss (ss, expr->value.op.op1);
7753 else /* head2 == head */
7755 gcc_assert (head2 == head);
7756 /* Second operand is scalar. */
7757 head2 = gfc_get_scalar_ss (head2, expr->value.op.op2);
7764 /* Reverse a SS chain. */
7767 gfc_reverse_ss (gfc_ss * ss)
7772 gcc_assert (ss != NULL);
7774 head = gfc_ss_terminator;
7775 while (ss != gfc_ss_terminator)
7778 /* Check we didn't somehow break the chain. */
7779 gcc_assert (next != NULL);
7789 /* Walk the arguments of an elemental function. */
7792 gfc_walk_elemental_function_args (gfc_ss * ss, gfc_actual_arglist *arg,
7800 head = gfc_ss_terminator;
7803 for (; arg; arg = arg->next)
7808 newss = gfc_walk_subexpr (head, arg->expr);
7811 /* Scalar argument. */
7812 gcc_assert (type == GFC_SS_SCALAR || type == GFC_SS_REFERENCE);
7813 newss = gfc_get_scalar_ss (head, arg->expr);
7823 while (tail->next != gfc_ss_terminator)
7830 /* If all the arguments are scalar we don't need the argument SS. */
7831 gfc_free_ss_chain (head);
7836 /* Add it onto the existing chain. */
7842 /* Walk a function call. Scalar functions are passed back, and taken out of
7843 scalarization loops. For elemental functions we walk their arguments.
7844 The result of functions returning arrays is stored in a temporary outside
7845 the loop, so that the function is only called once. Hence we do not need
7846 to walk their arguments. */
7849 gfc_walk_function_expr (gfc_ss * ss, gfc_expr * expr)
7851 gfc_intrinsic_sym *isym;
7853 gfc_component *comp = NULL;
7855 isym = expr->value.function.isym;
7857 /* Handle intrinsic functions separately. */
7859 return gfc_walk_intrinsic_function (ss, expr, isym);
7861 sym = expr->value.function.esym;
7863 sym = expr->symtree->n.sym;
7865 /* A function that returns arrays. */
7866 gfc_is_proc_ptr_comp (expr, &comp);
7867 if ((!comp && gfc_return_by_reference (sym) && sym->result->attr.dimension)
7868 || (comp && comp->attr.dimension))
7869 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_FUNCTION);
7871 /* Walk the parameters of an elemental function. For now we always pass
7873 if (sym->attr.elemental)
7874 return gfc_walk_elemental_function_args (ss, expr->value.function.actual,
7877 /* Scalar functions are OK as these are evaluated outside the scalarization
7878 loop. Pass back and let the caller deal with it. */
7883 /* An array temporary is constructed for array constructors. */
7886 gfc_walk_array_constructor (gfc_ss * ss, gfc_expr * expr)
7888 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_CONSTRUCTOR);
7892 /* Walk an expression. Add walked expressions to the head of the SS chain.
7893 A wholly scalar expression will not be added. */
7896 gfc_walk_subexpr (gfc_ss * ss, gfc_expr * expr)
7900 switch (expr->expr_type)
7903 head = gfc_walk_variable_expr (ss, expr);
7907 head = gfc_walk_op_expr (ss, expr);
7911 head = gfc_walk_function_expr (ss, expr);
7916 case EXPR_STRUCTURE:
7917 /* Pass back and let the caller deal with it. */
7921 head = gfc_walk_array_constructor (ss, expr);
7924 case EXPR_SUBSTRING:
7925 /* Pass back and let the caller deal with it. */
7929 internal_error ("bad expression type during walk (%d)",
7936 /* Entry point for expression walking.
7937 A return value equal to the passed chain means this is
7938 a scalar expression. It is up to the caller to take whatever action is
7939 necessary to translate these. */
7942 gfc_walk_expr (gfc_expr * expr)
7946 res = gfc_walk_subexpr (gfc_ss_terminator, expr);
7947 return gfc_reverse_ss (res);