1 /* Array translation routines
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 Free Software Foundation, Inc.
5 Contributed by Paul Brook <paul@nowt.org>
6 and Steven Bosscher <s.bosscher@student.tudelft.nl>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* trans-array.c-- Various array related code, including scalarization,
25 allocation, initialization and other support routines. */
27 /* How the scalarizer works.
28 In gfortran, array expressions use the same core routines as scalar
30 First, a Scalarization State (SS) chain is built. This is done by walking
31 the expression tree, and building a linear list of the terms in the
32 expression. As the tree is walked, scalar subexpressions are translated.
34 The scalarization parameters are stored in a gfc_loopinfo structure.
35 First the start and stride of each term is calculated by
36 gfc_conv_ss_startstride. During this process the expressions for the array
37 descriptors and data pointers are also translated.
39 If the expression is an assignment, we must then resolve any dependencies.
40 In fortran all the rhs values of an assignment must be evaluated before
41 any assignments take place. This can require a temporary array to store the
42 values. We also require a temporary when we are passing array expressions
43 or vector subscripts as procedure parameters.
45 Array sections are passed without copying to a temporary. These use the
46 scalarizer to determine the shape of the section. The flag
47 loop->array_parameter tells the scalarizer that the actual values and loop
48 variables will not be required.
50 The function gfc_conv_loop_setup generates the scalarization setup code.
51 It determines the range of the scalarizing loop variables. If a temporary
52 is required, this is created and initialized. Code for scalar expressions
53 taken outside the loop is also generated at this time. Next the offset and
54 scaling required to translate from loop variables to array indices for each
57 A call to gfc_start_scalarized_body marks the start of the scalarized
58 expression. This creates a scope and declares the loop variables. Before
59 calling this gfc_make_ss_chain_used must be used to indicate which terms
60 will be used inside this loop.
62 The scalar gfc_conv_* functions are then used to build the main body of the
63 scalarization loop. Scalarization loop variables and precalculated scalar
64 values are automatically substituted. Note that gfc_advance_se_ss_chain
65 must be used, rather than changing the se->ss directly.
67 For assignment expressions requiring a temporary two sub loops are
68 generated. The first stores the result of the expression in the temporary,
69 the second copies it to the result. A call to
70 gfc_trans_scalarized_loop_boundary marks the end of the main loop code and
71 the start of the copying loop. The temporary may be less than full rank.
73 Finally gfc_trans_scalarizing_loops is called to generate the implicit do
74 loops. The loops are added to the pre chain of the loopinfo. The post
75 chain may still contain cleanup code.
77 After the loop code has been added into its parent scope gfc_cleanup_loop
78 is called to free all the SS allocated by the scalarizer. */
82 #include "coretypes.h"
85 #include "diagnostic-core.h" /* For internal_error/fatal_error. */
88 #include "constructor.h"
90 #include "trans-stmt.h"
91 #include "trans-types.h"
92 #include "trans-array.h"
93 #include "trans-const.h"
94 #include "dependency.h"
96 static bool gfc_get_array_constructor_size (mpz_t *, gfc_constructor_base);
98 /* The contents of this structure aren't actually used, just the address. */
99 static gfc_ss gfc_ss_terminator_var;
100 gfc_ss * const gfc_ss_terminator = &gfc_ss_terminator_var;
104 gfc_array_dataptr_type (tree desc)
106 return (GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc)));
110 /* Build expressions to access the members of an array descriptor.
111 It's surprisingly easy to mess up here, so never access
112 an array descriptor by "brute force", always use these
113 functions. This also avoids problems if we change the format
114 of an array descriptor.
116 To understand these magic numbers, look at the comments
117 before gfc_build_array_type() in trans-types.c.
119 The code within these defines should be the only code which knows the format
120 of an array descriptor.
122 Any code just needing to read obtain the bounds of an array should use
123 gfc_conv_array_* rather than the following functions as these will return
124 know constant values, and work with arrays which do not have descriptors.
126 Don't forget to #undef these! */
129 #define OFFSET_FIELD 1
130 #define DTYPE_FIELD 2
131 #define DIMENSION_FIELD 3
132 #define CAF_TOKEN_FIELD 4
134 #define STRIDE_SUBFIELD 0
135 #define LBOUND_SUBFIELD 1
136 #define UBOUND_SUBFIELD 2
138 /* This provides READ-ONLY access to the data field. The field itself
139 doesn't have the proper type. */
142 gfc_conv_descriptor_data_get (tree desc)
146 type = TREE_TYPE (desc);
147 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
149 field = TYPE_FIELDS (type);
150 gcc_assert (DATA_FIELD == 0);
152 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
154 t = fold_convert (GFC_TYPE_ARRAY_DATAPTR_TYPE (type), t);
159 /* This provides WRITE access to the data field.
161 TUPLES_P is true if we are generating tuples.
163 This function gets called through the following macros:
164 gfc_conv_descriptor_data_set
165 gfc_conv_descriptor_data_set. */
168 gfc_conv_descriptor_data_set (stmtblock_t *block, tree desc, tree value)
172 type = TREE_TYPE (desc);
173 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
175 field = TYPE_FIELDS (type);
176 gcc_assert (DATA_FIELD == 0);
178 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
180 gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value));
184 /* This provides address access to the data field. This should only be
185 used by array allocation, passing this on to the runtime. */
188 gfc_conv_descriptor_data_addr (tree desc)
192 type = TREE_TYPE (desc);
193 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
195 field = TYPE_FIELDS (type);
196 gcc_assert (DATA_FIELD == 0);
198 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
200 return gfc_build_addr_expr (NULL_TREE, t);
204 gfc_conv_descriptor_offset (tree desc)
209 type = TREE_TYPE (desc);
210 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
212 field = gfc_advance_chain (TYPE_FIELDS (type), OFFSET_FIELD);
213 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
215 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
216 desc, field, NULL_TREE);
220 gfc_conv_descriptor_offset_get (tree desc)
222 return gfc_conv_descriptor_offset (desc);
226 gfc_conv_descriptor_offset_set (stmtblock_t *block, tree desc,
229 tree t = gfc_conv_descriptor_offset (desc);
230 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
235 gfc_conv_descriptor_dtype (tree desc)
240 type = TREE_TYPE (desc);
241 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
243 field = gfc_advance_chain (TYPE_FIELDS (type), DTYPE_FIELD);
244 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
246 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
247 desc, field, NULL_TREE);
251 gfc_conv_descriptor_dimension (tree desc, tree dim)
257 type = TREE_TYPE (desc);
258 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
260 field = gfc_advance_chain (TYPE_FIELDS (type), DIMENSION_FIELD);
261 gcc_assert (field != NULL_TREE
262 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
263 && TREE_CODE (TREE_TYPE (TREE_TYPE (field))) == RECORD_TYPE);
265 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
266 desc, field, NULL_TREE);
267 tmp = gfc_build_array_ref (tmp, dim, NULL);
273 gfc_conv_descriptor_token (tree desc)
278 type = TREE_TYPE (desc);
279 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
280 gcc_assert (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE);
281 gcc_assert (gfc_option.coarray == GFC_FCOARRAY_LIB);
282 field = gfc_advance_chain (TYPE_FIELDS (type), CAF_TOKEN_FIELD);
283 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == prvoid_type_node);
285 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
286 desc, field, NULL_TREE);
291 gfc_conv_descriptor_stride (tree desc, tree dim)
296 tmp = gfc_conv_descriptor_dimension (desc, dim);
297 field = TYPE_FIELDS (TREE_TYPE (tmp));
298 field = gfc_advance_chain (field, STRIDE_SUBFIELD);
299 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
301 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
302 tmp, field, NULL_TREE);
307 gfc_conv_descriptor_stride_get (tree desc, tree dim)
309 tree type = TREE_TYPE (desc);
310 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
311 if (integer_zerop (dim)
312 && (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE
313 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_SHAPE_CONT
314 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT))
315 return gfc_index_one_node;
317 return gfc_conv_descriptor_stride (desc, dim);
321 gfc_conv_descriptor_stride_set (stmtblock_t *block, tree desc,
322 tree dim, tree value)
324 tree t = gfc_conv_descriptor_stride (desc, dim);
325 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
329 gfc_conv_descriptor_lbound (tree desc, tree dim)
334 tmp = gfc_conv_descriptor_dimension (desc, dim);
335 field = TYPE_FIELDS (TREE_TYPE (tmp));
336 field = gfc_advance_chain (field, LBOUND_SUBFIELD);
337 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
339 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
340 tmp, field, NULL_TREE);
345 gfc_conv_descriptor_lbound_get (tree desc, tree dim)
347 return gfc_conv_descriptor_lbound (desc, dim);
351 gfc_conv_descriptor_lbound_set (stmtblock_t *block, tree desc,
352 tree dim, tree value)
354 tree t = gfc_conv_descriptor_lbound (desc, dim);
355 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
359 gfc_conv_descriptor_ubound (tree desc, tree dim)
364 tmp = gfc_conv_descriptor_dimension (desc, dim);
365 field = TYPE_FIELDS (TREE_TYPE (tmp));
366 field = gfc_advance_chain (field, UBOUND_SUBFIELD);
367 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
369 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
370 tmp, field, NULL_TREE);
375 gfc_conv_descriptor_ubound_get (tree desc, tree dim)
377 return gfc_conv_descriptor_ubound (desc, dim);
381 gfc_conv_descriptor_ubound_set (stmtblock_t *block, tree desc,
382 tree dim, tree value)
384 tree t = gfc_conv_descriptor_ubound (desc, dim);
385 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
388 /* Build a null array descriptor constructor. */
391 gfc_build_null_descriptor (tree type)
396 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
397 gcc_assert (DATA_FIELD == 0);
398 field = TYPE_FIELDS (type);
400 /* Set a NULL data pointer. */
401 tmp = build_constructor_single (type, field, null_pointer_node);
402 TREE_CONSTANT (tmp) = 1;
403 /* All other fields are ignored. */
409 /* Modify a descriptor such that the lbound of a given dimension is the value
410 specified. This also updates ubound and offset accordingly. */
413 gfc_conv_shift_descriptor_lbound (stmtblock_t* block, tree desc,
414 int dim, tree new_lbound)
416 tree offs, ubound, lbound, stride;
417 tree diff, offs_diff;
419 new_lbound = fold_convert (gfc_array_index_type, new_lbound);
421 offs = gfc_conv_descriptor_offset_get (desc);
422 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
423 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
424 stride = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[dim]);
426 /* Get difference (new - old) by which to shift stuff. */
427 diff = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
430 /* Shift ubound and offset accordingly. This has to be done before
431 updating the lbound, as they depend on the lbound expression! */
432 ubound = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
434 gfc_conv_descriptor_ubound_set (block, desc, gfc_rank_cst[dim], ubound);
435 offs_diff = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
437 offs = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
439 gfc_conv_descriptor_offset_set (block, desc, offs);
441 /* Finally set lbound to value we want. */
442 gfc_conv_descriptor_lbound_set (block, desc, gfc_rank_cst[dim], new_lbound);
446 /* Cleanup those #defines. */
451 #undef DIMENSION_FIELD
452 #undef CAF_TOKEN_FIELD
453 #undef STRIDE_SUBFIELD
454 #undef LBOUND_SUBFIELD
455 #undef UBOUND_SUBFIELD
458 /* Mark a SS chain as used. Flags specifies in which loops the SS is used.
459 flags & 1 = Main loop body.
460 flags & 2 = temp copy loop. */
463 gfc_mark_ss_chain_used (gfc_ss * ss, unsigned flags)
465 for (; ss != gfc_ss_terminator; ss = ss->next)
466 ss->useflags = flags;
469 static void gfc_free_ss (gfc_ss *);
472 /* Free a gfc_ss chain. */
475 gfc_free_ss_chain (gfc_ss * ss)
479 while (ss != gfc_ss_terminator)
481 gcc_assert (ss != NULL);
492 gfc_free_ss (gfc_ss * ss)
499 for (n = 0; n < ss->data.info.dimen; n++)
501 if (ss->data.info.subscript[ss->data.info.dim[n]])
502 gfc_free_ss_chain (ss->data.info.subscript[ss->data.info.dim[n]]);
514 /* Creates and initializes an array type gfc_ss struct. */
517 gfc_get_array_ss (gfc_ss *next, gfc_expr *expr, int dimen, gfc_ss_type type)
527 info = &ss->data.info;
529 for (i = 0; i < info->dimen; i++)
536 /* Creates and initializes a temporary type gfc_ss struct. */
539 gfc_get_temp_ss (tree type, tree string_length, int dimen)
544 ss->next = gfc_ss_terminator;
545 ss->type = GFC_SS_TEMP;
546 ss->string_length = string_length;
547 ss->data.temp.dimen = dimen;
548 ss->data.temp.type = type;
554 /* Creates and initializes a scalar type gfc_ss struct. */
557 gfc_get_scalar_ss (gfc_ss *next, gfc_expr *expr)
563 ss->type = GFC_SS_SCALAR;
570 /* Free all the SS associated with a loop. */
573 gfc_cleanup_loop (gfc_loopinfo * loop)
579 while (ss != gfc_ss_terminator)
581 gcc_assert (ss != NULL);
582 next = ss->loop_chain;
589 /* Associate a SS chain with a loop. */
592 gfc_add_ss_to_loop (gfc_loopinfo * loop, gfc_ss * head)
596 if (head == gfc_ss_terminator)
600 for (; ss && ss != gfc_ss_terminator; ss = ss->next)
602 if (ss->next == gfc_ss_terminator)
603 ss->loop_chain = loop->ss;
605 ss->loop_chain = ss->next;
607 gcc_assert (ss == gfc_ss_terminator);
612 /* Generate an initializer for a static pointer or allocatable array. */
615 gfc_trans_static_array_pointer (gfc_symbol * sym)
619 gcc_assert (TREE_STATIC (sym->backend_decl));
620 /* Just zero the data member. */
621 type = TREE_TYPE (sym->backend_decl);
622 DECL_INITIAL (sym->backend_decl) = gfc_build_null_descriptor (type);
626 /* If the bounds of SE's loop have not yet been set, see if they can be
627 determined from array spec AS, which is the array spec of a called
628 function. MAPPING maps the callee's dummy arguments to the values
629 that the caller is passing. Add any initialization and finalization
633 gfc_set_loop_bounds_from_array_spec (gfc_interface_mapping * mapping,
634 gfc_se * se, gfc_array_spec * as)
642 if (as && as->type == AS_EXPLICIT)
643 for (n = 0; n < se->loop->dimen; n++)
645 dim = se->ss->data.info.dim[n];
646 gcc_assert (dim < as->rank);
647 gcc_assert (se->loop->dimen == as->rank);
648 if (se->loop->to[n] == NULL_TREE)
650 /* Evaluate the lower bound. */
651 gfc_init_se (&tmpse, NULL);
652 gfc_apply_interface_mapping (mapping, &tmpse, as->lower[dim]);
653 gfc_add_block_to_block (&se->pre, &tmpse.pre);
654 gfc_add_block_to_block (&se->post, &tmpse.post);
655 lower = fold_convert (gfc_array_index_type, tmpse.expr);
657 /* ...and the upper bound. */
658 gfc_init_se (&tmpse, NULL);
659 gfc_apply_interface_mapping (mapping, &tmpse, as->upper[dim]);
660 gfc_add_block_to_block (&se->pre, &tmpse.pre);
661 gfc_add_block_to_block (&se->post, &tmpse.post);
662 upper = fold_convert (gfc_array_index_type, tmpse.expr);
664 /* Set the upper bound of the loop to UPPER - LOWER. */
665 tmp = fold_build2_loc (input_location, MINUS_EXPR,
666 gfc_array_index_type, upper, lower);
667 tmp = gfc_evaluate_now (tmp, &se->pre);
668 se->loop->to[n] = tmp;
674 /* Generate code to allocate an array temporary, or create a variable to
675 hold the data. If size is NULL, zero the descriptor so that the
676 callee will allocate the array. If DEALLOC is true, also generate code to
677 free the array afterwards.
679 If INITIAL is not NULL, it is packed using internal_pack and the result used
680 as data instead of allocating a fresh, unitialized area of memory.
682 Initialization code is added to PRE and finalization code to POST.
683 DYNAMIC is true if the caller may want to extend the array later
684 using realloc. This prevents us from putting the array on the stack. */
687 gfc_trans_allocate_array_storage (stmtblock_t * pre, stmtblock_t * post,
688 gfc_ss_info * info, tree size, tree nelem,
689 tree initial, bool dynamic, bool dealloc)
695 desc = info->descriptor;
696 info->offset = gfc_index_zero_node;
697 if (size == NULL_TREE || integer_zerop (size))
699 /* A callee allocated array. */
700 gfc_conv_descriptor_data_set (pre, desc, null_pointer_node);
705 /* Allocate the temporary. */
706 onstack = !dynamic && initial == NULL_TREE
707 && (gfc_option.flag_stack_arrays
708 || gfc_can_put_var_on_stack (size));
712 /* Make a temporary variable to hold the data. */
713 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (nelem),
714 nelem, gfc_index_one_node);
715 tmp = gfc_evaluate_now (tmp, pre);
716 tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node,
718 tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)),
720 tmp = gfc_create_var (tmp, "A");
721 /* If we're here only because of -fstack-arrays we have to
722 emit a DECL_EXPR to make the gimplifier emit alloca calls. */
723 if (!gfc_can_put_var_on_stack (size))
724 gfc_add_expr_to_block (pre,
725 fold_build1_loc (input_location,
726 DECL_EXPR, TREE_TYPE (tmp),
728 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
729 gfc_conv_descriptor_data_set (pre, desc, tmp);
733 /* Allocate memory to hold the data or call internal_pack. */
734 if (initial == NULL_TREE)
736 tmp = gfc_call_malloc (pre, NULL, size);
737 tmp = gfc_evaluate_now (tmp, pre);
744 stmtblock_t do_copying;
746 tmp = TREE_TYPE (initial); /* Pointer to descriptor. */
747 gcc_assert (TREE_CODE (tmp) == POINTER_TYPE);
748 tmp = TREE_TYPE (tmp); /* The descriptor itself. */
749 tmp = gfc_get_element_type (tmp);
750 gcc_assert (tmp == gfc_get_element_type (TREE_TYPE (desc)));
751 packed = gfc_create_var (build_pointer_type (tmp), "data");
753 tmp = build_call_expr_loc (input_location,
754 gfor_fndecl_in_pack, 1, initial);
755 tmp = fold_convert (TREE_TYPE (packed), tmp);
756 gfc_add_modify (pre, packed, tmp);
758 tmp = build_fold_indirect_ref_loc (input_location,
760 source_data = gfc_conv_descriptor_data_get (tmp);
762 /* internal_pack may return source->data without any allocation
763 or copying if it is already packed. If that's the case, we
764 need to allocate and copy manually. */
766 gfc_start_block (&do_copying);
767 tmp = gfc_call_malloc (&do_copying, NULL, size);
768 tmp = fold_convert (TREE_TYPE (packed), tmp);
769 gfc_add_modify (&do_copying, packed, tmp);
770 tmp = gfc_build_memcpy_call (packed, source_data, size);
771 gfc_add_expr_to_block (&do_copying, tmp);
773 was_packed = fold_build2_loc (input_location, EQ_EXPR,
774 boolean_type_node, packed,
776 tmp = gfc_finish_block (&do_copying);
777 tmp = build3_v (COND_EXPR, was_packed, tmp,
778 build_empty_stmt (input_location));
779 gfc_add_expr_to_block (pre, tmp);
781 tmp = fold_convert (pvoid_type_node, packed);
784 gfc_conv_descriptor_data_set (pre, desc, tmp);
787 info->data = gfc_conv_descriptor_data_get (desc);
789 /* The offset is zero because we create temporaries with a zero
791 gfc_conv_descriptor_offset_set (pre, desc, gfc_index_zero_node);
793 if (dealloc && !onstack)
795 /* Free the temporary. */
796 tmp = gfc_conv_descriptor_data_get (desc);
797 tmp = gfc_call_free (fold_convert (pvoid_type_node, tmp));
798 gfc_add_expr_to_block (post, tmp);
803 /* Get the array reference dimension corresponding to the given loop dimension.
804 It is different from the true array dimension given by the dim array in
805 the case of a partial array reference
806 It is different from the loop dimension in the case of a transposed array.
810 get_array_ref_dim (gfc_ss_info *info, int loop_dim)
812 int n, array_dim, array_ref_dim;
815 array_dim = info->dim[loop_dim];
817 for (n = 0; n < info->dimen; n++)
818 if (info->dim[n] < array_dim)
821 return array_ref_dim;
825 /* Generate code to create and initialize the descriptor for a temporary
826 array. This is used for both temporaries needed by the scalarizer, and
827 functions returning arrays. Adjusts the loop variables to be
828 zero-based, and calculates the loop bounds for callee allocated arrays.
829 Allocate the array unless it's callee allocated (we have a callee
830 allocated array if 'callee_alloc' is true, or if loop->to[n] is
831 NULL_TREE for any n). Also fills in the descriptor, data and offset
832 fields of info if known. Returns the size of the array, or NULL for a
833 callee allocated array.
835 PRE, POST, INITIAL, DYNAMIC and DEALLOC are as for
836 gfc_trans_allocate_array_storage.
840 gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post,
841 gfc_loopinfo * loop, gfc_ss * ss,
842 tree eltype, tree initial, bool dynamic,
843 bool dealloc, bool callee_alloc, locus * where)
846 tree from[GFC_MAX_DIMENSIONS], to[GFC_MAX_DIMENSIONS];
856 memset (from, 0, sizeof (from));
857 memset (to, 0, sizeof (to));
859 info = &ss->data.info;
861 gcc_assert (info->dimen > 0);
862 gcc_assert (loop->dimen == info->dimen);
864 if (gfc_option.warn_array_temp && where)
865 gfc_warning ("Creating array temporary at %L", where);
867 /* Set the lower bound to zero. */
868 for (n = 0; n < loop->dimen; n++)
872 /* Callee allocated arrays may not have a known bound yet. */
874 loop->to[n] = gfc_evaluate_now (
875 fold_build2_loc (input_location, MINUS_EXPR,
876 gfc_array_index_type,
877 loop->to[n], loop->from[n]),
879 loop->from[n] = gfc_index_zero_node;
881 /* We are constructing the temporary's descriptor based on the loop
882 dimensions. As the dimensions may be accessed in arbitrary order
883 (think of transpose) the size taken from the n'th loop may not map
884 to the n'th dimension of the array. We need to reconstruct loop infos
885 in the right order before using it to set the descriptor
887 tmp_dim = get_array_ref_dim (info, n);
888 from[tmp_dim] = loop->from[n];
889 to[tmp_dim] = loop->to[n];
891 info->delta[dim] = gfc_index_zero_node;
892 info->start[dim] = gfc_index_zero_node;
893 info->end[dim] = gfc_index_zero_node;
894 info->stride[dim] = gfc_index_one_node;
897 /* Initialize the descriptor. */
899 gfc_get_array_type_bounds (eltype, info->dimen, 0, from, to, 1,
900 GFC_ARRAY_UNKNOWN, true);
901 desc = gfc_create_var (type, "atmp");
902 GFC_DECL_PACKED_ARRAY (desc) = 1;
904 info->descriptor = desc;
905 size = gfc_index_one_node;
907 /* Fill in the array dtype. */
908 tmp = gfc_conv_descriptor_dtype (desc);
909 gfc_add_modify (pre, tmp, gfc_get_dtype (TREE_TYPE (desc)));
912 Fill in the bounds and stride. This is a packed array, so:
915 for (n = 0; n < rank; n++)
918 delta = ubound[n] + 1 - lbound[n];
921 size = size * sizeof(element);
926 /* If there is at least one null loop->to[n], it is a callee allocated
928 for (n = 0; n < loop->dimen; n++)
929 if (loop->to[n] == NULL_TREE)
935 for (n = 0; n < loop->dimen; n++)
939 if (size == NULL_TREE)
941 /* For a callee allocated array express the loop bounds in terms
942 of the descriptor fields. */
943 tmp = fold_build2_loc (input_location,
944 MINUS_EXPR, gfc_array_index_type,
945 gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]),
946 gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]));
951 /* Store the stride and bound components in the descriptor. */
952 gfc_conv_descriptor_stride_set (pre, desc, gfc_rank_cst[n], size);
954 gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n],
955 gfc_index_zero_node);
957 gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n],
960 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
961 to[n], gfc_index_one_node);
963 /* Check whether the size for this dimension is negative. */
964 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, tmp,
965 gfc_index_zero_node);
966 cond = gfc_evaluate_now (cond, pre);
971 or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
972 boolean_type_node, or_expr, cond);
974 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
976 size = gfc_evaluate_now (size, pre);
979 /* Get the size of the array. */
981 if (size && !callee_alloc)
983 /* If or_expr is true, then the extent in at least one
984 dimension is zero and the size is set to zero. */
985 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
986 or_expr, gfc_index_zero_node, size);
989 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
991 fold_convert (gfc_array_index_type,
992 TYPE_SIZE_UNIT (gfc_get_element_type (type))));
1000 gfc_trans_allocate_array_storage (pre, post, info, size, nelem, initial,
1003 if (info->dimen > loop->temp_dim)
1004 loop->temp_dim = info->dimen;
1010 /* Return the number of iterations in a loop that starts at START,
1011 ends at END, and has step STEP. */
1014 gfc_get_iteration_count (tree start, tree end, tree step)
1019 type = TREE_TYPE (step);
1020 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, end, start);
1021 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, type, tmp, step);
1022 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, tmp,
1023 build_int_cst (type, 1));
1024 tmp = fold_build2_loc (input_location, MAX_EXPR, type, tmp,
1025 build_int_cst (type, 0));
1026 return fold_convert (gfc_array_index_type, tmp);
1030 /* Extend the data in array DESC by EXTRA elements. */
1033 gfc_grow_array (stmtblock_t * pblock, tree desc, tree extra)
1040 if (integer_zerop (extra))
1043 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[0]);
1045 /* Add EXTRA to the upper bound. */
1046 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1048 gfc_conv_descriptor_ubound_set (pblock, desc, gfc_rank_cst[0], tmp);
1050 /* Get the value of the current data pointer. */
1051 arg0 = gfc_conv_descriptor_data_get (desc);
1053 /* Calculate the new array size. */
1054 size = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
1055 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1056 ubound, gfc_index_one_node);
1057 arg1 = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
1058 fold_convert (size_type_node, tmp),
1059 fold_convert (size_type_node, size));
1061 /* Call the realloc() function. */
1062 tmp = gfc_call_realloc (pblock, arg0, arg1);
1063 gfc_conv_descriptor_data_set (pblock, desc, tmp);
1067 /* Return true if the bounds of iterator I can only be determined
1071 gfc_iterator_has_dynamic_bounds (gfc_iterator * i)
1073 return (i->start->expr_type != EXPR_CONSTANT
1074 || i->end->expr_type != EXPR_CONSTANT
1075 || i->step->expr_type != EXPR_CONSTANT);
1079 /* Split the size of constructor element EXPR into the sum of two terms,
1080 one of which can be determined at compile time and one of which must
1081 be calculated at run time. Set *SIZE to the former and return true
1082 if the latter might be nonzero. */
1085 gfc_get_array_constructor_element_size (mpz_t * size, gfc_expr * expr)
1087 if (expr->expr_type == EXPR_ARRAY)
1088 return gfc_get_array_constructor_size (size, expr->value.constructor);
1089 else if (expr->rank > 0)
1091 /* Calculate everything at run time. */
1092 mpz_set_ui (*size, 0);
1097 /* A single element. */
1098 mpz_set_ui (*size, 1);
1104 /* Like gfc_get_array_constructor_element_size, but applied to the whole
1105 of array constructor C. */
1108 gfc_get_array_constructor_size (mpz_t * size, gfc_constructor_base base)
1116 mpz_set_ui (*size, 0);
1121 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1124 if (i && gfc_iterator_has_dynamic_bounds (i))
1128 dynamic |= gfc_get_array_constructor_element_size (&len, c->expr);
1131 /* Multiply the static part of the element size by the
1132 number of iterations. */
1133 mpz_sub (val, i->end->value.integer, i->start->value.integer);
1134 mpz_fdiv_q (val, val, i->step->value.integer);
1135 mpz_add_ui (val, val, 1);
1136 if (mpz_sgn (val) > 0)
1137 mpz_mul (len, len, val);
1139 mpz_set_ui (len, 0);
1141 mpz_add (*size, *size, len);
1150 /* Make sure offset is a variable. */
1153 gfc_put_offset_into_var (stmtblock_t * pblock, tree * poffset,
1156 /* We should have already created the offset variable. We cannot
1157 create it here because we may be in an inner scope. */
1158 gcc_assert (*offsetvar != NULL_TREE);
1159 gfc_add_modify (pblock, *offsetvar, *poffset);
1160 *poffset = *offsetvar;
1161 TREE_USED (*offsetvar) = 1;
1165 /* Variables needed for bounds-checking. */
1166 static bool first_len;
1167 static tree first_len_val;
1168 static bool typespec_chararray_ctor;
1171 gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
1172 tree offset, gfc_se * se, gfc_expr * expr)
1176 gfc_conv_expr (se, expr);
1178 /* Store the value. */
1179 tmp = build_fold_indirect_ref_loc (input_location,
1180 gfc_conv_descriptor_data_get (desc));
1181 tmp = gfc_build_array_ref (tmp, offset, NULL);
1183 if (expr->ts.type == BT_CHARACTER)
1185 int i = gfc_validate_kind (BT_CHARACTER, expr->ts.kind, false);
1188 esize = size_in_bytes (gfc_get_element_type (TREE_TYPE (desc)));
1189 esize = fold_convert (gfc_charlen_type_node, esize);
1190 esize = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
1191 gfc_charlen_type_node, esize,
1192 build_int_cst (gfc_charlen_type_node,
1193 gfc_character_kinds[i].bit_size / 8));
1195 gfc_conv_string_parameter (se);
1196 if (POINTER_TYPE_P (TREE_TYPE (tmp)))
1198 /* The temporary is an array of pointers. */
1199 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1200 gfc_add_modify (&se->pre, tmp, se->expr);
1204 /* The temporary is an array of string values. */
1205 tmp = gfc_build_addr_expr (gfc_get_pchar_type (expr->ts.kind), tmp);
1206 /* We know the temporary and the value will be the same length,
1207 so can use memcpy. */
1208 gfc_trans_string_copy (&se->pre, esize, tmp, expr->ts.kind,
1209 se->string_length, se->expr, expr->ts.kind);
1211 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) && !typespec_chararray_ctor)
1215 gfc_add_modify (&se->pre, first_len_val,
1221 /* Verify that all constructor elements are of the same
1223 tree cond = fold_build2_loc (input_location, NE_EXPR,
1224 boolean_type_node, first_len_val,
1226 gfc_trans_runtime_check
1227 (true, false, cond, &se->pre, &expr->where,
1228 "Different CHARACTER lengths (%ld/%ld) in array constructor",
1229 fold_convert (long_integer_type_node, first_len_val),
1230 fold_convert (long_integer_type_node, se->string_length));
1236 /* TODO: Should the frontend already have done this conversion? */
1237 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1238 gfc_add_modify (&se->pre, tmp, se->expr);
1241 gfc_add_block_to_block (pblock, &se->pre);
1242 gfc_add_block_to_block (pblock, &se->post);
1246 /* Add the contents of an array to the constructor. DYNAMIC is as for
1247 gfc_trans_array_constructor_value. */
1250 gfc_trans_array_constructor_subarray (stmtblock_t * pblock,
1251 tree type ATTRIBUTE_UNUSED,
1252 tree desc, gfc_expr * expr,
1253 tree * poffset, tree * offsetvar,
1264 /* We need this to be a variable so we can increment it. */
1265 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1267 gfc_init_se (&se, NULL);
1269 /* Walk the array expression. */
1270 ss = gfc_walk_expr (expr);
1271 gcc_assert (ss != gfc_ss_terminator);
1273 /* Initialize the scalarizer. */
1274 gfc_init_loopinfo (&loop);
1275 gfc_add_ss_to_loop (&loop, ss);
1277 /* Initialize the loop. */
1278 gfc_conv_ss_startstride (&loop);
1279 gfc_conv_loop_setup (&loop, &expr->where);
1281 /* Make sure the constructed array has room for the new data. */
1284 /* Set SIZE to the total number of elements in the subarray. */
1285 size = gfc_index_one_node;
1286 for (n = 0; n < loop.dimen; n++)
1288 tmp = gfc_get_iteration_count (loop.from[n], loop.to[n],
1289 gfc_index_one_node);
1290 size = fold_build2_loc (input_location, MULT_EXPR,
1291 gfc_array_index_type, size, tmp);
1294 /* Grow the constructed array by SIZE elements. */
1295 gfc_grow_array (&loop.pre, desc, size);
1298 /* Make the loop body. */
1299 gfc_mark_ss_chain_used (ss, 1);
1300 gfc_start_scalarized_body (&loop, &body);
1301 gfc_copy_loopinfo_to_se (&se, &loop);
1304 gfc_trans_array_ctor_element (&body, desc, *poffset, &se, expr);
1305 gcc_assert (se.ss == gfc_ss_terminator);
1307 /* Increment the offset. */
1308 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1309 *poffset, gfc_index_one_node);
1310 gfc_add_modify (&body, *poffset, tmp);
1312 /* Finish the loop. */
1313 gfc_trans_scalarizing_loops (&loop, &body);
1314 gfc_add_block_to_block (&loop.pre, &loop.post);
1315 tmp = gfc_finish_block (&loop.pre);
1316 gfc_add_expr_to_block (pblock, tmp);
1318 gfc_cleanup_loop (&loop);
1322 /* Assign the values to the elements of an array constructor. DYNAMIC
1323 is true if descriptor DESC only contains enough data for the static
1324 size calculated by gfc_get_array_constructor_size. When true, memory
1325 for the dynamic parts must be allocated using realloc. */
1328 gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
1329 tree desc, gfc_constructor_base base,
1330 tree * poffset, tree * offsetvar,
1339 tree shadow_loopvar = NULL_TREE;
1340 gfc_saved_var saved_loopvar;
1343 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1345 /* If this is an iterator or an array, the offset must be a variable. */
1346 if ((c->iterator || c->expr->rank > 0) && INTEGER_CST_P (*poffset))
1347 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1349 /* Shadowing the iterator avoids changing its value and saves us from
1350 keeping track of it. Further, it makes sure that there's always a
1351 backend-decl for the symbol, even if there wasn't one before,
1352 e.g. in the case of an iterator that appears in a specification
1353 expression in an interface mapping. */
1356 gfc_symbol *sym = c->iterator->var->symtree->n.sym;
1357 tree type = gfc_typenode_for_spec (&sym->ts);
1359 shadow_loopvar = gfc_create_var (type, "shadow_loopvar");
1360 gfc_shadow_sym (sym, shadow_loopvar, &saved_loopvar);
1363 gfc_start_block (&body);
1365 if (c->expr->expr_type == EXPR_ARRAY)
1367 /* Array constructors can be nested. */
1368 gfc_trans_array_constructor_value (&body, type, desc,
1369 c->expr->value.constructor,
1370 poffset, offsetvar, dynamic);
1372 else if (c->expr->rank > 0)
1374 gfc_trans_array_constructor_subarray (&body, type, desc, c->expr,
1375 poffset, offsetvar, dynamic);
1379 /* This code really upsets the gimplifier so don't bother for now. */
1386 while (p && !(p->iterator || p->expr->expr_type != EXPR_CONSTANT))
1388 p = gfc_constructor_next (p);
1393 /* Scalar values. */
1394 gfc_init_se (&se, NULL);
1395 gfc_trans_array_ctor_element (&body, desc, *poffset,
1398 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1399 gfc_array_index_type,
1400 *poffset, gfc_index_one_node);
1404 /* Collect multiple scalar constants into a constructor. */
1405 VEC(constructor_elt,gc) *v = NULL;
1409 HOST_WIDE_INT idx = 0;
1412 /* Count the number of consecutive scalar constants. */
1413 while (p && !(p->iterator
1414 || p->expr->expr_type != EXPR_CONSTANT))
1416 gfc_init_se (&se, NULL);
1417 gfc_conv_constant (&se, p->expr);
1419 if (c->expr->ts.type != BT_CHARACTER)
1420 se.expr = fold_convert (type, se.expr);
1421 /* For constant character array constructors we build
1422 an array of pointers. */
1423 else if (POINTER_TYPE_P (type))
1424 se.expr = gfc_build_addr_expr
1425 (gfc_get_pchar_type (p->expr->ts.kind),
1428 CONSTRUCTOR_APPEND_ELT (v,
1429 build_int_cst (gfc_array_index_type,
1433 p = gfc_constructor_next (p);
1436 bound = size_int (n - 1);
1437 /* Create an array type to hold them. */
1438 tmptype = build_range_type (gfc_array_index_type,
1439 gfc_index_zero_node, bound);
1440 tmptype = build_array_type (type, tmptype);
1442 init = build_constructor (tmptype, v);
1443 TREE_CONSTANT (init) = 1;
1444 TREE_STATIC (init) = 1;
1445 /* Create a static variable to hold the data. */
1446 tmp = gfc_create_var (tmptype, "data");
1447 TREE_STATIC (tmp) = 1;
1448 TREE_CONSTANT (tmp) = 1;
1449 TREE_READONLY (tmp) = 1;
1450 DECL_INITIAL (tmp) = init;
1453 /* Use BUILTIN_MEMCPY to assign the values. */
1454 tmp = gfc_conv_descriptor_data_get (desc);
1455 tmp = build_fold_indirect_ref_loc (input_location,
1457 tmp = gfc_build_array_ref (tmp, *poffset, NULL);
1458 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
1459 init = gfc_build_addr_expr (NULL_TREE, init);
1461 size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
1462 bound = build_int_cst (size_type_node, n * size);
1463 tmp = build_call_expr_loc (input_location,
1464 builtin_decl_explicit (BUILT_IN_MEMCPY),
1465 3, tmp, init, bound);
1466 gfc_add_expr_to_block (&body, tmp);
1468 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1469 gfc_array_index_type, *poffset,
1470 build_int_cst (gfc_array_index_type, n));
1472 if (!INTEGER_CST_P (*poffset))
1474 gfc_add_modify (&body, *offsetvar, *poffset);
1475 *poffset = *offsetvar;
1479 /* The frontend should already have done any expansions
1483 /* Pass the code as is. */
1484 tmp = gfc_finish_block (&body);
1485 gfc_add_expr_to_block (pblock, tmp);
1489 /* Build the implied do-loop. */
1490 stmtblock_t implied_do_block;
1498 loopbody = gfc_finish_block (&body);
1500 /* Create a new block that holds the implied-do loop. A temporary
1501 loop-variable is used. */
1502 gfc_start_block(&implied_do_block);
1504 /* Initialize the loop. */
1505 gfc_init_se (&se, NULL);
1506 gfc_conv_expr_val (&se, c->iterator->start);
1507 gfc_add_block_to_block (&implied_do_block, &se.pre);
1508 gfc_add_modify (&implied_do_block, shadow_loopvar, se.expr);
1510 gfc_init_se (&se, NULL);
1511 gfc_conv_expr_val (&se, c->iterator->end);
1512 gfc_add_block_to_block (&implied_do_block, &se.pre);
1513 end = gfc_evaluate_now (se.expr, &implied_do_block);
1515 gfc_init_se (&se, NULL);
1516 gfc_conv_expr_val (&se, c->iterator->step);
1517 gfc_add_block_to_block (&implied_do_block, &se.pre);
1518 step = gfc_evaluate_now (se.expr, &implied_do_block);
1520 /* If this array expands dynamically, and the number of iterations
1521 is not constant, we won't have allocated space for the static
1522 part of C->EXPR's size. Do that now. */
1523 if (dynamic && gfc_iterator_has_dynamic_bounds (c->iterator))
1525 /* Get the number of iterations. */
1526 tmp = gfc_get_iteration_count (shadow_loopvar, end, step);
1528 /* Get the static part of C->EXPR's size. */
1529 gfc_get_array_constructor_element_size (&size, c->expr);
1530 tmp2 = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
1532 /* Grow the array by TMP * TMP2 elements. */
1533 tmp = fold_build2_loc (input_location, MULT_EXPR,
1534 gfc_array_index_type, tmp, tmp2);
1535 gfc_grow_array (&implied_do_block, desc, tmp);
1538 /* Generate the loop body. */
1539 exit_label = gfc_build_label_decl (NULL_TREE);
1540 gfc_start_block (&body);
1542 /* Generate the exit condition. Depending on the sign of
1543 the step variable we have to generate the correct
1545 tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
1546 step, build_int_cst (TREE_TYPE (step), 0));
1547 cond = fold_build3_loc (input_location, COND_EXPR,
1548 boolean_type_node, tmp,
1549 fold_build2_loc (input_location, GT_EXPR,
1550 boolean_type_node, shadow_loopvar, end),
1551 fold_build2_loc (input_location, LT_EXPR,
1552 boolean_type_node, shadow_loopvar, end));
1553 tmp = build1_v (GOTO_EXPR, exit_label);
1554 TREE_USED (exit_label) = 1;
1555 tmp = build3_v (COND_EXPR, cond, tmp,
1556 build_empty_stmt (input_location));
1557 gfc_add_expr_to_block (&body, tmp);
1559 /* The main loop body. */
1560 gfc_add_expr_to_block (&body, loopbody);
1562 /* Increase loop variable by step. */
1563 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1564 TREE_TYPE (shadow_loopvar), shadow_loopvar,
1566 gfc_add_modify (&body, shadow_loopvar, tmp);
1568 /* Finish the loop. */
1569 tmp = gfc_finish_block (&body);
1570 tmp = build1_v (LOOP_EXPR, tmp);
1571 gfc_add_expr_to_block (&implied_do_block, tmp);
1573 /* Add the exit label. */
1574 tmp = build1_v (LABEL_EXPR, exit_label);
1575 gfc_add_expr_to_block (&implied_do_block, tmp);
1577 /* Finishe the implied-do loop. */
1578 tmp = gfc_finish_block(&implied_do_block);
1579 gfc_add_expr_to_block(pblock, tmp);
1581 gfc_restore_sym (c->iterator->var->symtree->n.sym, &saved_loopvar);
1588 /* A catch-all to obtain the string length for anything that is not a
1589 a substring of non-constant length, a constant, array or variable. */
1592 get_array_ctor_all_strlen (stmtblock_t *block, gfc_expr *e, tree *len)
1597 /* Don't bother if we already know the length is a constant. */
1598 if (*len && INTEGER_CST_P (*len))
1601 if (!e->ref && e->ts.u.cl && e->ts.u.cl->length
1602 && e->ts.u.cl->length->expr_type == EXPR_CONSTANT)
1605 gfc_conv_const_charlen (e->ts.u.cl);
1606 *len = e->ts.u.cl->backend_decl;
1610 /* Otherwise, be brutal even if inefficient. */
1611 ss = gfc_walk_expr (e);
1612 gfc_init_se (&se, NULL);
1614 /* No function call, in case of side effects. */
1615 se.no_function_call = 1;
1616 if (ss == gfc_ss_terminator)
1617 gfc_conv_expr (&se, e);
1619 gfc_conv_expr_descriptor (&se, e, ss);
1621 /* Fix the value. */
1622 *len = gfc_evaluate_now (se.string_length, &se.pre);
1624 gfc_add_block_to_block (block, &se.pre);
1625 gfc_add_block_to_block (block, &se.post);
1627 e->ts.u.cl->backend_decl = *len;
1632 /* Figure out the string length of a variable reference expression.
1633 Used by get_array_ctor_strlen. */
1636 get_array_ctor_var_strlen (stmtblock_t *block, gfc_expr * expr, tree * len)
1642 /* Don't bother if we already know the length is a constant. */
1643 if (*len && INTEGER_CST_P (*len))
1646 ts = &expr->symtree->n.sym->ts;
1647 for (ref = expr->ref; ref; ref = ref->next)
1652 /* Array references don't change the string length. */
1656 /* Use the length of the component. */
1657 ts = &ref->u.c.component->ts;
1661 if (ref->u.ss.start->expr_type != EXPR_CONSTANT
1662 || ref->u.ss.end->expr_type != EXPR_CONSTANT)
1664 /* Note that this might evaluate expr. */
1665 get_array_ctor_all_strlen (block, expr, len);
1668 mpz_init_set_ui (char_len, 1);
1669 mpz_add (char_len, char_len, ref->u.ss.end->value.integer);
1670 mpz_sub (char_len, char_len, ref->u.ss.start->value.integer);
1671 *len = gfc_conv_mpz_to_tree (char_len, gfc_default_integer_kind);
1672 *len = convert (gfc_charlen_type_node, *len);
1673 mpz_clear (char_len);
1681 *len = ts->u.cl->backend_decl;
1685 /* Figure out the string length of a character array constructor.
1686 If len is NULL, don't calculate the length; this happens for recursive calls
1687 when a sub-array-constructor is an element but not at the first position,
1688 so when we're not interested in the length.
1689 Returns TRUE if all elements are character constants. */
1692 get_array_ctor_strlen (stmtblock_t *block, gfc_constructor_base base, tree * len)
1699 if (gfc_constructor_first (base) == NULL)
1702 *len = build_int_cstu (gfc_charlen_type_node, 0);
1706 /* Loop over all constructor elements to find out is_const, but in len we
1707 want to store the length of the first, not the last, element. We can
1708 of course exit the loop as soon as is_const is found to be false. */
1709 for (c = gfc_constructor_first (base);
1710 c && is_const; c = gfc_constructor_next (c))
1712 switch (c->expr->expr_type)
1715 if (len && !(*len && INTEGER_CST_P (*len)))
1716 *len = build_int_cstu (gfc_charlen_type_node,
1717 c->expr->value.character.length);
1721 if (!get_array_ctor_strlen (block, c->expr->value.constructor, len))
1728 get_array_ctor_var_strlen (block, c->expr, len);
1734 get_array_ctor_all_strlen (block, c->expr, len);
1738 /* After the first iteration, we don't want the length modified. */
1745 /* Check whether the array constructor C consists entirely of constant
1746 elements, and if so returns the number of those elements, otherwise
1747 return zero. Note, an empty or NULL array constructor returns zero. */
1749 unsigned HOST_WIDE_INT
1750 gfc_constant_array_constructor_p (gfc_constructor_base base)
1752 unsigned HOST_WIDE_INT nelem = 0;
1754 gfc_constructor *c = gfc_constructor_first (base);
1758 || c->expr->rank > 0
1759 || c->expr->expr_type != EXPR_CONSTANT)
1761 c = gfc_constructor_next (c);
1768 /* Given EXPR, the constant array constructor specified by an EXPR_ARRAY,
1769 and the tree type of it's elements, TYPE, return a static constant
1770 variable that is compile-time initialized. */
1773 gfc_build_constant_array_constructor (gfc_expr * expr, tree type)
1775 tree tmptype, init, tmp;
1776 HOST_WIDE_INT nelem;
1781 VEC(constructor_elt,gc) *v = NULL;
1783 /* First traverse the constructor list, converting the constants
1784 to tree to build an initializer. */
1786 c = gfc_constructor_first (expr->value.constructor);
1789 gfc_init_se (&se, NULL);
1790 gfc_conv_constant (&se, c->expr);
1791 if (c->expr->ts.type != BT_CHARACTER)
1792 se.expr = fold_convert (type, se.expr);
1793 else if (POINTER_TYPE_P (type))
1794 se.expr = gfc_build_addr_expr (gfc_get_pchar_type (c->expr->ts.kind),
1796 CONSTRUCTOR_APPEND_ELT (v, build_int_cst (gfc_array_index_type, nelem),
1798 c = gfc_constructor_next (c);
1802 /* Next determine the tree type for the array. We use the gfortran
1803 front-end's gfc_get_nodesc_array_type in order to create a suitable
1804 GFC_ARRAY_TYPE_P that may be used by the scalarizer. */
1806 memset (&as, 0, sizeof (gfc_array_spec));
1808 as.rank = expr->rank;
1809 as.type = AS_EXPLICIT;
1812 as.lower[0] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
1813 as.upper[0] = gfc_get_int_expr (gfc_default_integer_kind,
1817 for (i = 0; i < expr->rank; i++)
1819 int tmp = (int) mpz_get_si (expr->shape[i]);
1820 as.lower[i] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
1821 as.upper[i] = gfc_get_int_expr (gfc_default_integer_kind,
1825 tmptype = gfc_get_nodesc_array_type (type, &as, PACKED_STATIC, true);
1827 /* as is not needed anymore. */
1828 for (i = 0; i < as.rank + as.corank; i++)
1830 gfc_free_expr (as.lower[i]);
1831 gfc_free_expr (as.upper[i]);
1834 init = build_constructor (tmptype, v);
1836 TREE_CONSTANT (init) = 1;
1837 TREE_STATIC (init) = 1;
1839 tmp = gfc_create_var (tmptype, "A");
1840 TREE_STATIC (tmp) = 1;
1841 TREE_CONSTANT (tmp) = 1;
1842 TREE_READONLY (tmp) = 1;
1843 DECL_INITIAL (tmp) = init;
1849 /* Translate a constant EXPR_ARRAY array constructor for the scalarizer.
1850 This mostly initializes the scalarizer state info structure with the
1851 appropriate values to directly use the array created by the function
1852 gfc_build_constant_array_constructor. */
1855 trans_constant_array_constructor (gfc_ss * ss, tree type)
1861 tmp = gfc_build_constant_array_constructor (ss->expr, type);
1863 info = &ss->data.info;
1865 info->descriptor = tmp;
1866 info->data = gfc_build_addr_expr (NULL_TREE, tmp);
1867 info->offset = gfc_index_zero_node;
1869 for (i = 0; i < info->dimen; i++)
1871 info->delta[i] = gfc_index_zero_node;
1872 info->start[i] = gfc_index_zero_node;
1873 info->end[i] = gfc_index_zero_node;
1874 info->stride[i] = gfc_index_one_node;
1878 /* Helper routine of gfc_trans_array_constructor to determine if the
1879 bounds of the loop specified by LOOP are constant and simple enough
1880 to use with trans_constant_array_constructor. Returns the
1881 iteration count of the loop if suitable, and NULL_TREE otherwise. */
1884 constant_array_constructor_loop_size (gfc_loopinfo * loop)
1886 tree size = gfc_index_one_node;
1890 for (i = 0; i < loop->dimen; i++)
1892 /* If the bounds aren't constant, return NULL_TREE. */
1893 if (!INTEGER_CST_P (loop->from[i]) || !INTEGER_CST_P (loop->to[i]))
1895 if (!integer_zerop (loop->from[i]))
1897 /* Only allow nonzero "from" in one-dimensional arrays. */
1898 if (loop->dimen != 1)
1900 tmp = fold_build2_loc (input_location, MINUS_EXPR,
1901 gfc_array_index_type,
1902 loop->to[i], loop->from[i]);
1906 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1907 tmp, gfc_index_one_node);
1908 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
1916 /* Array constructors are handled by constructing a temporary, then using that
1917 within the scalarization loop. This is not optimal, but seems by far the
1921 gfc_trans_array_constructor (gfc_loopinfo * loop, gfc_ss * ss, locus * where)
1923 gfc_constructor_base c;
1930 bool old_first_len, old_typespec_chararray_ctor;
1931 tree old_first_len_val;
1933 /* Save the old values for nested checking. */
1934 old_first_len = first_len;
1935 old_first_len_val = first_len_val;
1936 old_typespec_chararray_ctor = typespec_chararray_ctor;
1938 /* Do bounds-checking here and in gfc_trans_array_ctor_element only if no
1939 typespec was given for the array constructor. */
1940 typespec_chararray_ctor = (ss->expr->ts.u.cl
1941 && ss->expr->ts.u.cl->length_from_typespec);
1943 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
1944 && ss->expr->ts.type == BT_CHARACTER && !typespec_chararray_ctor)
1946 first_len_val = gfc_create_var (gfc_charlen_type_node, "len");
1950 gcc_assert (ss->data.info.dimen == loop->dimen);
1952 c = ss->expr->value.constructor;
1953 if (ss->expr->ts.type == BT_CHARACTER)
1957 /* get_array_ctor_strlen walks the elements of the constructor, if a
1958 typespec was given, we already know the string length and want the one
1960 if (typespec_chararray_ctor && ss->expr->ts.u.cl->length
1961 && ss->expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
1965 const_string = false;
1966 gfc_init_se (&length_se, NULL);
1967 gfc_conv_expr_type (&length_se, ss->expr->ts.u.cl->length,
1968 gfc_charlen_type_node);
1969 ss->string_length = length_se.expr;
1970 gfc_add_block_to_block (&loop->pre, &length_se.pre);
1971 gfc_add_block_to_block (&loop->post, &length_se.post);
1974 const_string = get_array_ctor_strlen (&loop->pre, c,
1975 &ss->string_length);
1977 /* Complex character array constructors should have been taken care of
1978 and not end up here. */
1979 gcc_assert (ss->string_length);
1981 ss->expr->ts.u.cl->backend_decl = ss->string_length;
1983 type = gfc_get_character_type_len (ss->expr->ts.kind, ss->string_length);
1985 type = build_pointer_type (type);
1988 type = gfc_typenode_for_spec (&ss->expr->ts);
1990 /* See if the constructor determines the loop bounds. */
1993 if (ss->expr->shape && loop->dimen > 1 && loop->to[0] == NULL_TREE)
1995 /* We have a multidimensional parameter. */
1997 for (n = 0; n < ss->expr->rank; n++)
1999 loop->from[n] = gfc_index_zero_node;
2000 loop->to[n] = gfc_conv_mpz_to_tree (ss->expr->shape [n],
2001 gfc_index_integer_kind);
2002 loop->to[n] = fold_build2_loc (input_location, MINUS_EXPR,
2003 gfc_array_index_type,
2004 loop->to[n], gfc_index_one_node);
2008 if (loop->to[0] == NULL_TREE)
2012 /* We should have a 1-dimensional, zero-based loop. */
2013 gcc_assert (loop->dimen == 1);
2014 gcc_assert (integer_zerop (loop->from[0]));
2016 /* Split the constructor size into a static part and a dynamic part.
2017 Allocate the static size up-front and record whether the dynamic
2018 size might be nonzero. */
2020 dynamic = gfc_get_array_constructor_size (&size, c);
2021 mpz_sub_ui (size, size, 1);
2022 loop->to[0] = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
2026 /* Special case constant array constructors. */
2029 unsigned HOST_WIDE_INT nelem = gfc_constant_array_constructor_p (c);
2032 tree size = constant_array_constructor_loop_size (loop);
2033 if (size && compare_tree_int (size, nelem) == 0)
2035 trans_constant_array_constructor (ss, type);
2041 if (TREE_CODE (loop->to[0]) == VAR_DECL)
2044 gfc_trans_create_temp_array (&loop->pre, &loop->post, loop, ss,
2045 type, NULL_TREE, dynamic, true, false, where);
2047 desc = ss->data.info.descriptor;
2048 offset = gfc_index_zero_node;
2049 offsetvar = gfc_create_var_np (gfc_array_index_type, "offset");
2050 TREE_NO_WARNING (offsetvar) = 1;
2051 TREE_USED (offsetvar) = 0;
2052 gfc_trans_array_constructor_value (&loop->pre, type, desc, c,
2053 &offset, &offsetvar, dynamic);
2055 /* If the array grows dynamically, the upper bound of the loop variable
2056 is determined by the array's final upper bound. */
2059 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2060 gfc_array_index_type,
2061 offsetvar, gfc_index_one_node);
2062 tmp = gfc_evaluate_now (tmp, &loop->pre);
2063 gfc_conv_descriptor_ubound_set (&loop->pre, desc, gfc_rank_cst[0], tmp);
2064 if (loop->to[0] && TREE_CODE (loop->to[0]) == VAR_DECL)
2065 gfc_add_modify (&loop->pre, loop->to[0], tmp);
2070 if (TREE_USED (offsetvar))
2071 pushdecl (offsetvar);
2073 gcc_assert (INTEGER_CST_P (offset));
2076 /* Disable bound checking for now because it's probably broken. */
2077 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2084 /* Restore old values of globals. */
2085 first_len = old_first_len;
2086 first_len_val = old_first_len_val;
2087 typespec_chararray_ctor = old_typespec_chararray_ctor;
2091 /* INFO describes a GFC_SS_SECTION in loop LOOP, and this function is
2092 called after evaluating all of INFO's vector dimensions. Go through
2093 each such vector dimension and see if we can now fill in any missing
2097 set_vector_loop_bounds (gfc_loopinfo * loop, gfc_ss * ss)
2107 info = &ss->data.info;
2109 for (n = 0; n < loop->dimen; n++)
2112 if (info->ref->u.ar.dimen_type[dim] == DIMEN_VECTOR
2113 && loop->to[n] == NULL)
2115 /* Loop variable N indexes vector dimension DIM, and we don't
2116 yet know the upper bound of loop variable N. Set it to the
2117 difference between the vector's upper and lower bounds. */
2118 gcc_assert (loop->from[n] == gfc_index_zero_node);
2119 gcc_assert (info->subscript[dim]
2120 && info->subscript[dim]->type == GFC_SS_VECTOR);
2122 gfc_init_se (&se, NULL);
2123 desc = info->subscript[dim]->data.info.descriptor;
2124 zero = gfc_rank_cst[0];
2125 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2126 gfc_array_index_type,
2127 gfc_conv_descriptor_ubound_get (desc, zero),
2128 gfc_conv_descriptor_lbound_get (desc, zero));
2129 tmp = gfc_evaluate_now (tmp, &loop->pre);
2136 /* Add the pre and post chains for all the scalar expressions in a SS chain
2137 to loop. This is called after the loop parameters have been calculated,
2138 but before the actual scalarizing loops. */
2141 gfc_add_loop_ss_code (gfc_loopinfo * loop, gfc_ss * ss, bool subscript,
2147 /* TODO: This can generate bad code if there are ordering dependencies,
2148 e.g., a callee allocated function and an unknown size constructor. */
2149 gcc_assert (ss != NULL);
2151 for (; ss != gfc_ss_terminator; ss = ss->loop_chain)
2158 /* Scalar expression. Evaluate this now. This includes elemental
2159 dimension indices, but not array section bounds. */
2160 gfc_init_se (&se, NULL);
2161 gfc_conv_expr (&se, ss->expr);
2162 gfc_add_block_to_block (&loop->pre, &se.pre);
2164 if (ss->expr->ts.type != BT_CHARACTER)
2166 /* Move the evaluation of scalar expressions outside the
2167 scalarization loop, except for WHERE assignments. */
2169 se.expr = convert(gfc_array_index_type, se.expr);
2171 se.expr = gfc_evaluate_now (se.expr, &loop->pre);
2172 gfc_add_block_to_block (&loop->pre, &se.post);
2175 gfc_add_block_to_block (&loop->post, &se.post);
2177 ss->data.scalar.expr = se.expr;
2178 ss->string_length = se.string_length;
2181 case GFC_SS_REFERENCE:
2182 /* Scalar argument to elemental procedure. Evaluate this
2184 gfc_init_se (&se, NULL);
2185 gfc_conv_expr (&se, ss->expr);
2186 gfc_add_block_to_block (&loop->pre, &se.pre);
2187 gfc_add_block_to_block (&loop->post, &se.post);
2189 ss->data.scalar.expr = gfc_evaluate_now (se.expr, &loop->pre);
2190 ss->string_length = se.string_length;
2193 case GFC_SS_SECTION:
2194 /* Add the expressions for scalar and vector subscripts. */
2195 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2196 if (ss->data.info.subscript[n])
2197 gfc_add_loop_ss_code (loop, ss->data.info.subscript[n], true,
2200 set_vector_loop_bounds (loop, ss);
2204 /* Get the vector's descriptor and store it in SS. */
2205 gfc_init_se (&se, NULL);
2206 gfc_conv_expr_descriptor (&se, ss->expr, gfc_walk_expr (ss->expr));
2207 gfc_add_block_to_block (&loop->pre, &se.pre);
2208 gfc_add_block_to_block (&loop->post, &se.post);
2209 ss->data.info.descriptor = se.expr;
2212 case GFC_SS_INTRINSIC:
2213 gfc_add_intrinsic_ss_code (loop, ss);
2216 case GFC_SS_FUNCTION:
2217 /* Array function return value. We call the function and save its
2218 result in a temporary for use inside the loop. */
2219 gfc_init_se (&se, NULL);
2222 gfc_conv_expr (&se, ss->expr);
2223 gfc_add_block_to_block (&loop->pre, &se.pre);
2224 gfc_add_block_to_block (&loop->post, &se.post);
2225 ss->string_length = se.string_length;
2228 case GFC_SS_CONSTRUCTOR:
2229 if (ss->expr->ts.type == BT_CHARACTER
2230 && ss->string_length == NULL
2231 && ss->expr->ts.u.cl
2232 && ss->expr->ts.u.cl->length)
2234 gfc_init_se (&se, NULL);
2235 gfc_conv_expr_type (&se, ss->expr->ts.u.cl->length,
2236 gfc_charlen_type_node);
2237 ss->string_length = se.expr;
2238 gfc_add_block_to_block (&loop->pre, &se.pre);
2239 gfc_add_block_to_block (&loop->post, &se.post);
2241 gfc_trans_array_constructor (loop, ss, where);
2245 case GFC_SS_COMPONENT:
2246 /* Do nothing. These are handled elsewhere. */
2256 /* Translate expressions for the descriptor and data pointer of a SS. */
2260 gfc_conv_ss_descriptor (stmtblock_t * block, gfc_ss * ss, int base)
2265 /* Get the descriptor for the array to be scalarized. */
2266 gcc_assert (ss->expr->expr_type == EXPR_VARIABLE);
2267 gfc_init_se (&se, NULL);
2268 se.descriptor_only = 1;
2269 gfc_conv_expr_lhs (&se, ss->expr);
2270 gfc_add_block_to_block (block, &se.pre);
2271 ss->data.info.descriptor = se.expr;
2272 ss->string_length = se.string_length;
2276 /* Also the data pointer. */
2277 tmp = gfc_conv_array_data (se.expr);
2278 /* If this is a variable or address of a variable we use it directly.
2279 Otherwise we must evaluate it now to avoid breaking dependency
2280 analysis by pulling the expressions for elemental array indices
2283 || (TREE_CODE (tmp) == ADDR_EXPR
2284 && DECL_P (TREE_OPERAND (tmp, 0)))))
2285 tmp = gfc_evaluate_now (tmp, block);
2286 ss->data.info.data = tmp;
2288 tmp = gfc_conv_array_offset (se.expr);
2289 ss->data.info.offset = gfc_evaluate_now (tmp, block);
2291 /* Make absolutely sure that the saved_offset is indeed saved
2292 so that the variable is still accessible after the loops
2294 ss->data.info.saved_offset = ss->data.info.offset;
2299 /* Initialize a gfc_loopinfo structure. */
2302 gfc_init_loopinfo (gfc_loopinfo * loop)
2306 memset (loop, 0, sizeof (gfc_loopinfo));
2307 gfc_init_block (&loop->pre);
2308 gfc_init_block (&loop->post);
2310 /* Initially scalarize in order and default to no loop reversal. */
2311 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2314 loop->reverse[n] = GFC_INHIBIT_REVERSE;
2317 loop->ss = gfc_ss_terminator;
2321 /* Copies the loop variable info to a gfc_se structure. Does not copy the SS
2325 gfc_copy_loopinfo_to_se (gfc_se * se, gfc_loopinfo * loop)
2331 /* Return an expression for the data pointer of an array. */
2334 gfc_conv_array_data (tree descriptor)
2338 type = TREE_TYPE (descriptor);
2339 if (GFC_ARRAY_TYPE_P (type))
2341 if (TREE_CODE (type) == POINTER_TYPE)
2345 /* Descriptorless arrays. */
2346 return gfc_build_addr_expr (NULL_TREE, descriptor);
2350 return gfc_conv_descriptor_data_get (descriptor);
2354 /* Return an expression for the base offset of an array. */
2357 gfc_conv_array_offset (tree descriptor)
2361 type = TREE_TYPE (descriptor);
2362 if (GFC_ARRAY_TYPE_P (type))
2363 return GFC_TYPE_ARRAY_OFFSET (type);
2365 return gfc_conv_descriptor_offset_get (descriptor);
2369 /* Get an expression for the array stride. */
2372 gfc_conv_array_stride (tree descriptor, int dim)
2377 type = TREE_TYPE (descriptor);
2379 /* For descriptorless arrays use the array size. */
2380 tmp = GFC_TYPE_ARRAY_STRIDE (type, dim);
2381 if (tmp != NULL_TREE)
2384 tmp = gfc_conv_descriptor_stride_get (descriptor, gfc_rank_cst[dim]);
2389 /* Like gfc_conv_array_stride, but for the lower bound. */
2392 gfc_conv_array_lbound (tree descriptor, int dim)
2397 type = TREE_TYPE (descriptor);
2399 tmp = GFC_TYPE_ARRAY_LBOUND (type, dim);
2400 if (tmp != NULL_TREE)
2403 tmp = gfc_conv_descriptor_lbound_get (descriptor, gfc_rank_cst[dim]);
2408 /* Like gfc_conv_array_stride, but for the upper bound. */
2411 gfc_conv_array_ubound (tree descriptor, int dim)
2416 type = TREE_TYPE (descriptor);
2418 tmp = GFC_TYPE_ARRAY_UBOUND (type, dim);
2419 if (tmp != NULL_TREE)
2422 /* This should only ever happen when passing an assumed shape array
2423 as an actual parameter. The value will never be used. */
2424 if (GFC_ARRAY_TYPE_P (TREE_TYPE (descriptor)))
2425 return gfc_index_zero_node;
2427 tmp = gfc_conv_descriptor_ubound_get (descriptor, gfc_rank_cst[dim]);
2432 /* Generate code to perform an array index bound check. */
2435 trans_array_bound_check (gfc_se * se, gfc_ss *ss, tree index, int n,
2436 locus * where, bool check_upper)
2439 tree tmp_lo, tmp_up;
2442 const char * name = NULL;
2444 if (!(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS))
2447 descriptor = ss->data.info.descriptor;
2449 index = gfc_evaluate_now (index, &se->pre);
2451 /* We find a name for the error message. */
2452 name = ss->expr->symtree->n.sym->name;
2453 gcc_assert (name != NULL);
2455 if (TREE_CODE (descriptor) == VAR_DECL)
2456 name = IDENTIFIER_POINTER (DECL_NAME (descriptor));
2458 /* If upper bound is present, include both bounds in the error message. */
2461 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2462 tmp_up = gfc_conv_array_ubound (descriptor, n);
2465 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2466 "outside of expected range (%%ld:%%ld)", n+1, name);
2468 asprintf (&msg, "Index '%%ld' of dimension %d "
2469 "outside of expected range (%%ld:%%ld)", n+1);
2471 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2473 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2474 fold_convert (long_integer_type_node, index),
2475 fold_convert (long_integer_type_node, tmp_lo),
2476 fold_convert (long_integer_type_node, tmp_up));
2477 fault = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
2479 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2480 fold_convert (long_integer_type_node, index),
2481 fold_convert (long_integer_type_node, tmp_lo),
2482 fold_convert (long_integer_type_node, tmp_up));
2487 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2490 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2491 "below lower bound of %%ld", n+1, name);
2493 asprintf (&msg, "Index '%%ld' of dimension %d "
2494 "below lower bound of %%ld", n+1);
2496 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2498 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2499 fold_convert (long_integer_type_node, index),
2500 fold_convert (long_integer_type_node, tmp_lo));
2508 /* Return the offset for an index. Performs bound checking for elemental
2509 dimensions. Single element references are processed separately.
2510 DIM is the array dimension, I is the loop dimension. */
2513 conv_array_index_offset (gfc_se * se, gfc_ss * ss, int dim, int i,
2514 gfc_array_ref * ar, tree stride)
2521 info = &ss->data.info;
2523 /* Get the index into the array for this dimension. */
2526 gcc_assert (ar->type != AR_ELEMENT);
2527 switch (ar->dimen_type[dim])
2529 case DIMEN_THIS_IMAGE:
2533 /* Elemental dimension. */
2534 gcc_assert (info->subscript[dim]
2535 && info->subscript[dim]->type == GFC_SS_SCALAR);
2536 /* We've already translated this value outside the loop. */
2537 index = info->subscript[dim]->data.scalar.expr;
2539 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2540 ar->as->type != AS_ASSUMED_SIZE
2541 || dim < ar->dimen - 1);
2545 gcc_assert (info && se->loop);
2546 gcc_assert (info->subscript[dim]
2547 && info->subscript[dim]->type == GFC_SS_VECTOR);
2548 desc = info->subscript[dim]->data.info.descriptor;
2550 /* Get a zero-based index into the vector. */
2551 index = fold_build2_loc (input_location, MINUS_EXPR,
2552 gfc_array_index_type,
2553 se->loop->loopvar[i], se->loop->from[i]);
2555 /* Multiply the index by the stride. */
2556 index = fold_build2_loc (input_location, MULT_EXPR,
2557 gfc_array_index_type,
2558 index, gfc_conv_array_stride (desc, 0));
2560 /* Read the vector to get an index into info->descriptor. */
2561 data = build_fold_indirect_ref_loc (input_location,
2562 gfc_conv_array_data (desc));
2563 index = gfc_build_array_ref (data, index, NULL);
2564 index = gfc_evaluate_now (index, &se->pre);
2565 index = fold_convert (gfc_array_index_type, index);
2567 /* Do any bounds checking on the final info->descriptor index. */
2568 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2569 ar->as->type != AS_ASSUMED_SIZE
2570 || dim < ar->dimen - 1);
2574 /* Scalarized dimension. */
2575 gcc_assert (info && se->loop);
2577 /* Multiply the loop variable by the stride and delta. */
2578 index = se->loop->loopvar[i];
2579 if (!integer_onep (info->stride[dim]))
2580 index = fold_build2_loc (input_location, MULT_EXPR,
2581 gfc_array_index_type, index,
2583 if (!integer_zerop (info->delta[dim]))
2584 index = fold_build2_loc (input_location, PLUS_EXPR,
2585 gfc_array_index_type, index,
2595 /* Temporary array or derived type component. */
2596 gcc_assert (se->loop);
2597 index = se->loop->loopvar[se->loop->order[i]];
2599 /* Pointer functions can have stride[0] different from unity.
2600 Use the stride returned by the function call and stored in
2601 the descriptor for the temporary. */
2602 if (se->ss && se->ss->type == GFC_SS_FUNCTION
2604 && se->ss->expr->symtree
2605 && se->ss->expr->symtree->n.sym->result
2606 && se->ss->expr->symtree->n.sym->result->attr.pointer)
2607 stride = gfc_conv_descriptor_stride_get (info->descriptor,
2610 if (!integer_zerop (info->delta[dim]))
2611 index = fold_build2_loc (input_location, PLUS_EXPR,
2612 gfc_array_index_type, index, info->delta[dim]);
2615 /* Multiply by the stride. */
2616 if (!integer_onep (stride))
2617 index = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
2624 /* Build a scalarized reference to an array. */
2627 gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar)
2630 tree decl = NULL_TREE;
2635 info = &se->ss->data.info;
2637 n = se->loop->order[0];
2641 index = conv_array_index_offset (se, se->ss, info->dim[n], n, ar,
2643 /* Add the offset for this dimension to the stored offset for all other
2645 if (!integer_zerop (info->offset))
2646 index = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2647 index, info->offset);
2649 if (se->ss->expr && is_subref_array (se->ss->expr))
2650 decl = se->ss->expr->symtree->n.sym->backend_decl;
2652 tmp = build_fold_indirect_ref_loc (input_location,
2654 se->expr = gfc_build_array_ref (tmp, index, decl);
2658 /* Translate access of temporary array. */
2661 gfc_conv_tmp_array_ref (gfc_se * se)
2663 se->string_length = se->ss->string_length;
2664 gfc_conv_scalarized_array_ref (se, NULL);
2665 gfc_advance_se_ss_chain (se);
2668 /* Add T to the offset pair *OFFSET, *CST_OFFSET. */
2671 add_to_offset (tree *cst_offset, tree *offset, tree t)
2673 if (TREE_CODE (t) == INTEGER_CST)
2674 *cst_offset = int_const_binop (PLUS_EXPR, *cst_offset, t);
2677 if (!integer_zerop (*offset))
2678 *offset = fold_build2_loc (input_location, PLUS_EXPR,
2679 gfc_array_index_type, *offset, t);
2685 /* Build an array reference. se->expr already holds the array descriptor.
2686 This should be either a variable, indirect variable reference or component
2687 reference. For arrays which do not have a descriptor, se->expr will be
2689 a(i, j, k) = base[offset + i * stride[0] + j * stride[1] + k * stride[2]]*/
2692 gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar, gfc_symbol * sym,
2696 tree offset, cst_offset;
2704 gcc_assert (ar->codimen);
2706 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se->expr)))
2707 se->expr = build_fold_indirect_ref (gfc_conv_array_data (se->expr));
2710 if (GFC_ARRAY_TYPE_P (TREE_TYPE (se->expr))
2711 && TREE_CODE (TREE_TYPE (se->expr)) == POINTER_TYPE)
2712 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
2714 /* Use the actual tree type and not the wrapped coarray. */
2715 if (!se->want_pointer)
2716 se->expr = fold_convert (TYPE_MAIN_VARIANT (TREE_TYPE (se->expr)),
2723 /* Handle scalarized references separately. */
2724 if (ar->type != AR_ELEMENT)
2726 gfc_conv_scalarized_array_ref (se, ar);
2727 gfc_advance_se_ss_chain (se);
2731 cst_offset = offset = gfc_index_zero_node;
2732 add_to_offset (&cst_offset, &offset, gfc_conv_array_offset (se->expr));
2734 /* Calculate the offsets from all the dimensions. Make sure to associate
2735 the final offset so that we form a chain of loop invariant summands. */
2736 for (n = ar->dimen - 1; n >= 0; n--)
2738 /* Calculate the index for this dimension. */
2739 gfc_init_se (&indexse, se);
2740 gfc_conv_expr_type (&indexse, ar->start[n], gfc_array_index_type);
2741 gfc_add_block_to_block (&se->pre, &indexse.pre);
2743 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2745 /* Check array bounds. */
2749 /* Evaluate the indexse.expr only once. */
2750 indexse.expr = save_expr (indexse.expr);
2753 tmp = gfc_conv_array_lbound (se->expr, n);
2754 if (sym->attr.temporary)
2756 gfc_init_se (&tmpse, se);
2757 gfc_conv_expr_type (&tmpse, ar->as->lower[n],
2758 gfc_array_index_type);
2759 gfc_add_block_to_block (&se->pre, &tmpse.pre);
2763 cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2765 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2766 "below lower bound of %%ld", n+1, sym->name);
2767 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
2768 fold_convert (long_integer_type_node,
2770 fold_convert (long_integer_type_node, tmp));
2773 /* Upper bound, but not for the last dimension of assumed-size
2775 if (n < ar->dimen - 1 || ar->as->type != AS_ASSUMED_SIZE)
2777 tmp = gfc_conv_array_ubound (se->expr, n);
2778 if (sym->attr.temporary)
2780 gfc_init_se (&tmpse, se);
2781 gfc_conv_expr_type (&tmpse, ar->as->upper[n],
2782 gfc_array_index_type);
2783 gfc_add_block_to_block (&se->pre, &tmpse.pre);
2787 cond = fold_build2_loc (input_location, GT_EXPR,
2788 boolean_type_node, indexse.expr, tmp);
2789 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2790 "above upper bound of %%ld", n+1, sym->name);
2791 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
2792 fold_convert (long_integer_type_node,
2794 fold_convert (long_integer_type_node, tmp));
2799 /* Multiply the index by the stride. */
2800 stride = gfc_conv_array_stride (se->expr, n);
2801 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
2802 indexse.expr, stride);
2804 /* And add it to the total. */
2805 add_to_offset (&cst_offset, &offset, tmp);
2808 if (!integer_zerop (cst_offset))
2809 offset = fold_build2_loc (input_location, PLUS_EXPR,
2810 gfc_array_index_type, offset, cst_offset);
2812 /* Access the calculated element. */
2813 tmp = gfc_conv_array_data (se->expr);
2814 tmp = build_fold_indirect_ref (tmp);
2815 se->expr = gfc_build_array_ref (tmp, offset, sym->backend_decl);
2819 /* Add the offset corresponding to array's ARRAY_DIM dimension and loop's
2820 LOOP_DIM dimension (if any) to array's offset. */
2823 add_array_offset (stmtblock_t *pblock, gfc_loopinfo *loop, gfc_ss *ss,
2824 gfc_array_ref *ar, int array_dim, int loop_dim)
2830 info = &ss->data.info;
2832 gfc_init_se (&se, NULL);
2834 se.expr = info->descriptor;
2835 stride = gfc_conv_array_stride (info->descriptor, array_dim);
2836 index = conv_array_index_offset (&se, ss, array_dim, loop_dim, ar, stride);
2837 gfc_add_block_to_block (pblock, &se.pre);
2839 info->offset = fold_build2_loc (input_location, PLUS_EXPR,
2840 gfc_array_index_type,
2841 info->offset, index);
2842 info->offset = gfc_evaluate_now (info->offset, pblock);
2846 /* Generate the code to be executed immediately before entering a
2847 scalarization loop. */
2850 gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag,
2851 stmtblock_t * pblock)
2859 /* This code will be executed before entering the scalarization loop
2860 for this dimension. */
2861 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
2863 if ((ss->useflags & flag) == 0)
2866 if (ss->type != GFC_SS_SECTION
2867 && ss->type != GFC_SS_FUNCTION && ss->type != GFC_SS_CONSTRUCTOR
2868 && ss->type != GFC_SS_COMPONENT)
2871 info = &ss->data.info;
2873 gcc_assert (dim < info->dimen);
2874 gcc_assert (info->dimen == loop->dimen);
2877 ar = &info->ref->u.ar;
2881 if (dim == loop->dimen - 1)
2886 /* For the time being, there is no loop reordering. */
2887 gcc_assert (i == loop->order[i]);
2890 if (dim == loop->dimen - 1)
2892 stride = gfc_conv_array_stride (info->descriptor, info->dim[i]);
2894 /* Calculate the stride of the innermost loop. Hopefully this will
2895 allow the backend optimizers to do their stuff more effectively.
2897 info->stride0 = gfc_evaluate_now (stride, pblock);
2899 /* For the outermost loop calculate the offset due to any
2900 elemental dimensions. It will have been initialized with the
2901 base offset of the array. */
2904 for (i = 0; i < ar->dimen; i++)
2906 if (ar->dimen_type[i] != DIMEN_ELEMENT)
2909 add_array_offset (pblock, loop, ss, ar, i, /* unused */ -1);
2914 /* Add the offset for the previous loop dimension. */
2915 add_array_offset (pblock, loop, ss, ar, info->dim[i], i);
2917 /* Remember this offset for the second loop. */
2918 if (dim == loop->temp_dim - 1)
2919 info->saved_offset = info->offset;
2924 /* Start a scalarized expression. Creates a scope and declares loop
2928 gfc_start_scalarized_body (gfc_loopinfo * loop, stmtblock_t * pbody)
2934 gcc_assert (!loop->array_parameter);
2936 for (dim = loop->dimen - 1; dim >= 0; dim--)
2938 n = loop->order[dim];
2940 gfc_start_block (&loop->code[n]);
2942 /* Create the loop variable. */
2943 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "S");
2945 if (dim < loop->temp_dim)
2949 /* Calculate values that will be constant within this loop. */
2950 gfc_trans_preloop_setup (loop, dim, flags, &loop->code[n]);
2952 gfc_start_block (pbody);
2956 /* Generates the actual loop code for a scalarization loop. */
2959 gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n,
2960 stmtblock_t * pbody)
2971 if ((ompws_flags & (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS))
2972 == (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS)
2973 && n == loop->dimen - 1)
2975 /* We create an OMP_FOR construct for the outermost scalarized loop. */
2976 init = make_tree_vec (1);
2977 cond = make_tree_vec (1);
2978 incr = make_tree_vec (1);
2980 /* Cycle statement is implemented with a goto. Exit statement must not
2981 be present for this loop. */
2982 exit_label = gfc_build_label_decl (NULL_TREE);
2983 TREE_USED (exit_label) = 1;
2985 /* Label for cycle statements (if needed). */
2986 tmp = build1_v (LABEL_EXPR, exit_label);
2987 gfc_add_expr_to_block (pbody, tmp);
2989 stmt = make_node (OMP_FOR);
2991 TREE_TYPE (stmt) = void_type_node;
2992 OMP_FOR_BODY (stmt) = loopbody = gfc_finish_block (pbody);
2994 OMP_FOR_CLAUSES (stmt) = build_omp_clause (input_location,
2995 OMP_CLAUSE_SCHEDULE);
2996 OMP_CLAUSE_SCHEDULE_KIND (OMP_FOR_CLAUSES (stmt))
2997 = OMP_CLAUSE_SCHEDULE_STATIC;
2998 if (ompws_flags & OMPWS_NOWAIT)
2999 OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (stmt))
3000 = build_omp_clause (input_location, OMP_CLAUSE_NOWAIT);
3002 /* Initialize the loopvar. */
3003 TREE_VEC_ELT (init, 0) = build2_v (MODIFY_EXPR, loop->loopvar[n],
3005 OMP_FOR_INIT (stmt) = init;
3006 /* The exit condition. */
3007 TREE_VEC_ELT (cond, 0) = build2_loc (input_location, LE_EXPR,
3009 loop->loopvar[n], loop->to[n]);
3010 SET_EXPR_LOCATION (TREE_VEC_ELT (cond, 0), input_location);
3011 OMP_FOR_COND (stmt) = cond;
3012 /* Increment the loopvar. */
3013 tmp = build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3014 loop->loopvar[n], gfc_index_one_node);
3015 TREE_VEC_ELT (incr, 0) = fold_build2_loc (input_location, MODIFY_EXPR,
3016 void_type_node, loop->loopvar[n], tmp);
3017 OMP_FOR_INCR (stmt) = incr;
3019 ompws_flags &= ~OMPWS_CURR_SINGLEUNIT;
3020 gfc_add_expr_to_block (&loop->code[n], stmt);
3024 bool reverse_loop = (loop->reverse[n] == GFC_REVERSE_SET)
3025 && (loop->temp_ss == NULL);
3027 loopbody = gfc_finish_block (pbody);
3031 tmp = loop->from[n];
3032 loop->from[n] = loop->to[n];
3036 /* Initialize the loopvar. */
3037 if (loop->loopvar[n] != loop->from[n])
3038 gfc_add_modify (&loop->code[n], loop->loopvar[n], loop->from[n]);
3040 exit_label = gfc_build_label_decl (NULL_TREE);
3042 /* Generate the loop body. */
3043 gfc_init_block (&block);
3045 /* The exit condition. */
3046 cond = fold_build2_loc (input_location, reverse_loop ? LT_EXPR : GT_EXPR,
3047 boolean_type_node, loop->loopvar[n], loop->to[n]);
3048 tmp = build1_v (GOTO_EXPR, exit_label);
3049 TREE_USED (exit_label) = 1;
3050 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
3051 gfc_add_expr_to_block (&block, tmp);
3053 /* The main body. */
3054 gfc_add_expr_to_block (&block, loopbody);
3056 /* Increment the loopvar. */
3057 tmp = fold_build2_loc (input_location,
3058 reverse_loop ? MINUS_EXPR : PLUS_EXPR,
3059 gfc_array_index_type, loop->loopvar[n],
3060 gfc_index_one_node);
3062 gfc_add_modify (&block, loop->loopvar[n], tmp);
3064 /* Build the loop. */
3065 tmp = gfc_finish_block (&block);
3066 tmp = build1_v (LOOP_EXPR, tmp);
3067 gfc_add_expr_to_block (&loop->code[n], tmp);
3069 /* Add the exit label. */
3070 tmp = build1_v (LABEL_EXPR, exit_label);
3071 gfc_add_expr_to_block (&loop->code[n], tmp);
3077 /* Finishes and generates the loops for a scalarized expression. */
3080 gfc_trans_scalarizing_loops (gfc_loopinfo * loop, stmtblock_t * body)
3085 stmtblock_t *pblock;
3089 /* Generate the loops. */
3090 for (dim = 0; dim < loop->dimen; dim++)
3092 n = loop->order[dim];
3093 gfc_trans_scalarized_loop_end (loop, n, pblock);
3094 loop->loopvar[n] = NULL_TREE;
3095 pblock = &loop->code[n];
3098 tmp = gfc_finish_block (pblock);
3099 gfc_add_expr_to_block (&loop->pre, tmp);
3101 /* Clear all the used flags. */
3102 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3107 /* Finish the main body of a scalarized expression, and start the secondary
3111 gfc_trans_scalarized_loop_boundary (gfc_loopinfo * loop, stmtblock_t * body)
3115 stmtblock_t *pblock;
3119 /* We finish as many loops as are used by the temporary. */
3120 for (dim = 0; dim < loop->temp_dim - 1; dim++)
3122 n = loop->order[dim];
3123 gfc_trans_scalarized_loop_end (loop, n, pblock);
3124 loop->loopvar[n] = NULL_TREE;
3125 pblock = &loop->code[n];
3128 /* We don't want to finish the outermost loop entirely. */
3129 n = loop->order[loop->temp_dim - 1];
3130 gfc_trans_scalarized_loop_end (loop, n, pblock);
3132 /* Restore the initial offsets. */
3133 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3135 if ((ss->useflags & 2) == 0)
3138 if (ss->type != GFC_SS_SECTION
3139 && ss->type != GFC_SS_FUNCTION && ss->type != GFC_SS_CONSTRUCTOR
3140 && ss->type != GFC_SS_COMPONENT)
3143 ss->data.info.offset = ss->data.info.saved_offset;
3146 /* Restart all the inner loops we just finished. */
3147 for (dim = loop->temp_dim - 2; dim >= 0; dim--)
3149 n = loop->order[dim];
3151 gfc_start_block (&loop->code[n]);
3153 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "Q");
3155 gfc_trans_preloop_setup (loop, dim, 2, &loop->code[n]);
3158 /* Start a block for the secondary copying code. */
3159 gfc_start_block (body);
3163 /* Precalculate (either lower or upper) bound of an array section.
3164 BLOCK: Block in which the (pre)calculation code will go.
3165 BOUNDS[DIM]: Where the bound value will be stored once evaluated.
3166 VALUES[DIM]: Specified bound (NULL <=> unspecified).
3167 DESC: Array descriptor from which the bound will be picked if unspecified
3168 (either lower or upper bound according to LBOUND). */
3171 evaluate_bound (stmtblock_t *block, tree *bounds, gfc_expr ** values,
3172 tree desc, int dim, bool lbound)
3175 gfc_expr * input_val = values[dim];
3176 tree *output = &bounds[dim];
3181 /* Specified section bound. */
3182 gfc_init_se (&se, NULL);
3183 gfc_conv_expr_type (&se, input_val, gfc_array_index_type);
3184 gfc_add_block_to_block (block, &se.pre);
3189 /* No specific bound specified so use the bound of the array. */
3190 *output = lbound ? gfc_conv_array_lbound (desc, dim) :
3191 gfc_conv_array_ubound (desc, dim);
3193 *output = gfc_evaluate_now (*output, block);
3197 /* Calculate the lower bound of an array section. */
3200 gfc_conv_section_startstride (gfc_loopinfo * loop, gfc_ss * ss, int dim)
3202 gfc_expr *stride = NULL;
3208 gcc_assert (ss->type == GFC_SS_SECTION);
3210 info = &ss->data.info;
3211 ar = &info->ref->u.ar;
3213 if (ar->dimen_type[dim] == DIMEN_VECTOR)
3215 /* We use a zero-based index to access the vector. */
3216 info->start[dim] = gfc_index_zero_node;
3217 info->end[dim] = NULL;
3218 info->stride[dim] = gfc_index_one_node;
3222 gcc_assert (ar->dimen_type[dim] == DIMEN_RANGE
3223 || ar->dimen_type[dim] == DIMEN_THIS_IMAGE);
3224 desc = info->descriptor;
3225 stride = ar->stride[dim];
3227 /* Calculate the start of the range. For vector subscripts this will
3228 be the range of the vector. */
3229 evaluate_bound (&loop->pre, info->start, ar->start, desc, dim, true);
3231 /* Similarly calculate the end. Although this is not used in the
3232 scalarizer, it is needed when checking bounds and where the end
3233 is an expression with side-effects. */
3234 evaluate_bound (&loop->pre, info->end, ar->end, desc, dim, false);
3236 /* Calculate the stride. */
3238 info->stride[dim] = gfc_index_one_node;
3241 gfc_init_se (&se, NULL);
3242 gfc_conv_expr_type (&se, stride, gfc_array_index_type);
3243 gfc_add_block_to_block (&loop->pre, &se.pre);
3244 info->stride[dim] = gfc_evaluate_now (se.expr, &loop->pre);
3249 /* Calculates the range start and stride for a SS chain. Also gets the
3250 descriptor and data pointer. The range of vector subscripts is the size
3251 of the vector. Array bounds are also checked. */
3254 gfc_conv_ss_startstride (gfc_loopinfo * loop)
3262 /* Determine the rank of the loop. */
3263 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3267 case GFC_SS_SECTION:
3268 case GFC_SS_CONSTRUCTOR:
3269 case GFC_SS_FUNCTION:
3270 case GFC_SS_COMPONENT:
3271 loop->dimen = ss->data.info.dimen;
3274 /* As usual, lbound and ubound are exceptions!. */
3275 case GFC_SS_INTRINSIC:
3276 switch (ss->expr->value.function.isym->id)
3278 case GFC_ISYM_LBOUND:
3279 case GFC_ISYM_UBOUND:
3280 case GFC_ISYM_LCOBOUND:
3281 case GFC_ISYM_UCOBOUND:
3282 case GFC_ISYM_THIS_IMAGE:
3283 loop->dimen = ss->data.info.dimen;
3295 /* We should have determined the rank of the expression by now. If
3296 not, that's bad news. */
3300 /* Loop over all the SS in the chain. */
3301 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3303 if (ss->expr && ss->expr->shape && !ss->shape)
3304 ss->shape = ss->expr->shape;
3308 case GFC_SS_SECTION:
3309 /* Get the descriptor for the array. */
3310 gfc_conv_ss_descriptor (&loop->pre, ss, !loop->array_parameter);
3312 for (n = 0; n < ss->data.info.dimen; n++)
3313 gfc_conv_section_startstride (loop, ss, ss->data.info.dim[n]);
3316 case GFC_SS_INTRINSIC:
3317 switch (ss->expr->value.function.isym->id)
3319 /* Fall through to supply start and stride. */
3320 case GFC_ISYM_LBOUND:
3321 case GFC_ISYM_UBOUND:
3322 case GFC_ISYM_LCOBOUND:
3323 case GFC_ISYM_UCOBOUND:
3324 case GFC_ISYM_THIS_IMAGE:
3331 case GFC_SS_CONSTRUCTOR:
3332 case GFC_SS_FUNCTION:
3333 for (n = 0; n < ss->data.info.dimen; n++)
3335 int dim = ss->data.info.dim[n];
3337 ss->data.info.start[dim] = gfc_index_zero_node;
3338 ss->data.info.end[dim] = gfc_index_zero_node;
3339 ss->data.info.stride[dim] = gfc_index_one_node;
3348 /* The rest is just runtime bound checking. */
3349 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3352 tree lbound, ubound;
3354 tree size[GFC_MAX_DIMENSIONS];
3355 tree stride_pos, stride_neg, non_zerosized, tmp2, tmp3;
3360 gfc_start_block (&block);
3362 for (n = 0; n < loop->dimen; n++)
3363 size[n] = NULL_TREE;
3365 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3369 if (ss->type != GFC_SS_SECTION)
3372 /* Catch allocatable lhs in f2003. */
3373 if (gfc_option.flag_realloc_lhs && ss->is_alloc_lhs)
3376 gfc_start_block (&inner);
3378 /* TODO: range checking for mapped dimensions. */
3379 info = &ss->data.info;
3381 /* This code only checks ranges. Elemental and vector
3382 dimensions are checked later. */
3383 for (n = 0; n < loop->dimen; n++)
3388 if (info->ref->u.ar.dimen_type[dim] != DIMEN_RANGE)
3391 if (dim == info->ref->u.ar.dimen - 1
3392 && info->ref->u.ar.as->type == AS_ASSUMED_SIZE)
3393 check_upper = false;
3397 /* Zero stride is not allowed. */
3398 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
3399 info->stride[dim], gfc_index_zero_node);
3400 asprintf (&msg, "Zero stride is not allowed, for dimension %d "
3401 "of array '%s'", dim + 1, ss->expr->symtree->name);
3402 gfc_trans_runtime_check (true, false, tmp, &inner,
3403 &ss->expr->where, msg);
3406 desc = ss->data.info.descriptor;
3408 /* This is the run-time equivalent of resolve.c's
3409 check_dimension(). The logical is more readable there
3410 than it is here, with all the trees. */
3411 lbound = gfc_conv_array_lbound (desc, dim);
3412 end = info->end[dim];
3414 ubound = gfc_conv_array_ubound (desc, dim);
3418 /* non_zerosized is true when the selected range is not
3420 stride_pos = fold_build2_loc (input_location, GT_EXPR,
3421 boolean_type_node, info->stride[dim],
3422 gfc_index_zero_node);
3423 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
3424 info->start[dim], end);
3425 stride_pos = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3426 boolean_type_node, stride_pos, tmp);
3428 stride_neg = fold_build2_loc (input_location, LT_EXPR,
3430 info->stride[dim], gfc_index_zero_node);
3431 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
3432 info->start[dim], end);
3433 stride_neg = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3436 non_zerosized = fold_build2_loc (input_location, TRUTH_OR_EXPR,
3438 stride_pos, stride_neg);
3440 /* Check the start of the range against the lower and upper
3441 bounds of the array, if the range is not empty.
3442 If upper bound is present, include both bounds in the
3446 tmp = fold_build2_loc (input_location, LT_EXPR,
3448 info->start[dim], lbound);
3449 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3451 non_zerosized, tmp);
3452 tmp2 = fold_build2_loc (input_location, GT_EXPR,
3454 info->start[dim], ubound);
3455 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3457 non_zerosized, tmp2);
3458 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3459 "outside of expected range (%%ld:%%ld)",
3460 dim + 1, ss->expr->symtree->name);
3461 gfc_trans_runtime_check (true, false, tmp, &inner,
3462 &ss->expr->where, msg,
3463 fold_convert (long_integer_type_node, info->start[dim]),
3464 fold_convert (long_integer_type_node, lbound),
3465 fold_convert (long_integer_type_node, ubound));
3466 gfc_trans_runtime_check (true, false, tmp2, &inner,
3467 &ss->expr->where, msg,
3468 fold_convert (long_integer_type_node, info->start[dim]),
3469 fold_convert (long_integer_type_node, lbound),
3470 fold_convert (long_integer_type_node, ubound));
3475 tmp = fold_build2_loc (input_location, LT_EXPR,
3477 info->start[dim], lbound);
3478 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3479 boolean_type_node, non_zerosized, tmp);
3480 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3481 "below lower bound of %%ld",
3482 dim + 1, ss->expr->symtree->name);
3483 gfc_trans_runtime_check (true, false, tmp, &inner,
3484 &ss->expr->where, msg,
3485 fold_convert (long_integer_type_node, info->start[dim]),
3486 fold_convert (long_integer_type_node, lbound));
3490 /* Compute the last element of the range, which is not
3491 necessarily "end" (think 0:5:3, which doesn't contain 5)
3492 and check it against both lower and upper bounds. */
3494 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3495 gfc_array_index_type, end,
3497 tmp = fold_build2_loc (input_location, TRUNC_MOD_EXPR,
3498 gfc_array_index_type, tmp,
3500 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3501 gfc_array_index_type, end, tmp);
3502 tmp2 = fold_build2_loc (input_location, LT_EXPR,
3503 boolean_type_node, tmp, lbound);
3504 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3505 boolean_type_node, non_zerosized, tmp2);
3508 tmp3 = fold_build2_loc (input_location, GT_EXPR,
3509 boolean_type_node, tmp, ubound);
3510 tmp3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3511 boolean_type_node, non_zerosized, tmp3);
3512 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3513 "outside of expected range (%%ld:%%ld)",
3514 dim + 1, ss->expr->symtree->name);
3515 gfc_trans_runtime_check (true, false, tmp2, &inner,
3516 &ss->expr->where, msg,
3517 fold_convert (long_integer_type_node, tmp),
3518 fold_convert (long_integer_type_node, ubound),
3519 fold_convert (long_integer_type_node, lbound));
3520 gfc_trans_runtime_check (true, false, tmp3, &inner,
3521 &ss->expr->where, msg,
3522 fold_convert (long_integer_type_node, tmp),
3523 fold_convert (long_integer_type_node, ubound),
3524 fold_convert (long_integer_type_node, lbound));
3529 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3530 "below lower bound of %%ld",
3531 dim + 1, ss->expr->symtree->name);
3532 gfc_trans_runtime_check (true, false, tmp2, &inner,
3533 &ss->expr->where, msg,
3534 fold_convert (long_integer_type_node, tmp),
3535 fold_convert (long_integer_type_node, lbound));
3539 /* Check the section sizes match. */
3540 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3541 gfc_array_index_type, end,
3543 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
3544 gfc_array_index_type, tmp,
3546 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3547 gfc_array_index_type,
3548 gfc_index_one_node, tmp);
3549 tmp = fold_build2_loc (input_location, MAX_EXPR,
3550 gfc_array_index_type, tmp,
3551 build_int_cst (gfc_array_index_type, 0));
3552 /* We remember the size of the first section, and check all the
3553 others against this. */
3556 tmp3 = fold_build2_loc (input_location, NE_EXPR,
3557 boolean_type_node, tmp, size[n]);
3558 asprintf (&msg, "Array bound mismatch for dimension %d "
3559 "of array '%s' (%%ld/%%ld)",
3560 dim + 1, ss->expr->symtree->name);
3562 gfc_trans_runtime_check (true, false, tmp3, &inner,
3563 &ss->expr->where, msg,
3564 fold_convert (long_integer_type_node, tmp),
3565 fold_convert (long_integer_type_node, size[n]));
3570 size[n] = gfc_evaluate_now (tmp, &inner);
3573 tmp = gfc_finish_block (&inner);
3575 /* For optional arguments, only check bounds if the argument is
3577 if (ss->expr->symtree->n.sym->attr.optional
3578 || ss->expr->symtree->n.sym->attr.not_always_present)
3579 tmp = build3_v (COND_EXPR,
3580 gfc_conv_expr_present (ss->expr->symtree->n.sym),
3581 tmp, build_empty_stmt (input_location));
3583 gfc_add_expr_to_block (&block, tmp);
3587 tmp = gfc_finish_block (&block);
3588 gfc_add_expr_to_block (&loop->pre, tmp);
3592 /* Return true if both symbols could refer to the same data object. Does
3593 not take account of aliasing due to equivalence statements. */
3596 symbols_could_alias (gfc_symbol *lsym, gfc_symbol *rsym, bool lsym_pointer,
3597 bool lsym_target, bool rsym_pointer, bool rsym_target)
3599 /* Aliasing isn't possible if the symbols have different base types. */
3600 if (gfc_compare_types (&lsym->ts, &rsym->ts) == 0)
3603 /* Pointers can point to other pointers and target objects. */
3605 if ((lsym_pointer && (rsym_pointer || rsym_target))
3606 || (rsym_pointer && (lsym_pointer || lsym_target)))
3609 /* Special case: Argument association, cf. F90 12.4.1.6, F2003 12.4.1.7
3610 and F2008 12.5.2.13 items 3b and 4b. The pointer case (a) is already
3612 if (lsym_target && rsym_target
3613 && ((lsym->attr.dummy && !lsym->attr.contiguous
3614 && (!lsym->attr.dimension || lsym->as->type == AS_ASSUMED_SHAPE))
3615 || (rsym->attr.dummy && !rsym->attr.contiguous
3616 && (!rsym->attr.dimension
3617 || rsym->as->type == AS_ASSUMED_SHAPE))))
3624 /* Return true if the two SS could be aliased, i.e. both point to the same data
3626 /* TODO: resolve aliases based on frontend expressions. */
3629 gfc_could_be_alias (gfc_ss * lss, gfc_ss * rss)
3635 bool lsym_pointer, lsym_target, rsym_pointer, rsym_target;
3637 lsym = lss->expr->symtree->n.sym;
3638 rsym = rss->expr->symtree->n.sym;
3640 lsym_pointer = lsym->attr.pointer;
3641 lsym_target = lsym->attr.target;
3642 rsym_pointer = rsym->attr.pointer;
3643 rsym_target = rsym->attr.target;
3645 if (symbols_could_alias (lsym, rsym, lsym_pointer, lsym_target,
3646 rsym_pointer, rsym_target))
3649 if (rsym->ts.type != BT_DERIVED && rsym->ts.type != BT_CLASS
3650 && lsym->ts.type != BT_DERIVED && lsym->ts.type != BT_CLASS)
3653 /* For derived types we must check all the component types. We can ignore
3654 array references as these will have the same base type as the previous
3656 for (lref = lss->expr->ref; lref != lss->data.info.ref; lref = lref->next)
3658 if (lref->type != REF_COMPONENT)
3661 lsym_pointer = lsym_pointer || lref->u.c.sym->attr.pointer;
3662 lsym_target = lsym_target || lref->u.c.sym->attr.target;
3664 if (symbols_could_alias (lref->u.c.sym, rsym, lsym_pointer, lsym_target,
3665 rsym_pointer, rsym_target))
3668 if ((lsym_pointer && (rsym_pointer || rsym_target))
3669 || (rsym_pointer && (lsym_pointer || lsym_target)))
3671 if (gfc_compare_types (&lref->u.c.component->ts,
3676 for (rref = rss->expr->ref; rref != rss->data.info.ref;
3679 if (rref->type != REF_COMPONENT)
3682 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
3683 rsym_target = lsym_target || rref->u.c.sym->attr.target;
3685 if (symbols_could_alias (lref->u.c.sym, rref->u.c.sym,
3686 lsym_pointer, lsym_target,
3687 rsym_pointer, rsym_target))
3690 if ((lsym_pointer && (rsym_pointer || rsym_target))
3691 || (rsym_pointer && (lsym_pointer || lsym_target)))
3693 if (gfc_compare_types (&lref->u.c.component->ts,
3694 &rref->u.c.sym->ts))
3696 if (gfc_compare_types (&lref->u.c.sym->ts,
3697 &rref->u.c.component->ts))
3699 if (gfc_compare_types (&lref->u.c.component->ts,
3700 &rref->u.c.component->ts))
3706 lsym_pointer = lsym->attr.pointer;
3707 lsym_target = lsym->attr.target;
3708 lsym_pointer = lsym->attr.pointer;
3709 lsym_target = lsym->attr.target;
3711 for (rref = rss->expr->ref; rref != rss->data.info.ref; rref = rref->next)
3713 if (rref->type != REF_COMPONENT)
3716 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
3717 rsym_target = lsym_target || rref->u.c.sym->attr.target;
3719 if (symbols_could_alias (rref->u.c.sym, lsym,
3720 lsym_pointer, lsym_target,
3721 rsym_pointer, rsym_target))
3724 if ((lsym_pointer && (rsym_pointer || rsym_target))
3725 || (rsym_pointer && (lsym_pointer || lsym_target)))
3727 if (gfc_compare_types (&lsym->ts, &rref->u.c.component->ts))
3736 /* Resolve array data dependencies. Creates a temporary if required. */
3737 /* TODO: Calc dependencies with gfc_expr rather than gfc_ss, and move to
3741 gfc_conv_resolve_dependencies (gfc_loopinfo * loop, gfc_ss * dest,
3750 loop->temp_ss = NULL;
3752 for (ss = rss; ss != gfc_ss_terminator; ss = ss->next)
3754 if (ss->type != GFC_SS_SECTION)
3757 if (dest->expr->symtree->n.sym != ss->expr->symtree->n.sym)
3759 if (gfc_could_be_alias (dest, ss)
3760 || gfc_are_equivalenced_arrays (dest->expr, ss->expr))
3768 lref = dest->expr->ref;
3769 rref = ss->expr->ref;
3771 nDepend = gfc_dep_resolver (lref, rref, &loop->reverse[0]);
3776 for (i = 0; i < dest->data.info.dimen; i++)
3777 for (j = 0; j < ss->data.info.dimen; j++)
3779 && dest->data.info.dim[i] == ss->data.info.dim[j])
3781 /* If we don't access array elements in the same order,
3782 there is a dependency. */
3787 /* TODO : loop shifting. */
3790 /* Mark the dimensions for LOOP SHIFTING */
3791 for (n = 0; n < loop->dimen; n++)
3793 int dim = dest->data.info.dim[n];
3795 if (lref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
3797 else if (! gfc_is_same_range (&lref->u.ar,
3798 &rref->u.ar, dim, 0))
3802 /* Put all the dimensions with dependencies in the
3805 for (n = 0; n < loop->dimen; n++)
3807 gcc_assert (loop->order[n] == n);
3809 loop->order[dim++] = n;
3811 for (n = 0; n < loop->dimen; n++)
3814 loop->order[dim++] = n;
3817 gcc_assert (dim == loop->dimen);
3828 tree base_type = gfc_typenode_for_spec (&dest->expr->ts);
3829 if (GFC_ARRAY_TYPE_P (base_type)
3830 || GFC_DESCRIPTOR_TYPE_P (base_type))
3831 base_type = gfc_get_element_type (base_type);
3832 loop->temp_ss = gfc_get_temp_ss (base_type, dest->string_length,
3834 gfc_add_ss_to_loop (loop, loop->temp_ss);
3837 loop->temp_ss = NULL;
3841 /* Initialize the scalarization loop. Creates the loop variables. Determines
3842 the range of the loop variables. Creates a temporary if required.
3843 Calculates how to transform from loop variables to array indices for each
3844 expression. Also generates code for scalar expressions which have been
3845 moved outside the loop. */
3848 gfc_conv_loop_setup (gfc_loopinfo * loop, locus * where)
3850 int n, dim, spec_dim;
3852 gfc_ss_info *specinfo;
3855 gfc_ss *loopspec[GFC_MAX_DIMENSIONS];
3856 bool dynamic[GFC_MAX_DIMENSIONS];
3861 for (n = 0; n < loop->dimen; n++)
3865 /* We use one SS term, and use that to determine the bounds of the
3866 loop for this dimension. We try to pick the simplest term. */
3867 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3869 gfc_ss_type ss_type;
3872 if (ss_type == GFC_SS_SCALAR
3873 || ss_type == GFC_SS_TEMP
3874 || ss_type == GFC_SS_REFERENCE)
3877 info = &ss->data.info;
3880 if (loopspec[n] != NULL)
3882 specinfo = &loopspec[n]->data.info;
3883 spec_dim = specinfo->dim[n];
3887 /* Silence unitialized warnings. */
3894 gcc_assert (ss->shape[dim]);
3895 /* The frontend has worked out the size for us. */
3897 || !loopspec[n]->shape
3898 || !integer_zerop (specinfo->start[spec_dim]))
3899 /* Prefer zero-based descriptors if possible. */
3904 if (ss->type == GFC_SS_CONSTRUCTOR)
3906 gfc_constructor_base base;
3907 /* An unknown size constructor will always be rank one.
3908 Higher rank constructors will either have known shape,
3909 or still be wrapped in a call to reshape. */
3910 gcc_assert (loop->dimen == 1);
3912 /* Always prefer to use the constructor bounds if the size
3913 can be determined at compile time. Prefer not to otherwise,
3914 since the general case involves realloc, and it's better to
3915 avoid that overhead if possible. */
3916 base = ss->expr->value.constructor;
3917 dynamic[n] = gfc_get_array_constructor_size (&i, base);
3918 if (!dynamic[n] || !loopspec[n])
3923 /* TODO: Pick the best bound if we have a choice between a
3924 function and something else. */
3925 if (ss->type == GFC_SS_FUNCTION)
3931 /* Avoid using an allocatable lhs in an assignment, since
3932 there might be a reallocation coming. */
3933 if (loopspec[n] && ss->is_alloc_lhs)
3936 if (ss->type != GFC_SS_SECTION)
3941 /* Criteria for choosing a loop specifier (most important first):
3942 doesn't need realloc
3948 else if ((loopspec[n]->type == GFC_SS_CONSTRUCTOR && dynamic[n])
3949 || n >= loop->dimen)
3951 else if (integer_onep (info->stride[dim])
3952 && !integer_onep (specinfo->stride[spec_dim]))
3954 else if (INTEGER_CST_P (info->stride[dim])
3955 && !INTEGER_CST_P (specinfo->stride[spec_dim]))
3957 else if (INTEGER_CST_P (info->start[dim])
3958 && !INTEGER_CST_P (specinfo->start[spec_dim]))
3960 /* We don't work out the upper bound.
3961 else if (INTEGER_CST_P (info->finish[n])
3962 && ! INTEGER_CST_P (specinfo->finish[n]))
3963 loopspec[n] = ss; */
3966 /* We should have found the scalarization loop specifier. If not,
3968 gcc_assert (loopspec[n]);
3970 info = &loopspec[n]->data.info;
3973 /* Set the extents of this range. */
3974 cshape = loopspec[n]->shape;
3975 if (cshape && INTEGER_CST_P (info->start[dim])
3976 && INTEGER_CST_P (info->stride[dim]))
3978 loop->from[n] = info->start[dim];
3979 mpz_set (i, cshape[get_array_ref_dim (info, n)]);
3980 mpz_sub_ui (i, i, 1);
3981 /* To = from + (size - 1) * stride. */
3982 tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind);
3983 if (!integer_onep (info->stride[dim]))
3984 tmp = fold_build2_loc (input_location, MULT_EXPR,
3985 gfc_array_index_type, tmp,
3987 loop->to[n] = fold_build2_loc (input_location, PLUS_EXPR,
3988 gfc_array_index_type,
3989 loop->from[n], tmp);
3993 loop->from[n] = info->start[dim];
3994 switch (loopspec[n]->type)
3996 case GFC_SS_CONSTRUCTOR:
3997 /* The upper bound is calculated when we expand the
3999 gcc_assert (loop->to[n] == NULL_TREE);
4002 case GFC_SS_SECTION:
4003 /* Use the end expression if it exists and is not constant,
4004 so that it is only evaluated once. */
4005 loop->to[n] = info->end[dim];
4008 case GFC_SS_FUNCTION:
4009 /* The loop bound will be set when we generate the call. */
4010 gcc_assert (loop->to[n] == NULL_TREE);
4018 /* Transform everything so we have a simple incrementing variable. */
4019 if (n < loop->dimen && integer_onep (info->stride[dim]))
4020 info->delta[dim] = gfc_index_zero_node;
4021 else if (n < loop->dimen)
4023 /* Set the delta for this section. */
4024 info->delta[dim] = gfc_evaluate_now (loop->from[n], &loop->pre);
4025 /* Number of iterations is (end - start + step) / step.
4026 with start = 0, this simplifies to
4028 for (i = 0; i<=last; i++){...}; */
4029 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4030 gfc_array_index_type, loop->to[n],
4032 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
4033 gfc_array_index_type, tmp, info->stride[dim]);
4034 tmp = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
4035 tmp, build_int_cst (gfc_array_index_type, -1));
4036 loop->to[n] = gfc_evaluate_now (tmp, &loop->pre);
4037 /* Make the loop variable start at 0. */
4038 loop->from[n] = gfc_index_zero_node;
4042 /* Add all the scalar code that can be taken out of the loops.
4043 This may include calculating the loop bounds, so do it before
4044 allocating the temporary. */
4045 gfc_add_loop_ss_code (loop, loop->ss, false, where);
4047 /* If we want a temporary then create it. */
4048 if (loop->temp_ss != NULL)
4050 gcc_assert (loop->temp_ss->type == GFC_SS_TEMP);
4052 /* Make absolutely sure that this is a complete type. */
4053 if (loop->temp_ss->string_length)
4054 loop->temp_ss->data.temp.type
4055 = gfc_get_character_type_len_for_eltype
4056 (TREE_TYPE (loop->temp_ss->data.temp.type),
4057 loop->temp_ss->string_length);
4059 tmp = loop->temp_ss->data.temp.type;
4060 n = loop->temp_ss->data.temp.dimen;
4061 memset (&loop->temp_ss->data.info, 0, sizeof (gfc_ss_info));
4062 loop->temp_ss->type = GFC_SS_SECTION;
4063 loop->temp_ss->data.info.dimen = n;
4065 gcc_assert (loop->temp_ss->data.info.dimen != 0);
4066 for (n = 0; n < loop->temp_ss->data.info.dimen; n++)
4067 loop->temp_ss->data.info.dim[n] = n;
4069 gfc_trans_create_temp_array (&loop->pre, &loop->post, loop,
4070 loop->temp_ss, tmp, NULL_TREE,
4071 false, true, false, where);
4074 for (n = 0; n < loop->temp_dim; n++)
4075 loopspec[loop->order[n]] = NULL;
4079 /* For array parameters we don't have loop variables, so don't calculate the
4081 if (loop->array_parameter)
4084 /* Calculate the translation from loop variables to array indices. */
4085 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4087 if (ss->type != GFC_SS_SECTION && ss->type != GFC_SS_COMPONENT
4088 && ss->type != GFC_SS_CONSTRUCTOR)
4092 info = &ss->data.info;
4094 for (n = 0; n < info->dimen; n++)
4096 /* If we are specifying the range the delta is already set. */
4097 if (loopspec[n] != ss)
4099 dim = ss->data.info.dim[n];
4101 /* Calculate the offset relative to the loop variable.
4102 First multiply by the stride. */
4103 tmp = loop->from[n];
4104 if (!integer_onep (info->stride[dim]))
4105 tmp = fold_build2_loc (input_location, MULT_EXPR,
4106 gfc_array_index_type,
4107 tmp, info->stride[dim]);
4109 /* Then subtract this from our starting value. */
4110 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4111 gfc_array_index_type,
4112 info->start[dim], tmp);
4114 info->delta[dim] = gfc_evaluate_now (tmp, &loop->pre);
4121 /* Calculate the size of a given array dimension from the bounds. This
4122 is simply (ubound - lbound + 1) if this expression is positive
4123 or 0 if it is negative (pick either one if it is zero). Optionally
4124 (if or_expr is present) OR the (expression != 0) condition to it. */
4127 gfc_conv_array_extent_dim (tree lbound, tree ubound, tree* or_expr)
4132 /* Calculate (ubound - lbound + 1). */
4133 res = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4135 res = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, res,
4136 gfc_index_one_node);
4138 /* Check whether the size for this dimension is negative. */
4139 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, res,
4140 gfc_index_zero_node);
4141 res = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, cond,
4142 gfc_index_zero_node, res);
4144 /* Build OR expression. */
4146 *or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
4147 boolean_type_node, *or_expr, cond);
4153 /* For an array descriptor, get the total number of elements. This is just
4154 the product of the extents along from_dim to to_dim. */
4157 gfc_conv_descriptor_size_1 (tree desc, int from_dim, int to_dim)
4162 res = gfc_index_one_node;
4164 for (dim = from_dim; dim < to_dim; ++dim)
4170 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
4171 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
4173 extent = gfc_conv_array_extent_dim (lbound, ubound, NULL);
4174 res = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4182 /* Full size of an array. */
4185 gfc_conv_descriptor_size (tree desc, int rank)
4187 return gfc_conv_descriptor_size_1 (desc, 0, rank);
4191 /* Size of a coarray for all dimensions but the last. */
4194 gfc_conv_descriptor_cosize (tree desc, int rank, int corank)
4196 return gfc_conv_descriptor_size_1 (desc, rank, rank + corank - 1);
4200 /* Fills in an array descriptor, and returns the size of the array.
4201 The size will be a simple_val, ie a variable or a constant. Also
4202 calculates the offset of the base. The pointer argument overflow,
4203 which should be of integer type, will increase in value if overflow
4204 occurs during the size calculation. Returns the size of the array.
4208 for (n = 0; n < rank; n++)
4210 a.lbound[n] = specified_lower_bound;
4211 offset = offset + a.lbond[n] * stride;
4213 a.ubound[n] = specified_upper_bound;
4214 a.stride[n] = stride;
4215 size = size >= 0 ? ubound + size : 0; //size = ubound + 1 - lbound
4216 overflow += size == 0 ? 0: (MAX/size < stride ? 1: 0);
4217 stride = stride * size;
4219 for (n = rank; n < rank+corank; n++)
4220 (Set lcobound/ucobound as above.)
4221 element_size = sizeof (array element);
4224 stride = (size_t) stride;
4225 overflow += element_size == 0 ? 0: (MAX/element_size < stride ? 1: 0);
4226 stride = stride * element_size;
4232 gfc_array_init_size (tree descriptor, int rank, int corank, tree * poffset,
4233 gfc_expr ** lower, gfc_expr ** upper, stmtblock_t * pblock,
4234 stmtblock_t * descriptor_block, tree * overflow)
4247 stmtblock_t thenblock;
4248 stmtblock_t elseblock;
4253 type = TREE_TYPE (descriptor);
4255 stride = gfc_index_one_node;
4256 offset = gfc_index_zero_node;
4258 /* Set the dtype. */
4259 tmp = gfc_conv_descriptor_dtype (descriptor);
4260 gfc_add_modify (descriptor_block, tmp, gfc_get_dtype (TREE_TYPE (descriptor)));
4262 or_expr = boolean_false_node;
4264 for (n = 0; n < rank; n++)
4269 /* We have 3 possibilities for determining the size of the array:
4270 lower == NULL => lbound = 1, ubound = upper[n]
4271 upper[n] = NULL => lbound = 1, ubound = lower[n]
4272 upper[n] != NULL => lbound = lower[n], ubound = upper[n] */
4275 /* Set lower bound. */
4276 gfc_init_se (&se, NULL);
4278 se.expr = gfc_index_one_node;
4281 gcc_assert (lower[n]);
4284 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4285 gfc_add_block_to_block (pblock, &se.pre);
4289 se.expr = gfc_index_one_node;
4293 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
4294 gfc_rank_cst[n], se.expr);
4295 conv_lbound = se.expr;
4297 /* Work out the offset for this component. */
4298 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4300 offset = fold_build2_loc (input_location, MINUS_EXPR,
4301 gfc_array_index_type, offset, tmp);
4303 /* Set upper bound. */
4304 gfc_init_se (&se, NULL);
4305 gcc_assert (ubound);
4306 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4307 gfc_add_block_to_block (pblock, &se.pre);
4309 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
4310 gfc_rank_cst[n], se.expr);
4311 conv_ubound = se.expr;
4313 /* Store the stride. */
4314 gfc_conv_descriptor_stride_set (descriptor_block, descriptor,
4315 gfc_rank_cst[n], stride);
4317 /* Calculate size and check whether extent is negative. */
4318 size = gfc_conv_array_extent_dim (conv_lbound, conv_ubound, &or_expr);
4319 size = gfc_evaluate_now (size, pblock);
4321 /* Check whether multiplying the stride by the number of
4322 elements in this dimension would overflow. We must also check
4323 whether the current dimension has zero size in order to avoid
4326 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
4327 gfc_array_index_type,
4328 fold_convert (gfc_array_index_type,
4329 TYPE_MAX_VALUE (gfc_array_index_type)),
4331 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
4332 boolean_type_node, tmp, stride));
4333 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4334 integer_one_node, integer_zero_node);
4335 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
4336 boolean_type_node, size,
4337 gfc_index_zero_node));
4338 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4339 integer_zero_node, tmp);
4340 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
4342 *overflow = gfc_evaluate_now (tmp, pblock);
4344 /* Multiply the stride by the number of elements in this dimension. */
4345 stride = fold_build2_loc (input_location, MULT_EXPR,
4346 gfc_array_index_type, stride, size);
4347 stride = gfc_evaluate_now (stride, pblock);
4350 for (n = rank; n < rank + corank; n++)
4354 /* Set lower bound. */
4355 gfc_init_se (&se, NULL);
4356 if (lower == NULL || lower[n] == NULL)
4358 gcc_assert (n == rank + corank - 1);
4359 se.expr = gfc_index_one_node;
4363 if (ubound || n == rank + corank - 1)
4365 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4366 gfc_add_block_to_block (pblock, &se.pre);
4370 se.expr = gfc_index_one_node;
4374 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
4375 gfc_rank_cst[n], se.expr);
4377 if (n < rank + corank - 1)
4379 gfc_init_se (&se, NULL);
4380 gcc_assert (ubound);
4381 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4382 gfc_add_block_to_block (pblock, &se.pre);
4383 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
4384 gfc_rank_cst[n], se.expr);
4388 /* The stride is the number of elements in the array, so multiply by the
4389 size of an element to get the total size. */
4390 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
4391 /* Convert to size_t. */
4392 element_size = fold_convert (size_type_node, tmp);
4395 return element_size;
4397 stride = fold_convert (size_type_node, stride);
4399 /* First check for overflow. Since an array of type character can
4400 have zero element_size, we must check for that before
4402 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
4404 TYPE_MAX_VALUE (size_type_node), element_size);
4405 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
4406 boolean_type_node, tmp, stride));
4407 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4408 integer_one_node, integer_zero_node);
4409 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
4410 boolean_type_node, element_size,
4411 build_int_cst (size_type_node, 0)));
4412 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4413 integer_zero_node, tmp);
4414 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
4416 *overflow = gfc_evaluate_now (tmp, pblock);
4418 size = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
4419 stride, element_size);
4421 if (poffset != NULL)
4423 offset = gfc_evaluate_now (offset, pblock);
4427 if (integer_zerop (or_expr))
4429 if (integer_onep (or_expr))
4430 return build_int_cst (size_type_node, 0);
4432 var = gfc_create_var (TREE_TYPE (size), "size");
4433 gfc_start_block (&thenblock);
4434 gfc_add_modify (&thenblock, var, build_int_cst (size_type_node, 0));
4435 thencase = gfc_finish_block (&thenblock);
4437 gfc_start_block (&elseblock);
4438 gfc_add_modify (&elseblock, var, size);
4439 elsecase = gfc_finish_block (&elseblock);
4441 tmp = gfc_evaluate_now (or_expr, pblock);
4442 tmp = build3_v (COND_EXPR, tmp, thencase, elsecase);
4443 gfc_add_expr_to_block (pblock, tmp);
4449 /* Initializes the descriptor and generates a call to _gfor_allocate. Does
4450 the work for an ALLOCATE statement. */
4454 gfc_array_allocate (gfc_se * se, gfc_expr * expr, tree status, tree errmsg,
4459 tree offset = NULL_TREE;
4460 tree token = NULL_TREE;
4463 tree error = NULL_TREE;
4464 tree overflow; /* Boolean storing whether size calculation overflows. */
4465 tree var_overflow = NULL_TREE;
4467 tree set_descriptor;
4468 stmtblock_t set_descriptor_block;
4469 stmtblock_t elseblock;
4472 gfc_ref *ref, *prev_ref = NULL;
4473 bool allocatable, coarray, dimension;
4477 /* Find the last reference in the chain. */
4478 while (ref && ref->next != NULL)
4480 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT
4481 || (ref->u.ar.dimen == 0 && ref->u.ar.codimen > 0));
4486 if (ref == NULL || ref->type != REF_ARRAY)
4491 allocatable = expr->symtree->n.sym->attr.allocatable;
4492 coarray = expr->symtree->n.sym->attr.codimension;
4493 dimension = expr->symtree->n.sym->attr.dimension;
4497 allocatable = prev_ref->u.c.component->attr.allocatable;
4498 coarray = prev_ref->u.c.component->attr.codimension;
4499 dimension = prev_ref->u.c.component->attr.dimension;
4503 gcc_assert (coarray);
4505 /* Figure out the size of the array. */
4506 switch (ref->u.ar.type)
4512 upper = ref->u.ar.start;
4518 lower = ref->u.ar.start;
4519 upper = ref->u.ar.end;
4523 gcc_assert (ref->u.ar.as->type == AS_EXPLICIT);
4525 lower = ref->u.ar.as->lower;
4526 upper = ref->u.ar.as->upper;
4534 overflow = integer_zero_node;
4536 gfc_init_block (&set_descriptor_block);
4537 size = gfc_array_init_size (se->expr, ref->u.ar.as->rank,
4538 ref->u.ar.as->corank, &offset, lower, upper,
4539 &se->pre, &set_descriptor_block, &overflow);
4544 var_overflow = gfc_create_var (integer_type_node, "overflow");
4545 gfc_add_modify (&se->pre, var_overflow, overflow);
4547 /* Generate the block of code handling overflow. */
4548 msg = gfc_build_addr_expr (pchar_type_node,
4549 gfc_build_localized_cstring_const
4550 ("Integer overflow when calculating the amount of "
4551 "memory to allocate"));
4552 error = build_call_expr_loc (input_location, gfor_fndecl_runtime_error,
4556 if (status != NULL_TREE)
4558 tree status_type = TREE_TYPE (status);
4559 stmtblock_t set_status_block;
4561 gfc_start_block (&set_status_block);
4562 gfc_add_modify (&set_status_block, status,
4563 build_int_cst (status_type, LIBERROR_ALLOCATION));
4564 error = gfc_finish_block (&set_status_block);
4567 gfc_start_block (&elseblock);
4569 /* Allocate memory to store the data. */
4570 pointer = gfc_conv_descriptor_data_get (se->expr);
4571 STRIP_NOPS (pointer);
4573 if (coarray && gfc_option.coarray == GFC_FCOARRAY_LIB)
4574 token = gfc_build_addr_expr (NULL_TREE,
4575 gfc_conv_descriptor_token (se->expr));
4577 /* The allocatable variant takes the old pointer as first argument. */
4579 gfc_allocate_allocatable (&elseblock, pointer, size, token,
4580 status, errmsg, errlen, expr);
4582 gfc_allocate_using_malloc (&elseblock, pointer, size, status);
4586 cond = gfc_unlikely (fold_build2_loc (input_location, NE_EXPR,
4587 boolean_type_node, var_overflow, integer_zero_node));
4588 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
4589 error, gfc_finish_block (&elseblock));
4592 tmp = gfc_finish_block (&elseblock);
4594 gfc_add_expr_to_block (&se->pre, tmp);
4596 /* Update the array descriptors. */
4598 gfc_conv_descriptor_offset_set (&set_descriptor_block, se->expr, offset);
4600 set_descriptor = gfc_finish_block (&set_descriptor_block);
4601 if (status != NULL_TREE)
4603 cond = fold_build2_loc (input_location, EQ_EXPR,
4604 boolean_type_node, status,
4605 build_int_cst (TREE_TYPE (status), 0));
4606 gfc_add_expr_to_block (&se->pre,
4607 fold_build3_loc (input_location, COND_EXPR, void_type_node,
4608 gfc_likely (cond), set_descriptor,
4609 build_empty_stmt (input_location)));
4612 gfc_add_expr_to_block (&se->pre, set_descriptor);
4614 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
4615 && expr->ts.u.derived->attr.alloc_comp)
4617 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, se->expr,
4618 ref->u.ar.as->rank);
4619 gfc_add_expr_to_block (&se->pre, tmp);
4626 /* Deallocate an array variable. Also used when an allocated variable goes
4631 gfc_array_deallocate (tree descriptor, tree pstat, gfc_expr* expr)
4637 gfc_start_block (&block);
4638 /* Get a pointer to the data. */
4639 var = gfc_conv_descriptor_data_get (descriptor);
4642 /* Parameter is the address of the data component. */
4643 tmp = gfc_deallocate_with_status (var, pstat, false, expr);
4644 gfc_add_expr_to_block (&block, tmp);
4646 /* Zero the data pointer. */
4647 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
4648 var, build_int_cst (TREE_TYPE (var), 0));
4649 gfc_add_expr_to_block (&block, tmp);
4651 return gfc_finish_block (&block);
4655 /* Create an array constructor from an initialization expression.
4656 We assume the frontend already did any expansions and conversions. */
4659 gfc_conv_array_initializer (tree type, gfc_expr * expr)
4665 unsigned HOST_WIDE_INT lo;
4667 VEC(constructor_elt,gc) *v = NULL;
4669 switch (expr->expr_type)
4672 case EXPR_STRUCTURE:
4673 /* A single scalar or derived type value. Create an array with all
4674 elements equal to that value. */
4675 gfc_init_se (&se, NULL);
4677 if (expr->expr_type == EXPR_CONSTANT)
4678 gfc_conv_constant (&se, expr);
4680 gfc_conv_structure (&se, expr, 1);
4682 tmp = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4683 gcc_assert (tmp && INTEGER_CST_P (tmp));
4684 hi = TREE_INT_CST_HIGH (tmp);
4685 lo = TREE_INT_CST_LOW (tmp);
4689 /* This will probably eat buckets of memory for large arrays. */
4690 while (hi != 0 || lo != 0)
4692 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, se.expr);
4700 /* Create a vector of all the elements. */
4701 for (c = gfc_constructor_first (expr->value.constructor);
4702 c; c = gfc_constructor_next (c))
4706 /* Problems occur when we get something like
4707 integer :: a(lots) = (/(i, i=1, lots)/) */
4708 gfc_fatal_error ("The number of elements in the array constructor "
4709 "at %L requires an increase of the allowed %d "
4710 "upper limit. See -fmax-array-constructor "
4711 "option", &expr->where,
4712 gfc_option.flag_max_array_constructor);
4715 if (mpz_cmp_si (c->offset, 0) != 0)
4716 index = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
4720 if (mpz_cmp_si (c->repeat, 1) > 0)
4726 mpz_add (maxval, c->offset, c->repeat);
4727 mpz_sub_ui (maxval, maxval, 1);
4728 tmp2 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
4729 if (mpz_cmp_si (c->offset, 0) != 0)
4731 mpz_add_ui (maxval, c->offset, 1);
4732 tmp1 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
4735 tmp1 = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
4737 range = fold_build2 (RANGE_EXPR, gfc_array_index_type, tmp1, tmp2);
4743 gfc_init_se (&se, NULL);
4744 switch (c->expr->expr_type)
4747 gfc_conv_constant (&se, c->expr);
4750 case EXPR_STRUCTURE:
4751 gfc_conv_structure (&se, c->expr, 1);
4755 /* Catch those occasional beasts that do not simplify
4756 for one reason or another, assuming that if they are
4757 standard defying the frontend will catch them. */
4758 gfc_conv_expr (&se, c->expr);
4762 if (range == NULL_TREE)
4763 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
4766 if (index != NULL_TREE)
4767 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
4768 CONSTRUCTOR_APPEND_ELT (v, range, se.expr);
4774 return gfc_build_null_descriptor (type);
4780 /* Create a constructor from the list of elements. */
4781 tmp = build_constructor (type, v);
4782 TREE_CONSTANT (tmp) = 1;
4787 /* Generate code to evaluate non-constant coarray cobounds. */
4790 gfc_trans_array_cobounds (tree type, stmtblock_t * pblock,
4791 const gfc_symbol *sym)
4801 for (dim = as->rank; dim < as->rank + as->corank; dim++)
4803 /* Evaluate non-constant array bound expressions. */
4804 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
4805 if (as->lower[dim] && !INTEGER_CST_P (lbound))
4807 gfc_init_se (&se, NULL);
4808 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
4809 gfc_add_block_to_block (pblock, &se.pre);
4810 gfc_add_modify (pblock, lbound, se.expr);
4812 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
4813 if (as->upper[dim] && !INTEGER_CST_P (ubound))
4815 gfc_init_se (&se, NULL);
4816 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
4817 gfc_add_block_to_block (pblock, &se.pre);
4818 gfc_add_modify (pblock, ubound, se.expr);
4824 /* Generate code to evaluate non-constant array bounds. Sets *poffset and
4825 returns the size (in elements) of the array. */
4828 gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
4829 stmtblock_t * pblock)
4844 size = gfc_index_one_node;
4845 offset = gfc_index_zero_node;
4846 for (dim = 0; dim < as->rank; dim++)
4848 /* Evaluate non-constant array bound expressions. */
4849 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
4850 if (as->lower[dim] && !INTEGER_CST_P (lbound))
4852 gfc_init_se (&se, NULL);
4853 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
4854 gfc_add_block_to_block (pblock, &se.pre);
4855 gfc_add_modify (pblock, lbound, se.expr);
4857 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
4858 if (as->upper[dim] && !INTEGER_CST_P (ubound))
4860 gfc_init_se (&se, NULL);
4861 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
4862 gfc_add_block_to_block (pblock, &se.pre);
4863 gfc_add_modify (pblock, ubound, se.expr);
4865 /* The offset of this dimension. offset = offset - lbound * stride. */
4866 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4868 offset = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4871 /* The size of this dimension, and the stride of the next. */
4872 if (dim + 1 < as->rank)
4873 stride = GFC_TYPE_ARRAY_STRIDE (type, dim + 1);
4875 stride = GFC_TYPE_ARRAY_SIZE (type);
4877 if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride)))
4879 /* Calculate stride = size * (ubound + 1 - lbound). */
4880 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4881 gfc_array_index_type,
4882 gfc_index_one_node, lbound);
4883 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4884 gfc_array_index_type, ubound, tmp);
4885 tmp = fold_build2_loc (input_location, MULT_EXPR,
4886 gfc_array_index_type, size, tmp);
4888 gfc_add_modify (pblock, stride, tmp);
4890 stride = gfc_evaluate_now (tmp, pblock);
4892 /* Make sure that negative size arrays are translated
4893 to being zero size. */
4894 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
4895 stride, gfc_index_zero_node);
4896 tmp = fold_build3_loc (input_location, COND_EXPR,
4897 gfc_array_index_type, tmp,
4898 stride, gfc_index_zero_node);
4899 gfc_add_modify (pblock, stride, tmp);
4905 gfc_trans_array_cobounds (type, pblock, sym);
4906 gfc_trans_vla_type_sizes (sym, pblock);
4913 /* Generate code to initialize/allocate an array variable. */
4916 gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym,
4917 gfc_wrapped_block * block)
4921 tree tmp = NULL_TREE;
4928 gcc_assert (!(sym->attr.pointer || sym->attr.allocatable));
4930 /* Do nothing for USEd variables. */
4931 if (sym->attr.use_assoc)
4934 type = TREE_TYPE (decl);
4935 gcc_assert (GFC_ARRAY_TYPE_P (type));
4936 onstack = TREE_CODE (type) != POINTER_TYPE;
4938 gfc_init_block (&init);
4940 /* Evaluate character string length. */
4941 if (sym->ts.type == BT_CHARACTER
4942 && onstack && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
4944 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
4946 gfc_trans_vla_type_sizes (sym, &init);
4948 /* Emit a DECL_EXPR for this variable, which will cause the
4949 gimplifier to allocate storage, and all that good stuff. */
4950 tmp = fold_build1_loc (input_location, DECL_EXPR, TREE_TYPE (decl), decl);
4951 gfc_add_expr_to_block (&init, tmp);
4956 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
4960 type = TREE_TYPE (type);
4962 gcc_assert (!sym->attr.use_assoc);
4963 gcc_assert (!TREE_STATIC (decl));
4964 gcc_assert (!sym->module);
4966 if (sym->ts.type == BT_CHARACTER
4967 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
4968 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
4970 size = gfc_trans_array_bounds (type, sym, &offset, &init);
4972 /* Don't actually allocate space for Cray Pointees. */
4973 if (sym->attr.cray_pointee)
4975 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
4976 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
4978 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
4982 if (gfc_option.flag_stack_arrays)
4984 gcc_assert (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE);
4985 space = build_decl (sym->declared_at.lb->location,
4986 VAR_DECL, create_tmp_var_name ("A"),
4987 TREE_TYPE (TREE_TYPE (decl)));
4988 gfc_trans_vla_type_sizes (sym, &init);
4992 /* The size is the number of elements in the array, so multiply by the
4993 size of an element to get the total size. */
4994 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
4995 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4996 size, fold_convert (gfc_array_index_type, tmp));
4998 /* Allocate memory to hold the data. */
4999 tmp = gfc_call_malloc (&init, TREE_TYPE (decl), size);
5000 gfc_add_modify (&init, decl, tmp);
5002 /* Free the temporary. */
5003 tmp = gfc_call_free (convert (pvoid_type_node, decl));
5007 /* Set offset of the array. */
5008 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5009 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5011 /* Automatic arrays should not have initializers. */
5012 gcc_assert (!sym->value);
5014 inittree = gfc_finish_block (&init);
5021 /* Don't create new scope, emit the DECL_EXPR in exactly the scope
5022 where also space is located. */
5023 gfc_init_block (&init);
5024 tmp = fold_build1_loc (input_location, DECL_EXPR,
5025 TREE_TYPE (space), space);
5026 gfc_add_expr_to_block (&init, tmp);
5027 addr = fold_build1_loc (sym->declared_at.lb->location,
5028 ADDR_EXPR, TREE_TYPE (decl), space);
5029 gfc_add_modify (&init, decl, addr);
5030 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5033 gfc_add_init_cleanup (block, inittree, tmp);
5037 /* Generate entry and exit code for g77 calling convention arrays. */
5040 gfc_trans_g77_array (gfc_symbol * sym, gfc_wrapped_block * block)
5050 gfc_save_backend_locus (&loc);
5051 gfc_set_backend_locus (&sym->declared_at);
5053 /* Descriptor type. */
5054 parm = sym->backend_decl;
5055 type = TREE_TYPE (parm);
5056 gcc_assert (GFC_ARRAY_TYPE_P (type));
5058 gfc_start_block (&init);
5060 if (sym->ts.type == BT_CHARACTER
5061 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5062 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5064 /* Evaluate the bounds of the array. */
5065 gfc_trans_array_bounds (type, sym, &offset, &init);
5067 /* Set the offset. */
5068 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5069 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5071 /* Set the pointer itself if we aren't using the parameter directly. */
5072 if (TREE_CODE (parm) != PARM_DECL)
5074 tmp = convert (TREE_TYPE (parm), GFC_DECL_SAVED_DESCRIPTOR (parm));
5075 gfc_add_modify (&init, parm, tmp);
5077 stmt = gfc_finish_block (&init);
5079 gfc_restore_backend_locus (&loc);
5081 /* Add the initialization code to the start of the function. */
5083 if (sym->attr.optional || sym->attr.not_always_present)
5085 tmp = gfc_conv_expr_present (sym);
5086 stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
5089 gfc_add_init_cleanup (block, stmt, NULL_TREE);
5093 /* Modify the descriptor of an array parameter so that it has the
5094 correct lower bound. Also move the upper bound accordingly.
5095 If the array is not packed, it will be copied into a temporary.
5096 For each dimension we set the new lower and upper bounds. Then we copy the
5097 stride and calculate the offset for this dimension. We also work out
5098 what the stride of a packed array would be, and see it the two match.
5099 If the array need repacking, we set the stride to the values we just
5100 calculated, recalculate the offset and copy the array data.
5101 Code is also added to copy the data back at the end of the function.
5105 gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc,
5106 gfc_wrapped_block * block)
5113 tree stmtInit, stmtCleanup;
5120 tree stride, stride2;
5130 /* Do nothing for pointer and allocatable arrays. */
5131 if (sym->attr.pointer || sym->attr.allocatable)
5134 if (sym->attr.dummy && gfc_is_nodesc_array (sym))
5136 gfc_trans_g77_array (sym, block);
5140 gfc_save_backend_locus (&loc);
5141 gfc_set_backend_locus (&sym->declared_at);
5143 /* Descriptor type. */
5144 type = TREE_TYPE (tmpdesc);
5145 gcc_assert (GFC_ARRAY_TYPE_P (type));
5146 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5147 dumdesc = build_fold_indirect_ref_loc (input_location, dumdesc);
5148 gfc_start_block (&init);
5150 if (sym->ts.type == BT_CHARACTER
5151 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5152 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5154 checkparm = (sym->as->type == AS_EXPLICIT
5155 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS));
5157 no_repack = !(GFC_DECL_PACKED_ARRAY (tmpdesc)
5158 || GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc));
5160 if (GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc))
5162 /* For non-constant shape arrays we only check if the first dimension
5163 is contiguous. Repacking higher dimensions wouldn't gain us
5164 anything as we still don't know the array stride. */
5165 partial = gfc_create_var (boolean_type_node, "partial");
5166 TREE_USED (partial) = 1;
5167 tmp = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
5168 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, tmp,
5169 gfc_index_one_node);
5170 gfc_add_modify (&init, partial, tmp);
5173 partial = NULL_TREE;
5175 /* The naming of stmt_unpacked and stmt_packed may be counter-intuitive
5176 here, however I think it does the right thing. */
5179 /* Set the first stride. */
5180 stride = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
5181 stride = gfc_evaluate_now (stride, &init);
5183 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
5184 stride, gfc_index_zero_node);
5185 tmp = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
5186 tmp, gfc_index_one_node, stride);
5187 stride = GFC_TYPE_ARRAY_STRIDE (type, 0);
5188 gfc_add_modify (&init, stride, tmp);
5190 /* Allow the user to disable array repacking. */
5191 stmt_unpacked = NULL_TREE;
5195 gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type, 0)));
5196 /* A library call to repack the array if necessary. */
5197 tmp = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5198 stmt_unpacked = build_call_expr_loc (input_location,
5199 gfor_fndecl_in_pack, 1, tmp);
5201 stride = gfc_index_one_node;
5203 if (gfc_option.warn_array_temp)
5204 gfc_warning ("Creating array temporary at %L", &loc);
5207 /* This is for the case where the array data is used directly without
5208 calling the repack function. */
5209 if (no_repack || partial != NULL_TREE)
5210 stmt_packed = gfc_conv_descriptor_data_get (dumdesc);
5212 stmt_packed = NULL_TREE;
5214 /* Assign the data pointer. */
5215 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
5217 /* Don't repack unknown shape arrays when the first stride is 1. */
5218 tmp = fold_build3_loc (input_location, COND_EXPR, TREE_TYPE (stmt_packed),
5219 partial, stmt_packed, stmt_unpacked);
5222 tmp = stmt_packed != NULL_TREE ? stmt_packed : stmt_unpacked;
5223 gfc_add_modify (&init, tmpdesc, fold_convert (type, tmp));
5225 offset = gfc_index_zero_node;
5226 size = gfc_index_one_node;
5228 /* Evaluate the bounds of the array. */
5229 for (n = 0; n < sym->as->rank; n++)
5231 if (checkparm || !sym->as->upper[n])
5233 /* Get the bounds of the actual parameter. */
5234 dubound = gfc_conv_descriptor_ubound_get (dumdesc, gfc_rank_cst[n]);
5235 dlbound = gfc_conv_descriptor_lbound_get (dumdesc, gfc_rank_cst[n]);
5239 dubound = NULL_TREE;
5240 dlbound = NULL_TREE;
5243 lbound = GFC_TYPE_ARRAY_LBOUND (type, n);
5244 if (!INTEGER_CST_P (lbound))
5246 gfc_init_se (&se, NULL);
5247 gfc_conv_expr_type (&se, sym->as->lower[n],
5248 gfc_array_index_type);
5249 gfc_add_block_to_block (&init, &se.pre);
5250 gfc_add_modify (&init, lbound, se.expr);
5253 ubound = GFC_TYPE_ARRAY_UBOUND (type, n);
5254 /* Set the desired upper bound. */
5255 if (sym->as->upper[n])
5257 /* We know what we want the upper bound to be. */
5258 if (!INTEGER_CST_P (ubound))
5260 gfc_init_se (&se, NULL);
5261 gfc_conv_expr_type (&se, sym->as->upper[n],
5262 gfc_array_index_type);
5263 gfc_add_block_to_block (&init, &se.pre);
5264 gfc_add_modify (&init, ubound, se.expr);
5267 /* Check the sizes match. */
5270 /* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */
5274 temp = fold_build2_loc (input_location, MINUS_EXPR,
5275 gfc_array_index_type, ubound, lbound);
5276 temp = fold_build2_loc (input_location, PLUS_EXPR,
5277 gfc_array_index_type,
5278 gfc_index_one_node, temp);
5279 stride2 = fold_build2_loc (input_location, MINUS_EXPR,
5280 gfc_array_index_type, dubound,
5282 stride2 = fold_build2_loc (input_location, PLUS_EXPR,
5283 gfc_array_index_type,
5284 gfc_index_one_node, stride2);
5285 tmp = fold_build2_loc (input_location, NE_EXPR,
5286 gfc_array_index_type, temp, stride2);
5287 asprintf (&msg, "Dimension %d of array '%s' has extent "
5288 "%%ld instead of %%ld", n+1, sym->name);
5290 gfc_trans_runtime_check (true, false, tmp, &init, &loc, msg,
5291 fold_convert (long_integer_type_node, temp),
5292 fold_convert (long_integer_type_node, stride2));
5299 /* For assumed shape arrays move the upper bound by the same amount
5300 as the lower bound. */
5301 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5302 gfc_array_index_type, dubound, dlbound);
5303 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5304 gfc_array_index_type, tmp, lbound);
5305 gfc_add_modify (&init, ubound, tmp);
5307 /* The offset of this dimension. offset = offset - lbound * stride. */
5308 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5310 offset = fold_build2_loc (input_location, MINUS_EXPR,
5311 gfc_array_index_type, offset, tmp);
5313 /* The size of this dimension, and the stride of the next. */
5314 if (n + 1 < sym->as->rank)
5316 stride = GFC_TYPE_ARRAY_STRIDE (type, n + 1);
5318 if (no_repack || partial != NULL_TREE)
5320 gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[n+1]);
5322 /* Figure out the stride if not a known constant. */
5323 if (!INTEGER_CST_P (stride))
5326 stmt_packed = NULL_TREE;
5329 /* Calculate stride = size * (ubound + 1 - lbound). */
5330 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5331 gfc_array_index_type,
5332 gfc_index_one_node, lbound);
5333 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5334 gfc_array_index_type, ubound, tmp);
5335 size = fold_build2_loc (input_location, MULT_EXPR,
5336 gfc_array_index_type, size, tmp);
5340 /* Assign the stride. */
5341 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
5342 tmp = fold_build3_loc (input_location, COND_EXPR,
5343 gfc_array_index_type, partial,
5344 stmt_unpacked, stmt_packed);
5346 tmp = (stmt_packed != NULL_TREE) ? stmt_packed : stmt_unpacked;
5347 gfc_add_modify (&init, stride, tmp);
5352 stride = GFC_TYPE_ARRAY_SIZE (type);
5354 if (stride && !INTEGER_CST_P (stride))
5356 /* Calculate size = stride * (ubound + 1 - lbound). */
5357 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5358 gfc_array_index_type,
5359 gfc_index_one_node, lbound);
5360 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5361 gfc_array_index_type,
5363 tmp = fold_build2_loc (input_location, MULT_EXPR,
5364 gfc_array_index_type,
5365 GFC_TYPE_ARRAY_STRIDE (type, n), tmp);
5366 gfc_add_modify (&init, stride, tmp);
5371 gfc_trans_array_cobounds (type, &init, sym);
5373 /* Set the offset. */
5374 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5375 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5377 gfc_trans_vla_type_sizes (sym, &init);
5379 stmtInit = gfc_finish_block (&init);
5381 /* Only do the entry/initialization code if the arg is present. */
5382 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5383 optional_arg = (sym->attr.optional
5384 || (sym->ns->proc_name->attr.entry_master
5385 && sym->attr.dummy));
5388 tmp = gfc_conv_expr_present (sym);
5389 stmtInit = build3_v (COND_EXPR, tmp, stmtInit,
5390 build_empty_stmt (input_location));
5395 stmtCleanup = NULL_TREE;
5398 stmtblock_t cleanup;
5399 gfc_start_block (&cleanup);
5401 if (sym->attr.intent != INTENT_IN)
5403 /* Copy the data back. */
5404 tmp = build_call_expr_loc (input_location,
5405 gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc);
5406 gfc_add_expr_to_block (&cleanup, tmp);
5409 /* Free the temporary. */
5410 tmp = gfc_call_free (tmpdesc);
5411 gfc_add_expr_to_block (&cleanup, tmp);
5413 stmtCleanup = gfc_finish_block (&cleanup);
5415 /* Only do the cleanup if the array was repacked. */
5416 tmp = build_fold_indirect_ref_loc (input_location, dumdesc);
5417 tmp = gfc_conv_descriptor_data_get (tmp);
5418 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
5420 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
5421 build_empty_stmt (input_location));
5425 tmp = gfc_conv_expr_present (sym);
5426 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
5427 build_empty_stmt (input_location));
5431 /* We don't need to free any memory allocated by internal_pack as it will
5432 be freed at the end of the function by pop_context. */
5433 gfc_add_init_cleanup (block, stmtInit, stmtCleanup);
5435 gfc_restore_backend_locus (&loc);
5439 /* Calculate the overall offset, including subreferences. */
5441 gfc_get_dataptr_offset (stmtblock_t *block, tree parm, tree desc, tree offset,
5442 bool subref, gfc_expr *expr)
5452 /* If offset is NULL and this is not a subreferenced array, there is
5454 if (offset == NULL_TREE)
5457 offset = gfc_index_zero_node;
5462 tmp = gfc_conv_array_data (desc);
5463 tmp = build_fold_indirect_ref_loc (input_location,
5465 tmp = gfc_build_array_ref (tmp, offset, NULL);
5467 /* Offset the data pointer for pointer assignments from arrays with
5468 subreferences; e.g. my_integer => my_type(:)%integer_component. */
5471 /* Go past the array reference. */
5472 for (ref = expr->ref; ref; ref = ref->next)
5473 if (ref->type == REF_ARRAY &&
5474 ref->u.ar.type != AR_ELEMENT)
5480 /* Calculate the offset for each subsequent subreference. */
5481 for (; ref; ref = ref->next)
5486 field = ref->u.c.component->backend_decl;
5487 gcc_assert (field && TREE_CODE (field) == FIELD_DECL);
5488 tmp = fold_build3_loc (input_location, COMPONENT_REF,
5490 tmp, field, NULL_TREE);
5494 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE);
5495 gfc_init_se (&start, NULL);
5496 gfc_conv_expr_type (&start, ref->u.ss.start, gfc_charlen_type_node);
5497 gfc_add_block_to_block (block, &start.pre);
5498 tmp = gfc_build_array_ref (tmp, start.expr, NULL);
5502 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE
5503 && ref->u.ar.type == AR_ELEMENT);
5505 /* TODO - Add bounds checking. */
5506 stride = gfc_index_one_node;
5507 index = gfc_index_zero_node;
5508 for (n = 0; n < ref->u.ar.dimen; n++)
5513 /* Update the index. */
5514 gfc_init_se (&start, NULL);
5515 gfc_conv_expr_type (&start, ref->u.ar.start[n], gfc_array_index_type);
5516 itmp = gfc_evaluate_now (start.expr, block);
5517 gfc_init_se (&start, NULL);
5518 gfc_conv_expr_type (&start, ref->u.ar.as->lower[n], gfc_array_index_type);
5519 jtmp = gfc_evaluate_now (start.expr, block);
5520 itmp = fold_build2_loc (input_location, MINUS_EXPR,
5521 gfc_array_index_type, itmp, jtmp);
5522 itmp = fold_build2_loc (input_location, MULT_EXPR,
5523 gfc_array_index_type, itmp, stride);
5524 index = fold_build2_loc (input_location, PLUS_EXPR,
5525 gfc_array_index_type, itmp, index);
5526 index = gfc_evaluate_now (index, block);
5528 /* Update the stride. */
5529 gfc_init_se (&start, NULL);
5530 gfc_conv_expr_type (&start, ref->u.ar.as->upper[n], gfc_array_index_type);
5531 itmp = fold_build2_loc (input_location, MINUS_EXPR,
5532 gfc_array_index_type, start.expr,
5534 itmp = fold_build2_loc (input_location, PLUS_EXPR,
5535 gfc_array_index_type,
5536 gfc_index_one_node, itmp);
5537 stride = fold_build2_loc (input_location, MULT_EXPR,
5538 gfc_array_index_type, stride, itmp);
5539 stride = gfc_evaluate_now (stride, block);
5542 /* Apply the index to obtain the array element. */
5543 tmp = gfc_build_array_ref (tmp, index, NULL);
5553 /* Set the target data pointer. */
5554 offset = gfc_build_addr_expr (gfc_array_dataptr_type (desc), tmp);
5555 gfc_conv_descriptor_data_set (block, parm, offset);
5559 /* gfc_conv_expr_descriptor needs the string length an expression
5560 so that the size of the temporary can be obtained. This is done
5561 by adding up the string lengths of all the elements in the
5562 expression. Function with non-constant expressions have their
5563 string lengths mapped onto the actual arguments using the
5564 interface mapping machinery in trans-expr.c. */
5566 get_array_charlen (gfc_expr *expr, gfc_se *se)
5568 gfc_interface_mapping mapping;
5569 gfc_formal_arglist *formal;
5570 gfc_actual_arglist *arg;
5573 if (expr->ts.u.cl->length
5574 && gfc_is_constant_expr (expr->ts.u.cl->length))
5576 if (!expr->ts.u.cl->backend_decl)
5577 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5581 switch (expr->expr_type)
5584 get_array_charlen (expr->value.op.op1, se);
5586 /* For parentheses the expression ts.u.cl is identical. */
5587 if (expr->value.op.op == INTRINSIC_PARENTHESES)
5590 expr->ts.u.cl->backend_decl =
5591 gfc_create_var (gfc_charlen_type_node, "sln");
5593 if (expr->value.op.op2)
5595 get_array_charlen (expr->value.op.op2, se);
5597 gcc_assert (expr->value.op.op == INTRINSIC_CONCAT);
5599 /* Add the string lengths and assign them to the expression
5600 string length backend declaration. */
5601 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
5602 fold_build2_loc (input_location, PLUS_EXPR,
5603 gfc_charlen_type_node,
5604 expr->value.op.op1->ts.u.cl->backend_decl,
5605 expr->value.op.op2->ts.u.cl->backend_decl));
5608 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
5609 expr->value.op.op1->ts.u.cl->backend_decl);
5613 if (expr->value.function.esym == NULL
5614 || expr->ts.u.cl->length->expr_type == EXPR_CONSTANT)
5616 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5620 /* Map expressions involving the dummy arguments onto the actual
5621 argument expressions. */
5622 gfc_init_interface_mapping (&mapping);
5623 formal = expr->symtree->n.sym->formal;
5624 arg = expr->value.function.actual;
5626 /* Set se = NULL in the calls to the interface mapping, to suppress any
5628 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
5633 gfc_add_interface_mapping (&mapping, formal->sym, NULL, arg->expr);
5636 gfc_init_se (&tse, NULL);
5638 /* Build the expression for the character length and convert it. */
5639 gfc_apply_interface_mapping (&mapping, &tse, expr->ts.u.cl->length);
5641 gfc_add_block_to_block (&se->pre, &tse.pre);
5642 gfc_add_block_to_block (&se->post, &tse.post);
5643 tse.expr = fold_convert (gfc_charlen_type_node, tse.expr);
5644 tse.expr = fold_build2_loc (input_location, MAX_EXPR,
5645 gfc_charlen_type_node, tse.expr,
5646 build_int_cst (gfc_charlen_type_node, 0));
5647 expr->ts.u.cl->backend_decl = tse.expr;
5648 gfc_free_interface_mapping (&mapping);
5652 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5657 /* Helper function to check dimensions. */
5659 dim_ok (gfc_ss_info *info)
5662 for (n = 0; n < info->dimen; n++)
5663 if (info->dim[n] != n)
5668 /* Convert an array for passing as an actual argument. Expressions and
5669 vector subscripts are evaluated and stored in a temporary, which is then
5670 passed. For whole arrays the descriptor is passed. For array sections
5671 a modified copy of the descriptor is passed, but using the original data.
5673 This function is also used for array pointer assignments, and there
5676 - se->want_pointer && !se->direct_byref
5677 EXPR is an actual argument. On exit, se->expr contains a
5678 pointer to the array descriptor.
5680 - !se->want_pointer && !se->direct_byref
5681 EXPR is an actual argument to an intrinsic function or the
5682 left-hand side of a pointer assignment. On exit, se->expr
5683 contains the descriptor for EXPR.
5685 - !se->want_pointer && se->direct_byref
5686 EXPR is the right-hand side of a pointer assignment and
5687 se->expr is the descriptor for the previously-evaluated
5688 left-hand side. The function creates an assignment from
5692 The se->force_tmp flag disables the non-copying descriptor optimization
5693 that is used for transpose. It may be used in cases where there is an
5694 alias between the transpose argument and another argument in the same
5698 gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss)
5710 bool subref_array_target = false;
5713 gcc_assert (ss != NULL);
5714 gcc_assert (ss != gfc_ss_terminator);
5716 /* Special case things we know we can pass easily. */
5717 switch (expr->expr_type)
5720 /* If we have a linear array section, we can pass it directly.
5721 Otherwise we need to copy it into a temporary. */
5723 gcc_assert (ss->type == GFC_SS_SECTION);
5724 gcc_assert (ss->expr == expr);
5725 info = &ss->data.info;
5727 /* Get the descriptor for the array. */
5728 gfc_conv_ss_descriptor (&se->pre, ss, 0);
5729 desc = info->descriptor;
5731 subref_array_target = se->direct_byref && is_subref_array (expr);
5732 need_tmp = gfc_ref_needs_temporary_p (expr->ref)
5733 && !subref_array_target;
5740 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
5742 /* Create a new descriptor if the array doesn't have one. */
5745 else if (info->ref->u.ar.type == AR_FULL)
5747 else if (se->direct_byref)
5750 full = gfc_full_array_ref_p (info->ref, NULL);
5752 if (full && dim_ok (info))
5754 if (se->direct_byref && !se->byref_noassign)
5756 /* Copy the descriptor for pointer assignments. */
5757 gfc_add_modify (&se->pre, se->expr, desc);
5759 /* Add any offsets from subreferences. */
5760 gfc_get_dataptr_offset (&se->pre, se->expr, desc, NULL_TREE,
5761 subref_array_target, expr);
5763 else if (se->want_pointer)
5765 /* We pass full arrays directly. This means that pointers and
5766 allocatable arrays should also work. */
5767 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
5774 if (expr->ts.type == BT_CHARACTER)
5775 se->string_length = gfc_get_expr_charlen (expr);
5783 /* We don't need to copy data in some cases. */
5784 arg = gfc_get_noncopying_intrinsic_argument (expr);
5787 /* This is a call to transpose... */
5788 gcc_assert (expr->value.function.isym->id == GFC_ISYM_TRANSPOSE);
5789 /* ... which has already been handled by the scalarizer, so
5790 that we just need to get its argument's descriptor. */
5791 gfc_conv_expr_descriptor (se, expr->value.function.actual->expr, ss);
5795 /* A transformational function return value will be a temporary
5796 array descriptor. We still need to go through the scalarizer
5797 to create the descriptor. Elemental functions ar handled as
5798 arbitrary expressions, i.e. copy to a temporary. */
5800 if (se->direct_byref)
5802 gcc_assert (ss->type == GFC_SS_FUNCTION && ss->expr == expr);
5804 /* For pointer assignments pass the descriptor directly. */
5808 gcc_assert (se->ss == ss);
5809 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
5810 gfc_conv_expr (se, expr);
5814 if (ss->expr != expr || ss->type != GFC_SS_FUNCTION)
5816 if (ss->expr != expr)
5817 /* Elemental function. */
5818 gcc_assert ((expr->value.function.esym != NULL
5819 && expr->value.function.esym->attr.elemental)
5820 || (expr->value.function.isym != NULL
5821 && expr->value.function.isym->elemental));
5823 gcc_assert (ss->type == GFC_SS_INTRINSIC);
5826 if (expr->ts.type == BT_CHARACTER
5827 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
5828 get_array_charlen (expr, se);
5834 /* Transformational function. */
5835 info = &ss->data.info;
5841 /* Constant array constructors don't need a temporary. */
5842 if (ss->type == GFC_SS_CONSTRUCTOR
5843 && expr->ts.type != BT_CHARACTER
5844 && gfc_constant_array_constructor_p (expr->value.constructor))
5847 info = &ss->data.info;
5857 /* Something complicated. Copy it into a temporary. */
5863 /* If we are creating a temporary, we don't need to bother about aliases
5868 gfc_init_loopinfo (&loop);
5870 /* Associate the SS with the loop. */
5871 gfc_add_ss_to_loop (&loop, ss);
5873 /* Tell the scalarizer not to bother creating loop variables, etc. */
5875 loop.array_parameter = 1;
5877 /* The right-hand side of a pointer assignment mustn't use a temporary. */
5878 gcc_assert (!se->direct_byref);
5880 /* Setup the scalarizing loops and bounds. */
5881 gfc_conv_ss_startstride (&loop);
5885 if (expr->ts.type == BT_CHARACTER && !expr->ts.u.cl->backend_decl)
5886 get_array_charlen (expr, se);
5888 /* Tell the scalarizer to make a temporary. */
5889 loop.temp_ss = gfc_get_temp_ss (gfc_typenode_for_spec (&expr->ts),
5890 ((expr->ts.type == BT_CHARACTER)
5891 ? expr->ts.u.cl->backend_decl
5895 se->string_length = loop.temp_ss->string_length;
5896 gcc_assert (loop.temp_ss->data.temp.dimen == loop.dimen);
5897 gfc_add_ss_to_loop (&loop, loop.temp_ss);
5900 gfc_conv_loop_setup (&loop, & expr->where);
5904 /* Copy into a temporary and pass that. We don't need to copy the data
5905 back because expressions and vector subscripts must be INTENT_IN. */
5906 /* TODO: Optimize passing function return values. */
5910 /* Start the copying loops. */
5911 gfc_mark_ss_chain_used (loop.temp_ss, 1);
5912 gfc_mark_ss_chain_used (ss, 1);
5913 gfc_start_scalarized_body (&loop, &block);
5915 /* Copy each data element. */
5916 gfc_init_se (&lse, NULL);
5917 gfc_copy_loopinfo_to_se (&lse, &loop);
5918 gfc_init_se (&rse, NULL);
5919 gfc_copy_loopinfo_to_se (&rse, &loop);
5921 lse.ss = loop.temp_ss;
5924 gfc_conv_scalarized_array_ref (&lse, NULL);
5925 if (expr->ts.type == BT_CHARACTER)
5927 gfc_conv_expr (&rse, expr);
5928 if (POINTER_TYPE_P (TREE_TYPE (rse.expr)))
5929 rse.expr = build_fold_indirect_ref_loc (input_location,
5933 gfc_conv_expr_val (&rse, expr);
5935 gfc_add_block_to_block (&block, &rse.pre);
5936 gfc_add_block_to_block (&block, &lse.pre);
5938 lse.string_length = rse.string_length;
5939 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, true,
5940 expr->expr_type == EXPR_VARIABLE
5941 || expr->expr_type == EXPR_ARRAY, true);
5942 gfc_add_expr_to_block (&block, tmp);
5944 /* Finish the copying loops. */
5945 gfc_trans_scalarizing_loops (&loop, &block);
5947 desc = loop.temp_ss->data.info.descriptor;
5949 else if (expr->expr_type == EXPR_FUNCTION && dim_ok (info))
5951 desc = info->descriptor;
5952 se->string_length = ss->string_length;
5956 /* We pass sections without copying to a temporary. Make a new
5957 descriptor and point it at the section we want. The loop variable
5958 limits will be the limits of the section.
5959 A function may decide to repack the array to speed up access, but
5960 we're not bothered about that here. */
5961 int dim, ndim, codim;
5969 ndim = info->ref ? info->ref->u.ar.dimen : info->dimen;
5971 if (se->want_coarray)
5973 gfc_array_ref *ar = &info->ref->u.ar;
5975 codim = gfc_get_corank (expr);
5976 for (n = 0; n < codim - 1; n++)
5978 /* Make sure we are not lost somehow. */
5979 gcc_assert (ar->dimen_type[n + ndim] == DIMEN_THIS_IMAGE);
5981 /* Make sure the call to gfc_conv_section_startstride won't
5982 generate unnecessary code to calculate stride. */
5983 gcc_assert (ar->stride[n + ndim] == NULL);
5985 gfc_conv_section_startstride (&loop, ss, n + ndim);
5986 loop.from[n + loop.dimen] = info->start[n + ndim];
5987 loop.to[n + loop.dimen] = info->end[n + ndim];
5990 gcc_assert (n == codim - 1);
5991 evaluate_bound (&loop.pre, info->start, ar->start,
5992 info->descriptor, n + ndim, true);
5993 loop.from[n + loop.dimen] = info->start[n + ndim];
5998 /* Set the string_length for a character array. */
5999 if (expr->ts.type == BT_CHARACTER)
6000 se->string_length = gfc_get_expr_charlen (expr);
6002 desc = info->descriptor;
6003 if (se->direct_byref && !se->byref_noassign)
6005 /* For pointer assignments we fill in the destination. */
6007 parmtype = TREE_TYPE (parm);
6011 /* Otherwise make a new one. */
6012 parmtype = gfc_get_element_type (TREE_TYPE (desc));
6013 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, codim,
6014 loop.from, loop.to, 0,
6015 GFC_ARRAY_UNKNOWN, false);
6016 parm = gfc_create_var (parmtype, "parm");
6019 offset = gfc_index_zero_node;
6021 /* The following can be somewhat confusing. We have two
6022 descriptors, a new one and the original array.
6023 {parm, parmtype, dim} refer to the new one.
6024 {desc, type, n, loop} refer to the original, which maybe
6025 a descriptorless array.
6026 The bounds of the scalarization are the bounds of the section.
6027 We don't have to worry about numeric overflows when calculating
6028 the offsets because all elements are within the array data. */
6030 /* Set the dtype. */
6031 tmp = gfc_conv_descriptor_dtype (parm);
6032 gfc_add_modify (&loop.pre, tmp, gfc_get_dtype (parmtype));
6034 /* Set offset for assignments to pointer only to zero if it is not
6036 if (se->direct_byref
6037 && info->ref && info->ref->u.ar.type != AR_FULL)
6038 base = gfc_index_zero_node;
6039 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6040 base = gfc_evaluate_now (gfc_conv_array_offset (desc), &loop.pre);
6044 for (n = 0; n < ndim; n++)
6046 stride = gfc_conv_array_stride (desc, n);
6048 /* Work out the offset. */
6050 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
6052 gcc_assert (info->subscript[n]
6053 && info->subscript[n]->type == GFC_SS_SCALAR);
6054 start = info->subscript[n]->data.scalar.expr;
6058 /* Evaluate and remember the start of the section. */
6059 start = info->start[n];
6060 stride = gfc_evaluate_now (stride, &loop.pre);
6063 tmp = gfc_conv_array_lbound (desc, n);
6064 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (tmp),
6066 tmp = fold_build2_loc (input_location, MULT_EXPR, TREE_TYPE (tmp),
6068 offset = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (tmp),
6072 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
6074 /* For elemental dimensions, we only need the offset. */
6078 /* Vector subscripts need copying and are handled elsewhere. */
6080 gcc_assert (info->ref->u.ar.dimen_type[n] == DIMEN_RANGE);
6082 /* look for the corresponding scalarizer dimension: dim. */
6083 for (dim = 0; dim < ndim; dim++)
6084 if (info->dim[dim] == n)
6087 /* loop exited early: the DIM being looked for has been found. */
6088 gcc_assert (dim < ndim);
6090 /* Set the new lower bound. */
6091 from = loop.from[dim];
6094 /* If we have an array section or are assigning make sure that
6095 the lower bound is 1. References to the full
6096 array should otherwise keep the original bounds. */
6098 || info->ref->u.ar.type != AR_FULL)
6099 && !integer_onep (from))
6101 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6102 gfc_array_index_type, gfc_index_one_node,
6104 to = fold_build2_loc (input_location, PLUS_EXPR,
6105 gfc_array_index_type, to, tmp);
6106 from = gfc_index_one_node;
6108 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
6109 gfc_rank_cst[dim], from);
6111 /* Set the new upper bound. */
6112 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
6113 gfc_rank_cst[dim], to);
6115 /* Multiply the stride by the section stride to get the
6117 stride = fold_build2_loc (input_location, MULT_EXPR,
6118 gfc_array_index_type,
6119 stride, info->stride[n]);
6121 if (se->direct_byref
6123 && info->ref->u.ar.type != AR_FULL)
6125 base = fold_build2_loc (input_location, MINUS_EXPR,
6126 TREE_TYPE (base), base, stride);
6128 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6130 tmp = gfc_conv_array_lbound (desc, n);
6131 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6132 TREE_TYPE (base), tmp, loop.from[dim]);
6133 tmp = fold_build2_loc (input_location, MULT_EXPR,
6134 TREE_TYPE (base), tmp,
6135 gfc_conv_array_stride (desc, n));
6136 base = fold_build2_loc (input_location, PLUS_EXPR,
6137 TREE_TYPE (base), tmp, base);
6140 /* Store the new stride. */
6141 gfc_conv_descriptor_stride_set (&loop.pre, parm,
6142 gfc_rank_cst[dim], stride);
6145 for (n = loop.dimen; n < loop.dimen + codim; n++)
6147 from = loop.from[n];
6149 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
6150 gfc_rank_cst[n], from);
6151 if (n < loop.dimen + codim - 1)
6152 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
6153 gfc_rank_cst[n], to);
6156 if (se->data_not_needed)
6157 gfc_conv_descriptor_data_set (&loop.pre, parm,
6158 gfc_index_zero_node);
6160 /* Point the data pointer at the 1st element in the section. */
6161 gfc_get_dataptr_offset (&loop.pre, parm, desc, offset,
6162 subref_array_target, expr);
6164 if ((se->direct_byref || GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6165 && !se->data_not_needed)
6167 /* Set the offset. */
6168 gfc_conv_descriptor_offset_set (&loop.pre, parm, base);
6172 /* Only the callee knows what the correct offset it, so just set
6174 gfc_conv_descriptor_offset_set (&loop.pre, parm, gfc_index_zero_node);
6179 if (!se->direct_byref || se->byref_noassign)
6181 /* Get a pointer to the new descriptor. */
6182 if (se->want_pointer)
6183 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
6188 gfc_add_block_to_block (&se->pre, &loop.pre);
6189 gfc_add_block_to_block (&se->post, &loop.post);
6191 /* Cleanup the scalarizer. */
6192 gfc_cleanup_loop (&loop);
6195 /* Helper function for gfc_conv_array_parameter if array size needs to be
6199 array_parameter_size (tree desc, gfc_expr *expr, tree *size)
6202 if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6203 *size = GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc));
6204 else if (expr->rank > 1)
6205 *size = build_call_expr_loc (input_location,
6206 gfor_fndecl_size0, 1,
6207 gfc_build_addr_expr (NULL, desc));
6210 tree ubound = gfc_conv_descriptor_ubound_get (desc, gfc_index_zero_node);
6211 tree lbound = gfc_conv_descriptor_lbound_get (desc, gfc_index_zero_node);
6213 *size = fold_build2_loc (input_location, MINUS_EXPR,
6214 gfc_array_index_type, ubound, lbound);
6215 *size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
6216 *size, gfc_index_one_node);
6217 *size = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
6218 *size, gfc_index_zero_node);
6220 elem = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
6221 *size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6222 *size, fold_convert (gfc_array_index_type, elem));
6225 /* Convert an array for passing as an actual parameter. */
6226 /* TODO: Optimize passing g77 arrays. */
6229 gfc_conv_array_parameter (gfc_se * se, gfc_expr * expr, gfc_ss * ss, bool g77,
6230 const gfc_symbol *fsym, const char *proc_name,
6235 tree tmp = NULL_TREE;
6237 tree parent = DECL_CONTEXT (current_function_decl);
6238 bool full_array_var;
6239 bool this_array_result;
6242 bool array_constructor;
6243 bool good_allocatable;
6244 bool ultimate_ptr_comp;
6245 bool ultimate_alloc_comp;
6250 ultimate_ptr_comp = false;
6251 ultimate_alloc_comp = false;
6253 for (ref = expr->ref; ref; ref = ref->next)
6255 if (ref->next == NULL)
6258 if (ref->type == REF_COMPONENT)
6260 ultimate_ptr_comp = ref->u.c.component->attr.pointer;
6261 ultimate_alloc_comp = ref->u.c.component->attr.allocatable;
6265 full_array_var = false;
6268 if (expr->expr_type == EXPR_VARIABLE && ref && !ultimate_ptr_comp)
6269 full_array_var = gfc_full_array_ref_p (ref, &contiguous);
6271 sym = full_array_var ? expr->symtree->n.sym : NULL;
6273 /* The symbol should have an array specification. */
6274 gcc_assert (!sym || sym->as || ref->u.ar.as);
6276 if (expr->expr_type == EXPR_ARRAY && expr->ts.type == BT_CHARACTER)
6278 get_array_ctor_strlen (&se->pre, expr->value.constructor, &tmp);
6279 expr->ts.u.cl->backend_decl = tmp;
6280 se->string_length = tmp;
6283 /* Is this the result of the enclosing procedure? */
6284 this_array_result = (full_array_var && sym->attr.flavor == FL_PROCEDURE);
6285 if (this_array_result
6286 && (sym->backend_decl != current_function_decl)
6287 && (sym->backend_decl != parent))
6288 this_array_result = false;
6290 /* Passing address of the array if it is not pointer or assumed-shape. */
6291 if (full_array_var && g77 && !this_array_result)
6293 tmp = gfc_get_symbol_decl (sym);
6295 if (sym->ts.type == BT_CHARACTER)
6296 se->string_length = sym->ts.u.cl->backend_decl;
6298 if (sym->ts.type == BT_DERIVED || sym->ts.type == BT_CLASS)
6300 gfc_conv_expr_descriptor (se, expr, ss);
6301 se->expr = gfc_conv_array_data (se->expr);
6305 if (!sym->attr.pointer
6307 && sym->as->type != AS_ASSUMED_SHAPE
6308 && !sym->attr.allocatable)
6310 /* Some variables are declared directly, others are declared as
6311 pointers and allocated on the heap. */
6312 if (sym->attr.dummy || POINTER_TYPE_P (TREE_TYPE (tmp)))
6315 se->expr = gfc_build_addr_expr (NULL_TREE, tmp);
6317 array_parameter_size (tmp, expr, size);
6321 if (sym->attr.allocatable)
6323 if (sym->attr.dummy || sym->attr.result)
6325 gfc_conv_expr_descriptor (se, expr, ss);
6329 array_parameter_size (tmp, expr, size);
6330 se->expr = gfc_conv_array_data (tmp);
6335 /* A convenient reduction in scope. */
6336 contiguous = g77 && !this_array_result && contiguous;
6338 /* There is no need to pack and unpack the array, if it is contiguous
6339 and not a deferred- or assumed-shape array, or if it is simply
6341 no_pack = ((sym && sym->as
6342 && !sym->attr.pointer
6343 && sym->as->type != AS_DEFERRED
6344 && sym->as->type != AS_ASSUMED_SHAPE)
6346 (ref && ref->u.ar.as
6347 && ref->u.ar.as->type != AS_DEFERRED
6348 && ref->u.ar.as->type != AS_ASSUMED_SHAPE)
6350 gfc_is_simply_contiguous (expr, false));
6352 no_pack = contiguous && no_pack;
6354 /* Array constructors are always contiguous and do not need packing. */
6355 array_constructor = g77 && !this_array_result && expr->expr_type == EXPR_ARRAY;
6357 /* Same is true of contiguous sections from allocatable variables. */
6358 good_allocatable = contiguous
6360 && expr->symtree->n.sym->attr.allocatable;
6362 /* Or ultimate allocatable components. */
6363 ultimate_alloc_comp = contiguous && ultimate_alloc_comp;
6365 if (no_pack || array_constructor || good_allocatable || ultimate_alloc_comp)
6367 gfc_conv_expr_descriptor (se, expr, ss);
6368 if (expr->ts.type == BT_CHARACTER)
6369 se->string_length = expr->ts.u.cl->backend_decl;
6371 array_parameter_size (se->expr, expr, size);
6372 se->expr = gfc_conv_array_data (se->expr);
6376 if (this_array_result)
6378 /* Result of the enclosing function. */
6379 gfc_conv_expr_descriptor (se, expr, ss);
6381 array_parameter_size (se->expr, expr, size);
6382 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
6384 if (g77 && TREE_TYPE (TREE_TYPE (se->expr)) != NULL_TREE
6385 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr))))
6386 se->expr = gfc_conv_array_data (build_fold_indirect_ref_loc (input_location,
6393 /* Every other type of array. */
6394 se->want_pointer = 1;
6395 gfc_conv_expr_descriptor (se, expr, ss);
6397 array_parameter_size (build_fold_indirect_ref_loc (input_location,
6402 /* Deallocate the allocatable components of structures that are
6404 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
6405 && expr->ts.u.derived->attr.alloc_comp
6406 && expr->expr_type != EXPR_VARIABLE)
6408 tmp = build_fold_indirect_ref_loc (input_location, se->expr);
6409 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, tmp, expr->rank);
6411 /* The components shall be deallocated before their containing entity. */
6412 gfc_prepend_expr_to_block (&se->post, tmp);
6415 if (g77 || (fsym && fsym->attr.contiguous
6416 && !gfc_is_simply_contiguous (expr, false)))
6418 tree origptr = NULL_TREE;
6422 /* For contiguous arrays, save the original value of the descriptor. */
6425 origptr = gfc_create_var (pvoid_type_node, "origptr");
6426 tmp = build_fold_indirect_ref_loc (input_location, desc);
6427 tmp = gfc_conv_array_data (tmp);
6428 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6429 TREE_TYPE (origptr), origptr,
6430 fold_convert (TREE_TYPE (origptr), tmp));
6431 gfc_add_expr_to_block (&se->pre, tmp);
6434 /* Repack the array. */
6435 if (gfc_option.warn_array_temp)
6438 gfc_warning ("Creating array temporary at %L for argument '%s'",
6439 &expr->where, fsym->name);
6441 gfc_warning ("Creating array temporary at %L", &expr->where);
6444 ptr = build_call_expr_loc (input_location,
6445 gfor_fndecl_in_pack, 1, desc);
6447 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6449 tmp = gfc_conv_expr_present (sym);
6450 ptr = build3_loc (input_location, COND_EXPR, TREE_TYPE (se->expr),
6451 tmp, fold_convert (TREE_TYPE (se->expr), ptr),
6452 fold_convert (TREE_TYPE (se->expr), null_pointer_node));
6455 ptr = gfc_evaluate_now (ptr, &se->pre);
6457 /* Use the packed data for the actual argument, except for contiguous arrays,
6458 where the descriptor's data component is set. */
6463 tmp = build_fold_indirect_ref_loc (input_location, desc);
6464 gfc_conv_descriptor_data_set (&se->pre, tmp, ptr);
6467 if (gfc_option.rtcheck & GFC_RTCHECK_ARRAY_TEMPS)
6471 if (fsym && proc_name)
6472 asprintf (&msg, "An array temporary was created for argument "
6473 "'%s' of procedure '%s'", fsym->name, proc_name);
6475 asprintf (&msg, "An array temporary was created");
6477 tmp = build_fold_indirect_ref_loc (input_location,
6479 tmp = gfc_conv_array_data (tmp);
6480 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6481 fold_convert (TREE_TYPE (tmp), ptr), tmp);
6483 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6484 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
6486 gfc_conv_expr_present (sym), tmp);
6488 gfc_trans_runtime_check (false, true, tmp, &se->pre,
6493 gfc_start_block (&block);
6495 /* Copy the data back. */
6496 if (fsym == NULL || fsym->attr.intent != INTENT_IN)
6498 tmp = build_call_expr_loc (input_location,
6499 gfor_fndecl_in_unpack, 2, desc, ptr);
6500 gfc_add_expr_to_block (&block, tmp);
6503 /* Free the temporary. */
6504 tmp = gfc_call_free (convert (pvoid_type_node, ptr));
6505 gfc_add_expr_to_block (&block, tmp);
6507 stmt = gfc_finish_block (&block);
6509 gfc_init_block (&block);
6510 /* Only if it was repacked. This code needs to be executed before the
6511 loop cleanup code. */
6512 tmp = build_fold_indirect_ref_loc (input_location,
6514 tmp = gfc_conv_array_data (tmp);
6515 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6516 fold_convert (TREE_TYPE (tmp), ptr), tmp);
6518 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6519 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
6521 gfc_conv_expr_present (sym), tmp);
6523 tmp = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
6525 gfc_add_expr_to_block (&block, tmp);
6526 gfc_add_block_to_block (&block, &se->post);
6528 gfc_init_block (&se->post);
6530 /* Reset the descriptor pointer. */
6533 tmp = build_fold_indirect_ref_loc (input_location, desc);
6534 gfc_conv_descriptor_data_set (&se->post, tmp, origptr);
6537 gfc_add_block_to_block (&se->post, &block);
6542 /* Generate code to deallocate an array, if it is allocated. */
6545 gfc_trans_dealloc_allocated (tree descriptor)
6551 gfc_start_block (&block);
6553 var = gfc_conv_descriptor_data_get (descriptor);
6556 /* Call array_deallocate with an int * present in the second argument.
6557 Although it is ignored here, it's presence ensures that arrays that
6558 are already deallocated are ignored. */
6559 tmp = gfc_deallocate_with_status (var, NULL_TREE, true, NULL);
6560 gfc_add_expr_to_block (&block, tmp);
6562 /* Zero the data pointer. */
6563 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
6564 var, build_int_cst (TREE_TYPE (var), 0));
6565 gfc_add_expr_to_block (&block, tmp);
6567 return gfc_finish_block (&block);
6571 /* This helper function calculates the size in words of a full array. */
6574 get_full_array_size (stmtblock_t *block, tree decl, int rank)
6579 idx = gfc_rank_cst[rank - 1];
6580 nelems = gfc_conv_descriptor_ubound_get (decl, idx);
6581 tmp = gfc_conv_descriptor_lbound_get (decl, idx);
6582 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
6584 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
6585 tmp, gfc_index_one_node);
6586 tmp = gfc_evaluate_now (tmp, block);
6588 nelems = gfc_conv_descriptor_stride_get (decl, idx);
6589 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6591 return gfc_evaluate_now (tmp, block);
6595 /* Allocate dest to the same size as src, and copy src -> dest.
6596 If no_malloc is set, only the copy is done. */
6599 duplicate_allocatable (tree dest, tree src, tree type, int rank,
6609 /* If the source is null, set the destination to null. Then,
6610 allocate memory to the destination. */
6611 gfc_init_block (&block);
6615 tmp = null_pointer_node;
6616 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type, dest, tmp);
6617 gfc_add_expr_to_block (&block, tmp);
6618 null_data = gfc_finish_block (&block);
6620 gfc_init_block (&block);
6621 size = TYPE_SIZE_UNIT (TREE_TYPE (type));
6624 tmp = gfc_call_malloc (&block, type, size);
6625 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
6626 dest, fold_convert (type, tmp));
6627 gfc_add_expr_to_block (&block, tmp);
6630 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
6631 tmp = build_call_expr_loc (input_location, tmp, 3,
6636 gfc_conv_descriptor_data_set (&block, dest, null_pointer_node);
6637 null_data = gfc_finish_block (&block);
6639 gfc_init_block (&block);
6640 nelems = get_full_array_size (&block, src, rank);
6641 tmp = fold_convert (gfc_array_index_type,
6642 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
6643 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6647 tmp = TREE_TYPE (gfc_conv_descriptor_data_get (src));
6648 tmp = gfc_call_malloc (&block, tmp, size);
6649 gfc_conv_descriptor_data_set (&block, dest, tmp);
6652 /* We know the temporary and the value will be the same length,
6653 so can use memcpy. */
6654 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
6655 tmp = build_call_expr_loc (input_location,
6656 tmp, 3, gfc_conv_descriptor_data_get (dest),
6657 gfc_conv_descriptor_data_get (src), size);
6660 gfc_add_expr_to_block (&block, tmp);
6661 tmp = gfc_finish_block (&block);
6663 /* Null the destination if the source is null; otherwise do
6664 the allocate and copy. */
6668 null_cond = gfc_conv_descriptor_data_get (src);
6670 null_cond = convert (pvoid_type_node, null_cond);
6671 null_cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6672 null_cond, null_pointer_node);
6673 return build3_v (COND_EXPR, null_cond, tmp, null_data);
6677 /* Allocate dest to the same size as src, and copy data src -> dest. */
6680 gfc_duplicate_allocatable (tree dest, tree src, tree type, int rank)
6682 return duplicate_allocatable (dest, src, type, rank, false);
6686 /* Copy data src -> dest. */
6689 gfc_copy_allocatable_data (tree dest, tree src, tree type, int rank)
6691 return duplicate_allocatable (dest, src, type, rank, true);
6695 /* Recursively traverse an object of derived type, generating code to
6696 deallocate, nullify or copy allocatable components. This is the work horse
6697 function for the functions named in this enum. */
6699 enum {DEALLOCATE_ALLOC_COMP = 1, NULLIFY_ALLOC_COMP, COPY_ALLOC_COMP,
6700 COPY_ONLY_ALLOC_COMP};
6703 structure_alloc_comps (gfc_symbol * der_type, tree decl,
6704 tree dest, int rank, int purpose)
6708 stmtblock_t fnblock;
6709 stmtblock_t loopbody;
6720 tree null_cond = NULL_TREE;
6722 gfc_init_block (&fnblock);
6724 decl_type = TREE_TYPE (decl);
6726 if ((POINTER_TYPE_P (decl_type) && rank != 0)
6727 || (TREE_CODE (decl_type) == REFERENCE_TYPE && rank == 0))
6729 decl = build_fold_indirect_ref_loc (input_location,
6732 /* Just in case in gets dereferenced. */
6733 decl_type = TREE_TYPE (decl);
6735 /* If this an array of derived types with allocatable components
6736 build a loop and recursively call this function. */
6737 if (TREE_CODE (decl_type) == ARRAY_TYPE
6738 || GFC_DESCRIPTOR_TYPE_P (decl_type))
6740 tmp = gfc_conv_array_data (decl);
6741 var = build_fold_indirect_ref_loc (input_location,
6744 /* Get the number of elements - 1 and set the counter. */
6745 if (GFC_DESCRIPTOR_TYPE_P (decl_type))
6747 /* Use the descriptor for an allocatable array. Since this
6748 is a full array reference, we only need the descriptor
6749 information from dimension = rank. */
6750 tmp = get_full_array_size (&fnblock, decl, rank);
6751 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6752 gfc_array_index_type, tmp,
6753 gfc_index_one_node);
6755 null_cond = gfc_conv_descriptor_data_get (decl);
6756 null_cond = fold_build2_loc (input_location, NE_EXPR,
6757 boolean_type_node, null_cond,
6758 build_int_cst (TREE_TYPE (null_cond), 0));
6762 /* Otherwise use the TYPE_DOMAIN information. */
6763 tmp = array_type_nelts (decl_type);
6764 tmp = fold_convert (gfc_array_index_type, tmp);
6767 /* Remember that this is, in fact, the no. of elements - 1. */
6768 nelems = gfc_evaluate_now (tmp, &fnblock);
6769 index = gfc_create_var (gfc_array_index_type, "S");
6771 /* Build the body of the loop. */
6772 gfc_init_block (&loopbody);
6774 vref = gfc_build_array_ref (var, index, NULL);
6776 if (purpose == COPY_ALLOC_COMP)
6778 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
6780 tmp = gfc_duplicate_allocatable (dest, decl, decl_type, rank);
6781 gfc_add_expr_to_block (&fnblock, tmp);
6783 tmp = build_fold_indirect_ref_loc (input_location,
6784 gfc_conv_array_data (dest));
6785 dref = gfc_build_array_ref (tmp, index, NULL);
6786 tmp = structure_alloc_comps (der_type, vref, dref, rank, purpose);
6788 else if (purpose == COPY_ONLY_ALLOC_COMP)
6790 tmp = build_fold_indirect_ref_loc (input_location,
6791 gfc_conv_array_data (dest));
6792 dref = gfc_build_array_ref (tmp, index, NULL);
6793 tmp = structure_alloc_comps (der_type, vref, dref, rank,
6797 tmp = structure_alloc_comps (der_type, vref, NULL_TREE, rank, purpose);
6799 gfc_add_expr_to_block (&loopbody, tmp);
6801 /* Build the loop and return. */
6802 gfc_init_loopinfo (&loop);
6804 loop.from[0] = gfc_index_zero_node;
6805 loop.loopvar[0] = index;
6806 loop.to[0] = nelems;
6807 gfc_trans_scalarizing_loops (&loop, &loopbody);
6808 gfc_add_block_to_block (&fnblock, &loop.pre);
6810 tmp = gfc_finish_block (&fnblock);
6811 if (null_cond != NULL_TREE)
6812 tmp = build3_v (COND_EXPR, null_cond, tmp,
6813 build_empty_stmt (input_location));
6818 /* Otherwise, act on the components or recursively call self to
6819 act on a chain of components. */
6820 for (c = der_type->components; c; c = c->next)
6822 bool cmp_has_alloc_comps = (c->ts.type == BT_DERIVED
6823 || c->ts.type == BT_CLASS)
6824 && c->ts.u.derived->attr.alloc_comp;
6825 cdecl = c->backend_decl;
6826 ctype = TREE_TYPE (cdecl);
6830 case DEALLOCATE_ALLOC_COMP:
6831 if (cmp_has_alloc_comps && !c->attr.pointer)
6833 /* Do not deallocate the components of ultimate pointer
6835 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6836 decl, cdecl, NULL_TREE);
6837 rank = c->as ? c->as->rank : 0;
6838 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
6840 gfc_add_expr_to_block (&fnblock, tmp);
6843 if (c->attr.allocatable
6844 && (c->attr.dimension || c->attr.codimension))
6846 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6847 decl, cdecl, NULL_TREE);
6848 tmp = gfc_trans_dealloc_allocated (comp);
6849 gfc_add_expr_to_block (&fnblock, tmp);
6851 else if (c->attr.allocatable)
6853 /* Allocatable scalar components. */
6854 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6855 decl, cdecl, NULL_TREE);
6857 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
6859 gfc_add_expr_to_block (&fnblock, tmp);
6861 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6862 void_type_node, comp,
6863 build_int_cst (TREE_TYPE (comp), 0));
6864 gfc_add_expr_to_block (&fnblock, tmp);
6866 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
6868 /* Allocatable scalar CLASS components. */
6869 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6870 decl, cdecl, NULL_TREE);
6872 /* Add reference to '_data' component. */
6873 tmp = CLASS_DATA (c)->backend_decl;
6874 comp = fold_build3_loc (input_location, COMPONENT_REF,
6875 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
6877 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
6878 CLASS_DATA (c)->ts);
6879 gfc_add_expr_to_block (&fnblock, tmp);
6881 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6882 void_type_node, comp,
6883 build_int_cst (TREE_TYPE (comp), 0));
6884 gfc_add_expr_to_block (&fnblock, tmp);
6888 case NULLIFY_ALLOC_COMP:
6889 if (c->attr.pointer)
6891 else if (c->attr.allocatable
6892 && (c->attr.dimension|| c->attr.codimension))
6894 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6895 decl, cdecl, NULL_TREE);
6896 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
6898 else if (c->attr.allocatable)
6900 /* Allocatable scalar components. */
6901 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6902 decl, cdecl, NULL_TREE);
6903 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6904 void_type_node, comp,
6905 build_int_cst (TREE_TYPE (comp), 0));
6906 gfc_add_expr_to_block (&fnblock, tmp);
6908 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
6910 /* Allocatable scalar CLASS components. */
6911 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6912 decl, cdecl, NULL_TREE);
6913 /* Add reference to '_data' component. */
6914 tmp = CLASS_DATA (c)->backend_decl;
6915 comp = fold_build3_loc (input_location, COMPONENT_REF,
6916 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
6917 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6918 void_type_node, comp,
6919 build_int_cst (TREE_TYPE (comp), 0));
6920 gfc_add_expr_to_block (&fnblock, tmp);
6922 else if (cmp_has_alloc_comps)
6924 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6925 decl, cdecl, NULL_TREE);
6926 rank = c->as ? c->as->rank : 0;
6927 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
6929 gfc_add_expr_to_block (&fnblock, tmp);
6933 case COPY_ALLOC_COMP:
6934 if (c->attr.pointer)
6937 /* We need source and destination components. */
6938 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
6940 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
6942 dcmp = fold_convert (TREE_TYPE (comp), dcmp);
6944 if (c->attr.allocatable && !cmp_has_alloc_comps)
6946 rank = c->as ? c->as->rank : 0;
6947 tmp = gfc_duplicate_allocatable (dcmp, comp, ctype, rank);
6948 gfc_add_expr_to_block (&fnblock, tmp);
6951 if (cmp_has_alloc_comps)
6953 rank = c->as ? c->as->rank : 0;
6954 tmp = fold_convert (TREE_TYPE (dcmp), comp);
6955 gfc_add_modify (&fnblock, dcmp, tmp);
6956 tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp,
6958 gfc_add_expr_to_block (&fnblock, tmp);
6968 return gfc_finish_block (&fnblock);
6971 /* Recursively traverse an object of derived type, generating code to
6972 nullify allocatable components. */
6975 gfc_nullify_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
6977 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
6978 NULLIFY_ALLOC_COMP);
6982 /* Recursively traverse an object of derived type, generating code to
6983 deallocate allocatable components. */
6986 gfc_deallocate_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
6988 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
6989 DEALLOCATE_ALLOC_COMP);
6993 /* Recursively traverse an object of derived type, generating code to
6994 copy it and its allocatable components. */
6997 gfc_copy_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
6999 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ALLOC_COMP);
7003 /* Recursively traverse an object of derived type, generating code to
7004 copy only its allocatable components. */
7007 gfc_copy_only_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
7009 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ONLY_ALLOC_COMP);
7013 /* Returns the value of LBOUND for an expression. This could be broken out
7014 from gfc_conv_intrinsic_bound but this seemed to be simpler. This is
7015 called by gfc_alloc_allocatable_for_assignment. */
7017 get_std_lbound (gfc_expr *expr, tree desc, int dim, bool assumed_size)
7022 tree cond, cond1, cond3, cond4;
7026 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)))
7028 tmp = gfc_rank_cst[dim];
7029 lbound = gfc_conv_descriptor_lbound_get (desc, tmp);
7030 ubound = gfc_conv_descriptor_ubound_get (desc, tmp);
7031 stride = gfc_conv_descriptor_stride_get (desc, tmp);
7032 cond1 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
7034 cond3 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
7035 stride, gfc_index_zero_node);
7036 cond3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7037 boolean_type_node, cond3, cond1);
7038 cond4 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
7039 stride, gfc_index_zero_node);
7041 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
7042 tmp, build_int_cst (gfc_array_index_type,
7045 cond = boolean_false_node;
7047 cond1 = fold_build2_loc (input_location, TRUTH_OR_EXPR,
7048 boolean_type_node, cond3, cond4);
7049 cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
7050 boolean_type_node, cond, cond1);
7052 return fold_build3_loc (input_location, COND_EXPR,
7053 gfc_array_index_type, cond,
7054 lbound, gfc_index_one_node);
7056 else if (expr->expr_type == EXPR_VARIABLE)
7058 tmp = TREE_TYPE (expr->symtree->n.sym->backend_decl);
7059 for (ref = expr->ref; ref; ref = ref->next)
7061 if (ref->type == REF_COMPONENT
7062 && ref->u.c.component->as
7064 && ref->next->u.ar.type == AR_FULL)
7065 tmp = TREE_TYPE (ref->u.c.component->backend_decl);
7067 return GFC_TYPE_ARRAY_LBOUND(tmp, dim);
7069 else if (expr->expr_type == EXPR_FUNCTION)
7071 /* A conversion function, so use the argument. */
7072 expr = expr->value.function.actual->expr;
7073 if (expr->expr_type != EXPR_VARIABLE)
7074 return gfc_index_one_node;
7075 desc = TREE_TYPE (expr->symtree->n.sym->backend_decl);
7076 return get_std_lbound (expr, desc, dim, assumed_size);
7079 return gfc_index_one_node;
7083 /* Returns true if an expression represents an lhs that can be reallocated
7087 gfc_is_reallocatable_lhs (gfc_expr *expr)
7094 /* An allocatable variable. */
7095 if (expr->symtree->n.sym->attr.allocatable
7097 && expr->ref->type == REF_ARRAY
7098 && expr->ref->u.ar.type == AR_FULL)
7101 /* All that can be left are allocatable components. */
7102 if ((expr->symtree->n.sym->ts.type != BT_DERIVED
7103 && expr->symtree->n.sym->ts.type != BT_CLASS)
7104 || !expr->symtree->n.sym->ts.u.derived->attr.alloc_comp)
7107 /* Find a component ref followed by an array reference. */
7108 for (ref = expr->ref; ref; ref = ref->next)
7110 && ref->type == REF_COMPONENT
7111 && ref->next->type == REF_ARRAY
7112 && !ref->next->next)
7118 /* Return true if valid reallocatable lhs. */
7119 if (ref->u.c.component->attr.allocatable
7120 && ref->next->u.ar.type == AR_FULL)
7127 /* Allocate the lhs of an assignment to an allocatable array, otherwise
7131 gfc_alloc_allocatable_for_assignment (gfc_loopinfo *loop,
7135 stmtblock_t realloc_block;
7136 stmtblock_t alloc_block;
7159 gfc_array_spec * as;
7161 /* x = f(...) with x allocatable. In this case, expr1 is the rhs.
7162 Find the lhs expression in the loop chain and set expr1 and
7163 expr2 accordingly. */
7164 if (expr1->expr_type == EXPR_FUNCTION && expr2 == NULL)
7167 /* Find the ss for the lhs. */
7169 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
7170 if (lss->expr && lss->expr->expr_type == EXPR_VARIABLE)
7172 if (lss == gfc_ss_terminator)
7177 /* Bail out if this is not a valid allocate on assignment. */
7178 if (!gfc_is_reallocatable_lhs (expr1)
7179 || (expr2 && !expr2->rank))
7182 /* Find the ss for the lhs. */
7184 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
7185 if (lss->expr == expr1)
7188 if (lss == gfc_ss_terminator)
7191 /* Find an ss for the rhs. For operator expressions, we see the
7192 ss's for the operands. Any one of these will do. */
7194 for (; rss && rss != gfc_ss_terminator; rss = rss->loop_chain)
7195 if (rss->expr != expr1 && rss != loop->temp_ss)
7198 if (expr2 && rss == gfc_ss_terminator)
7201 gfc_start_block (&fblock);
7203 /* Since the lhs is allocatable, this must be a descriptor type.
7204 Get the data and array size. */
7205 desc = lss->data.info.descriptor;
7206 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)));
7207 array1 = gfc_conv_descriptor_data_get (desc);
7209 /* 7.4.1.3 "If variable is an allocated allocatable variable, it is
7210 deallocated if expr is an array of different shape or any of the
7211 corresponding length type parameter values of variable and expr
7212 differ." This assures F95 compatibility. */
7213 jump_label1 = gfc_build_label_decl (NULL_TREE);
7214 jump_label2 = gfc_build_label_decl (NULL_TREE);
7216 /* Allocate if data is NULL. */
7217 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
7218 array1, build_int_cst (TREE_TYPE (array1), 0));
7219 tmp = build3_v (COND_EXPR, cond,
7220 build1_v (GOTO_EXPR, jump_label1),
7221 build_empty_stmt (input_location));
7222 gfc_add_expr_to_block (&fblock, tmp);
7224 /* Get arrayspec if expr is a full array. */
7225 if (expr2 && expr2->expr_type == EXPR_FUNCTION
7226 && expr2->value.function.isym
7227 && expr2->value.function.isym->conversion)
7229 /* For conversion functions, take the arg. */
7230 gfc_expr *arg = expr2->value.function.actual->expr;
7231 as = gfc_get_full_arrayspec_from_expr (arg);
7234 as = gfc_get_full_arrayspec_from_expr (expr2);
7238 /* If the lhs shape is not the same as the rhs jump to setting the
7239 bounds and doing the reallocation....... */
7240 for (n = 0; n < expr1->rank; n++)
7242 /* Check the shape. */
7243 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
7244 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[n]);
7245 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7246 gfc_array_index_type,
7247 loop->to[n], loop->from[n]);
7248 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7249 gfc_array_index_type,
7251 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7252 gfc_array_index_type,
7254 cond = fold_build2_loc (input_location, NE_EXPR,
7256 tmp, gfc_index_zero_node);
7257 tmp = build3_v (COND_EXPR, cond,
7258 build1_v (GOTO_EXPR, jump_label1),
7259 build_empty_stmt (input_location));
7260 gfc_add_expr_to_block (&fblock, tmp);
7263 /* ....else jump past the (re)alloc code. */
7264 tmp = build1_v (GOTO_EXPR, jump_label2);
7265 gfc_add_expr_to_block (&fblock, tmp);
7267 /* Add the label to start automatic (re)allocation. */
7268 tmp = build1_v (LABEL_EXPR, jump_label1);
7269 gfc_add_expr_to_block (&fblock, tmp);
7271 size1 = gfc_conv_descriptor_size (desc, expr1->rank);
7273 /* Get the rhs size. Fix both sizes. */
7275 desc2 = rss->data.info.descriptor;
7278 size2 = gfc_index_one_node;
7279 for (n = 0; n < expr2->rank; n++)
7281 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7282 gfc_array_index_type,
7283 loop->to[n], loop->from[n]);
7284 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7285 gfc_array_index_type,
7286 tmp, gfc_index_one_node);
7287 size2 = fold_build2_loc (input_location, MULT_EXPR,
7288 gfc_array_index_type,
7292 size1 = gfc_evaluate_now (size1, &fblock);
7293 size2 = gfc_evaluate_now (size2, &fblock);
7295 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7297 neq_size = gfc_evaluate_now (cond, &fblock);
7300 /* Now modify the lhs descriptor and the associated scalarizer
7301 variables. F2003 7.4.1.3: "If variable is or becomes an
7302 unallocated allocatable variable, then it is allocated with each
7303 deferred type parameter equal to the corresponding type parameters
7304 of expr , with the shape of expr , and with each lower bound equal
7305 to the corresponding element of LBOUND(expr)."
7306 Reuse size1 to keep a dimension-by-dimension track of the
7307 stride of the new array. */
7308 size1 = gfc_index_one_node;
7309 offset = gfc_index_zero_node;
7311 for (n = 0; n < expr2->rank; n++)
7313 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7314 gfc_array_index_type,
7315 loop->to[n], loop->from[n]);
7316 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7317 gfc_array_index_type,
7318 tmp, gfc_index_one_node);
7320 lbound = gfc_index_one_node;
7325 lbd = get_std_lbound (expr2, desc2, n,
7326 as->type == AS_ASSUMED_SIZE);
7327 ubound = fold_build2_loc (input_location,
7329 gfc_array_index_type,
7331 ubound = fold_build2_loc (input_location,
7333 gfc_array_index_type,
7338 gfc_conv_descriptor_lbound_set (&fblock, desc,
7341 gfc_conv_descriptor_ubound_set (&fblock, desc,
7344 gfc_conv_descriptor_stride_set (&fblock, desc,
7347 lbound = gfc_conv_descriptor_lbound_get (desc,
7349 tmp2 = fold_build2_loc (input_location, MULT_EXPR,
7350 gfc_array_index_type,
7352 offset = fold_build2_loc (input_location, MINUS_EXPR,
7353 gfc_array_index_type,
7355 size1 = fold_build2_loc (input_location, MULT_EXPR,
7356 gfc_array_index_type,
7360 /* Set the lhs descriptor and scalarizer offsets. For rank > 1,
7361 the array offset is saved and the info.offset is used for a
7362 running offset. Use the saved_offset instead. */
7363 tmp = gfc_conv_descriptor_offset (desc);
7364 gfc_add_modify (&fblock, tmp, offset);
7365 if (lss->data.info.saved_offset
7366 && TREE_CODE (lss->data.info.saved_offset) == VAR_DECL)
7367 gfc_add_modify (&fblock, lss->data.info.saved_offset, tmp);
7369 /* Now set the deltas for the lhs. */
7370 for (n = 0; n < expr1->rank; n++)
7372 tmp = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
7373 dim = lss->data.info.dim[n];
7374 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7375 gfc_array_index_type, tmp,
7377 if (lss->data.info.delta[dim]
7378 && TREE_CODE (lss->data.info.delta[dim]) == VAR_DECL)
7379 gfc_add_modify (&fblock, lss->data.info.delta[dim], tmp);
7382 /* Get the new lhs size in bytes. */
7383 if (expr1->ts.type == BT_CHARACTER && expr1->ts.deferred)
7385 tmp = expr2->ts.u.cl->backend_decl;
7386 gcc_assert (expr1->ts.u.cl->backend_decl);
7387 tmp = fold_convert (TREE_TYPE (expr1->ts.u.cl->backend_decl), tmp);
7388 gfc_add_modify (&fblock, expr1->ts.u.cl->backend_decl, tmp);
7390 else if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->backend_decl)
7392 tmp = TYPE_SIZE_UNIT (TREE_TYPE (gfc_typenode_for_spec (&expr1->ts)));
7393 tmp = fold_build2_loc (input_location, MULT_EXPR,
7394 gfc_array_index_type, tmp,
7395 expr1->ts.u.cl->backend_decl);
7398 tmp = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&expr1->ts));
7399 tmp = fold_convert (gfc_array_index_type, tmp);
7400 size2 = fold_build2_loc (input_location, MULT_EXPR,
7401 gfc_array_index_type,
7403 size2 = fold_convert (size_type_node, size2);
7404 size2 = gfc_evaluate_now (size2, &fblock);
7406 /* Realloc expression. Note that the scalarizer uses desc.data
7407 in the array reference - (*desc.data)[<element>]. */
7408 gfc_init_block (&realloc_block);
7409 tmp = build_call_expr_loc (input_location,
7410 builtin_decl_explicit (BUILT_IN_REALLOC), 2,
7411 fold_convert (pvoid_type_node, array1),
7413 gfc_conv_descriptor_data_set (&realloc_block,
7415 realloc_expr = gfc_finish_block (&realloc_block);
7417 /* Only reallocate if sizes are different. */
7418 tmp = build3_v (COND_EXPR, neq_size, realloc_expr,
7419 build_empty_stmt (input_location));
7423 /* Malloc expression. */
7424 gfc_init_block (&alloc_block);
7425 tmp = build_call_expr_loc (input_location,
7426 builtin_decl_explicit (BUILT_IN_MALLOC),
7428 gfc_conv_descriptor_data_set (&alloc_block,
7430 tmp = gfc_conv_descriptor_dtype (desc);
7431 gfc_add_modify (&alloc_block, tmp, gfc_get_dtype (TREE_TYPE (desc)));
7432 alloc_expr = gfc_finish_block (&alloc_block);
7434 /* Malloc if not allocated; realloc otherwise. */
7435 tmp = build_int_cst (TREE_TYPE (array1), 0);
7436 cond = fold_build2_loc (input_location, EQ_EXPR,
7439 tmp = build3_v (COND_EXPR, cond, alloc_expr, realloc_expr);
7440 gfc_add_expr_to_block (&fblock, tmp);
7442 /* Make sure that the scalarizer data pointer is updated. */
7443 if (lss->data.info.data
7444 && TREE_CODE (lss->data.info.data) == VAR_DECL)
7446 tmp = gfc_conv_descriptor_data_get (desc);
7447 gfc_add_modify (&fblock, lss->data.info.data, tmp);
7450 /* Add the exit label. */
7451 tmp = build1_v (LABEL_EXPR, jump_label2);
7452 gfc_add_expr_to_block (&fblock, tmp);
7454 return gfc_finish_block (&fblock);
7458 /* NULLIFY an allocatable/pointer array on function entry, free it on exit.
7459 Do likewise, recursively if necessary, with the allocatable components of
7463 gfc_trans_deferred_array (gfc_symbol * sym, gfc_wrapped_block * block)
7469 stmtblock_t cleanup;
7472 bool sym_has_alloc_comp;
7474 sym_has_alloc_comp = (sym->ts.type == BT_DERIVED
7475 || sym->ts.type == BT_CLASS)
7476 && sym->ts.u.derived->attr.alloc_comp;
7478 /* Make sure the frontend gets these right. */
7479 if (!(sym->attr.pointer || sym->attr.allocatable || sym_has_alloc_comp))
7480 fatal_error ("Possible front-end bug: Deferred array size without pointer, "
7481 "allocatable attribute or derived type without allocatable "
7484 gfc_save_backend_locus (&loc);
7485 gfc_set_backend_locus (&sym->declared_at);
7486 gfc_init_block (&init);
7488 gcc_assert (TREE_CODE (sym->backend_decl) == VAR_DECL
7489 || TREE_CODE (sym->backend_decl) == PARM_DECL);
7491 if (sym->ts.type == BT_CHARACTER
7492 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
7494 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
7495 gfc_trans_vla_type_sizes (sym, &init);
7498 /* Dummy, use associated and result variables don't need anything special. */
7499 if (sym->attr.dummy || sym->attr.use_assoc || sym->attr.result)
7501 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
7502 gfc_restore_backend_locus (&loc);
7506 descriptor = sym->backend_decl;
7508 /* Although static, derived types with default initializers and
7509 allocatable components must not be nulled wholesale; instead they
7510 are treated component by component. */
7511 if (TREE_STATIC (descriptor) && !sym_has_alloc_comp)
7513 /* SAVEd variables are not freed on exit. */
7514 gfc_trans_static_array_pointer (sym);
7516 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
7517 gfc_restore_backend_locus (&loc);
7521 /* Get the descriptor type. */
7522 type = TREE_TYPE (sym->backend_decl);
7524 if (sym_has_alloc_comp && !(sym->attr.pointer || sym->attr.allocatable))
7527 && !(TREE_STATIC (sym->backend_decl) && sym->attr.is_main_program))
7529 if (sym->value == NULL
7530 || !gfc_has_default_initializer (sym->ts.u.derived))
7532 rank = sym->as ? sym->as->rank : 0;
7533 tmp = gfc_nullify_alloc_comp (sym->ts.u.derived,
7535 gfc_add_expr_to_block (&init, tmp);
7538 gfc_init_default_dt (sym, &init, false);
7541 else if (!GFC_DESCRIPTOR_TYPE_P (type))
7543 /* If the backend_decl is not a descriptor, we must have a pointer
7545 descriptor = build_fold_indirect_ref_loc (input_location,
7547 type = TREE_TYPE (descriptor);
7550 /* NULLIFY the data pointer. */
7551 if (GFC_DESCRIPTOR_TYPE_P (type) && !sym->attr.save)
7552 gfc_conv_descriptor_data_set (&init, descriptor, null_pointer_node);
7554 gfc_restore_backend_locus (&loc);
7555 gfc_init_block (&cleanup);
7557 /* Allocatable arrays need to be freed when they go out of scope.
7558 The allocatable components of pointers must not be touched. */
7559 if (sym_has_alloc_comp && !(sym->attr.function || sym->attr.result)
7560 && !sym->attr.pointer && !sym->attr.save)
7563 rank = sym->as ? sym->as->rank : 0;
7564 tmp = gfc_deallocate_alloc_comp (sym->ts.u.derived, descriptor, rank);
7565 gfc_add_expr_to_block (&cleanup, tmp);
7568 if (sym->attr.allocatable && (sym->attr.dimension || sym->attr.codimension)
7569 && !sym->attr.save && !sym->attr.result)
7571 tmp = gfc_trans_dealloc_allocated (sym->backend_decl);
7572 gfc_add_expr_to_block (&cleanup, tmp);
7575 gfc_add_init_cleanup (block, gfc_finish_block (&init),
7576 gfc_finish_block (&cleanup));
7579 /************ Expression Walking Functions ******************/
7581 /* Walk a variable reference.
7583 Possible extension - multiple component subscripts.
7584 x(:,:) = foo%a(:)%b(:)
7586 forall (i=..., j=...)
7587 x(i,j) = foo%a(j)%b(i)
7589 This adds a fair amount of complexity because you need to deal with more
7590 than one ref. Maybe handle in a similar manner to vector subscripts.
7591 Maybe not worth the effort. */
7595 gfc_walk_variable_expr (gfc_ss * ss, gfc_expr * expr)
7599 for (ref = expr->ref; ref; ref = ref->next)
7600 if (ref->type == REF_ARRAY && ref->u.ar.type != AR_ELEMENT)
7603 return gfc_walk_array_ref (ss, expr, ref);
7608 gfc_walk_array_ref (gfc_ss * ss, gfc_expr * expr, gfc_ref * ref)
7614 for (; ref; ref = ref->next)
7616 if (ref->type == REF_SUBSTRING)
7618 ss = gfc_get_scalar_ss (ss, ref->u.ss.start);
7619 ss = gfc_get_scalar_ss (ss, ref->u.ss.end);
7622 /* We're only interested in array sections from now on. */
7623 if (ref->type != REF_ARRAY)
7631 for (n = ar->dimen - 1; n >= 0; n--)
7632 ss = gfc_get_scalar_ss (ss, ar->start[n]);
7636 newss = gfc_get_array_ss (ss, expr, ar->as->rank, GFC_SS_SECTION);
7637 newss->data.info.ref = ref;
7639 /* Make sure array is the same as array(:,:), this way
7640 we don't need to special case all the time. */
7641 ar->dimen = ar->as->rank;
7642 for (n = 0; n < ar->dimen; n++)
7644 ar->dimen_type[n] = DIMEN_RANGE;
7646 gcc_assert (ar->start[n] == NULL);
7647 gcc_assert (ar->end[n] == NULL);
7648 gcc_assert (ar->stride[n] == NULL);
7654 newss = gfc_get_array_ss (ss, expr, 0, GFC_SS_SECTION);
7655 newss->data.info.ref = ref;
7657 /* We add SS chains for all the subscripts in the section. */
7658 for (n = 0; n < ar->dimen; n++)
7662 switch (ar->dimen_type[n])
7665 /* Add SS for elemental (scalar) subscripts. */
7666 gcc_assert (ar->start[n]);
7667 indexss = gfc_get_scalar_ss (gfc_ss_terminator, ar->start[n]);
7668 indexss->loop_chain = gfc_ss_terminator;
7669 newss->data.info.subscript[n] = indexss;
7673 /* We don't add anything for sections, just remember this
7674 dimension for later. */
7675 newss->data.info.dim[newss->data.info.dimen] = n;
7676 newss->data.info.dimen++;
7680 /* Create a GFC_SS_VECTOR index in which we can store
7681 the vector's descriptor. */
7682 indexss = gfc_get_array_ss (gfc_ss_terminator, ar->start[n],
7684 indexss->loop_chain = gfc_ss_terminator;
7685 newss->data.info.subscript[n] = indexss;
7686 newss->data.info.dim[newss->data.info.dimen] = n;
7687 newss->data.info.dimen++;
7691 /* We should know what sort of section it is by now. */
7695 /* We should have at least one non-elemental dimension,
7696 unless we are creating a descriptor for a (scalar) coarray. */
7697 gcc_assert (newss->data.info.dimen > 0
7698 || newss->data.info.ref->u.ar.as->corank > 0);
7703 /* We should know what sort of section it is by now. */
7712 /* Walk an expression operator. If only one operand of a binary expression is
7713 scalar, we must also add the scalar term to the SS chain. */
7716 gfc_walk_op_expr (gfc_ss * ss, gfc_expr * expr)
7721 head = gfc_walk_subexpr (ss, expr->value.op.op1);
7722 if (expr->value.op.op2 == NULL)
7725 head2 = gfc_walk_subexpr (head, expr->value.op.op2);
7727 /* All operands are scalar. Pass back and let the caller deal with it. */
7731 /* All operands require scalarization. */
7732 if (head != ss && (expr->value.op.op2 == NULL || head2 != head))
7735 /* One of the operands needs scalarization, the other is scalar.
7736 Create a gfc_ss for the scalar expression. */
7739 /* First operand is scalar. We build the chain in reverse order, so
7740 add the scalar SS after the second operand. */
7742 while (head && head->next != ss)
7744 /* Check we haven't somehow broken the chain. */
7746 head->next = gfc_get_scalar_ss (ss, expr->value.op.op1);
7748 else /* head2 == head */
7750 gcc_assert (head2 == head);
7751 /* Second operand is scalar. */
7752 head2 = gfc_get_scalar_ss (head2, expr->value.op.op2);
7759 /* Reverse a SS chain. */
7762 gfc_reverse_ss (gfc_ss * ss)
7767 gcc_assert (ss != NULL);
7769 head = gfc_ss_terminator;
7770 while (ss != gfc_ss_terminator)
7773 /* Check we didn't somehow break the chain. */
7774 gcc_assert (next != NULL);
7784 /* Walk the arguments of an elemental function. */
7787 gfc_walk_elemental_function_args (gfc_ss * ss, gfc_actual_arglist *arg,
7795 head = gfc_ss_terminator;
7798 for (; arg; arg = arg->next)
7803 newss = gfc_walk_subexpr (head, arg->expr);
7806 /* Scalar argument. */
7807 gcc_assert (type == GFC_SS_SCALAR || type == GFC_SS_REFERENCE);
7808 newss = gfc_get_scalar_ss (head, arg->expr);
7818 while (tail->next != gfc_ss_terminator)
7825 /* If all the arguments are scalar we don't need the argument SS. */
7826 gfc_free_ss_chain (head);
7831 /* Add it onto the existing chain. */
7837 /* Walk a function call. Scalar functions are passed back, and taken out of
7838 scalarization loops. For elemental functions we walk their arguments.
7839 The result of functions returning arrays is stored in a temporary outside
7840 the loop, so that the function is only called once. Hence we do not need
7841 to walk their arguments. */
7844 gfc_walk_function_expr (gfc_ss * ss, gfc_expr * expr)
7846 gfc_intrinsic_sym *isym;
7848 gfc_component *comp = NULL;
7850 isym = expr->value.function.isym;
7852 /* Handle intrinsic functions separately. */
7854 return gfc_walk_intrinsic_function (ss, expr, isym);
7856 sym = expr->value.function.esym;
7858 sym = expr->symtree->n.sym;
7860 /* A function that returns arrays. */
7861 gfc_is_proc_ptr_comp (expr, &comp);
7862 if ((!comp && gfc_return_by_reference (sym) && sym->result->attr.dimension)
7863 || (comp && comp->attr.dimension))
7864 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_FUNCTION);
7866 /* Walk the parameters of an elemental function. For now we always pass
7868 if (sym->attr.elemental)
7869 return gfc_walk_elemental_function_args (ss, expr->value.function.actual,
7872 /* Scalar functions are OK as these are evaluated outside the scalarization
7873 loop. Pass back and let the caller deal with it. */
7878 /* An array temporary is constructed for array constructors. */
7881 gfc_walk_array_constructor (gfc_ss * ss, gfc_expr * expr)
7883 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_CONSTRUCTOR);
7887 /* Walk an expression. Add walked expressions to the head of the SS chain.
7888 A wholly scalar expression will not be added. */
7891 gfc_walk_subexpr (gfc_ss * ss, gfc_expr * expr)
7895 switch (expr->expr_type)
7898 head = gfc_walk_variable_expr (ss, expr);
7902 head = gfc_walk_op_expr (ss, expr);
7906 head = gfc_walk_function_expr (ss, expr);
7911 case EXPR_STRUCTURE:
7912 /* Pass back and let the caller deal with it. */
7916 head = gfc_walk_array_constructor (ss, expr);
7919 case EXPR_SUBSTRING:
7920 /* Pass back and let the caller deal with it. */
7924 internal_error ("bad expression type during walk (%d)",
7931 /* Entry point for expression walking.
7932 A return value equal to the passed chain means this is
7933 a scalar expression. It is up to the caller to take whatever action is
7934 necessary to translate these. */
7937 gfc_walk_expr (gfc_expr * expr)
7941 res = gfc_walk_subexpr (gfc_ss_terminator, expr);
7942 return gfc_reverse_ss (res);