1 /* Array translation routines
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 Free Software Foundation, Inc.
5 Contributed by Paul Brook <paul@nowt.org>
6 and Steven Bosscher <s.bosscher@student.tudelft.nl>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* trans-array.c-- Various array related code, including scalarization,
25 allocation, initialization and other support routines. */
27 /* How the scalarizer works.
28 In gfortran, array expressions use the same core routines as scalar
30 First, a Scalarization State (SS) chain is built. This is done by walking
31 the expression tree, and building a linear list of the terms in the
32 expression. As the tree is walked, scalar subexpressions are translated.
34 The scalarization parameters are stored in a gfc_loopinfo structure.
35 First the start and stride of each term is calculated by
36 gfc_conv_ss_startstride. During this process the expressions for the array
37 descriptors and data pointers are also translated.
39 If the expression is an assignment, we must then resolve any dependencies.
40 In fortran all the rhs values of an assignment must be evaluated before
41 any assignments take place. This can require a temporary array to store the
42 values. We also require a temporary when we are passing array expressions
43 or vector subscripts as procedure parameters.
45 Array sections are passed without copying to a temporary. These use the
46 scalarizer to determine the shape of the section. The flag
47 loop->array_parameter tells the scalarizer that the actual values and loop
48 variables will not be required.
50 The function gfc_conv_loop_setup generates the scalarization setup code.
51 It determines the range of the scalarizing loop variables. If a temporary
52 is required, this is created and initialized. Code for scalar expressions
53 taken outside the loop is also generated at this time. Next the offset and
54 scaling required to translate from loop variables to array indices for each
57 A call to gfc_start_scalarized_body marks the start of the scalarized
58 expression. This creates a scope and declares the loop variables. Before
59 calling this gfc_make_ss_chain_used must be used to indicate which terms
60 will be used inside this loop.
62 The scalar gfc_conv_* functions are then used to build the main body of the
63 scalarization loop. Scalarization loop variables and precalculated scalar
64 values are automatically substituted. Note that gfc_advance_se_ss_chain
65 must be used, rather than changing the se->ss directly.
67 For assignment expressions requiring a temporary two sub loops are
68 generated. The first stores the result of the expression in the temporary,
69 the second copies it to the result. A call to
70 gfc_trans_scalarized_loop_boundary marks the end of the main loop code and
71 the start of the copying loop. The temporary may be less than full rank.
73 Finally gfc_trans_scalarizing_loops is called to generate the implicit do
74 loops. The loops are added to the pre chain of the loopinfo. The post
75 chain may still contain cleanup code.
77 After the loop code has been added into its parent scope gfc_cleanup_loop
78 is called to free all the SS allocated by the scalarizer. */
82 #include "coretypes.h"
85 #include "diagnostic-core.h" /* For internal_error/fatal_error. */
88 #include "constructor.h"
90 #include "trans-stmt.h"
91 #include "trans-types.h"
92 #include "trans-array.h"
93 #include "trans-const.h"
94 #include "dependency.h"
96 static bool gfc_get_array_constructor_size (mpz_t *, gfc_constructor_base);
98 /* The contents of this structure aren't actually used, just the address. */
99 static gfc_ss gfc_ss_terminator_var;
100 gfc_ss * const gfc_ss_terminator = &gfc_ss_terminator_var;
104 gfc_array_dataptr_type (tree desc)
106 return (GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc)));
110 /* Build expressions to access the members of an array descriptor.
111 It's surprisingly easy to mess up here, so never access
112 an array descriptor by "brute force", always use these
113 functions. This also avoids problems if we change the format
114 of an array descriptor.
116 To understand these magic numbers, look at the comments
117 before gfc_build_array_type() in trans-types.c.
119 The code within these defines should be the only code which knows the format
120 of an array descriptor.
122 Any code just needing to read obtain the bounds of an array should use
123 gfc_conv_array_* rather than the following functions as these will return
124 know constant values, and work with arrays which do not have descriptors.
126 Don't forget to #undef these! */
129 #define OFFSET_FIELD 1
130 #define DTYPE_FIELD 2
131 #define DIMENSION_FIELD 3
132 #define CAF_TOKEN_FIELD 4
134 #define STRIDE_SUBFIELD 0
135 #define LBOUND_SUBFIELD 1
136 #define UBOUND_SUBFIELD 2
138 /* This provides READ-ONLY access to the data field. The field itself
139 doesn't have the proper type. */
142 gfc_conv_descriptor_data_get (tree desc)
146 type = TREE_TYPE (desc);
147 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
149 field = TYPE_FIELDS (type);
150 gcc_assert (DATA_FIELD == 0);
152 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
154 t = fold_convert (GFC_TYPE_ARRAY_DATAPTR_TYPE (type), t);
159 /* This provides WRITE access to the data field.
161 TUPLES_P is true if we are generating tuples.
163 This function gets called through the following macros:
164 gfc_conv_descriptor_data_set
165 gfc_conv_descriptor_data_set. */
168 gfc_conv_descriptor_data_set (stmtblock_t *block, tree desc, tree value)
172 type = TREE_TYPE (desc);
173 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
175 field = TYPE_FIELDS (type);
176 gcc_assert (DATA_FIELD == 0);
178 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
180 gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value));
184 /* This provides address access to the data field. This should only be
185 used by array allocation, passing this on to the runtime. */
188 gfc_conv_descriptor_data_addr (tree desc)
192 type = TREE_TYPE (desc);
193 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
195 field = TYPE_FIELDS (type);
196 gcc_assert (DATA_FIELD == 0);
198 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
200 return gfc_build_addr_expr (NULL_TREE, t);
204 gfc_conv_descriptor_offset (tree desc)
209 type = TREE_TYPE (desc);
210 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
212 field = gfc_advance_chain (TYPE_FIELDS (type), OFFSET_FIELD);
213 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
215 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
216 desc, field, NULL_TREE);
220 gfc_conv_descriptor_offset_get (tree desc)
222 return gfc_conv_descriptor_offset (desc);
226 gfc_conv_descriptor_offset_set (stmtblock_t *block, tree desc,
229 tree t = gfc_conv_descriptor_offset (desc);
230 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
235 gfc_conv_descriptor_dtype (tree desc)
240 type = TREE_TYPE (desc);
241 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
243 field = gfc_advance_chain (TYPE_FIELDS (type), DTYPE_FIELD);
244 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
246 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
247 desc, field, NULL_TREE);
251 gfc_conv_descriptor_dimension (tree desc, tree dim)
257 type = TREE_TYPE (desc);
258 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
260 field = gfc_advance_chain (TYPE_FIELDS (type), DIMENSION_FIELD);
261 gcc_assert (field != NULL_TREE
262 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
263 && TREE_CODE (TREE_TYPE (TREE_TYPE (field))) == RECORD_TYPE);
265 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
266 desc, field, NULL_TREE);
267 tmp = gfc_build_array_ref (tmp, dim, NULL);
273 gfc_conv_descriptor_token (tree desc)
278 type = TREE_TYPE (desc);
279 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
280 gcc_assert (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE);
281 gcc_assert (gfc_option.coarray == GFC_FCOARRAY_LIB);
282 field = gfc_advance_chain (TYPE_FIELDS (type), CAF_TOKEN_FIELD);
283 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == prvoid_type_node);
285 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
286 desc, field, NULL_TREE);
291 gfc_conv_descriptor_stride (tree desc, tree dim)
296 tmp = gfc_conv_descriptor_dimension (desc, dim);
297 field = TYPE_FIELDS (TREE_TYPE (tmp));
298 field = gfc_advance_chain (field, STRIDE_SUBFIELD);
299 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
301 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
302 tmp, field, NULL_TREE);
307 gfc_conv_descriptor_stride_get (tree desc, tree dim)
309 tree type = TREE_TYPE (desc);
310 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
311 if (integer_zerop (dim)
312 && (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE
313 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_SHAPE_CONT
314 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT))
315 return gfc_index_one_node;
317 return gfc_conv_descriptor_stride (desc, dim);
321 gfc_conv_descriptor_stride_set (stmtblock_t *block, tree desc,
322 tree dim, tree value)
324 tree t = gfc_conv_descriptor_stride (desc, dim);
325 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
329 gfc_conv_descriptor_lbound (tree desc, tree dim)
334 tmp = gfc_conv_descriptor_dimension (desc, dim);
335 field = TYPE_FIELDS (TREE_TYPE (tmp));
336 field = gfc_advance_chain (field, LBOUND_SUBFIELD);
337 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
339 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
340 tmp, field, NULL_TREE);
345 gfc_conv_descriptor_lbound_get (tree desc, tree dim)
347 return gfc_conv_descriptor_lbound (desc, dim);
351 gfc_conv_descriptor_lbound_set (stmtblock_t *block, tree desc,
352 tree dim, tree value)
354 tree t = gfc_conv_descriptor_lbound (desc, dim);
355 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
359 gfc_conv_descriptor_ubound (tree desc, tree dim)
364 tmp = gfc_conv_descriptor_dimension (desc, dim);
365 field = TYPE_FIELDS (TREE_TYPE (tmp));
366 field = gfc_advance_chain (field, UBOUND_SUBFIELD);
367 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
369 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
370 tmp, field, NULL_TREE);
375 gfc_conv_descriptor_ubound_get (tree desc, tree dim)
377 return gfc_conv_descriptor_ubound (desc, dim);
381 gfc_conv_descriptor_ubound_set (stmtblock_t *block, tree desc,
382 tree dim, tree value)
384 tree t = gfc_conv_descriptor_ubound (desc, dim);
385 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
388 /* Build a null array descriptor constructor. */
391 gfc_build_null_descriptor (tree type)
396 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
397 gcc_assert (DATA_FIELD == 0);
398 field = TYPE_FIELDS (type);
400 /* Set a NULL data pointer. */
401 tmp = build_constructor_single (type, field, null_pointer_node);
402 TREE_CONSTANT (tmp) = 1;
403 /* All other fields are ignored. */
409 /* Modify a descriptor such that the lbound of a given dimension is the value
410 specified. This also updates ubound and offset accordingly. */
413 gfc_conv_shift_descriptor_lbound (stmtblock_t* block, tree desc,
414 int dim, tree new_lbound)
416 tree offs, ubound, lbound, stride;
417 tree diff, offs_diff;
419 new_lbound = fold_convert (gfc_array_index_type, new_lbound);
421 offs = gfc_conv_descriptor_offset_get (desc);
422 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
423 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
424 stride = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[dim]);
426 /* Get difference (new - old) by which to shift stuff. */
427 diff = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
430 /* Shift ubound and offset accordingly. This has to be done before
431 updating the lbound, as they depend on the lbound expression! */
432 ubound = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
434 gfc_conv_descriptor_ubound_set (block, desc, gfc_rank_cst[dim], ubound);
435 offs_diff = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
437 offs = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
439 gfc_conv_descriptor_offset_set (block, desc, offs);
441 /* Finally set lbound to value we want. */
442 gfc_conv_descriptor_lbound_set (block, desc, gfc_rank_cst[dim], new_lbound);
446 /* Cleanup those #defines. */
451 #undef DIMENSION_FIELD
452 #undef CAF_TOKEN_FIELD
453 #undef STRIDE_SUBFIELD
454 #undef LBOUND_SUBFIELD
455 #undef UBOUND_SUBFIELD
458 /* Mark a SS chain as used. Flags specifies in which loops the SS is used.
459 flags & 1 = Main loop body.
460 flags & 2 = temp copy loop. */
463 gfc_mark_ss_chain_used (gfc_ss * ss, unsigned flags)
465 for (; ss != gfc_ss_terminator; ss = ss->next)
466 ss->useflags = flags;
469 static void gfc_free_ss (gfc_ss *);
472 /* Free a gfc_ss chain. */
475 gfc_free_ss_chain (gfc_ss * ss)
479 while (ss != gfc_ss_terminator)
481 gcc_assert (ss != NULL);
492 gfc_free_ss (gfc_ss * ss)
499 for (n = 0; n < ss->data.info.dimen; n++)
501 if (ss->data.info.subscript[ss->data.info.dim[n]])
502 gfc_free_ss_chain (ss->data.info.subscript[ss->data.info.dim[n]]);
514 /* Creates and initializes an array type gfc_ss struct. */
517 gfc_get_array_ss (gfc_ss *next, gfc_expr *expr, int dimen, gfc_ss_type type)
527 info = &ss->data.info;
529 for (i = 0; i < info->dimen; i++)
536 /* Creates and initializes a temporary type gfc_ss struct. */
539 gfc_get_temp_ss (tree type, tree string_length, int dimen)
544 ss->next = gfc_ss_terminator;
545 ss->type = GFC_SS_TEMP;
546 ss->string_length = string_length;
547 ss->data.temp.dimen = dimen;
548 ss->data.temp.type = type;
554 /* Creates and initializes a scalar type gfc_ss struct. */
557 gfc_get_scalar_ss (gfc_ss *next, gfc_expr *expr)
563 ss->type = GFC_SS_SCALAR;
570 /* Free all the SS associated with a loop. */
573 gfc_cleanup_loop (gfc_loopinfo * loop)
579 while (ss != gfc_ss_terminator)
581 gcc_assert (ss != NULL);
582 next = ss->loop_chain;
589 /* Associate a SS chain with a loop. */
592 gfc_add_ss_to_loop (gfc_loopinfo * loop, gfc_ss * head)
596 if (head == gfc_ss_terminator)
600 for (; ss && ss != gfc_ss_terminator; ss = ss->next)
602 if (ss->next == gfc_ss_terminator)
603 ss->loop_chain = loop->ss;
605 ss->loop_chain = ss->next;
607 gcc_assert (ss == gfc_ss_terminator);
612 /* Generate an initializer for a static pointer or allocatable array. */
615 gfc_trans_static_array_pointer (gfc_symbol * sym)
619 gcc_assert (TREE_STATIC (sym->backend_decl));
620 /* Just zero the data member. */
621 type = TREE_TYPE (sym->backend_decl);
622 DECL_INITIAL (sym->backend_decl) = gfc_build_null_descriptor (type);
626 /* If the bounds of SE's loop have not yet been set, see if they can be
627 determined from array spec AS, which is the array spec of a called
628 function. MAPPING maps the callee's dummy arguments to the values
629 that the caller is passing. Add any initialization and finalization
633 gfc_set_loop_bounds_from_array_spec (gfc_interface_mapping * mapping,
634 gfc_se * se, gfc_array_spec * as)
642 if (as && as->type == AS_EXPLICIT)
643 for (n = 0; n < se->loop->dimen; n++)
645 dim = se->ss->data.info.dim[n];
646 gcc_assert (dim < as->rank);
647 gcc_assert (se->loop->dimen == as->rank);
648 if (se->loop->to[n] == NULL_TREE)
650 /* Evaluate the lower bound. */
651 gfc_init_se (&tmpse, NULL);
652 gfc_apply_interface_mapping (mapping, &tmpse, as->lower[dim]);
653 gfc_add_block_to_block (&se->pre, &tmpse.pre);
654 gfc_add_block_to_block (&se->post, &tmpse.post);
655 lower = fold_convert (gfc_array_index_type, tmpse.expr);
657 /* ...and the upper bound. */
658 gfc_init_se (&tmpse, NULL);
659 gfc_apply_interface_mapping (mapping, &tmpse, as->upper[dim]);
660 gfc_add_block_to_block (&se->pre, &tmpse.pre);
661 gfc_add_block_to_block (&se->post, &tmpse.post);
662 upper = fold_convert (gfc_array_index_type, tmpse.expr);
664 /* Set the upper bound of the loop to UPPER - LOWER. */
665 tmp = fold_build2_loc (input_location, MINUS_EXPR,
666 gfc_array_index_type, upper, lower);
667 tmp = gfc_evaluate_now (tmp, &se->pre);
668 se->loop->to[n] = tmp;
674 /* Generate code to allocate an array temporary, or create a variable to
675 hold the data. If size is NULL, zero the descriptor so that the
676 callee will allocate the array. If DEALLOC is true, also generate code to
677 free the array afterwards.
679 If INITIAL is not NULL, it is packed using internal_pack and the result used
680 as data instead of allocating a fresh, unitialized area of memory.
682 Initialization code is added to PRE and finalization code to POST.
683 DYNAMIC is true if the caller may want to extend the array later
684 using realloc. This prevents us from putting the array on the stack. */
687 gfc_trans_allocate_array_storage (stmtblock_t * pre, stmtblock_t * post,
688 gfc_ss_info * info, tree size, tree nelem,
689 tree initial, bool dynamic, bool dealloc)
695 desc = info->descriptor;
696 info->offset = gfc_index_zero_node;
697 if (size == NULL_TREE || integer_zerop (size))
699 /* A callee allocated array. */
700 gfc_conv_descriptor_data_set (pre, desc, null_pointer_node);
705 /* Allocate the temporary. */
706 onstack = !dynamic && initial == NULL_TREE
707 && (gfc_option.flag_stack_arrays
708 || gfc_can_put_var_on_stack (size));
712 /* Make a temporary variable to hold the data. */
713 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (nelem),
714 nelem, gfc_index_one_node);
715 tmp = gfc_evaluate_now (tmp, pre);
716 tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node,
718 tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)),
720 tmp = gfc_create_var (tmp, "A");
721 /* If we're here only because of -fstack-arrays we have to
722 emit a DECL_EXPR to make the gimplifier emit alloca calls. */
723 if (!gfc_can_put_var_on_stack (size))
724 gfc_add_expr_to_block (pre,
725 fold_build1_loc (input_location,
726 DECL_EXPR, TREE_TYPE (tmp),
728 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
729 gfc_conv_descriptor_data_set (pre, desc, tmp);
733 /* Allocate memory to hold the data or call internal_pack. */
734 if (initial == NULL_TREE)
736 tmp = gfc_call_malloc (pre, NULL, size);
737 tmp = gfc_evaluate_now (tmp, pre);
744 stmtblock_t do_copying;
746 tmp = TREE_TYPE (initial); /* Pointer to descriptor. */
747 gcc_assert (TREE_CODE (tmp) == POINTER_TYPE);
748 tmp = TREE_TYPE (tmp); /* The descriptor itself. */
749 tmp = gfc_get_element_type (tmp);
750 gcc_assert (tmp == gfc_get_element_type (TREE_TYPE (desc)));
751 packed = gfc_create_var (build_pointer_type (tmp), "data");
753 tmp = build_call_expr_loc (input_location,
754 gfor_fndecl_in_pack, 1, initial);
755 tmp = fold_convert (TREE_TYPE (packed), tmp);
756 gfc_add_modify (pre, packed, tmp);
758 tmp = build_fold_indirect_ref_loc (input_location,
760 source_data = gfc_conv_descriptor_data_get (tmp);
762 /* internal_pack may return source->data without any allocation
763 or copying if it is already packed. If that's the case, we
764 need to allocate and copy manually. */
766 gfc_start_block (&do_copying);
767 tmp = gfc_call_malloc (&do_copying, NULL, size);
768 tmp = fold_convert (TREE_TYPE (packed), tmp);
769 gfc_add_modify (&do_copying, packed, tmp);
770 tmp = gfc_build_memcpy_call (packed, source_data, size);
771 gfc_add_expr_to_block (&do_copying, tmp);
773 was_packed = fold_build2_loc (input_location, EQ_EXPR,
774 boolean_type_node, packed,
776 tmp = gfc_finish_block (&do_copying);
777 tmp = build3_v (COND_EXPR, was_packed, tmp,
778 build_empty_stmt (input_location));
779 gfc_add_expr_to_block (pre, tmp);
781 tmp = fold_convert (pvoid_type_node, packed);
784 gfc_conv_descriptor_data_set (pre, desc, tmp);
787 info->data = gfc_conv_descriptor_data_get (desc);
789 /* The offset is zero because we create temporaries with a zero
791 gfc_conv_descriptor_offset_set (pre, desc, gfc_index_zero_node);
793 if (dealloc && !onstack)
795 /* Free the temporary. */
796 tmp = gfc_conv_descriptor_data_get (desc);
797 tmp = gfc_call_free (fold_convert (pvoid_type_node, tmp));
798 gfc_add_expr_to_block (post, tmp);
803 /* Get the array reference dimension corresponding to the given loop dimension.
804 It is different from the true array dimension given by the dim array in
805 the case of a partial array reference
806 It is different from the loop dimension in the case of a transposed array.
810 get_array_ref_dim (gfc_ss *ss, int loop_dim)
812 int n, array_dim, array_ref_dim;
815 info = &ss->data.info;
818 array_dim = info->dim[loop_dim];
820 for (n = 0; n < info->dimen; n++)
821 if (info->dim[n] < array_dim)
824 return array_ref_dim;
828 /* Generate code to create and initialize the descriptor for a temporary
829 array. This is used for both temporaries needed by the scalarizer, and
830 functions returning arrays. Adjusts the loop variables to be
831 zero-based, and calculates the loop bounds for callee allocated arrays.
832 Allocate the array unless it's callee allocated (we have a callee
833 allocated array if 'callee_alloc' is true, or if loop->to[n] is
834 NULL_TREE for any n). Also fills in the descriptor, data and offset
835 fields of info if known. Returns the size of the array, or NULL for a
836 callee allocated array.
838 PRE, POST, INITIAL, DYNAMIC and DEALLOC are as for
839 gfc_trans_allocate_array_storage.
843 gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post,
844 gfc_loopinfo * loop, gfc_ss * ss,
845 tree eltype, tree initial, bool dynamic,
846 bool dealloc, bool callee_alloc, locus * where)
849 tree from[GFC_MAX_DIMENSIONS], to[GFC_MAX_DIMENSIONS];
859 memset (from, 0, sizeof (from));
860 memset (to, 0, sizeof (to));
862 info = &ss->data.info;
864 gcc_assert (info->dimen > 0);
865 gcc_assert (loop->dimen == info->dimen);
867 if (gfc_option.warn_array_temp && where)
868 gfc_warning ("Creating array temporary at %L", where);
870 /* Set the lower bound to zero. */
871 for (n = 0; n < loop->dimen; n++)
875 /* Callee allocated arrays may not have a known bound yet. */
877 loop->to[n] = gfc_evaluate_now (
878 fold_build2_loc (input_location, MINUS_EXPR,
879 gfc_array_index_type,
880 loop->to[n], loop->from[n]),
882 loop->from[n] = gfc_index_zero_node;
884 /* We are constructing the temporary's descriptor based on the loop
885 dimensions. As the dimensions may be accessed in arbitrary order
886 (think of transpose) the size taken from the n'th loop may not map
887 to the n'th dimension of the array. We need to reconstruct loop infos
888 in the right order before using it to set the descriptor
890 tmp_dim = get_array_ref_dim (ss, n);
891 from[tmp_dim] = loop->from[n];
892 to[tmp_dim] = loop->to[n];
894 info->delta[dim] = gfc_index_zero_node;
895 info->start[dim] = gfc_index_zero_node;
896 info->end[dim] = gfc_index_zero_node;
897 info->stride[dim] = gfc_index_one_node;
900 /* Initialize the descriptor. */
902 gfc_get_array_type_bounds (eltype, info->dimen, 0, from, to, 1,
903 GFC_ARRAY_UNKNOWN, true);
904 desc = gfc_create_var (type, "atmp");
905 GFC_DECL_PACKED_ARRAY (desc) = 1;
907 info->descriptor = desc;
908 size = gfc_index_one_node;
910 /* Fill in the array dtype. */
911 tmp = gfc_conv_descriptor_dtype (desc);
912 gfc_add_modify (pre, tmp, gfc_get_dtype (TREE_TYPE (desc)));
915 Fill in the bounds and stride. This is a packed array, so:
918 for (n = 0; n < rank; n++)
921 delta = ubound[n] + 1 - lbound[n];
924 size = size * sizeof(element);
929 /* If there is at least one null loop->to[n], it is a callee allocated
931 for (n = 0; n < loop->dimen; n++)
932 if (loop->to[n] == NULL_TREE)
938 for (n = 0; n < loop->dimen; n++)
942 if (size == NULL_TREE)
944 /* For a callee allocated array express the loop bounds in terms
945 of the descriptor fields. */
946 tmp = fold_build2_loc (input_location,
947 MINUS_EXPR, gfc_array_index_type,
948 gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]),
949 gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]));
954 /* Store the stride and bound components in the descriptor. */
955 gfc_conv_descriptor_stride_set (pre, desc, gfc_rank_cst[n], size);
957 gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n],
958 gfc_index_zero_node);
960 gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n],
963 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
964 to[n], gfc_index_one_node);
966 /* Check whether the size for this dimension is negative. */
967 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, tmp,
968 gfc_index_zero_node);
969 cond = gfc_evaluate_now (cond, pre);
974 or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
975 boolean_type_node, or_expr, cond);
977 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
979 size = gfc_evaluate_now (size, pre);
982 /* Get the size of the array. */
984 if (size && !callee_alloc)
986 /* If or_expr is true, then the extent in at least one
987 dimension is zero and the size is set to zero. */
988 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
989 or_expr, gfc_index_zero_node, size);
992 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
994 fold_convert (gfc_array_index_type,
995 TYPE_SIZE_UNIT (gfc_get_element_type (type))));
1003 gfc_trans_allocate_array_storage (pre, post, info, size, nelem, initial,
1006 if (info->dimen > loop->temp_dim)
1007 loop->temp_dim = info->dimen;
1013 /* Return the number of iterations in a loop that starts at START,
1014 ends at END, and has step STEP. */
1017 gfc_get_iteration_count (tree start, tree end, tree step)
1022 type = TREE_TYPE (step);
1023 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, end, start);
1024 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, type, tmp, step);
1025 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, tmp,
1026 build_int_cst (type, 1));
1027 tmp = fold_build2_loc (input_location, MAX_EXPR, type, tmp,
1028 build_int_cst (type, 0));
1029 return fold_convert (gfc_array_index_type, tmp);
1033 /* Extend the data in array DESC by EXTRA elements. */
1036 gfc_grow_array (stmtblock_t * pblock, tree desc, tree extra)
1043 if (integer_zerop (extra))
1046 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[0]);
1048 /* Add EXTRA to the upper bound. */
1049 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1051 gfc_conv_descriptor_ubound_set (pblock, desc, gfc_rank_cst[0], tmp);
1053 /* Get the value of the current data pointer. */
1054 arg0 = gfc_conv_descriptor_data_get (desc);
1056 /* Calculate the new array size. */
1057 size = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
1058 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1059 ubound, gfc_index_one_node);
1060 arg1 = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
1061 fold_convert (size_type_node, tmp),
1062 fold_convert (size_type_node, size));
1064 /* Call the realloc() function. */
1065 tmp = gfc_call_realloc (pblock, arg0, arg1);
1066 gfc_conv_descriptor_data_set (pblock, desc, tmp);
1070 /* Return true if the bounds of iterator I can only be determined
1074 gfc_iterator_has_dynamic_bounds (gfc_iterator * i)
1076 return (i->start->expr_type != EXPR_CONSTANT
1077 || i->end->expr_type != EXPR_CONSTANT
1078 || i->step->expr_type != EXPR_CONSTANT);
1082 /* Split the size of constructor element EXPR into the sum of two terms,
1083 one of which can be determined at compile time and one of which must
1084 be calculated at run time. Set *SIZE to the former and return true
1085 if the latter might be nonzero. */
1088 gfc_get_array_constructor_element_size (mpz_t * size, gfc_expr * expr)
1090 if (expr->expr_type == EXPR_ARRAY)
1091 return gfc_get_array_constructor_size (size, expr->value.constructor);
1092 else if (expr->rank > 0)
1094 /* Calculate everything at run time. */
1095 mpz_set_ui (*size, 0);
1100 /* A single element. */
1101 mpz_set_ui (*size, 1);
1107 /* Like gfc_get_array_constructor_element_size, but applied to the whole
1108 of array constructor C. */
1111 gfc_get_array_constructor_size (mpz_t * size, gfc_constructor_base base)
1119 mpz_set_ui (*size, 0);
1124 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1127 if (i && gfc_iterator_has_dynamic_bounds (i))
1131 dynamic |= gfc_get_array_constructor_element_size (&len, c->expr);
1134 /* Multiply the static part of the element size by the
1135 number of iterations. */
1136 mpz_sub (val, i->end->value.integer, i->start->value.integer);
1137 mpz_fdiv_q (val, val, i->step->value.integer);
1138 mpz_add_ui (val, val, 1);
1139 if (mpz_sgn (val) > 0)
1140 mpz_mul (len, len, val);
1142 mpz_set_ui (len, 0);
1144 mpz_add (*size, *size, len);
1153 /* Make sure offset is a variable. */
1156 gfc_put_offset_into_var (stmtblock_t * pblock, tree * poffset,
1159 /* We should have already created the offset variable. We cannot
1160 create it here because we may be in an inner scope. */
1161 gcc_assert (*offsetvar != NULL_TREE);
1162 gfc_add_modify (pblock, *offsetvar, *poffset);
1163 *poffset = *offsetvar;
1164 TREE_USED (*offsetvar) = 1;
1168 /* Variables needed for bounds-checking. */
1169 static bool first_len;
1170 static tree first_len_val;
1171 static bool typespec_chararray_ctor;
1174 gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
1175 tree offset, gfc_se * se, gfc_expr * expr)
1179 gfc_conv_expr (se, expr);
1181 /* Store the value. */
1182 tmp = build_fold_indirect_ref_loc (input_location,
1183 gfc_conv_descriptor_data_get (desc));
1184 tmp = gfc_build_array_ref (tmp, offset, NULL);
1186 if (expr->ts.type == BT_CHARACTER)
1188 int i = gfc_validate_kind (BT_CHARACTER, expr->ts.kind, false);
1191 esize = size_in_bytes (gfc_get_element_type (TREE_TYPE (desc)));
1192 esize = fold_convert (gfc_charlen_type_node, esize);
1193 esize = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
1194 gfc_charlen_type_node, esize,
1195 build_int_cst (gfc_charlen_type_node,
1196 gfc_character_kinds[i].bit_size / 8));
1198 gfc_conv_string_parameter (se);
1199 if (POINTER_TYPE_P (TREE_TYPE (tmp)))
1201 /* The temporary is an array of pointers. */
1202 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1203 gfc_add_modify (&se->pre, tmp, se->expr);
1207 /* The temporary is an array of string values. */
1208 tmp = gfc_build_addr_expr (gfc_get_pchar_type (expr->ts.kind), tmp);
1209 /* We know the temporary and the value will be the same length,
1210 so can use memcpy. */
1211 gfc_trans_string_copy (&se->pre, esize, tmp, expr->ts.kind,
1212 se->string_length, se->expr, expr->ts.kind);
1214 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) && !typespec_chararray_ctor)
1218 gfc_add_modify (&se->pre, first_len_val,
1224 /* Verify that all constructor elements are of the same
1226 tree cond = fold_build2_loc (input_location, NE_EXPR,
1227 boolean_type_node, first_len_val,
1229 gfc_trans_runtime_check
1230 (true, false, cond, &se->pre, &expr->where,
1231 "Different CHARACTER lengths (%ld/%ld) in array constructor",
1232 fold_convert (long_integer_type_node, first_len_val),
1233 fold_convert (long_integer_type_node, se->string_length));
1239 /* TODO: Should the frontend already have done this conversion? */
1240 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1241 gfc_add_modify (&se->pre, tmp, se->expr);
1244 gfc_add_block_to_block (pblock, &se->pre);
1245 gfc_add_block_to_block (pblock, &se->post);
1249 /* Add the contents of an array to the constructor. DYNAMIC is as for
1250 gfc_trans_array_constructor_value. */
1253 gfc_trans_array_constructor_subarray (stmtblock_t * pblock,
1254 tree type ATTRIBUTE_UNUSED,
1255 tree desc, gfc_expr * expr,
1256 tree * poffset, tree * offsetvar,
1267 /* We need this to be a variable so we can increment it. */
1268 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1270 gfc_init_se (&se, NULL);
1272 /* Walk the array expression. */
1273 ss = gfc_walk_expr (expr);
1274 gcc_assert (ss != gfc_ss_terminator);
1276 /* Initialize the scalarizer. */
1277 gfc_init_loopinfo (&loop);
1278 gfc_add_ss_to_loop (&loop, ss);
1280 /* Initialize the loop. */
1281 gfc_conv_ss_startstride (&loop);
1282 gfc_conv_loop_setup (&loop, &expr->where);
1284 /* Make sure the constructed array has room for the new data. */
1287 /* Set SIZE to the total number of elements in the subarray. */
1288 size = gfc_index_one_node;
1289 for (n = 0; n < loop.dimen; n++)
1291 tmp = gfc_get_iteration_count (loop.from[n], loop.to[n],
1292 gfc_index_one_node);
1293 size = fold_build2_loc (input_location, MULT_EXPR,
1294 gfc_array_index_type, size, tmp);
1297 /* Grow the constructed array by SIZE elements. */
1298 gfc_grow_array (&loop.pre, desc, size);
1301 /* Make the loop body. */
1302 gfc_mark_ss_chain_used (ss, 1);
1303 gfc_start_scalarized_body (&loop, &body);
1304 gfc_copy_loopinfo_to_se (&se, &loop);
1307 gfc_trans_array_ctor_element (&body, desc, *poffset, &se, expr);
1308 gcc_assert (se.ss == gfc_ss_terminator);
1310 /* Increment the offset. */
1311 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1312 *poffset, gfc_index_one_node);
1313 gfc_add_modify (&body, *poffset, tmp);
1315 /* Finish the loop. */
1316 gfc_trans_scalarizing_loops (&loop, &body);
1317 gfc_add_block_to_block (&loop.pre, &loop.post);
1318 tmp = gfc_finish_block (&loop.pre);
1319 gfc_add_expr_to_block (pblock, tmp);
1321 gfc_cleanup_loop (&loop);
1325 /* Assign the values to the elements of an array constructor. DYNAMIC
1326 is true if descriptor DESC only contains enough data for the static
1327 size calculated by gfc_get_array_constructor_size. When true, memory
1328 for the dynamic parts must be allocated using realloc. */
1331 gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
1332 tree desc, gfc_constructor_base base,
1333 tree * poffset, tree * offsetvar,
1342 tree shadow_loopvar = NULL_TREE;
1343 gfc_saved_var saved_loopvar;
1346 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1348 /* If this is an iterator or an array, the offset must be a variable. */
1349 if ((c->iterator || c->expr->rank > 0) && INTEGER_CST_P (*poffset))
1350 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1352 /* Shadowing the iterator avoids changing its value and saves us from
1353 keeping track of it. Further, it makes sure that there's always a
1354 backend-decl for the symbol, even if there wasn't one before,
1355 e.g. in the case of an iterator that appears in a specification
1356 expression in an interface mapping. */
1359 gfc_symbol *sym = c->iterator->var->symtree->n.sym;
1360 tree type = gfc_typenode_for_spec (&sym->ts);
1362 shadow_loopvar = gfc_create_var (type, "shadow_loopvar");
1363 gfc_shadow_sym (sym, shadow_loopvar, &saved_loopvar);
1366 gfc_start_block (&body);
1368 if (c->expr->expr_type == EXPR_ARRAY)
1370 /* Array constructors can be nested. */
1371 gfc_trans_array_constructor_value (&body, type, desc,
1372 c->expr->value.constructor,
1373 poffset, offsetvar, dynamic);
1375 else if (c->expr->rank > 0)
1377 gfc_trans_array_constructor_subarray (&body, type, desc, c->expr,
1378 poffset, offsetvar, dynamic);
1382 /* This code really upsets the gimplifier so don't bother for now. */
1389 while (p && !(p->iterator || p->expr->expr_type != EXPR_CONSTANT))
1391 p = gfc_constructor_next (p);
1396 /* Scalar values. */
1397 gfc_init_se (&se, NULL);
1398 gfc_trans_array_ctor_element (&body, desc, *poffset,
1401 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1402 gfc_array_index_type,
1403 *poffset, gfc_index_one_node);
1407 /* Collect multiple scalar constants into a constructor. */
1408 VEC(constructor_elt,gc) *v = NULL;
1412 HOST_WIDE_INT idx = 0;
1415 /* Count the number of consecutive scalar constants. */
1416 while (p && !(p->iterator
1417 || p->expr->expr_type != EXPR_CONSTANT))
1419 gfc_init_se (&se, NULL);
1420 gfc_conv_constant (&se, p->expr);
1422 if (c->expr->ts.type != BT_CHARACTER)
1423 se.expr = fold_convert (type, se.expr);
1424 /* For constant character array constructors we build
1425 an array of pointers. */
1426 else if (POINTER_TYPE_P (type))
1427 se.expr = gfc_build_addr_expr
1428 (gfc_get_pchar_type (p->expr->ts.kind),
1431 CONSTRUCTOR_APPEND_ELT (v,
1432 build_int_cst (gfc_array_index_type,
1436 p = gfc_constructor_next (p);
1439 bound = size_int (n - 1);
1440 /* Create an array type to hold them. */
1441 tmptype = build_range_type (gfc_array_index_type,
1442 gfc_index_zero_node, bound);
1443 tmptype = build_array_type (type, tmptype);
1445 init = build_constructor (tmptype, v);
1446 TREE_CONSTANT (init) = 1;
1447 TREE_STATIC (init) = 1;
1448 /* Create a static variable to hold the data. */
1449 tmp = gfc_create_var (tmptype, "data");
1450 TREE_STATIC (tmp) = 1;
1451 TREE_CONSTANT (tmp) = 1;
1452 TREE_READONLY (tmp) = 1;
1453 DECL_INITIAL (tmp) = init;
1456 /* Use BUILTIN_MEMCPY to assign the values. */
1457 tmp = gfc_conv_descriptor_data_get (desc);
1458 tmp = build_fold_indirect_ref_loc (input_location,
1460 tmp = gfc_build_array_ref (tmp, *poffset, NULL);
1461 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
1462 init = gfc_build_addr_expr (NULL_TREE, init);
1464 size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
1465 bound = build_int_cst (size_type_node, n * size);
1466 tmp = build_call_expr_loc (input_location,
1467 builtin_decl_explicit (BUILT_IN_MEMCPY),
1468 3, tmp, init, bound);
1469 gfc_add_expr_to_block (&body, tmp);
1471 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1472 gfc_array_index_type, *poffset,
1473 build_int_cst (gfc_array_index_type, n));
1475 if (!INTEGER_CST_P (*poffset))
1477 gfc_add_modify (&body, *offsetvar, *poffset);
1478 *poffset = *offsetvar;
1482 /* The frontend should already have done any expansions
1486 /* Pass the code as is. */
1487 tmp = gfc_finish_block (&body);
1488 gfc_add_expr_to_block (pblock, tmp);
1492 /* Build the implied do-loop. */
1493 stmtblock_t implied_do_block;
1501 loopbody = gfc_finish_block (&body);
1503 /* Create a new block that holds the implied-do loop. A temporary
1504 loop-variable is used. */
1505 gfc_start_block(&implied_do_block);
1507 /* Initialize the loop. */
1508 gfc_init_se (&se, NULL);
1509 gfc_conv_expr_val (&se, c->iterator->start);
1510 gfc_add_block_to_block (&implied_do_block, &se.pre);
1511 gfc_add_modify (&implied_do_block, shadow_loopvar, se.expr);
1513 gfc_init_se (&se, NULL);
1514 gfc_conv_expr_val (&se, c->iterator->end);
1515 gfc_add_block_to_block (&implied_do_block, &se.pre);
1516 end = gfc_evaluate_now (se.expr, &implied_do_block);
1518 gfc_init_se (&se, NULL);
1519 gfc_conv_expr_val (&se, c->iterator->step);
1520 gfc_add_block_to_block (&implied_do_block, &se.pre);
1521 step = gfc_evaluate_now (se.expr, &implied_do_block);
1523 /* If this array expands dynamically, and the number of iterations
1524 is not constant, we won't have allocated space for the static
1525 part of C->EXPR's size. Do that now. */
1526 if (dynamic && gfc_iterator_has_dynamic_bounds (c->iterator))
1528 /* Get the number of iterations. */
1529 tmp = gfc_get_iteration_count (shadow_loopvar, end, step);
1531 /* Get the static part of C->EXPR's size. */
1532 gfc_get_array_constructor_element_size (&size, c->expr);
1533 tmp2 = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
1535 /* Grow the array by TMP * TMP2 elements. */
1536 tmp = fold_build2_loc (input_location, MULT_EXPR,
1537 gfc_array_index_type, tmp, tmp2);
1538 gfc_grow_array (&implied_do_block, desc, tmp);
1541 /* Generate the loop body. */
1542 exit_label = gfc_build_label_decl (NULL_TREE);
1543 gfc_start_block (&body);
1545 /* Generate the exit condition. Depending on the sign of
1546 the step variable we have to generate the correct
1548 tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
1549 step, build_int_cst (TREE_TYPE (step), 0));
1550 cond = fold_build3_loc (input_location, COND_EXPR,
1551 boolean_type_node, tmp,
1552 fold_build2_loc (input_location, GT_EXPR,
1553 boolean_type_node, shadow_loopvar, end),
1554 fold_build2_loc (input_location, LT_EXPR,
1555 boolean_type_node, shadow_loopvar, end));
1556 tmp = build1_v (GOTO_EXPR, exit_label);
1557 TREE_USED (exit_label) = 1;
1558 tmp = build3_v (COND_EXPR, cond, tmp,
1559 build_empty_stmt (input_location));
1560 gfc_add_expr_to_block (&body, tmp);
1562 /* The main loop body. */
1563 gfc_add_expr_to_block (&body, loopbody);
1565 /* Increase loop variable by step. */
1566 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1567 TREE_TYPE (shadow_loopvar), shadow_loopvar,
1569 gfc_add_modify (&body, shadow_loopvar, tmp);
1571 /* Finish the loop. */
1572 tmp = gfc_finish_block (&body);
1573 tmp = build1_v (LOOP_EXPR, tmp);
1574 gfc_add_expr_to_block (&implied_do_block, tmp);
1576 /* Add the exit label. */
1577 tmp = build1_v (LABEL_EXPR, exit_label);
1578 gfc_add_expr_to_block (&implied_do_block, tmp);
1580 /* Finishe the implied-do loop. */
1581 tmp = gfc_finish_block(&implied_do_block);
1582 gfc_add_expr_to_block(pblock, tmp);
1584 gfc_restore_sym (c->iterator->var->symtree->n.sym, &saved_loopvar);
1591 /* A catch-all to obtain the string length for anything that is not a
1592 a substring of non-constant length, a constant, array or variable. */
1595 get_array_ctor_all_strlen (stmtblock_t *block, gfc_expr *e, tree *len)
1600 /* Don't bother if we already know the length is a constant. */
1601 if (*len && INTEGER_CST_P (*len))
1604 if (!e->ref && e->ts.u.cl && e->ts.u.cl->length
1605 && e->ts.u.cl->length->expr_type == EXPR_CONSTANT)
1608 gfc_conv_const_charlen (e->ts.u.cl);
1609 *len = e->ts.u.cl->backend_decl;
1613 /* Otherwise, be brutal even if inefficient. */
1614 ss = gfc_walk_expr (e);
1615 gfc_init_se (&se, NULL);
1617 /* No function call, in case of side effects. */
1618 se.no_function_call = 1;
1619 if (ss == gfc_ss_terminator)
1620 gfc_conv_expr (&se, e);
1622 gfc_conv_expr_descriptor (&se, e, ss);
1624 /* Fix the value. */
1625 *len = gfc_evaluate_now (se.string_length, &se.pre);
1627 gfc_add_block_to_block (block, &se.pre);
1628 gfc_add_block_to_block (block, &se.post);
1630 e->ts.u.cl->backend_decl = *len;
1635 /* Figure out the string length of a variable reference expression.
1636 Used by get_array_ctor_strlen. */
1639 get_array_ctor_var_strlen (stmtblock_t *block, gfc_expr * expr, tree * len)
1645 /* Don't bother if we already know the length is a constant. */
1646 if (*len && INTEGER_CST_P (*len))
1649 ts = &expr->symtree->n.sym->ts;
1650 for (ref = expr->ref; ref; ref = ref->next)
1655 /* Array references don't change the string length. */
1659 /* Use the length of the component. */
1660 ts = &ref->u.c.component->ts;
1664 if (ref->u.ss.start->expr_type != EXPR_CONSTANT
1665 || ref->u.ss.end->expr_type != EXPR_CONSTANT)
1667 /* Note that this might evaluate expr. */
1668 get_array_ctor_all_strlen (block, expr, len);
1671 mpz_init_set_ui (char_len, 1);
1672 mpz_add (char_len, char_len, ref->u.ss.end->value.integer);
1673 mpz_sub (char_len, char_len, ref->u.ss.start->value.integer);
1674 *len = gfc_conv_mpz_to_tree (char_len, gfc_default_integer_kind);
1675 *len = convert (gfc_charlen_type_node, *len);
1676 mpz_clear (char_len);
1684 *len = ts->u.cl->backend_decl;
1688 /* Figure out the string length of a character array constructor.
1689 If len is NULL, don't calculate the length; this happens for recursive calls
1690 when a sub-array-constructor is an element but not at the first position,
1691 so when we're not interested in the length.
1692 Returns TRUE if all elements are character constants. */
1695 get_array_ctor_strlen (stmtblock_t *block, gfc_constructor_base base, tree * len)
1702 if (gfc_constructor_first (base) == NULL)
1705 *len = build_int_cstu (gfc_charlen_type_node, 0);
1709 /* Loop over all constructor elements to find out is_const, but in len we
1710 want to store the length of the first, not the last, element. We can
1711 of course exit the loop as soon as is_const is found to be false. */
1712 for (c = gfc_constructor_first (base);
1713 c && is_const; c = gfc_constructor_next (c))
1715 switch (c->expr->expr_type)
1718 if (len && !(*len && INTEGER_CST_P (*len)))
1719 *len = build_int_cstu (gfc_charlen_type_node,
1720 c->expr->value.character.length);
1724 if (!get_array_ctor_strlen (block, c->expr->value.constructor, len))
1731 get_array_ctor_var_strlen (block, c->expr, len);
1737 get_array_ctor_all_strlen (block, c->expr, len);
1741 /* After the first iteration, we don't want the length modified. */
1748 /* Check whether the array constructor C consists entirely of constant
1749 elements, and if so returns the number of those elements, otherwise
1750 return zero. Note, an empty or NULL array constructor returns zero. */
1752 unsigned HOST_WIDE_INT
1753 gfc_constant_array_constructor_p (gfc_constructor_base base)
1755 unsigned HOST_WIDE_INT nelem = 0;
1757 gfc_constructor *c = gfc_constructor_first (base);
1761 || c->expr->rank > 0
1762 || c->expr->expr_type != EXPR_CONSTANT)
1764 c = gfc_constructor_next (c);
1771 /* Given EXPR, the constant array constructor specified by an EXPR_ARRAY,
1772 and the tree type of it's elements, TYPE, return a static constant
1773 variable that is compile-time initialized. */
1776 gfc_build_constant_array_constructor (gfc_expr * expr, tree type)
1778 tree tmptype, init, tmp;
1779 HOST_WIDE_INT nelem;
1784 VEC(constructor_elt,gc) *v = NULL;
1786 /* First traverse the constructor list, converting the constants
1787 to tree to build an initializer. */
1789 c = gfc_constructor_first (expr->value.constructor);
1792 gfc_init_se (&se, NULL);
1793 gfc_conv_constant (&se, c->expr);
1794 if (c->expr->ts.type != BT_CHARACTER)
1795 se.expr = fold_convert (type, se.expr);
1796 else if (POINTER_TYPE_P (type))
1797 se.expr = gfc_build_addr_expr (gfc_get_pchar_type (c->expr->ts.kind),
1799 CONSTRUCTOR_APPEND_ELT (v, build_int_cst (gfc_array_index_type, nelem),
1801 c = gfc_constructor_next (c);
1805 /* Next determine the tree type for the array. We use the gfortran
1806 front-end's gfc_get_nodesc_array_type in order to create a suitable
1807 GFC_ARRAY_TYPE_P that may be used by the scalarizer. */
1809 memset (&as, 0, sizeof (gfc_array_spec));
1811 as.rank = expr->rank;
1812 as.type = AS_EXPLICIT;
1815 as.lower[0] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
1816 as.upper[0] = gfc_get_int_expr (gfc_default_integer_kind,
1820 for (i = 0; i < expr->rank; i++)
1822 int tmp = (int) mpz_get_si (expr->shape[i]);
1823 as.lower[i] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
1824 as.upper[i] = gfc_get_int_expr (gfc_default_integer_kind,
1828 tmptype = gfc_get_nodesc_array_type (type, &as, PACKED_STATIC, true);
1830 /* as is not needed anymore. */
1831 for (i = 0; i < as.rank + as.corank; i++)
1833 gfc_free_expr (as.lower[i]);
1834 gfc_free_expr (as.upper[i]);
1837 init = build_constructor (tmptype, v);
1839 TREE_CONSTANT (init) = 1;
1840 TREE_STATIC (init) = 1;
1842 tmp = gfc_create_var (tmptype, "A");
1843 TREE_STATIC (tmp) = 1;
1844 TREE_CONSTANT (tmp) = 1;
1845 TREE_READONLY (tmp) = 1;
1846 DECL_INITIAL (tmp) = init;
1852 /* Translate a constant EXPR_ARRAY array constructor for the scalarizer.
1853 This mostly initializes the scalarizer state info structure with the
1854 appropriate values to directly use the array created by the function
1855 gfc_build_constant_array_constructor. */
1858 trans_constant_array_constructor (gfc_ss * ss, tree type)
1864 tmp = gfc_build_constant_array_constructor (ss->expr, type);
1866 info = &ss->data.info;
1868 info->descriptor = tmp;
1869 info->data = gfc_build_addr_expr (NULL_TREE, tmp);
1870 info->offset = gfc_index_zero_node;
1872 for (i = 0; i < info->dimen; i++)
1874 info->delta[i] = gfc_index_zero_node;
1875 info->start[i] = gfc_index_zero_node;
1876 info->end[i] = gfc_index_zero_node;
1877 info->stride[i] = gfc_index_one_node;
1881 /* Helper routine of gfc_trans_array_constructor to determine if the
1882 bounds of the loop specified by LOOP are constant and simple enough
1883 to use with trans_constant_array_constructor. Returns the
1884 iteration count of the loop if suitable, and NULL_TREE otherwise. */
1887 constant_array_constructor_loop_size (gfc_loopinfo * loop)
1889 tree size = gfc_index_one_node;
1893 for (i = 0; i < loop->dimen; i++)
1895 /* If the bounds aren't constant, return NULL_TREE. */
1896 if (!INTEGER_CST_P (loop->from[i]) || !INTEGER_CST_P (loop->to[i]))
1898 if (!integer_zerop (loop->from[i]))
1900 /* Only allow nonzero "from" in one-dimensional arrays. */
1901 if (loop->dimen != 1)
1903 tmp = fold_build2_loc (input_location, MINUS_EXPR,
1904 gfc_array_index_type,
1905 loop->to[i], loop->from[i]);
1909 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1910 tmp, gfc_index_one_node);
1911 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
1919 /* Array constructors are handled by constructing a temporary, then using that
1920 within the scalarization loop. This is not optimal, but seems by far the
1924 gfc_trans_array_constructor (gfc_loopinfo * loop, gfc_ss * ss, locus * where)
1926 gfc_constructor_base c;
1933 bool old_first_len, old_typespec_chararray_ctor;
1934 tree old_first_len_val;
1936 /* Save the old values for nested checking. */
1937 old_first_len = first_len;
1938 old_first_len_val = first_len_val;
1939 old_typespec_chararray_ctor = typespec_chararray_ctor;
1941 /* Do bounds-checking here and in gfc_trans_array_ctor_element only if no
1942 typespec was given for the array constructor. */
1943 typespec_chararray_ctor = (ss->expr->ts.u.cl
1944 && ss->expr->ts.u.cl->length_from_typespec);
1946 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
1947 && ss->expr->ts.type == BT_CHARACTER && !typespec_chararray_ctor)
1949 first_len_val = gfc_create_var (gfc_charlen_type_node, "len");
1953 gcc_assert (ss->data.info.dimen == loop->dimen);
1955 c = ss->expr->value.constructor;
1956 if (ss->expr->ts.type == BT_CHARACTER)
1960 /* get_array_ctor_strlen walks the elements of the constructor, if a
1961 typespec was given, we already know the string length and want the one
1963 if (typespec_chararray_ctor && ss->expr->ts.u.cl->length
1964 && ss->expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
1968 const_string = false;
1969 gfc_init_se (&length_se, NULL);
1970 gfc_conv_expr_type (&length_se, ss->expr->ts.u.cl->length,
1971 gfc_charlen_type_node);
1972 ss->string_length = length_se.expr;
1973 gfc_add_block_to_block (&loop->pre, &length_se.pre);
1974 gfc_add_block_to_block (&loop->post, &length_se.post);
1977 const_string = get_array_ctor_strlen (&loop->pre, c,
1978 &ss->string_length);
1980 /* Complex character array constructors should have been taken care of
1981 and not end up here. */
1982 gcc_assert (ss->string_length);
1984 ss->expr->ts.u.cl->backend_decl = ss->string_length;
1986 type = gfc_get_character_type_len (ss->expr->ts.kind, ss->string_length);
1988 type = build_pointer_type (type);
1991 type = gfc_typenode_for_spec (&ss->expr->ts);
1993 /* See if the constructor determines the loop bounds. */
1996 if (ss->expr->shape && loop->dimen > 1 && loop->to[0] == NULL_TREE)
1998 /* We have a multidimensional parameter. */
2000 for (n = 0; n < ss->expr->rank; n++)
2002 loop->from[n] = gfc_index_zero_node;
2003 loop->to[n] = gfc_conv_mpz_to_tree (ss->expr->shape [n],
2004 gfc_index_integer_kind);
2005 loop->to[n] = fold_build2_loc (input_location, MINUS_EXPR,
2006 gfc_array_index_type,
2007 loop->to[n], gfc_index_one_node);
2011 if (loop->to[0] == NULL_TREE)
2015 /* We should have a 1-dimensional, zero-based loop. */
2016 gcc_assert (loop->dimen == 1);
2017 gcc_assert (integer_zerop (loop->from[0]));
2019 /* Split the constructor size into a static part and a dynamic part.
2020 Allocate the static size up-front and record whether the dynamic
2021 size might be nonzero. */
2023 dynamic = gfc_get_array_constructor_size (&size, c);
2024 mpz_sub_ui (size, size, 1);
2025 loop->to[0] = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
2029 /* Special case constant array constructors. */
2032 unsigned HOST_WIDE_INT nelem = gfc_constant_array_constructor_p (c);
2035 tree size = constant_array_constructor_loop_size (loop);
2036 if (size && compare_tree_int (size, nelem) == 0)
2038 trans_constant_array_constructor (ss, type);
2044 if (TREE_CODE (loop->to[0]) == VAR_DECL)
2047 gfc_trans_create_temp_array (&loop->pre, &loop->post, loop, ss,
2048 type, NULL_TREE, dynamic, true, false, where);
2050 desc = ss->data.info.descriptor;
2051 offset = gfc_index_zero_node;
2052 offsetvar = gfc_create_var_np (gfc_array_index_type, "offset");
2053 TREE_NO_WARNING (offsetvar) = 1;
2054 TREE_USED (offsetvar) = 0;
2055 gfc_trans_array_constructor_value (&loop->pre, type, desc, c,
2056 &offset, &offsetvar, dynamic);
2058 /* If the array grows dynamically, the upper bound of the loop variable
2059 is determined by the array's final upper bound. */
2062 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2063 gfc_array_index_type,
2064 offsetvar, gfc_index_one_node);
2065 tmp = gfc_evaluate_now (tmp, &loop->pre);
2066 gfc_conv_descriptor_ubound_set (&loop->pre, desc, gfc_rank_cst[0], tmp);
2067 if (loop->to[0] && TREE_CODE (loop->to[0]) == VAR_DECL)
2068 gfc_add_modify (&loop->pre, loop->to[0], tmp);
2073 if (TREE_USED (offsetvar))
2074 pushdecl (offsetvar);
2076 gcc_assert (INTEGER_CST_P (offset));
2079 /* Disable bound checking for now because it's probably broken. */
2080 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2087 /* Restore old values of globals. */
2088 first_len = old_first_len;
2089 first_len_val = old_first_len_val;
2090 typespec_chararray_ctor = old_typespec_chararray_ctor;
2094 /* INFO describes a GFC_SS_SECTION in loop LOOP, and this function is
2095 called after evaluating all of INFO's vector dimensions. Go through
2096 each such vector dimension and see if we can now fill in any missing
2100 set_vector_loop_bounds (gfc_loopinfo * loop, gfc_ss * ss)
2110 info = &ss->data.info;
2112 for (n = 0; n < loop->dimen; n++)
2115 if (info->ref->u.ar.dimen_type[dim] == DIMEN_VECTOR
2116 && loop->to[n] == NULL)
2118 /* Loop variable N indexes vector dimension DIM, and we don't
2119 yet know the upper bound of loop variable N. Set it to the
2120 difference between the vector's upper and lower bounds. */
2121 gcc_assert (loop->from[n] == gfc_index_zero_node);
2122 gcc_assert (info->subscript[dim]
2123 && info->subscript[dim]->type == GFC_SS_VECTOR);
2125 gfc_init_se (&se, NULL);
2126 desc = info->subscript[dim]->data.info.descriptor;
2127 zero = gfc_rank_cst[0];
2128 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2129 gfc_array_index_type,
2130 gfc_conv_descriptor_ubound_get (desc, zero),
2131 gfc_conv_descriptor_lbound_get (desc, zero));
2132 tmp = gfc_evaluate_now (tmp, &loop->pre);
2139 /* Add the pre and post chains for all the scalar expressions in a SS chain
2140 to loop. This is called after the loop parameters have been calculated,
2141 but before the actual scalarizing loops. */
2144 gfc_add_loop_ss_code (gfc_loopinfo * loop, gfc_ss * ss, bool subscript,
2150 /* TODO: This can generate bad code if there are ordering dependencies,
2151 e.g., a callee allocated function and an unknown size constructor. */
2152 gcc_assert (ss != NULL);
2154 for (; ss != gfc_ss_terminator; ss = ss->loop_chain)
2161 /* Scalar expression. Evaluate this now. This includes elemental
2162 dimension indices, but not array section bounds. */
2163 gfc_init_se (&se, NULL);
2164 gfc_conv_expr (&se, ss->expr);
2165 gfc_add_block_to_block (&loop->pre, &se.pre);
2167 if (ss->expr->ts.type != BT_CHARACTER)
2169 /* Move the evaluation of scalar expressions outside the
2170 scalarization loop, except for WHERE assignments. */
2172 se.expr = convert(gfc_array_index_type, se.expr);
2174 se.expr = gfc_evaluate_now (se.expr, &loop->pre);
2175 gfc_add_block_to_block (&loop->pre, &se.post);
2178 gfc_add_block_to_block (&loop->post, &se.post);
2180 ss->data.scalar.expr = se.expr;
2181 ss->string_length = se.string_length;
2184 case GFC_SS_REFERENCE:
2185 /* Scalar argument to elemental procedure. Evaluate this
2187 gfc_init_se (&se, NULL);
2188 gfc_conv_expr (&se, ss->expr);
2189 gfc_add_block_to_block (&loop->pre, &se.pre);
2190 gfc_add_block_to_block (&loop->post, &se.post);
2192 ss->data.scalar.expr = gfc_evaluate_now (se.expr, &loop->pre);
2193 ss->string_length = se.string_length;
2196 case GFC_SS_SECTION:
2197 /* Add the expressions for scalar and vector subscripts. */
2198 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2199 if (ss->data.info.subscript[n])
2200 gfc_add_loop_ss_code (loop, ss->data.info.subscript[n], true,
2203 set_vector_loop_bounds (loop, ss);
2207 /* Get the vector's descriptor and store it in SS. */
2208 gfc_init_se (&se, NULL);
2209 gfc_conv_expr_descriptor (&se, ss->expr, gfc_walk_expr (ss->expr));
2210 gfc_add_block_to_block (&loop->pre, &se.pre);
2211 gfc_add_block_to_block (&loop->post, &se.post);
2212 ss->data.info.descriptor = se.expr;
2215 case GFC_SS_INTRINSIC:
2216 gfc_add_intrinsic_ss_code (loop, ss);
2219 case GFC_SS_FUNCTION:
2220 /* Array function return value. We call the function and save its
2221 result in a temporary for use inside the loop. */
2222 gfc_init_se (&se, NULL);
2225 gfc_conv_expr (&se, ss->expr);
2226 gfc_add_block_to_block (&loop->pre, &se.pre);
2227 gfc_add_block_to_block (&loop->post, &se.post);
2228 ss->string_length = se.string_length;
2231 case GFC_SS_CONSTRUCTOR:
2232 if (ss->expr->ts.type == BT_CHARACTER
2233 && ss->string_length == NULL
2234 && ss->expr->ts.u.cl
2235 && ss->expr->ts.u.cl->length)
2237 gfc_init_se (&se, NULL);
2238 gfc_conv_expr_type (&se, ss->expr->ts.u.cl->length,
2239 gfc_charlen_type_node);
2240 ss->string_length = se.expr;
2241 gfc_add_block_to_block (&loop->pre, &se.pre);
2242 gfc_add_block_to_block (&loop->post, &se.post);
2244 gfc_trans_array_constructor (loop, ss, where);
2248 case GFC_SS_COMPONENT:
2249 /* Do nothing. These are handled elsewhere. */
2259 /* Translate expressions for the descriptor and data pointer of a SS. */
2263 gfc_conv_ss_descriptor (stmtblock_t * block, gfc_ss * ss, int base)
2268 /* Get the descriptor for the array to be scalarized. */
2269 gcc_assert (ss->expr->expr_type == EXPR_VARIABLE);
2270 gfc_init_se (&se, NULL);
2271 se.descriptor_only = 1;
2272 gfc_conv_expr_lhs (&se, ss->expr);
2273 gfc_add_block_to_block (block, &se.pre);
2274 ss->data.info.descriptor = se.expr;
2275 ss->string_length = se.string_length;
2279 /* Also the data pointer. */
2280 tmp = gfc_conv_array_data (se.expr);
2281 /* If this is a variable or address of a variable we use it directly.
2282 Otherwise we must evaluate it now to avoid breaking dependency
2283 analysis by pulling the expressions for elemental array indices
2286 || (TREE_CODE (tmp) == ADDR_EXPR
2287 && DECL_P (TREE_OPERAND (tmp, 0)))))
2288 tmp = gfc_evaluate_now (tmp, block);
2289 ss->data.info.data = tmp;
2291 tmp = gfc_conv_array_offset (se.expr);
2292 ss->data.info.offset = gfc_evaluate_now (tmp, block);
2294 /* Make absolutely sure that the saved_offset is indeed saved
2295 so that the variable is still accessible after the loops
2297 ss->data.info.saved_offset = ss->data.info.offset;
2302 /* Initialize a gfc_loopinfo structure. */
2305 gfc_init_loopinfo (gfc_loopinfo * loop)
2309 memset (loop, 0, sizeof (gfc_loopinfo));
2310 gfc_init_block (&loop->pre);
2311 gfc_init_block (&loop->post);
2313 /* Initially scalarize in order and default to no loop reversal. */
2314 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2317 loop->reverse[n] = GFC_INHIBIT_REVERSE;
2320 loop->ss = gfc_ss_terminator;
2324 /* Copies the loop variable info to a gfc_se structure. Does not copy the SS
2328 gfc_copy_loopinfo_to_se (gfc_se * se, gfc_loopinfo * loop)
2334 /* Return an expression for the data pointer of an array. */
2337 gfc_conv_array_data (tree descriptor)
2341 type = TREE_TYPE (descriptor);
2342 if (GFC_ARRAY_TYPE_P (type))
2344 if (TREE_CODE (type) == POINTER_TYPE)
2348 /* Descriptorless arrays. */
2349 return gfc_build_addr_expr (NULL_TREE, descriptor);
2353 return gfc_conv_descriptor_data_get (descriptor);
2357 /* Return an expression for the base offset of an array. */
2360 gfc_conv_array_offset (tree descriptor)
2364 type = TREE_TYPE (descriptor);
2365 if (GFC_ARRAY_TYPE_P (type))
2366 return GFC_TYPE_ARRAY_OFFSET (type);
2368 return gfc_conv_descriptor_offset_get (descriptor);
2372 /* Get an expression for the array stride. */
2375 gfc_conv_array_stride (tree descriptor, int dim)
2380 type = TREE_TYPE (descriptor);
2382 /* For descriptorless arrays use the array size. */
2383 tmp = GFC_TYPE_ARRAY_STRIDE (type, dim);
2384 if (tmp != NULL_TREE)
2387 tmp = gfc_conv_descriptor_stride_get (descriptor, gfc_rank_cst[dim]);
2392 /* Like gfc_conv_array_stride, but for the lower bound. */
2395 gfc_conv_array_lbound (tree descriptor, int dim)
2400 type = TREE_TYPE (descriptor);
2402 tmp = GFC_TYPE_ARRAY_LBOUND (type, dim);
2403 if (tmp != NULL_TREE)
2406 tmp = gfc_conv_descriptor_lbound_get (descriptor, gfc_rank_cst[dim]);
2411 /* Like gfc_conv_array_stride, but for the upper bound. */
2414 gfc_conv_array_ubound (tree descriptor, int dim)
2419 type = TREE_TYPE (descriptor);
2421 tmp = GFC_TYPE_ARRAY_UBOUND (type, dim);
2422 if (tmp != NULL_TREE)
2425 /* This should only ever happen when passing an assumed shape array
2426 as an actual parameter. The value will never be used. */
2427 if (GFC_ARRAY_TYPE_P (TREE_TYPE (descriptor)))
2428 return gfc_index_zero_node;
2430 tmp = gfc_conv_descriptor_ubound_get (descriptor, gfc_rank_cst[dim]);
2435 /* Generate code to perform an array index bound check. */
2438 trans_array_bound_check (gfc_se * se, gfc_ss *ss, tree index, int n,
2439 locus * where, bool check_upper)
2442 tree tmp_lo, tmp_up;
2445 const char * name = NULL;
2447 if (!(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS))
2450 descriptor = ss->data.info.descriptor;
2452 index = gfc_evaluate_now (index, &se->pre);
2454 /* We find a name for the error message. */
2455 name = ss->expr->symtree->n.sym->name;
2456 gcc_assert (name != NULL);
2458 if (TREE_CODE (descriptor) == VAR_DECL)
2459 name = IDENTIFIER_POINTER (DECL_NAME (descriptor));
2461 /* If upper bound is present, include both bounds in the error message. */
2464 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2465 tmp_up = gfc_conv_array_ubound (descriptor, n);
2468 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2469 "outside of expected range (%%ld:%%ld)", n+1, name);
2471 asprintf (&msg, "Index '%%ld' of dimension %d "
2472 "outside of expected range (%%ld:%%ld)", n+1);
2474 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2476 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2477 fold_convert (long_integer_type_node, index),
2478 fold_convert (long_integer_type_node, tmp_lo),
2479 fold_convert (long_integer_type_node, tmp_up));
2480 fault = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
2482 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2483 fold_convert (long_integer_type_node, index),
2484 fold_convert (long_integer_type_node, tmp_lo),
2485 fold_convert (long_integer_type_node, tmp_up));
2490 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2493 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2494 "below lower bound of %%ld", n+1, name);
2496 asprintf (&msg, "Index '%%ld' of dimension %d "
2497 "below lower bound of %%ld", n+1);
2499 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2501 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2502 fold_convert (long_integer_type_node, index),
2503 fold_convert (long_integer_type_node, tmp_lo));
2511 /* Return the offset for an index. Performs bound checking for elemental
2512 dimensions. Single element references are processed separately.
2513 DIM is the array dimension, I is the loop dimension. */
2516 conv_array_index_offset (gfc_se * se, gfc_ss * ss, int dim, int i,
2517 gfc_array_ref * ar, tree stride)
2524 info = &ss->data.info;
2526 /* Get the index into the array for this dimension. */
2529 gcc_assert (ar->type != AR_ELEMENT);
2530 switch (ar->dimen_type[dim])
2532 case DIMEN_THIS_IMAGE:
2536 /* Elemental dimension. */
2537 gcc_assert (info->subscript[dim]
2538 && info->subscript[dim]->type == GFC_SS_SCALAR);
2539 /* We've already translated this value outside the loop. */
2540 index = info->subscript[dim]->data.scalar.expr;
2542 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2543 ar->as->type != AS_ASSUMED_SIZE
2544 || dim < ar->dimen - 1);
2548 gcc_assert (info && se->loop);
2549 gcc_assert (info->subscript[dim]
2550 && info->subscript[dim]->type == GFC_SS_VECTOR);
2551 desc = info->subscript[dim]->data.info.descriptor;
2553 /* Get a zero-based index into the vector. */
2554 index = fold_build2_loc (input_location, MINUS_EXPR,
2555 gfc_array_index_type,
2556 se->loop->loopvar[i], se->loop->from[i]);
2558 /* Multiply the index by the stride. */
2559 index = fold_build2_loc (input_location, MULT_EXPR,
2560 gfc_array_index_type,
2561 index, gfc_conv_array_stride (desc, 0));
2563 /* Read the vector to get an index into info->descriptor. */
2564 data = build_fold_indirect_ref_loc (input_location,
2565 gfc_conv_array_data (desc));
2566 index = gfc_build_array_ref (data, index, NULL);
2567 index = gfc_evaluate_now (index, &se->pre);
2568 index = fold_convert (gfc_array_index_type, index);
2570 /* Do any bounds checking on the final info->descriptor index. */
2571 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2572 ar->as->type != AS_ASSUMED_SIZE
2573 || dim < ar->dimen - 1);
2577 /* Scalarized dimension. */
2578 gcc_assert (info && se->loop);
2580 /* Multiply the loop variable by the stride and delta. */
2581 index = se->loop->loopvar[i];
2582 if (!integer_onep (info->stride[dim]))
2583 index = fold_build2_loc (input_location, MULT_EXPR,
2584 gfc_array_index_type, index,
2586 if (!integer_zerop (info->delta[dim]))
2587 index = fold_build2_loc (input_location, PLUS_EXPR,
2588 gfc_array_index_type, index,
2598 /* Temporary array or derived type component. */
2599 gcc_assert (se->loop);
2600 index = se->loop->loopvar[se->loop->order[i]];
2602 /* Pointer functions can have stride[0] different from unity.
2603 Use the stride returned by the function call and stored in
2604 the descriptor for the temporary. */
2605 if (se->ss && se->ss->type == GFC_SS_FUNCTION
2607 && se->ss->expr->symtree
2608 && se->ss->expr->symtree->n.sym->result
2609 && se->ss->expr->symtree->n.sym->result->attr.pointer)
2610 stride = gfc_conv_descriptor_stride_get (info->descriptor,
2613 if (!integer_zerop (info->delta[dim]))
2614 index = fold_build2_loc (input_location, PLUS_EXPR,
2615 gfc_array_index_type, index, info->delta[dim]);
2618 /* Multiply by the stride. */
2619 if (!integer_onep (stride))
2620 index = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
2627 /* Build a scalarized reference to an array. */
2630 gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar)
2633 tree decl = NULL_TREE;
2638 info = &se->ss->data.info;
2640 n = se->loop->order[0];
2644 index = conv_array_index_offset (se, se->ss, info->dim[n], n, ar,
2646 /* Add the offset for this dimension to the stored offset for all other
2648 if (!integer_zerop (info->offset))
2649 index = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2650 index, info->offset);
2652 if (se->ss->expr && is_subref_array (se->ss->expr))
2653 decl = se->ss->expr->symtree->n.sym->backend_decl;
2655 tmp = build_fold_indirect_ref_loc (input_location,
2657 se->expr = gfc_build_array_ref (tmp, index, decl);
2661 /* Translate access of temporary array. */
2664 gfc_conv_tmp_array_ref (gfc_se * se)
2666 se->string_length = se->ss->string_length;
2667 gfc_conv_scalarized_array_ref (se, NULL);
2668 gfc_advance_se_ss_chain (se);
2671 /* Add T to the offset pair *OFFSET, *CST_OFFSET. */
2674 add_to_offset (tree *cst_offset, tree *offset, tree t)
2676 if (TREE_CODE (t) == INTEGER_CST)
2677 *cst_offset = int_const_binop (PLUS_EXPR, *cst_offset, t);
2680 if (!integer_zerop (*offset))
2681 *offset = fold_build2_loc (input_location, PLUS_EXPR,
2682 gfc_array_index_type, *offset, t);
2688 /* Build an array reference. se->expr already holds the array descriptor.
2689 This should be either a variable, indirect variable reference or component
2690 reference. For arrays which do not have a descriptor, se->expr will be
2692 a(i, j, k) = base[offset + i * stride[0] + j * stride[1] + k * stride[2]]*/
2695 gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar, gfc_symbol * sym,
2699 tree offset, cst_offset;
2707 gcc_assert (ar->codimen);
2709 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se->expr)))
2710 se->expr = build_fold_indirect_ref (gfc_conv_array_data (se->expr));
2713 if (GFC_ARRAY_TYPE_P (TREE_TYPE (se->expr))
2714 && TREE_CODE (TREE_TYPE (se->expr)) == POINTER_TYPE)
2715 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
2717 /* Use the actual tree type and not the wrapped coarray. */
2718 if (!se->want_pointer)
2719 se->expr = fold_convert (TYPE_MAIN_VARIANT (TREE_TYPE (se->expr)),
2726 /* Handle scalarized references separately. */
2727 if (ar->type != AR_ELEMENT)
2729 gfc_conv_scalarized_array_ref (se, ar);
2730 gfc_advance_se_ss_chain (se);
2734 cst_offset = offset = gfc_index_zero_node;
2735 add_to_offset (&cst_offset, &offset, gfc_conv_array_offset (se->expr));
2737 /* Calculate the offsets from all the dimensions. Make sure to associate
2738 the final offset so that we form a chain of loop invariant summands. */
2739 for (n = ar->dimen - 1; n >= 0; n--)
2741 /* Calculate the index for this dimension. */
2742 gfc_init_se (&indexse, se);
2743 gfc_conv_expr_type (&indexse, ar->start[n], gfc_array_index_type);
2744 gfc_add_block_to_block (&se->pre, &indexse.pre);
2746 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2748 /* Check array bounds. */
2752 /* Evaluate the indexse.expr only once. */
2753 indexse.expr = save_expr (indexse.expr);
2756 tmp = gfc_conv_array_lbound (se->expr, n);
2757 if (sym->attr.temporary)
2759 gfc_init_se (&tmpse, se);
2760 gfc_conv_expr_type (&tmpse, ar->as->lower[n],
2761 gfc_array_index_type);
2762 gfc_add_block_to_block (&se->pre, &tmpse.pre);
2766 cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2768 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2769 "below lower bound of %%ld", n+1, sym->name);
2770 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
2771 fold_convert (long_integer_type_node,
2773 fold_convert (long_integer_type_node, tmp));
2776 /* Upper bound, but not for the last dimension of assumed-size
2778 if (n < ar->dimen - 1 || ar->as->type != AS_ASSUMED_SIZE)
2780 tmp = gfc_conv_array_ubound (se->expr, n);
2781 if (sym->attr.temporary)
2783 gfc_init_se (&tmpse, se);
2784 gfc_conv_expr_type (&tmpse, ar->as->upper[n],
2785 gfc_array_index_type);
2786 gfc_add_block_to_block (&se->pre, &tmpse.pre);
2790 cond = fold_build2_loc (input_location, GT_EXPR,
2791 boolean_type_node, indexse.expr, tmp);
2792 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2793 "above upper bound of %%ld", n+1, sym->name);
2794 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
2795 fold_convert (long_integer_type_node,
2797 fold_convert (long_integer_type_node, tmp));
2802 /* Multiply the index by the stride. */
2803 stride = gfc_conv_array_stride (se->expr, n);
2804 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
2805 indexse.expr, stride);
2807 /* And add it to the total. */
2808 add_to_offset (&cst_offset, &offset, tmp);
2811 if (!integer_zerop (cst_offset))
2812 offset = fold_build2_loc (input_location, PLUS_EXPR,
2813 gfc_array_index_type, offset, cst_offset);
2815 /* Access the calculated element. */
2816 tmp = gfc_conv_array_data (se->expr);
2817 tmp = build_fold_indirect_ref (tmp);
2818 se->expr = gfc_build_array_ref (tmp, offset, sym->backend_decl);
2822 /* Add the offset corresponding to array's ARRAY_DIM dimension and loop's
2823 LOOP_DIM dimension (if any) to array's offset. */
2826 add_array_offset (stmtblock_t *pblock, gfc_loopinfo *loop, gfc_ss *ss,
2827 gfc_array_ref *ar, int array_dim, int loop_dim)
2833 info = &ss->data.info;
2835 gfc_init_se (&se, NULL);
2837 se.expr = info->descriptor;
2838 stride = gfc_conv_array_stride (info->descriptor, array_dim);
2839 index = conv_array_index_offset (&se, ss, array_dim, loop_dim, ar, stride);
2840 gfc_add_block_to_block (pblock, &se.pre);
2842 info->offset = fold_build2_loc (input_location, PLUS_EXPR,
2843 gfc_array_index_type,
2844 info->offset, index);
2845 info->offset = gfc_evaluate_now (info->offset, pblock);
2849 /* Generate the code to be executed immediately before entering a
2850 scalarization loop. */
2853 gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag,
2854 stmtblock_t * pblock)
2862 /* This code will be executed before entering the scalarization loop
2863 for this dimension. */
2864 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
2866 if ((ss->useflags & flag) == 0)
2869 if (ss->type != GFC_SS_SECTION
2870 && ss->type != GFC_SS_FUNCTION && ss->type != GFC_SS_CONSTRUCTOR
2871 && ss->type != GFC_SS_COMPONENT)
2874 info = &ss->data.info;
2876 gcc_assert (dim < info->dimen);
2877 gcc_assert (info->dimen == loop->dimen);
2880 ar = &info->ref->u.ar;
2884 if (dim == loop->dimen - 1)
2889 /* For the time being, there is no loop reordering. */
2890 gcc_assert (i == loop->order[i]);
2893 if (dim == loop->dimen - 1)
2895 stride = gfc_conv_array_stride (info->descriptor, info->dim[i]);
2897 /* Calculate the stride of the innermost loop. Hopefully this will
2898 allow the backend optimizers to do their stuff more effectively.
2900 info->stride0 = gfc_evaluate_now (stride, pblock);
2902 /* For the outermost loop calculate the offset due to any
2903 elemental dimensions. It will have been initialized with the
2904 base offset of the array. */
2907 for (i = 0; i < ar->dimen; i++)
2909 if (ar->dimen_type[i] != DIMEN_ELEMENT)
2912 add_array_offset (pblock, loop, ss, ar, i, /* unused */ -1);
2917 /* Add the offset for the previous loop dimension. */
2918 add_array_offset (pblock, loop, ss, ar, info->dim[i], i);
2920 /* Remember this offset for the second loop. */
2921 if (dim == loop->temp_dim - 1)
2922 info->saved_offset = info->offset;
2927 /* Start a scalarized expression. Creates a scope and declares loop
2931 gfc_start_scalarized_body (gfc_loopinfo * loop, stmtblock_t * pbody)
2937 gcc_assert (!loop->array_parameter);
2939 for (dim = loop->dimen - 1; dim >= 0; dim--)
2941 n = loop->order[dim];
2943 gfc_start_block (&loop->code[n]);
2945 /* Create the loop variable. */
2946 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "S");
2948 if (dim < loop->temp_dim)
2952 /* Calculate values that will be constant within this loop. */
2953 gfc_trans_preloop_setup (loop, dim, flags, &loop->code[n]);
2955 gfc_start_block (pbody);
2959 /* Generates the actual loop code for a scalarization loop. */
2962 gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n,
2963 stmtblock_t * pbody)
2974 if ((ompws_flags & (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS))
2975 == (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS)
2976 && n == loop->dimen - 1)
2978 /* We create an OMP_FOR construct for the outermost scalarized loop. */
2979 init = make_tree_vec (1);
2980 cond = make_tree_vec (1);
2981 incr = make_tree_vec (1);
2983 /* Cycle statement is implemented with a goto. Exit statement must not
2984 be present for this loop. */
2985 exit_label = gfc_build_label_decl (NULL_TREE);
2986 TREE_USED (exit_label) = 1;
2988 /* Label for cycle statements (if needed). */
2989 tmp = build1_v (LABEL_EXPR, exit_label);
2990 gfc_add_expr_to_block (pbody, tmp);
2992 stmt = make_node (OMP_FOR);
2994 TREE_TYPE (stmt) = void_type_node;
2995 OMP_FOR_BODY (stmt) = loopbody = gfc_finish_block (pbody);
2997 OMP_FOR_CLAUSES (stmt) = build_omp_clause (input_location,
2998 OMP_CLAUSE_SCHEDULE);
2999 OMP_CLAUSE_SCHEDULE_KIND (OMP_FOR_CLAUSES (stmt))
3000 = OMP_CLAUSE_SCHEDULE_STATIC;
3001 if (ompws_flags & OMPWS_NOWAIT)
3002 OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (stmt))
3003 = build_omp_clause (input_location, OMP_CLAUSE_NOWAIT);
3005 /* Initialize the loopvar. */
3006 TREE_VEC_ELT (init, 0) = build2_v (MODIFY_EXPR, loop->loopvar[n],
3008 OMP_FOR_INIT (stmt) = init;
3009 /* The exit condition. */
3010 TREE_VEC_ELT (cond, 0) = build2_loc (input_location, LE_EXPR,
3012 loop->loopvar[n], loop->to[n]);
3013 SET_EXPR_LOCATION (TREE_VEC_ELT (cond, 0), input_location);
3014 OMP_FOR_COND (stmt) = cond;
3015 /* Increment the loopvar. */
3016 tmp = build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3017 loop->loopvar[n], gfc_index_one_node);
3018 TREE_VEC_ELT (incr, 0) = fold_build2_loc (input_location, MODIFY_EXPR,
3019 void_type_node, loop->loopvar[n], tmp);
3020 OMP_FOR_INCR (stmt) = incr;
3022 ompws_flags &= ~OMPWS_CURR_SINGLEUNIT;
3023 gfc_add_expr_to_block (&loop->code[n], stmt);
3027 bool reverse_loop = (loop->reverse[n] == GFC_REVERSE_SET)
3028 && (loop->temp_ss == NULL);
3030 loopbody = gfc_finish_block (pbody);
3034 tmp = loop->from[n];
3035 loop->from[n] = loop->to[n];
3039 /* Initialize the loopvar. */
3040 if (loop->loopvar[n] != loop->from[n])
3041 gfc_add_modify (&loop->code[n], loop->loopvar[n], loop->from[n]);
3043 exit_label = gfc_build_label_decl (NULL_TREE);
3045 /* Generate the loop body. */
3046 gfc_init_block (&block);
3048 /* The exit condition. */
3049 cond = fold_build2_loc (input_location, reverse_loop ? LT_EXPR : GT_EXPR,
3050 boolean_type_node, loop->loopvar[n], loop->to[n]);
3051 tmp = build1_v (GOTO_EXPR, exit_label);
3052 TREE_USED (exit_label) = 1;
3053 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
3054 gfc_add_expr_to_block (&block, tmp);
3056 /* The main body. */
3057 gfc_add_expr_to_block (&block, loopbody);
3059 /* Increment the loopvar. */
3060 tmp = fold_build2_loc (input_location,
3061 reverse_loop ? MINUS_EXPR : PLUS_EXPR,
3062 gfc_array_index_type, loop->loopvar[n],
3063 gfc_index_one_node);
3065 gfc_add_modify (&block, loop->loopvar[n], tmp);
3067 /* Build the loop. */
3068 tmp = gfc_finish_block (&block);
3069 tmp = build1_v (LOOP_EXPR, tmp);
3070 gfc_add_expr_to_block (&loop->code[n], tmp);
3072 /* Add the exit label. */
3073 tmp = build1_v (LABEL_EXPR, exit_label);
3074 gfc_add_expr_to_block (&loop->code[n], tmp);
3080 /* Finishes and generates the loops for a scalarized expression. */
3083 gfc_trans_scalarizing_loops (gfc_loopinfo * loop, stmtblock_t * body)
3088 stmtblock_t *pblock;
3092 /* Generate the loops. */
3093 for (dim = 0; dim < loop->dimen; dim++)
3095 n = loop->order[dim];
3096 gfc_trans_scalarized_loop_end (loop, n, pblock);
3097 loop->loopvar[n] = NULL_TREE;
3098 pblock = &loop->code[n];
3101 tmp = gfc_finish_block (pblock);
3102 gfc_add_expr_to_block (&loop->pre, tmp);
3104 /* Clear all the used flags. */
3105 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3110 /* Finish the main body of a scalarized expression, and start the secondary
3114 gfc_trans_scalarized_loop_boundary (gfc_loopinfo * loop, stmtblock_t * body)
3118 stmtblock_t *pblock;
3122 /* We finish as many loops as are used by the temporary. */
3123 for (dim = 0; dim < loop->temp_dim - 1; dim++)
3125 n = loop->order[dim];
3126 gfc_trans_scalarized_loop_end (loop, n, pblock);
3127 loop->loopvar[n] = NULL_TREE;
3128 pblock = &loop->code[n];
3131 /* We don't want to finish the outermost loop entirely. */
3132 n = loop->order[loop->temp_dim - 1];
3133 gfc_trans_scalarized_loop_end (loop, n, pblock);
3135 /* Restore the initial offsets. */
3136 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3138 if ((ss->useflags & 2) == 0)
3141 if (ss->type != GFC_SS_SECTION
3142 && ss->type != GFC_SS_FUNCTION && ss->type != GFC_SS_CONSTRUCTOR
3143 && ss->type != GFC_SS_COMPONENT)
3146 ss->data.info.offset = ss->data.info.saved_offset;
3149 /* Restart all the inner loops we just finished. */
3150 for (dim = loop->temp_dim - 2; dim >= 0; dim--)
3152 n = loop->order[dim];
3154 gfc_start_block (&loop->code[n]);
3156 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "Q");
3158 gfc_trans_preloop_setup (loop, dim, 2, &loop->code[n]);
3161 /* Start a block for the secondary copying code. */
3162 gfc_start_block (body);
3166 /* Precalculate (either lower or upper) bound of an array section.
3167 BLOCK: Block in which the (pre)calculation code will go.
3168 BOUNDS[DIM]: Where the bound value will be stored once evaluated.
3169 VALUES[DIM]: Specified bound (NULL <=> unspecified).
3170 DESC: Array descriptor from which the bound will be picked if unspecified
3171 (either lower or upper bound according to LBOUND). */
3174 evaluate_bound (stmtblock_t *block, tree *bounds, gfc_expr ** values,
3175 tree desc, int dim, bool lbound)
3178 gfc_expr * input_val = values[dim];
3179 tree *output = &bounds[dim];
3184 /* Specified section bound. */
3185 gfc_init_se (&se, NULL);
3186 gfc_conv_expr_type (&se, input_val, gfc_array_index_type);
3187 gfc_add_block_to_block (block, &se.pre);
3192 /* No specific bound specified so use the bound of the array. */
3193 *output = lbound ? gfc_conv_array_lbound (desc, dim) :
3194 gfc_conv_array_ubound (desc, dim);
3196 *output = gfc_evaluate_now (*output, block);
3200 /* Calculate the lower bound of an array section. */
3203 gfc_conv_section_startstride (gfc_loopinfo * loop, gfc_ss * ss, int dim)
3205 gfc_expr *stride = NULL;
3211 gcc_assert (ss->type == GFC_SS_SECTION);
3213 info = &ss->data.info;
3214 ar = &info->ref->u.ar;
3216 if (ar->dimen_type[dim] == DIMEN_VECTOR)
3218 /* We use a zero-based index to access the vector. */
3219 info->start[dim] = gfc_index_zero_node;
3220 info->end[dim] = NULL;
3221 info->stride[dim] = gfc_index_one_node;
3225 gcc_assert (ar->dimen_type[dim] == DIMEN_RANGE
3226 || ar->dimen_type[dim] == DIMEN_THIS_IMAGE);
3227 desc = info->descriptor;
3228 stride = ar->stride[dim];
3230 /* Calculate the start of the range. For vector subscripts this will
3231 be the range of the vector. */
3232 evaluate_bound (&loop->pre, info->start, ar->start, desc, dim, true);
3234 /* Similarly calculate the end. Although this is not used in the
3235 scalarizer, it is needed when checking bounds and where the end
3236 is an expression with side-effects. */
3237 evaluate_bound (&loop->pre, info->end, ar->end, desc, dim, false);
3239 /* Calculate the stride. */
3241 info->stride[dim] = gfc_index_one_node;
3244 gfc_init_se (&se, NULL);
3245 gfc_conv_expr_type (&se, stride, gfc_array_index_type);
3246 gfc_add_block_to_block (&loop->pre, &se.pre);
3247 info->stride[dim] = gfc_evaluate_now (se.expr, &loop->pre);
3252 /* Calculates the range start and stride for a SS chain. Also gets the
3253 descriptor and data pointer. The range of vector subscripts is the size
3254 of the vector. Array bounds are also checked. */
3257 gfc_conv_ss_startstride (gfc_loopinfo * loop)
3265 /* Determine the rank of the loop. */
3266 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3270 case GFC_SS_SECTION:
3271 case GFC_SS_CONSTRUCTOR:
3272 case GFC_SS_FUNCTION:
3273 case GFC_SS_COMPONENT:
3274 loop->dimen = ss->data.info.dimen;
3277 /* As usual, lbound and ubound are exceptions!. */
3278 case GFC_SS_INTRINSIC:
3279 switch (ss->expr->value.function.isym->id)
3281 case GFC_ISYM_LBOUND:
3282 case GFC_ISYM_UBOUND:
3283 case GFC_ISYM_LCOBOUND:
3284 case GFC_ISYM_UCOBOUND:
3285 case GFC_ISYM_THIS_IMAGE:
3286 loop->dimen = ss->data.info.dimen;
3298 /* We should have determined the rank of the expression by now. If
3299 not, that's bad news. */
3303 /* Loop over all the SS in the chain. */
3304 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3306 if (ss->expr && ss->expr->shape && !ss->shape)
3307 ss->shape = ss->expr->shape;
3311 case GFC_SS_SECTION:
3312 /* Get the descriptor for the array. */
3313 gfc_conv_ss_descriptor (&loop->pre, ss, !loop->array_parameter);
3315 for (n = 0; n < ss->data.info.dimen; n++)
3316 gfc_conv_section_startstride (loop, ss, ss->data.info.dim[n]);
3319 case GFC_SS_INTRINSIC:
3320 switch (ss->expr->value.function.isym->id)
3322 /* Fall through to supply start and stride. */
3323 case GFC_ISYM_LBOUND:
3324 case GFC_ISYM_UBOUND:
3325 case GFC_ISYM_LCOBOUND:
3326 case GFC_ISYM_UCOBOUND:
3327 case GFC_ISYM_THIS_IMAGE:
3334 case GFC_SS_CONSTRUCTOR:
3335 case GFC_SS_FUNCTION:
3336 for (n = 0; n < ss->data.info.dimen; n++)
3338 int dim = ss->data.info.dim[n];
3340 ss->data.info.start[dim] = gfc_index_zero_node;
3341 ss->data.info.end[dim] = gfc_index_zero_node;
3342 ss->data.info.stride[dim] = gfc_index_one_node;
3351 /* The rest is just runtime bound checking. */
3352 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3355 tree lbound, ubound;
3357 tree size[GFC_MAX_DIMENSIONS];
3358 tree stride_pos, stride_neg, non_zerosized, tmp2, tmp3;
3363 gfc_start_block (&block);
3365 for (n = 0; n < loop->dimen; n++)
3366 size[n] = NULL_TREE;
3368 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3372 if (ss->type != GFC_SS_SECTION)
3375 /* Catch allocatable lhs in f2003. */
3376 if (gfc_option.flag_realloc_lhs && ss->is_alloc_lhs)
3379 gfc_start_block (&inner);
3381 /* TODO: range checking for mapped dimensions. */
3382 info = &ss->data.info;
3384 /* This code only checks ranges. Elemental and vector
3385 dimensions are checked later. */
3386 for (n = 0; n < loop->dimen; n++)
3391 if (info->ref->u.ar.dimen_type[dim] != DIMEN_RANGE)
3394 if (dim == info->ref->u.ar.dimen - 1
3395 && info->ref->u.ar.as->type == AS_ASSUMED_SIZE)
3396 check_upper = false;
3400 /* Zero stride is not allowed. */
3401 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
3402 info->stride[dim], gfc_index_zero_node);
3403 asprintf (&msg, "Zero stride is not allowed, for dimension %d "
3404 "of array '%s'", dim + 1, ss->expr->symtree->name);
3405 gfc_trans_runtime_check (true, false, tmp, &inner,
3406 &ss->expr->where, msg);
3409 desc = ss->data.info.descriptor;
3411 /* This is the run-time equivalent of resolve.c's
3412 check_dimension(). The logical is more readable there
3413 than it is here, with all the trees. */
3414 lbound = gfc_conv_array_lbound (desc, dim);
3415 end = info->end[dim];
3417 ubound = gfc_conv_array_ubound (desc, dim);
3421 /* non_zerosized is true when the selected range is not
3423 stride_pos = fold_build2_loc (input_location, GT_EXPR,
3424 boolean_type_node, info->stride[dim],
3425 gfc_index_zero_node);
3426 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
3427 info->start[dim], end);
3428 stride_pos = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3429 boolean_type_node, stride_pos, tmp);
3431 stride_neg = fold_build2_loc (input_location, LT_EXPR,
3433 info->stride[dim], gfc_index_zero_node);
3434 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
3435 info->start[dim], end);
3436 stride_neg = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3439 non_zerosized = fold_build2_loc (input_location, TRUTH_OR_EXPR,
3441 stride_pos, stride_neg);
3443 /* Check the start of the range against the lower and upper
3444 bounds of the array, if the range is not empty.
3445 If upper bound is present, include both bounds in the
3449 tmp = fold_build2_loc (input_location, LT_EXPR,
3451 info->start[dim], lbound);
3452 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3454 non_zerosized, tmp);
3455 tmp2 = fold_build2_loc (input_location, GT_EXPR,
3457 info->start[dim], ubound);
3458 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3460 non_zerosized, tmp2);
3461 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3462 "outside of expected range (%%ld:%%ld)",
3463 dim + 1, ss->expr->symtree->name);
3464 gfc_trans_runtime_check (true, false, tmp, &inner,
3465 &ss->expr->where, msg,
3466 fold_convert (long_integer_type_node, info->start[dim]),
3467 fold_convert (long_integer_type_node, lbound),
3468 fold_convert (long_integer_type_node, ubound));
3469 gfc_trans_runtime_check (true, false, tmp2, &inner,
3470 &ss->expr->where, msg,
3471 fold_convert (long_integer_type_node, info->start[dim]),
3472 fold_convert (long_integer_type_node, lbound),
3473 fold_convert (long_integer_type_node, ubound));
3478 tmp = fold_build2_loc (input_location, LT_EXPR,
3480 info->start[dim], lbound);
3481 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3482 boolean_type_node, non_zerosized, tmp);
3483 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3484 "below lower bound of %%ld",
3485 dim + 1, ss->expr->symtree->name);
3486 gfc_trans_runtime_check (true, false, tmp, &inner,
3487 &ss->expr->where, msg,
3488 fold_convert (long_integer_type_node, info->start[dim]),
3489 fold_convert (long_integer_type_node, lbound));
3493 /* Compute the last element of the range, which is not
3494 necessarily "end" (think 0:5:3, which doesn't contain 5)
3495 and check it against both lower and upper bounds. */
3497 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3498 gfc_array_index_type, end,
3500 tmp = fold_build2_loc (input_location, TRUNC_MOD_EXPR,
3501 gfc_array_index_type, tmp,
3503 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3504 gfc_array_index_type, end, tmp);
3505 tmp2 = fold_build2_loc (input_location, LT_EXPR,
3506 boolean_type_node, tmp, lbound);
3507 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3508 boolean_type_node, non_zerosized, tmp2);
3511 tmp3 = fold_build2_loc (input_location, GT_EXPR,
3512 boolean_type_node, tmp, ubound);
3513 tmp3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3514 boolean_type_node, non_zerosized, tmp3);
3515 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3516 "outside of expected range (%%ld:%%ld)",
3517 dim + 1, ss->expr->symtree->name);
3518 gfc_trans_runtime_check (true, false, tmp2, &inner,
3519 &ss->expr->where, msg,
3520 fold_convert (long_integer_type_node, tmp),
3521 fold_convert (long_integer_type_node, ubound),
3522 fold_convert (long_integer_type_node, lbound));
3523 gfc_trans_runtime_check (true, false, tmp3, &inner,
3524 &ss->expr->where, msg,
3525 fold_convert (long_integer_type_node, tmp),
3526 fold_convert (long_integer_type_node, ubound),
3527 fold_convert (long_integer_type_node, lbound));
3532 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3533 "below lower bound of %%ld",
3534 dim + 1, ss->expr->symtree->name);
3535 gfc_trans_runtime_check (true, false, tmp2, &inner,
3536 &ss->expr->where, msg,
3537 fold_convert (long_integer_type_node, tmp),
3538 fold_convert (long_integer_type_node, lbound));
3542 /* Check the section sizes match. */
3543 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3544 gfc_array_index_type, end,
3546 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
3547 gfc_array_index_type, tmp,
3549 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3550 gfc_array_index_type,
3551 gfc_index_one_node, tmp);
3552 tmp = fold_build2_loc (input_location, MAX_EXPR,
3553 gfc_array_index_type, tmp,
3554 build_int_cst (gfc_array_index_type, 0));
3555 /* We remember the size of the first section, and check all the
3556 others against this. */
3559 tmp3 = fold_build2_loc (input_location, NE_EXPR,
3560 boolean_type_node, tmp, size[n]);
3561 asprintf (&msg, "Array bound mismatch for dimension %d "
3562 "of array '%s' (%%ld/%%ld)",
3563 dim + 1, ss->expr->symtree->name);
3565 gfc_trans_runtime_check (true, false, tmp3, &inner,
3566 &ss->expr->where, msg,
3567 fold_convert (long_integer_type_node, tmp),
3568 fold_convert (long_integer_type_node, size[n]));
3573 size[n] = gfc_evaluate_now (tmp, &inner);
3576 tmp = gfc_finish_block (&inner);
3578 /* For optional arguments, only check bounds if the argument is
3580 if (ss->expr->symtree->n.sym->attr.optional
3581 || ss->expr->symtree->n.sym->attr.not_always_present)
3582 tmp = build3_v (COND_EXPR,
3583 gfc_conv_expr_present (ss->expr->symtree->n.sym),
3584 tmp, build_empty_stmt (input_location));
3586 gfc_add_expr_to_block (&block, tmp);
3590 tmp = gfc_finish_block (&block);
3591 gfc_add_expr_to_block (&loop->pre, tmp);
3595 /* Return true if both symbols could refer to the same data object. Does
3596 not take account of aliasing due to equivalence statements. */
3599 symbols_could_alias (gfc_symbol *lsym, gfc_symbol *rsym, bool lsym_pointer,
3600 bool lsym_target, bool rsym_pointer, bool rsym_target)
3602 /* Aliasing isn't possible if the symbols have different base types. */
3603 if (gfc_compare_types (&lsym->ts, &rsym->ts) == 0)
3606 /* Pointers can point to other pointers and target objects. */
3608 if ((lsym_pointer && (rsym_pointer || rsym_target))
3609 || (rsym_pointer && (lsym_pointer || lsym_target)))
3612 /* Special case: Argument association, cf. F90 12.4.1.6, F2003 12.4.1.7
3613 and F2008 12.5.2.13 items 3b and 4b. The pointer case (a) is already
3615 if (lsym_target && rsym_target
3616 && ((lsym->attr.dummy && !lsym->attr.contiguous
3617 && (!lsym->attr.dimension || lsym->as->type == AS_ASSUMED_SHAPE))
3618 || (rsym->attr.dummy && !rsym->attr.contiguous
3619 && (!rsym->attr.dimension
3620 || rsym->as->type == AS_ASSUMED_SHAPE))))
3627 /* Return true if the two SS could be aliased, i.e. both point to the same data
3629 /* TODO: resolve aliases based on frontend expressions. */
3632 gfc_could_be_alias (gfc_ss * lss, gfc_ss * rss)
3638 bool lsym_pointer, lsym_target, rsym_pointer, rsym_target;
3640 lsym = lss->expr->symtree->n.sym;
3641 rsym = rss->expr->symtree->n.sym;
3643 lsym_pointer = lsym->attr.pointer;
3644 lsym_target = lsym->attr.target;
3645 rsym_pointer = rsym->attr.pointer;
3646 rsym_target = rsym->attr.target;
3648 if (symbols_could_alias (lsym, rsym, lsym_pointer, lsym_target,
3649 rsym_pointer, rsym_target))
3652 if (rsym->ts.type != BT_DERIVED && rsym->ts.type != BT_CLASS
3653 && lsym->ts.type != BT_DERIVED && lsym->ts.type != BT_CLASS)
3656 /* For derived types we must check all the component types. We can ignore
3657 array references as these will have the same base type as the previous
3659 for (lref = lss->expr->ref; lref != lss->data.info.ref; lref = lref->next)
3661 if (lref->type != REF_COMPONENT)
3664 lsym_pointer = lsym_pointer || lref->u.c.sym->attr.pointer;
3665 lsym_target = lsym_target || lref->u.c.sym->attr.target;
3667 if (symbols_could_alias (lref->u.c.sym, rsym, lsym_pointer, lsym_target,
3668 rsym_pointer, rsym_target))
3671 if ((lsym_pointer && (rsym_pointer || rsym_target))
3672 || (rsym_pointer && (lsym_pointer || lsym_target)))
3674 if (gfc_compare_types (&lref->u.c.component->ts,
3679 for (rref = rss->expr->ref; rref != rss->data.info.ref;
3682 if (rref->type != REF_COMPONENT)
3685 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
3686 rsym_target = lsym_target || rref->u.c.sym->attr.target;
3688 if (symbols_could_alias (lref->u.c.sym, rref->u.c.sym,
3689 lsym_pointer, lsym_target,
3690 rsym_pointer, rsym_target))
3693 if ((lsym_pointer && (rsym_pointer || rsym_target))
3694 || (rsym_pointer && (lsym_pointer || lsym_target)))
3696 if (gfc_compare_types (&lref->u.c.component->ts,
3697 &rref->u.c.sym->ts))
3699 if (gfc_compare_types (&lref->u.c.sym->ts,
3700 &rref->u.c.component->ts))
3702 if (gfc_compare_types (&lref->u.c.component->ts,
3703 &rref->u.c.component->ts))
3709 lsym_pointer = lsym->attr.pointer;
3710 lsym_target = lsym->attr.target;
3711 lsym_pointer = lsym->attr.pointer;
3712 lsym_target = lsym->attr.target;
3714 for (rref = rss->expr->ref; rref != rss->data.info.ref; rref = rref->next)
3716 if (rref->type != REF_COMPONENT)
3719 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
3720 rsym_target = lsym_target || rref->u.c.sym->attr.target;
3722 if (symbols_could_alias (rref->u.c.sym, lsym,
3723 lsym_pointer, lsym_target,
3724 rsym_pointer, rsym_target))
3727 if ((lsym_pointer && (rsym_pointer || rsym_target))
3728 || (rsym_pointer && (lsym_pointer || lsym_target)))
3730 if (gfc_compare_types (&lsym->ts, &rref->u.c.component->ts))
3739 /* Resolve array data dependencies. Creates a temporary if required. */
3740 /* TODO: Calc dependencies with gfc_expr rather than gfc_ss, and move to
3744 gfc_conv_resolve_dependencies (gfc_loopinfo * loop, gfc_ss * dest,
3753 loop->temp_ss = NULL;
3755 for (ss = rss; ss != gfc_ss_terminator; ss = ss->next)
3757 if (ss->type != GFC_SS_SECTION)
3760 if (dest->expr->symtree->n.sym != ss->expr->symtree->n.sym)
3762 if (gfc_could_be_alias (dest, ss)
3763 || gfc_are_equivalenced_arrays (dest->expr, ss->expr))
3771 lref = dest->expr->ref;
3772 rref = ss->expr->ref;
3774 nDepend = gfc_dep_resolver (lref, rref, &loop->reverse[0]);
3779 for (i = 0; i < dest->data.info.dimen; i++)
3780 for (j = 0; j < ss->data.info.dimen; j++)
3782 && dest->data.info.dim[i] == ss->data.info.dim[j])
3784 /* If we don't access array elements in the same order,
3785 there is a dependency. */
3790 /* TODO : loop shifting. */
3793 /* Mark the dimensions for LOOP SHIFTING */
3794 for (n = 0; n < loop->dimen; n++)
3796 int dim = dest->data.info.dim[n];
3798 if (lref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
3800 else if (! gfc_is_same_range (&lref->u.ar,
3801 &rref->u.ar, dim, 0))
3805 /* Put all the dimensions with dependencies in the
3808 for (n = 0; n < loop->dimen; n++)
3810 gcc_assert (loop->order[n] == n);
3812 loop->order[dim++] = n;
3814 for (n = 0; n < loop->dimen; n++)
3817 loop->order[dim++] = n;
3820 gcc_assert (dim == loop->dimen);
3831 tree base_type = gfc_typenode_for_spec (&dest->expr->ts);
3832 if (GFC_ARRAY_TYPE_P (base_type)
3833 || GFC_DESCRIPTOR_TYPE_P (base_type))
3834 base_type = gfc_get_element_type (base_type);
3835 loop->temp_ss = gfc_get_temp_ss (base_type, dest->string_length,
3837 gfc_add_ss_to_loop (loop, loop->temp_ss);
3840 loop->temp_ss = NULL;
3844 /* Initialize the scalarization loop. Creates the loop variables. Determines
3845 the range of the loop variables. Creates a temporary if required.
3846 Calculates how to transform from loop variables to array indices for each
3847 expression. Also generates code for scalar expressions which have been
3848 moved outside the loop. */
3851 gfc_conv_loop_setup (gfc_loopinfo * loop, locus * where)
3853 int n, dim, spec_dim;
3855 gfc_ss_info *specinfo;
3858 gfc_ss *loopspec[GFC_MAX_DIMENSIONS];
3859 bool dynamic[GFC_MAX_DIMENSIONS];
3864 for (n = 0; n < loop->dimen; n++)
3868 /* We use one SS term, and use that to determine the bounds of the
3869 loop for this dimension. We try to pick the simplest term. */
3870 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3872 gfc_ss_type ss_type;
3875 if (ss_type == GFC_SS_SCALAR
3876 || ss_type == GFC_SS_TEMP
3877 || ss_type == GFC_SS_REFERENCE)
3880 info = &ss->data.info;
3883 if (loopspec[n] != NULL)
3885 specinfo = &loopspec[n]->data.info;
3886 spec_dim = specinfo->dim[n];
3890 /* Silence unitialized warnings. */
3897 gcc_assert (ss->shape[dim]);
3898 /* The frontend has worked out the size for us. */
3900 || !loopspec[n]->shape
3901 || !integer_zerop (specinfo->start[spec_dim]))
3902 /* Prefer zero-based descriptors if possible. */
3907 if (ss->type == GFC_SS_CONSTRUCTOR)
3909 gfc_constructor_base base;
3910 /* An unknown size constructor will always be rank one.
3911 Higher rank constructors will either have known shape,
3912 or still be wrapped in a call to reshape. */
3913 gcc_assert (loop->dimen == 1);
3915 /* Always prefer to use the constructor bounds if the size
3916 can be determined at compile time. Prefer not to otherwise,
3917 since the general case involves realloc, and it's better to
3918 avoid that overhead if possible. */
3919 base = ss->expr->value.constructor;
3920 dynamic[n] = gfc_get_array_constructor_size (&i, base);
3921 if (!dynamic[n] || !loopspec[n])
3926 /* TODO: Pick the best bound if we have a choice between a
3927 function and something else. */
3928 if (ss->type == GFC_SS_FUNCTION)
3934 /* Avoid using an allocatable lhs in an assignment, since
3935 there might be a reallocation coming. */
3936 if (loopspec[n] && ss->is_alloc_lhs)
3939 if (ss->type != GFC_SS_SECTION)
3944 /* Criteria for choosing a loop specifier (most important first):
3945 doesn't need realloc
3951 else if ((loopspec[n]->type == GFC_SS_CONSTRUCTOR && dynamic[n])
3952 || n >= loop->dimen)
3954 else if (integer_onep (info->stride[dim])
3955 && !integer_onep (specinfo->stride[spec_dim]))
3957 else if (INTEGER_CST_P (info->stride[dim])
3958 && !INTEGER_CST_P (specinfo->stride[spec_dim]))
3960 else if (INTEGER_CST_P (info->start[dim])
3961 && !INTEGER_CST_P (specinfo->start[spec_dim]))
3963 /* We don't work out the upper bound.
3964 else if (INTEGER_CST_P (info->finish[n])
3965 && ! INTEGER_CST_P (specinfo->finish[n]))
3966 loopspec[n] = ss; */
3969 /* We should have found the scalarization loop specifier. If not,
3971 gcc_assert (loopspec[n]);
3973 info = &loopspec[n]->data.info;
3976 /* Set the extents of this range. */
3977 cshape = loopspec[n]->shape;
3978 if (cshape && INTEGER_CST_P (info->start[dim])
3979 && INTEGER_CST_P (info->stride[dim]))
3981 loop->from[n] = info->start[dim];
3982 mpz_set (i, cshape[get_array_ref_dim (loopspec[n], n)]);
3983 mpz_sub_ui (i, i, 1);
3984 /* To = from + (size - 1) * stride. */
3985 tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind);
3986 if (!integer_onep (info->stride[dim]))
3987 tmp = fold_build2_loc (input_location, MULT_EXPR,
3988 gfc_array_index_type, tmp,
3990 loop->to[n] = fold_build2_loc (input_location, PLUS_EXPR,
3991 gfc_array_index_type,
3992 loop->from[n], tmp);
3996 loop->from[n] = info->start[dim];
3997 switch (loopspec[n]->type)
3999 case GFC_SS_CONSTRUCTOR:
4000 /* The upper bound is calculated when we expand the
4002 gcc_assert (loop->to[n] == NULL_TREE);
4005 case GFC_SS_SECTION:
4006 /* Use the end expression if it exists and is not constant,
4007 so that it is only evaluated once. */
4008 loop->to[n] = info->end[dim];
4011 case GFC_SS_FUNCTION:
4012 /* The loop bound will be set when we generate the call. */
4013 gcc_assert (loop->to[n] == NULL_TREE);
4021 /* Transform everything so we have a simple incrementing variable. */
4022 if (n < loop->dimen && integer_onep (info->stride[dim]))
4023 info->delta[dim] = gfc_index_zero_node;
4024 else if (n < loop->dimen)
4026 /* Set the delta for this section. */
4027 info->delta[dim] = gfc_evaluate_now (loop->from[n], &loop->pre);
4028 /* Number of iterations is (end - start + step) / step.
4029 with start = 0, this simplifies to
4031 for (i = 0; i<=last; i++){...}; */
4032 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4033 gfc_array_index_type, loop->to[n],
4035 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
4036 gfc_array_index_type, tmp, info->stride[dim]);
4037 tmp = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
4038 tmp, build_int_cst (gfc_array_index_type, -1));
4039 loop->to[n] = gfc_evaluate_now (tmp, &loop->pre);
4040 /* Make the loop variable start at 0. */
4041 loop->from[n] = gfc_index_zero_node;
4045 /* Add all the scalar code that can be taken out of the loops.
4046 This may include calculating the loop bounds, so do it before
4047 allocating the temporary. */
4048 gfc_add_loop_ss_code (loop, loop->ss, false, where);
4050 /* If we want a temporary then create it. */
4051 if (loop->temp_ss != NULL)
4053 gcc_assert (loop->temp_ss->type == GFC_SS_TEMP);
4055 /* Make absolutely sure that this is a complete type. */
4056 if (loop->temp_ss->string_length)
4057 loop->temp_ss->data.temp.type
4058 = gfc_get_character_type_len_for_eltype
4059 (TREE_TYPE (loop->temp_ss->data.temp.type),
4060 loop->temp_ss->string_length);
4062 tmp = loop->temp_ss->data.temp.type;
4063 n = loop->temp_ss->data.temp.dimen;
4064 memset (&loop->temp_ss->data.info, 0, sizeof (gfc_ss_info));
4065 loop->temp_ss->type = GFC_SS_SECTION;
4066 loop->temp_ss->data.info.dimen = n;
4068 gcc_assert (loop->temp_ss->data.info.dimen != 0);
4069 for (n = 0; n < loop->temp_ss->data.info.dimen; n++)
4070 loop->temp_ss->data.info.dim[n] = n;
4072 gfc_trans_create_temp_array (&loop->pre, &loop->post, loop,
4073 loop->temp_ss, tmp, NULL_TREE,
4074 false, true, false, where);
4077 for (n = 0; n < loop->temp_dim; n++)
4078 loopspec[loop->order[n]] = NULL;
4082 /* For array parameters we don't have loop variables, so don't calculate the
4084 if (loop->array_parameter)
4087 /* Calculate the translation from loop variables to array indices. */
4088 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4090 if (ss->type != GFC_SS_SECTION && ss->type != GFC_SS_COMPONENT
4091 && ss->type != GFC_SS_CONSTRUCTOR)
4095 info = &ss->data.info;
4097 for (n = 0; n < info->dimen; n++)
4099 /* If we are specifying the range the delta is already set. */
4100 if (loopspec[n] != ss)
4102 dim = ss->data.info.dim[n];
4104 /* Calculate the offset relative to the loop variable.
4105 First multiply by the stride. */
4106 tmp = loop->from[n];
4107 if (!integer_onep (info->stride[dim]))
4108 tmp = fold_build2_loc (input_location, MULT_EXPR,
4109 gfc_array_index_type,
4110 tmp, info->stride[dim]);
4112 /* Then subtract this from our starting value. */
4113 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4114 gfc_array_index_type,
4115 info->start[dim], tmp);
4117 info->delta[dim] = gfc_evaluate_now (tmp, &loop->pre);
4124 /* Calculate the size of a given array dimension from the bounds. This
4125 is simply (ubound - lbound + 1) if this expression is positive
4126 or 0 if it is negative (pick either one if it is zero). Optionally
4127 (if or_expr is present) OR the (expression != 0) condition to it. */
4130 gfc_conv_array_extent_dim (tree lbound, tree ubound, tree* or_expr)
4135 /* Calculate (ubound - lbound + 1). */
4136 res = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4138 res = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, res,
4139 gfc_index_one_node);
4141 /* Check whether the size for this dimension is negative. */
4142 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, res,
4143 gfc_index_zero_node);
4144 res = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, cond,
4145 gfc_index_zero_node, res);
4147 /* Build OR expression. */
4149 *or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
4150 boolean_type_node, *or_expr, cond);
4156 /* For an array descriptor, get the total number of elements. This is just
4157 the product of the extents along from_dim to to_dim. */
4160 gfc_conv_descriptor_size_1 (tree desc, int from_dim, int to_dim)
4165 res = gfc_index_one_node;
4167 for (dim = from_dim; dim < to_dim; ++dim)
4173 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
4174 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
4176 extent = gfc_conv_array_extent_dim (lbound, ubound, NULL);
4177 res = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4185 /* Full size of an array. */
4188 gfc_conv_descriptor_size (tree desc, int rank)
4190 return gfc_conv_descriptor_size_1 (desc, 0, rank);
4194 /* Size of a coarray for all dimensions but the last. */
4197 gfc_conv_descriptor_cosize (tree desc, int rank, int corank)
4199 return gfc_conv_descriptor_size_1 (desc, rank, rank + corank - 1);
4203 /* Fills in an array descriptor, and returns the size of the array.
4204 The size will be a simple_val, ie a variable or a constant. Also
4205 calculates the offset of the base. The pointer argument overflow,
4206 which should be of integer type, will increase in value if overflow
4207 occurs during the size calculation. Returns the size of the array.
4211 for (n = 0; n < rank; n++)
4213 a.lbound[n] = specified_lower_bound;
4214 offset = offset + a.lbond[n] * stride;
4216 a.ubound[n] = specified_upper_bound;
4217 a.stride[n] = stride;
4218 size = size >= 0 ? ubound + size : 0; //size = ubound + 1 - lbound
4219 overflow += size == 0 ? 0: (MAX/size < stride ? 1: 0);
4220 stride = stride * size;
4222 for (n = rank; n < rank+corank; n++)
4223 (Set lcobound/ucobound as above.)
4224 element_size = sizeof (array element);
4227 stride = (size_t) stride;
4228 overflow += element_size == 0 ? 0: (MAX/element_size < stride ? 1: 0);
4229 stride = stride * element_size;
4235 gfc_array_init_size (tree descriptor, int rank, int corank, tree * poffset,
4236 gfc_expr ** lower, gfc_expr ** upper, stmtblock_t * pblock,
4237 stmtblock_t * descriptor_block, tree * overflow)
4250 stmtblock_t thenblock;
4251 stmtblock_t elseblock;
4256 type = TREE_TYPE (descriptor);
4258 stride = gfc_index_one_node;
4259 offset = gfc_index_zero_node;
4261 /* Set the dtype. */
4262 tmp = gfc_conv_descriptor_dtype (descriptor);
4263 gfc_add_modify (descriptor_block, tmp, gfc_get_dtype (TREE_TYPE (descriptor)));
4265 or_expr = boolean_false_node;
4267 for (n = 0; n < rank; n++)
4272 /* We have 3 possibilities for determining the size of the array:
4273 lower == NULL => lbound = 1, ubound = upper[n]
4274 upper[n] = NULL => lbound = 1, ubound = lower[n]
4275 upper[n] != NULL => lbound = lower[n], ubound = upper[n] */
4278 /* Set lower bound. */
4279 gfc_init_se (&se, NULL);
4281 se.expr = gfc_index_one_node;
4284 gcc_assert (lower[n]);
4287 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4288 gfc_add_block_to_block (pblock, &se.pre);
4292 se.expr = gfc_index_one_node;
4296 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
4297 gfc_rank_cst[n], se.expr);
4298 conv_lbound = se.expr;
4300 /* Work out the offset for this component. */
4301 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4303 offset = fold_build2_loc (input_location, MINUS_EXPR,
4304 gfc_array_index_type, offset, tmp);
4306 /* Set upper bound. */
4307 gfc_init_se (&se, NULL);
4308 gcc_assert (ubound);
4309 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4310 gfc_add_block_to_block (pblock, &se.pre);
4312 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
4313 gfc_rank_cst[n], se.expr);
4314 conv_ubound = se.expr;
4316 /* Store the stride. */
4317 gfc_conv_descriptor_stride_set (descriptor_block, descriptor,
4318 gfc_rank_cst[n], stride);
4320 /* Calculate size and check whether extent is negative. */
4321 size = gfc_conv_array_extent_dim (conv_lbound, conv_ubound, &or_expr);
4322 size = gfc_evaluate_now (size, pblock);
4324 /* Check whether multiplying the stride by the number of
4325 elements in this dimension would overflow. We must also check
4326 whether the current dimension has zero size in order to avoid
4329 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
4330 gfc_array_index_type,
4331 fold_convert (gfc_array_index_type,
4332 TYPE_MAX_VALUE (gfc_array_index_type)),
4334 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
4335 boolean_type_node, tmp, stride));
4336 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4337 integer_one_node, integer_zero_node);
4338 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
4339 boolean_type_node, size,
4340 gfc_index_zero_node));
4341 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4342 integer_zero_node, tmp);
4343 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
4345 *overflow = gfc_evaluate_now (tmp, pblock);
4347 /* Multiply the stride by the number of elements in this dimension. */
4348 stride = fold_build2_loc (input_location, MULT_EXPR,
4349 gfc_array_index_type, stride, size);
4350 stride = gfc_evaluate_now (stride, pblock);
4353 for (n = rank; n < rank + corank; n++)
4357 /* Set lower bound. */
4358 gfc_init_se (&se, NULL);
4359 if (lower == NULL || lower[n] == NULL)
4361 gcc_assert (n == rank + corank - 1);
4362 se.expr = gfc_index_one_node;
4366 if (ubound || n == rank + corank - 1)
4368 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4369 gfc_add_block_to_block (pblock, &se.pre);
4373 se.expr = gfc_index_one_node;
4377 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
4378 gfc_rank_cst[n], se.expr);
4380 if (n < rank + corank - 1)
4382 gfc_init_se (&se, NULL);
4383 gcc_assert (ubound);
4384 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4385 gfc_add_block_to_block (pblock, &se.pre);
4386 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
4387 gfc_rank_cst[n], se.expr);
4391 /* The stride is the number of elements in the array, so multiply by the
4392 size of an element to get the total size. */
4393 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
4394 /* Convert to size_t. */
4395 element_size = fold_convert (size_type_node, tmp);
4398 return element_size;
4400 stride = fold_convert (size_type_node, stride);
4402 /* First check for overflow. Since an array of type character can
4403 have zero element_size, we must check for that before
4405 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
4407 TYPE_MAX_VALUE (size_type_node), element_size);
4408 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
4409 boolean_type_node, tmp, stride));
4410 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4411 integer_one_node, integer_zero_node);
4412 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
4413 boolean_type_node, element_size,
4414 build_int_cst (size_type_node, 0)));
4415 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4416 integer_zero_node, tmp);
4417 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
4419 *overflow = gfc_evaluate_now (tmp, pblock);
4421 size = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
4422 stride, element_size);
4424 if (poffset != NULL)
4426 offset = gfc_evaluate_now (offset, pblock);
4430 if (integer_zerop (or_expr))
4432 if (integer_onep (or_expr))
4433 return build_int_cst (size_type_node, 0);
4435 var = gfc_create_var (TREE_TYPE (size), "size");
4436 gfc_start_block (&thenblock);
4437 gfc_add_modify (&thenblock, var, build_int_cst (size_type_node, 0));
4438 thencase = gfc_finish_block (&thenblock);
4440 gfc_start_block (&elseblock);
4441 gfc_add_modify (&elseblock, var, size);
4442 elsecase = gfc_finish_block (&elseblock);
4444 tmp = gfc_evaluate_now (or_expr, pblock);
4445 tmp = build3_v (COND_EXPR, tmp, thencase, elsecase);
4446 gfc_add_expr_to_block (pblock, tmp);
4452 /* Initializes the descriptor and generates a call to _gfor_allocate. Does
4453 the work for an ALLOCATE statement. */
4457 gfc_array_allocate (gfc_se * se, gfc_expr * expr, tree status, tree errmsg,
4462 tree offset = NULL_TREE;
4463 tree token = NULL_TREE;
4466 tree error = NULL_TREE;
4467 tree overflow; /* Boolean storing whether size calculation overflows. */
4468 tree var_overflow = NULL_TREE;
4470 tree set_descriptor;
4471 stmtblock_t set_descriptor_block;
4472 stmtblock_t elseblock;
4475 gfc_ref *ref, *prev_ref = NULL;
4476 bool allocatable, coarray, dimension;
4480 /* Find the last reference in the chain. */
4481 while (ref && ref->next != NULL)
4483 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT
4484 || (ref->u.ar.dimen == 0 && ref->u.ar.codimen > 0));
4489 if (ref == NULL || ref->type != REF_ARRAY)
4494 allocatable = expr->symtree->n.sym->attr.allocatable;
4495 coarray = expr->symtree->n.sym->attr.codimension;
4496 dimension = expr->symtree->n.sym->attr.dimension;
4500 allocatable = prev_ref->u.c.component->attr.allocatable;
4501 coarray = prev_ref->u.c.component->attr.codimension;
4502 dimension = prev_ref->u.c.component->attr.dimension;
4506 gcc_assert (coarray);
4508 /* Figure out the size of the array. */
4509 switch (ref->u.ar.type)
4515 upper = ref->u.ar.start;
4521 lower = ref->u.ar.start;
4522 upper = ref->u.ar.end;
4526 gcc_assert (ref->u.ar.as->type == AS_EXPLICIT);
4528 lower = ref->u.ar.as->lower;
4529 upper = ref->u.ar.as->upper;
4537 overflow = integer_zero_node;
4539 gfc_init_block (&set_descriptor_block);
4540 size = gfc_array_init_size (se->expr, ref->u.ar.as->rank,
4541 ref->u.ar.as->corank, &offset, lower, upper,
4542 &se->pre, &set_descriptor_block, &overflow);
4547 var_overflow = gfc_create_var (integer_type_node, "overflow");
4548 gfc_add_modify (&se->pre, var_overflow, overflow);
4550 /* Generate the block of code handling overflow. */
4551 msg = gfc_build_addr_expr (pchar_type_node,
4552 gfc_build_localized_cstring_const
4553 ("Integer overflow when calculating the amount of "
4554 "memory to allocate"));
4555 error = build_call_expr_loc (input_location, gfor_fndecl_runtime_error,
4559 if (status != NULL_TREE)
4561 tree status_type = TREE_TYPE (status);
4562 stmtblock_t set_status_block;
4564 gfc_start_block (&set_status_block);
4565 gfc_add_modify (&set_status_block, status,
4566 build_int_cst (status_type, LIBERROR_ALLOCATION));
4567 error = gfc_finish_block (&set_status_block);
4570 gfc_start_block (&elseblock);
4572 /* Allocate memory to store the data. */
4573 pointer = gfc_conv_descriptor_data_get (se->expr);
4574 STRIP_NOPS (pointer);
4576 if (coarray && gfc_option.coarray == GFC_FCOARRAY_LIB)
4577 token = gfc_build_addr_expr (NULL_TREE,
4578 gfc_conv_descriptor_token (se->expr));
4580 /* The allocatable variant takes the old pointer as first argument. */
4582 gfc_allocate_allocatable (&elseblock, pointer, size, token,
4583 status, errmsg, errlen, expr);
4585 gfc_allocate_using_malloc (&elseblock, pointer, size, status);
4589 cond = gfc_unlikely (fold_build2_loc (input_location, NE_EXPR,
4590 boolean_type_node, var_overflow, integer_zero_node));
4591 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
4592 error, gfc_finish_block (&elseblock));
4595 tmp = gfc_finish_block (&elseblock);
4597 gfc_add_expr_to_block (&se->pre, tmp);
4599 /* Update the array descriptors. */
4601 gfc_conv_descriptor_offset_set (&set_descriptor_block, se->expr, offset);
4603 set_descriptor = gfc_finish_block (&set_descriptor_block);
4604 if (status != NULL_TREE)
4606 cond = fold_build2_loc (input_location, EQ_EXPR,
4607 boolean_type_node, status,
4608 build_int_cst (TREE_TYPE (status), 0));
4609 gfc_add_expr_to_block (&se->pre,
4610 fold_build3_loc (input_location, COND_EXPR, void_type_node,
4611 gfc_likely (cond), set_descriptor,
4612 build_empty_stmt (input_location)));
4615 gfc_add_expr_to_block (&se->pre, set_descriptor);
4617 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
4618 && expr->ts.u.derived->attr.alloc_comp)
4620 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, se->expr,
4621 ref->u.ar.as->rank);
4622 gfc_add_expr_to_block (&se->pre, tmp);
4629 /* Deallocate an array variable. Also used when an allocated variable goes
4634 gfc_array_deallocate (tree descriptor, tree pstat, gfc_expr* expr)
4640 gfc_start_block (&block);
4641 /* Get a pointer to the data. */
4642 var = gfc_conv_descriptor_data_get (descriptor);
4645 /* Parameter is the address of the data component. */
4646 tmp = gfc_deallocate_with_status (var, pstat, false, expr);
4647 gfc_add_expr_to_block (&block, tmp);
4649 /* Zero the data pointer. */
4650 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
4651 var, build_int_cst (TREE_TYPE (var), 0));
4652 gfc_add_expr_to_block (&block, tmp);
4654 return gfc_finish_block (&block);
4658 /* Create an array constructor from an initialization expression.
4659 We assume the frontend already did any expansions and conversions. */
4662 gfc_conv_array_initializer (tree type, gfc_expr * expr)
4668 unsigned HOST_WIDE_INT lo;
4670 VEC(constructor_elt,gc) *v = NULL;
4672 switch (expr->expr_type)
4675 case EXPR_STRUCTURE:
4676 /* A single scalar or derived type value. Create an array with all
4677 elements equal to that value. */
4678 gfc_init_se (&se, NULL);
4680 if (expr->expr_type == EXPR_CONSTANT)
4681 gfc_conv_constant (&se, expr);
4683 gfc_conv_structure (&se, expr, 1);
4685 tmp = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4686 gcc_assert (tmp && INTEGER_CST_P (tmp));
4687 hi = TREE_INT_CST_HIGH (tmp);
4688 lo = TREE_INT_CST_LOW (tmp);
4692 /* This will probably eat buckets of memory for large arrays. */
4693 while (hi != 0 || lo != 0)
4695 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, se.expr);
4703 /* Create a vector of all the elements. */
4704 for (c = gfc_constructor_first (expr->value.constructor);
4705 c; c = gfc_constructor_next (c))
4709 /* Problems occur when we get something like
4710 integer :: a(lots) = (/(i, i=1, lots)/) */
4711 gfc_fatal_error ("The number of elements in the array constructor "
4712 "at %L requires an increase of the allowed %d "
4713 "upper limit. See -fmax-array-constructor "
4714 "option", &expr->where,
4715 gfc_option.flag_max_array_constructor);
4718 if (mpz_cmp_si (c->offset, 0) != 0)
4719 index = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
4723 if (mpz_cmp_si (c->repeat, 1) > 0)
4729 mpz_add (maxval, c->offset, c->repeat);
4730 mpz_sub_ui (maxval, maxval, 1);
4731 tmp2 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
4732 if (mpz_cmp_si (c->offset, 0) != 0)
4734 mpz_add_ui (maxval, c->offset, 1);
4735 tmp1 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
4738 tmp1 = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
4740 range = fold_build2 (RANGE_EXPR, gfc_array_index_type, tmp1, tmp2);
4746 gfc_init_se (&se, NULL);
4747 switch (c->expr->expr_type)
4750 gfc_conv_constant (&se, c->expr);
4753 case EXPR_STRUCTURE:
4754 gfc_conv_structure (&se, c->expr, 1);
4758 /* Catch those occasional beasts that do not simplify
4759 for one reason or another, assuming that if they are
4760 standard defying the frontend will catch them. */
4761 gfc_conv_expr (&se, c->expr);
4765 if (range == NULL_TREE)
4766 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
4769 if (index != NULL_TREE)
4770 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
4771 CONSTRUCTOR_APPEND_ELT (v, range, se.expr);
4777 return gfc_build_null_descriptor (type);
4783 /* Create a constructor from the list of elements. */
4784 tmp = build_constructor (type, v);
4785 TREE_CONSTANT (tmp) = 1;
4790 /* Generate code to evaluate non-constant coarray cobounds. */
4793 gfc_trans_array_cobounds (tree type, stmtblock_t * pblock,
4794 const gfc_symbol *sym)
4804 for (dim = as->rank; dim < as->rank + as->corank; dim++)
4806 /* Evaluate non-constant array bound expressions. */
4807 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
4808 if (as->lower[dim] && !INTEGER_CST_P (lbound))
4810 gfc_init_se (&se, NULL);
4811 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
4812 gfc_add_block_to_block (pblock, &se.pre);
4813 gfc_add_modify (pblock, lbound, se.expr);
4815 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
4816 if (as->upper[dim] && !INTEGER_CST_P (ubound))
4818 gfc_init_se (&se, NULL);
4819 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
4820 gfc_add_block_to_block (pblock, &se.pre);
4821 gfc_add_modify (pblock, ubound, se.expr);
4827 /* Generate code to evaluate non-constant array bounds. Sets *poffset and
4828 returns the size (in elements) of the array. */
4831 gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
4832 stmtblock_t * pblock)
4847 size = gfc_index_one_node;
4848 offset = gfc_index_zero_node;
4849 for (dim = 0; dim < as->rank; dim++)
4851 /* Evaluate non-constant array bound expressions. */
4852 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
4853 if (as->lower[dim] && !INTEGER_CST_P (lbound))
4855 gfc_init_se (&se, NULL);
4856 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
4857 gfc_add_block_to_block (pblock, &se.pre);
4858 gfc_add_modify (pblock, lbound, se.expr);
4860 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
4861 if (as->upper[dim] && !INTEGER_CST_P (ubound))
4863 gfc_init_se (&se, NULL);
4864 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
4865 gfc_add_block_to_block (pblock, &se.pre);
4866 gfc_add_modify (pblock, ubound, se.expr);
4868 /* The offset of this dimension. offset = offset - lbound * stride. */
4869 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4871 offset = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4874 /* The size of this dimension, and the stride of the next. */
4875 if (dim + 1 < as->rank)
4876 stride = GFC_TYPE_ARRAY_STRIDE (type, dim + 1);
4878 stride = GFC_TYPE_ARRAY_SIZE (type);
4880 if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride)))
4882 /* Calculate stride = size * (ubound + 1 - lbound). */
4883 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4884 gfc_array_index_type,
4885 gfc_index_one_node, lbound);
4886 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4887 gfc_array_index_type, ubound, tmp);
4888 tmp = fold_build2_loc (input_location, MULT_EXPR,
4889 gfc_array_index_type, size, tmp);
4891 gfc_add_modify (pblock, stride, tmp);
4893 stride = gfc_evaluate_now (tmp, pblock);
4895 /* Make sure that negative size arrays are translated
4896 to being zero size. */
4897 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
4898 stride, gfc_index_zero_node);
4899 tmp = fold_build3_loc (input_location, COND_EXPR,
4900 gfc_array_index_type, tmp,
4901 stride, gfc_index_zero_node);
4902 gfc_add_modify (pblock, stride, tmp);
4908 gfc_trans_array_cobounds (type, pblock, sym);
4909 gfc_trans_vla_type_sizes (sym, pblock);
4916 /* Generate code to initialize/allocate an array variable. */
4919 gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym,
4920 gfc_wrapped_block * block)
4924 tree tmp = NULL_TREE;
4931 gcc_assert (!(sym->attr.pointer || sym->attr.allocatable));
4933 /* Do nothing for USEd variables. */
4934 if (sym->attr.use_assoc)
4937 type = TREE_TYPE (decl);
4938 gcc_assert (GFC_ARRAY_TYPE_P (type));
4939 onstack = TREE_CODE (type) != POINTER_TYPE;
4941 gfc_init_block (&init);
4943 /* Evaluate character string length. */
4944 if (sym->ts.type == BT_CHARACTER
4945 && onstack && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
4947 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
4949 gfc_trans_vla_type_sizes (sym, &init);
4951 /* Emit a DECL_EXPR for this variable, which will cause the
4952 gimplifier to allocate storage, and all that good stuff. */
4953 tmp = fold_build1_loc (input_location, DECL_EXPR, TREE_TYPE (decl), decl);
4954 gfc_add_expr_to_block (&init, tmp);
4959 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
4963 type = TREE_TYPE (type);
4965 gcc_assert (!sym->attr.use_assoc);
4966 gcc_assert (!TREE_STATIC (decl));
4967 gcc_assert (!sym->module);
4969 if (sym->ts.type == BT_CHARACTER
4970 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
4971 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
4973 size = gfc_trans_array_bounds (type, sym, &offset, &init);
4975 /* Don't actually allocate space for Cray Pointees. */
4976 if (sym->attr.cray_pointee)
4978 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
4979 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
4981 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
4985 if (gfc_option.flag_stack_arrays)
4987 gcc_assert (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE);
4988 space = build_decl (sym->declared_at.lb->location,
4989 VAR_DECL, create_tmp_var_name ("A"),
4990 TREE_TYPE (TREE_TYPE (decl)));
4991 gfc_trans_vla_type_sizes (sym, &init);
4995 /* The size is the number of elements in the array, so multiply by the
4996 size of an element to get the total size. */
4997 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
4998 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4999 size, fold_convert (gfc_array_index_type, tmp));
5001 /* Allocate memory to hold the data. */
5002 tmp = gfc_call_malloc (&init, TREE_TYPE (decl), size);
5003 gfc_add_modify (&init, decl, tmp);
5005 /* Free the temporary. */
5006 tmp = gfc_call_free (convert (pvoid_type_node, decl));
5010 /* Set offset of the array. */
5011 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5012 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5014 /* Automatic arrays should not have initializers. */
5015 gcc_assert (!sym->value);
5017 inittree = gfc_finish_block (&init);
5024 /* Don't create new scope, emit the DECL_EXPR in exactly the scope
5025 where also space is located. */
5026 gfc_init_block (&init);
5027 tmp = fold_build1_loc (input_location, DECL_EXPR,
5028 TREE_TYPE (space), space);
5029 gfc_add_expr_to_block (&init, tmp);
5030 addr = fold_build1_loc (sym->declared_at.lb->location,
5031 ADDR_EXPR, TREE_TYPE (decl), space);
5032 gfc_add_modify (&init, decl, addr);
5033 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5036 gfc_add_init_cleanup (block, inittree, tmp);
5040 /* Generate entry and exit code for g77 calling convention arrays. */
5043 gfc_trans_g77_array (gfc_symbol * sym, gfc_wrapped_block * block)
5053 gfc_save_backend_locus (&loc);
5054 gfc_set_backend_locus (&sym->declared_at);
5056 /* Descriptor type. */
5057 parm = sym->backend_decl;
5058 type = TREE_TYPE (parm);
5059 gcc_assert (GFC_ARRAY_TYPE_P (type));
5061 gfc_start_block (&init);
5063 if (sym->ts.type == BT_CHARACTER
5064 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5065 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5067 /* Evaluate the bounds of the array. */
5068 gfc_trans_array_bounds (type, sym, &offset, &init);
5070 /* Set the offset. */
5071 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5072 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5074 /* Set the pointer itself if we aren't using the parameter directly. */
5075 if (TREE_CODE (parm) != PARM_DECL)
5077 tmp = convert (TREE_TYPE (parm), GFC_DECL_SAVED_DESCRIPTOR (parm));
5078 gfc_add_modify (&init, parm, tmp);
5080 stmt = gfc_finish_block (&init);
5082 gfc_restore_backend_locus (&loc);
5084 /* Add the initialization code to the start of the function. */
5086 if (sym->attr.optional || sym->attr.not_always_present)
5088 tmp = gfc_conv_expr_present (sym);
5089 stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
5092 gfc_add_init_cleanup (block, stmt, NULL_TREE);
5096 /* Modify the descriptor of an array parameter so that it has the
5097 correct lower bound. Also move the upper bound accordingly.
5098 If the array is not packed, it will be copied into a temporary.
5099 For each dimension we set the new lower and upper bounds. Then we copy the
5100 stride and calculate the offset for this dimension. We also work out
5101 what the stride of a packed array would be, and see it the two match.
5102 If the array need repacking, we set the stride to the values we just
5103 calculated, recalculate the offset and copy the array data.
5104 Code is also added to copy the data back at the end of the function.
5108 gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc,
5109 gfc_wrapped_block * block)
5116 tree stmtInit, stmtCleanup;
5123 tree stride, stride2;
5133 /* Do nothing for pointer and allocatable arrays. */
5134 if (sym->attr.pointer || sym->attr.allocatable)
5137 if (sym->attr.dummy && gfc_is_nodesc_array (sym))
5139 gfc_trans_g77_array (sym, block);
5143 gfc_save_backend_locus (&loc);
5144 gfc_set_backend_locus (&sym->declared_at);
5146 /* Descriptor type. */
5147 type = TREE_TYPE (tmpdesc);
5148 gcc_assert (GFC_ARRAY_TYPE_P (type));
5149 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5150 dumdesc = build_fold_indirect_ref_loc (input_location, dumdesc);
5151 gfc_start_block (&init);
5153 if (sym->ts.type == BT_CHARACTER
5154 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5155 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5157 checkparm = (sym->as->type == AS_EXPLICIT
5158 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS));
5160 no_repack = !(GFC_DECL_PACKED_ARRAY (tmpdesc)
5161 || GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc));
5163 if (GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc))
5165 /* For non-constant shape arrays we only check if the first dimension
5166 is contiguous. Repacking higher dimensions wouldn't gain us
5167 anything as we still don't know the array stride. */
5168 partial = gfc_create_var (boolean_type_node, "partial");
5169 TREE_USED (partial) = 1;
5170 tmp = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
5171 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, tmp,
5172 gfc_index_one_node);
5173 gfc_add_modify (&init, partial, tmp);
5176 partial = NULL_TREE;
5178 /* The naming of stmt_unpacked and stmt_packed may be counter-intuitive
5179 here, however I think it does the right thing. */
5182 /* Set the first stride. */
5183 stride = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
5184 stride = gfc_evaluate_now (stride, &init);
5186 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
5187 stride, gfc_index_zero_node);
5188 tmp = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
5189 tmp, gfc_index_one_node, stride);
5190 stride = GFC_TYPE_ARRAY_STRIDE (type, 0);
5191 gfc_add_modify (&init, stride, tmp);
5193 /* Allow the user to disable array repacking. */
5194 stmt_unpacked = NULL_TREE;
5198 gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type, 0)));
5199 /* A library call to repack the array if necessary. */
5200 tmp = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5201 stmt_unpacked = build_call_expr_loc (input_location,
5202 gfor_fndecl_in_pack, 1, tmp);
5204 stride = gfc_index_one_node;
5206 if (gfc_option.warn_array_temp)
5207 gfc_warning ("Creating array temporary at %L", &loc);
5210 /* This is for the case where the array data is used directly without
5211 calling the repack function. */
5212 if (no_repack || partial != NULL_TREE)
5213 stmt_packed = gfc_conv_descriptor_data_get (dumdesc);
5215 stmt_packed = NULL_TREE;
5217 /* Assign the data pointer. */
5218 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
5220 /* Don't repack unknown shape arrays when the first stride is 1. */
5221 tmp = fold_build3_loc (input_location, COND_EXPR, TREE_TYPE (stmt_packed),
5222 partial, stmt_packed, stmt_unpacked);
5225 tmp = stmt_packed != NULL_TREE ? stmt_packed : stmt_unpacked;
5226 gfc_add_modify (&init, tmpdesc, fold_convert (type, tmp));
5228 offset = gfc_index_zero_node;
5229 size = gfc_index_one_node;
5231 /* Evaluate the bounds of the array. */
5232 for (n = 0; n < sym->as->rank; n++)
5234 if (checkparm || !sym->as->upper[n])
5236 /* Get the bounds of the actual parameter. */
5237 dubound = gfc_conv_descriptor_ubound_get (dumdesc, gfc_rank_cst[n]);
5238 dlbound = gfc_conv_descriptor_lbound_get (dumdesc, gfc_rank_cst[n]);
5242 dubound = NULL_TREE;
5243 dlbound = NULL_TREE;
5246 lbound = GFC_TYPE_ARRAY_LBOUND (type, n);
5247 if (!INTEGER_CST_P (lbound))
5249 gfc_init_se (&se, NULL);
5250 gfc_conv_expr_type (&se, sym->as->lower[n],
5251 gfc_array_index_type);
5252 gfc_add_block_to_block (&init, &se.pre);
5253 gfc_add_modify (&init, lbound, se.expr);
5256 ubound = GFC_TYPE_ARRAY_UBOUND (type, n);
5257 /* Set the desired upper bound. */
5258 if (sym->as->upper[n])
5260 /* We know what we want the upper bound to be. */
5261 if (!INTEGER_CST_P (ubound))
5263 gfc_init_se (&se, NULL);
5264 gfc_conv_expr_type (&se, sym->as->upper[n],
5265 gfc_array_index_type);
5266 gfc_add_block_to_block (&init, &se.pre);
5267 gfc_add_modify (&init, ubound, se.expr);
5270 /* Check the sizes match. */
5273 /* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */
5277 temp = fold_build2_loc (input_location, MINUS_EXPR,
5278 gfc_array_index_type, ubound, lbound);
5279 temp = fold_build2_loc (input_location, PLUS_EXPR,
5280 gfc_array_index_type,
5281 gfc_index_one_node, temp);
5282 stride2 = fold_build2_loc (input_location, MINUS_EXPR,
5283 gfc_array_index_type, dubound,
5285 stride2 = fold_build2_loc (input_location, PLUS_EXPR,
5286 gfc_array_index_type,
5287 gfc_index_one_node, stride2);
5288 tmp = fold_build2_loc (input_location, NE_EXPR,
5289 gfc_array_index_type, temp, stride2);
5290 asprintf (&msg, "Dimension %d of array '%s' has extent "
5291 "%%ld instead of %%ld", n+1, sym->name);
5293 gfc_trans_runtime_check (true, false, tmp, &init, &loc, msg,
5294 fold_convert (long_integer_type_node, temp),
5295 fold_convert (long_integer_type_node, stride2));
5302 /* For assumed shape arrays move the upper bound by the same amount
5303 as the lower bound. */
5304 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5305 gfc_array_index_type, dubound, dlbound);
5306 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5307 gfc_array_index_type, tmp, lbound);
5308 gfc_add_modify (&init, ubound, tmp);
5310 /* The offset of this dimension. offset = offset - lbound * stride. */
5311 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5313 offset = fold_build2_loc (input_location, MINUS_EXPR,
5314 gfc_array_index_type, offset, tmp);
5316 /* The size of this dimension, and the stride of the next. */
5317 if (n + 1 < sym->as->rank)
5319 stride = GFC_TYPE_ARRAY_STRIDE (type, n + 1);
5321 if (no_repack || partial != NULL_TREE)
5323 gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[n+1]);
5325 /* Figure out the stride if not a known constant. */
5326 if (!INTEGER_CST_P (stride))
5329 stmt_packed = NULL_TREE;
5332 /* Calculate stride = size * (ubound + 1 - lbound). */
5333 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5334 gfc_array_index_type,
5335 gfc_index_one_node, lbound);
5336 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5337 gfc_array_index_type, ubound, tmp);
5338 size = fold_build2_loc (input_location, MULT_EXPR,
5339 gfc_array_index_type, size, tmp);
5343 /* Assign the stride. */
5344 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
5345 tmp = fold_build3_loc (input_location, COND_EXPR,
5346 gfc_array_index_type, partial,
5347 stmt_unpacked, stmt_packed);
5349 tmp = (stmt_packed != NULL_TREE) ? stmt_packed : stmt_unpacked;
5350 gfc_add_modify (&init, stride, tmp);
5355 stride = GFC_TYPE_ARRAY_SIZE (type);
5357 if (stride && !INTEGER_CST_P (stride))
5359 /* Calculate size = stride * (ubound + 1 - lbound). */
5360 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5361 gfc_array_index_type,
5362 gfc_index_one_node, lbound);
5363 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5364 gfc_array_index_type,
5366 tmp = fold_build2_loc (input_location, MULT_EXPR,
5367 gfc_array_index_type,
5368 GFC_TYPE_ARRAY_STRIDE (type, n), tmp);
5369 gfc_add_modify (&init, stride, tmp);
5374 gfc_trans_array_cobounds (type, &init, sym);
5376 /* Set the offset. */
5377 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5378 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5380 gfc_trans_vla_type_sizes (sym, &init);
5382 stmtInit = gfc_finish_block (&init);
5384 /* Only do the entry/initialization code if the arg is present. */
5385 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5386 optional_arg = (sym->attr.optional
5387 || (sym->ns->proc_name->attr.entry_master
5388 && sym->attr.dummy));
5391 tmp = gfc_conv_expr_present (sym);
5392 stmtInit = build3_v (COND_EXPR, tmp, stmtInit,
5393 build_empty_stmt (input_location));
5398 stmtCleanup = NULL_TREE;
5401 stmtblock_t cleanup;
5402 gfc_start_block (&cleanup);
5404 if (sym->attr.intent != INTENT_IN)
5406 /* Copy the data back. */
5407 tmp = build_call_expr_loc (input_location,
5408 gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc);
5409 gfc_add_expr_to_block (&cleanup, tmp);
5412 /* Free the temporary. */
5413 tmp = gfc_call_free (tmpdesc);
5414 gfc_add_expr_to_block (&cleanup, tmp);
5416 stmtCleanup = gfc_finish_block (&cleanup);
5418 /* Only do the cleanup if the array was repacked. */
5419 tmp = build_fold_indirect_ref_loc (input_location, dumdesc);
5420 tmp = gfc_conv_descriptor_data_get (tmp);
5421 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
5423 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
5424 build_empty_stmt (input_location));
5428 tmp = gfc_conv_expr_present (sym);
5429 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
5430 build_empty_stmt (input_location));
5434 /* We don't need to free any memory allocated by internal_pack as it will
5435 be freed at the end of the function by pop_context. */
5436 gfc_add_init_cleanup (block, stmtInit, stmtCleanup);
5438 gfc_restore_backend_locus (&loc);
5442 /* Calculate the overall offset, including subreferences. */
5444 gfc_get_dataptr_offset (stmtblock_t *block, tree parm, tree desc, tree offset,
5445 bool subref, gfc_expr *expr)
5455 /* If offset is NULL and this is not a subreferenced array, there is
5457 if (offset == NULL_TREE)
5460 offset = gfc_index_zero_node;
5465 tmp = gfc_conv_array_data (desc);
5466 tmp = build_fold_indirect_ref_loc (input_location,
5468 tmp = gfc_build_array_ref (tmp, offset, NULL);
5470 /* Offset the data pointer for pointer assignments from arrays with
5471 subreferences; e.g. my_integer => my_type(:)%integer_component. */
5474 /* Go past the array reference. */
5475 for (ref = expr->ref; ref; ref = ref->next)
5476 if (ref->type == REF_ARRAY &&
5477 ref->u.ar.type != AR_ELEMENT)
5483 /* Calculate the offset for each subsequent subreference. */
5484 for (; ref; ref = ref->next)
5489 field = ref->u.c.component->backend_decl;
5490 gcc_assert (field && TREE_CODE (field) == FIELD_DECL);
5491 tmp = fold_build3_loc (input_location, COMPONENT_REF,
5493 tmp, field, NULL_TREE);
5497 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE);
5498 gfc_init_se (&start, NULL);
5499 gfc_conv_expr_type (&start, ref->u.ss.start, gfc_charlen_type_node);
5500 gfc_add_block_to_block (block, &start.pre);
5501 tmp = gfc_build_array_ref (tmp, start.expr, NULL);
5505 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE
5506 && ref->u.ar.type == AR_ELEMENT);
5508 /* TODO - Add bounds checking. */
5509 stride = gfc_index_one_node;
5510 index = gfc_index_zero_node;
5511 for (n = 0; n < ref->u.ar.dimen; n++)
5516 /* Update the index. */
5517 gfc_init_se (&start, NULL);
5518 gfc_conv_expr_type (&start, ref->u.ar.start[n], gfc_array_index_type);
5519 itmp = gfc_evaluate_now (start.expr, block);
5520 gfc_init_se (&start, NULL);
5521 gfc_conv_expr_type (&start, ref->u.ar.as->lower[n], gfc_array_index_type);
5522 jtmp = gfc_evaluate_now (start.expr, block);
5523 itmp = fold_build2_loc (input_location, MINUS_EXPR,
5524 gfc_array_index_type, itmp, jtmp);
5525 itmp = fold_build2_loc (input_location, MULT_EXPR,
5526 gfc_array_index_type, itmp, stride);
5527 index = fold_build2_loc (input_location, PLUS_EXPR,
5528 gfc_array_index_type, itmp, index);
5529 index = gfc_evaluate_now (index, block);
5531 /* Update the stride. */
5532 gfc_init_se (&start, NULL);
5533 gfc_conv_expr_type (&start, ref->u.ar.as->upper[n], gfc_array_index_type);
5534 itmp = fold_build2_loc (input_location, MINUS_EXPR,
5535 gfc_array_index_type, start.expr,
5537 itmp = fold_build2_loc (input_location, PLUS_EXPR,
5538 gfc_array_index_type,
5539 gfc_index_one_node, itmp);
5540 stride = fold_build2_loc (input_location, MULT_EXPR,
5541 gfc_array_index_type, stride, itmp);
5542 stride = gfc_evaluate_now (stride, block);
5545 /* Apply the index to obtain the array element. */
5546 tmp = gfc_build_array_ref (tmp, index, NULL);
5556 /* Set the target data pointer. */
5557 offset = gfc_build_addr_expr (gfc_array_dataptr_type (desc), tmp);
5558 gfc_conv_descriptor_data_set (block, parm, offset);
5562 /* gfc_conv_expr_descriptor needs the string length an expression
5563 so that the size of the temporary can be obtained. This is done
5564 by adding up the string lengths of all the elements in the
5565 expression. Function with non-constant expressions have their
5566 string lengths mapped onto the actual arguments using the
5567 interface mapping machinery in trans-expr.c. */
5569 get_array_charlen (gfc_expr *expr, gfc_se *se)
5571 gfc_interface_mapping mapping;
5572 gfc_formal_arglist *formal;
5573 gfc_actual_arglist *arg;
5576 if (expr->ts.u.cl->length
5577 && gfc_is_constant_expr (expr->ts.u.cl->length))
5579 if (!expr->ts.u.cl->backend_decl)
5580 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5584 switch (expr->expr_type)
5587 get_array_charlen (expr->value.op.op1, se);
5589 /* For parentheses the expression ts.u.cl is identical. */
5590 if (expr->value.op.op == INTRINSIC_PARENTHESES)
5593 expr->ts.u.cl->backend_decl =
5594 gfc_create_var (gfc_charlen_type_node, "sln");
5596 if (expr->value.op.op2)
5598 get_array_charlen (expr->value.op.op2, se);
5600 gcc_assert (expr->value.op.op == INTRINSIC_CONCAT);
5602 /* Add the string lengths and assign them to the expression
5603 string length backend declaration. */
5604 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
5605 fold_build2_loc (input_location, PLUS_EXPR,
5606 gfc_charlen_type_node,
5607 expr->value.op.op1->ts.u.cl->backend_decl,
5608 expr->value.op.op2->ts.u.cl->backend_decl));
5611 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
5612 expr->value.op.op1->ts.u.cl->backend_decl);
5616 if (expr->value.function.esym == NULL
5617 || expr->ts.u.cl->length->expr_type == EXPR_CONSTANT)
5619 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5623 /* Map expressions involving the dummy arguments onto the actual
5624 argument expressions. */
5625 gfc_init_interface_mapping (&mapping);
5626 formal = expr->symtree->n.sym->formal;
5627 arg = expr->value.function.actual;
5629 /* Set se = NULL in the calls to the interface mapping, to suppress any
5631 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
5636 gfc_add_interface_mapping (&mapping, formal->sym, NULL, arg->expr);
5639 gfc_init_se (&tse, NULL);
5641 /* Build the expression for the character length and convert it. */
5642 gfc_apply_interface_mapping (&mapping, &tse, expr->ts.u.cl->length);
5644 gfc_add_block_to_block (&se->pre, &tse.pre);
5645 gfc_add_block_to_block (&se->post, &tse.post);
5646 tse.expr = fold_convert (gfc_charlen_type_node, tse.expr);
5647 tse.expr = fold_build2_loc (input_location, MAX_EXPR,
5648 gfc_charlen_type_node, tse.expr,
5649 build_int_cst (gfc_charlen_type_node, 0));
5650 expr->ts.u.cl->backend_decl = tse.expr;
5651 gfc_free_interface_mapping (&mapping);
5655 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5660 /* Helper function to check dimensions. */
5662 transposed_dims (gfc_ss *ss)
5667 info = &ss->data.info;
5668 for (n = 0; n < info->dimen; n++)
5669 if (info->dim[n] != n)
5674 /* Convert an array for passing as an actual argument. Expressions and
5675 vector subscripts are evaluated and stored in a temporary, which is then
5676 passed. For whole arrays the descriptor is passed. For array sections
5677 a modified copy of the descriptor is passed, but using the original data.
5679 This function is also used for array pointer assignments, and there
5682 - se->want_pointer && !se->direct_byref
5683 EXPR is an actual argument. On exit, se->expr contains a
5684 pointer to the array descriptor.
5686 - !se->want_pointer && !se->direct_byref
5687 EXPR is an actual argument to an intrinsic function or the
5688 left-hand side of a pointer assignment. On exit, se->expr
5689 contains the descriptor for EXPR.
5691 - !se->want_pointer && se->direct_byref
5692 EXPR is the right-hand side of a pointer assignment and
5693 se->expr is the descriptor for the previously-evaluated
5694 left-hand side. The function creates an assignment from
5698 The se->force_tmp flag disables the non-copying descriptor optimization
5699 that is used for transpose. It may be used in cases where there is an
5700 alias between the transpose argument and another argument in the same
5704 gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss)
5716 bool subref_array_target = false;
5719 gcc_assert (ss != NULL);
5720 gcc_assert (ss != gfc_ss_terminator);
5722 /* Special case things we know we can pass easily. */
5723 switch (expr->expr_type)
5726 /* If we have a linear array section, we can pass it directly.
5727 Otherwise we need to copy it into a temporary. */
5729 gcc_assert (ss->type == GFC_SS_SECTION);
5730 gcc_assert (ss->expr == expr);
5731 info = &ss->data.info;
5733 /* Get the descriptor for the array. */
5734 gfc_conv_ss_descriptor (&se->pre, ss, 0);
5735 desc = info->descriptor;
5737 subref_array_target = se->direct_byref && is_subref_array (expr);
5738 need_tmp = gfc_ref_needs_temporary_p (expr->ref)
5739 && !subref_array_target;
5746 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
5748 /* Create a new descriptor if the array doesn't have one. */
5751 else if (info->ref->u.ar.type == AR_FULL)
5753 else if (se->direct_byref)
5756 full = gfc_full_array_ref_p (info->ref, NULL);
5758 if (full && !transposed_dims (ss))
5760 if (se->direct_byref && !se->byref_noassign)
5762 /* Copy the descriptor for pointer assignments. */
5763 gfc_add_modify (&se->pre, se->expr, desc);
5765 /* Add any offsets from subreferences. */
5766 gfc_get_dataptr_offset (&se->pre, se->expr, desc, NULL_TREE,
5767 subref_array_target, expr);
5769 else if (se->want_pointer)
5771 /* We pass full arrays directly. This means that pointers and
5772 allocatable arrays should also work. */
5773 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
5780 if (expr->ts.type == BT_CHARACTER)
5781 se->string_length = gfc_get_expr_charlen (expr);
5789 /* We don't need to copy data in some cases. */
5790 arg = gfc_get_noncopying_intrinsic_argument (expr);
5793 /* This is a call to transpose... */
5794 gcc_assert (expr->value.function.isym->id == GFC_ISYM_TRANSPOSE);
5795 /* ... which has already been handled by the scalarizer, so
5796 that we just need to get its argument's descriptor. */
5797 gfc_conv_expr_descriptor (se, expr->value.function.actual->expr, ss);
5801 /* A transformational function return value will be a temporary
5802 array descriptor. We still need to go through the scalarizer
5803 to create the descriptor. Elemental functions ar handled as
5804 arbitrary expressions, i.e. copy to a temporary. */
5806 if (se->direct_byref)
5808 gcc_assert (ss->type == GFC_SS_FUNCTION && ss->expr == expr);
5810 /* For pointer assignments pass the descriptor directly. */
5814 gcc_assert (se->ss == ss);
5815 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
5816 gfc_conv_expr (se, expr);
5820 if (ss->expr != expr || ss->type != GFC_SS_FUNCTION)
5822 if (ss->expr != expr)
5823 /* Elemental function. */
5824 gcc_assert ((expr->value.function.esym != NULL
5825 && expr->value.function.esym->attr.elemental)
5826 || (expr->value.function.isym != NULL
5827 && expr->value.function.isym->elemental));
5829 gcc_assert (ss->type == GFC_SS_INTRINSIC);
5832 if (expr->ts.type == BT_CHARACTER
5833 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
5834 get_array_charlen (expr, se);
5840 /* Transformational function. */
5841 info = &ss->data.info;
5847 /* Constant array constructors don't need a temporary. */
5848 if (ss->type == GFC_SS_CONSTRUCTOR
5849 && expr->ts.type != BT_CHARACTER
5850 && gfc_constant_array_constructor_p (expr->value.constructor))
5853 info = &ss->data.info;
5863 /* Something complicated. Copy it into a temporary. */
5869 /* If we are creating a temporary, we don't need to bother about aliases
5874 gfc_init_loopinfo (&loop);
5876 /* Associate the SS with the loop. */
5877 gfc_add_ss_to_loop (&loop, ss);
5879 /* Tell the scalarizer not to bother creating loop variables, etc. */
5881 loop.array_parameter = 1;
5883 /* The right-hand side of a pointer assignment mustn't use a temporary. */
5884 gcc_assert (!se->direct_byref);
5886 /* Setup the scalarizing loops and bounds. */
5887 gfc_conv_ss_startstride (&loop);
5891 if (expr->ts.type == BT_CHARACTER && !expr->ts.u.cl->backend_decl)
5892 get_array_charlen (expr, se);
5894 /* Tell the scalarizer to make a temporary. */
5895 loop.temp_ss = gfc_get_temp_ss (gfc_typenode_for_spec (&expr->ts),
5896 ((expr->ts.type == BT_CHARACTER)
5897 ? expr->ts.u.cl->backend_decl
5901 se->string_length = loop.temp_ss->string_length;
5902 gcc_assert (loop.temp_ss->data.temp.dimen == loop.dimen);
5903 gfc_add_ss_to_loop (&loop, loop.temp_ss);
5906 gfc_conv_loop_setup (&loop, & expr->where);
5910 /* Copy into a temporary and pass that. We don't need to copy the data
5911 back because expressions and vector subscripts must be INTENT_IN. */
5912 /* TODO: Optimize passing function return values. */
5916 /* Start the copying loops. */
5917 gfc_mark_ss_chain_used (loop.temp_ss, 1);
5918 gfc_mark_ss_chain_used (ss, 1);
5919 gfc_start_scalarized_body (&loop, &block);
5921 /* Copy each data element. */
5922 gfc_init_se (&lse, NULL);
5923 gfc_copy_loopinfo_to_se (&lse, &loop);
5924 gfc_init_se (&rse, NULL);
5925 gfc_copy_loopinfo_to_se (&rse, &loop);
5927 lse.ss = loop.temp_ss;
5930 gfc_conv_scalarized_array_ref (&lse, NULL);
5931 if (expr->ts.type == BT_CHARACTER)
5933 gfc_conv_expr (&rse, expr);
5934 if (POINTER_TYPE_P (TREE_TYPE (rse.expr)))
5935 rse.expr = build_fold_indirect_ref_loc (input_location,
5939 gfc_conv_expr_val (&rse, expr);
5941 gfc_add_block_to_block (&block, &rse.pre);
5942 gfc_add_block_to_block (&block, &lse.pre);
5944 lse.string_length = rse.string_length;
5945 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, true,
5946 expr->expr_type == EXPR_VARIABLE
5947 || expr->expr_type == EXPR_ARRAY, true);
5948 gfc_add_expr_to_block (&block, tmp);
5950 /* Finish the copying loops. */
5951 gfc_trans_scalarizing_loops (&loop, &block);
5953 desc = loop.temp_ss->data.info.descriptor;
5955 else if (expr->expr_type == EXPR_FUNCTION && !transposed_dims (ss))
5957 desc = info->descriptor;
5958 se->string_length = ss->string_length;
5962 /* We pass sections without copying to a temporary. Make a new
5963 descriptor and point it at the section we want. The loop variable
5964 limits will be the limits of the section.
5965 A function may decide to repack the array to speed up access, but
5966 we're not bothered about that here. */
5967 int dim, ndim, codim;
5975 ndim = info->ref ? info->ref->u.ar.dimen : info->dimen;
5977 if (se->want_coarray)
5979 gfc_array_ref *ar = &info->ref->u.ar;
5981 codim = gfc_get_corank (expr);
5982 for (n = 0; n < codim - 1; n++)
5984 /* Make sure we are not lost somehow. */
5985 gcc_assert (ar->dimen_type[n + ndim] == DIMEN_THIS_IMAGE);
5987 /* Make sure the call to gfc_conv_section_startstride won't
5988 generate unnecessary code to calculate stride. */
5989 gcc_assert (ar->stride[n + ndim] == NULL);
5991 gfc_conv_section_startstride (&loop, ss, n + ndim);
5992 loop.from[n + loop.dimen] = info->start[n + ndim];
5993 loop.to[n + loop.dimen] = info->end[n + ndim];
5996 gcc_assert (n == codim - 1);
5997 evaluate_bound (&loop.pre, info->start, ar->start,
5998 info->descriptor, n + ndim, true);
5999 loop.from[n + loop.dimen] = info->start[n + ndim];
6004 /* Set the string_length for a character array. */
6005 if (expr->ts.type == BT_CHARACTER)
6006 se->string_length = gfc_get_expr_charlen (expr);
6008 desc = info->descriptor;
6009 if (se->direct_byref && !se->byref_noassign)
6011 /* For pointer assignments we fill in the destination. */
6013 parmtype = TREE_TYPE (parm);
6017 /* Otherwise make a new one. */
6018 parmtype = gfc_get_element_type (TREE_TYPE (desc));
6019 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, codim,
6020 loop.from, loop.to, 0,
6021 GFC_ARRAY_UNKNOWN, false);
6022 parm = gfc_create_var (parmtype, "parm");
6025 offset = gfc_index_zero_node;
6027 /* The following can be somewhat confusing. We have two
6028 descriptors, a new one and the original array.
6029 {parm, parmtype, dim} refer to the new one.
6030 {desc, type, n, loop} refer to the original, which maybe
6031 a descriptorless array.
6032 The bounds of the scalarization are the bounds of the section.
6033 We don't have to worry about numeric overflows when calculating
6034 the offsets because all elements are within the array data. */
6036 /* Set the dtype. */
6037 tmp = gfc_conv_descriptor_dtype (parm);
6038 gfc_add_modify (&loop.pre, tmp, gfc_get_dtype (parmtype));
6040 /* Set offset for assignments to pointer only to zero if it is not
6042 if (se->direct_byref
6043 && info->ref && info->ref->u.ar.type != AR_FULL)
6044 base = gfc_index_zero_node;
6045 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6046 base = gfc_evaluate_now (gfc_conv_array_offset (desc), &loop.pre);
6050 for (n = 0; n < ndim; n++)
6052 stride = gfc_conv_array_stride (desc, n);
6054 /* Work out the offset. */
6056 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
6058 gcc_assert (info->subscript[n]
6059 && info->subscript[n]->type == GFC_SS_SCALAR);
6060 start = info->subscript[n]->data.scalar.expr;
6064 /* Evaluate and remember the start of the section. */
6065 start = info->start[n];
6066 stride = gfc_evaluate_now (stride, &loop.pre);
6069 tmp = gfc_conv_array_lbound (desc, n);
6070 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (tmp),
6072 tmp = fold_build2_loc (input_location, MULT_EXPR, TREE_TYPE (tmp),
6074 offset = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (tmp),
6078 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
6080 /* For elemental dimensions, we only need the offset. */
6084 /* Vector subscripts need copying and are handled elsewhere. */
6086 gcc_assert (info->ref->u.ar.dimen_type[n] == DIMEN_RANGE);
6088 /* look for the corresponding scalarizer dimension: dim. */
6089 for (dim = 0; dim < ndim; dim++)
6090 if (info->dim[dim] == n)
6093 /* loop exited early: the DIM being looked for has been found. */
6094 gcc_assert (dim < ndim);
6096 /* Set the new lower bound. */
6097 from = loop.from[dim];
6100 /* If we have an array section or are assigning make sure that
6101 the lower bound is 1. References to the full
6102 array should otherwise keep the original bounds. */
6104 || info->ref->u.ar.type != AR_FULL)
6105 && !integer_onep (from))
6107 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6108 gfc_array_index_type, gfc_index_one_node,
6110 to = fold_build2_loc (input_location, PLUS_EXPR,
6111 gfc_array_index_type, to, tmp);
6112 from = gfc_index_one_node;
6114 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
6115 gfc_rank_cst[dim], from);
6117 /* Set the new upper bound. */
6118 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
6119 gfc_rank_cst[dim], to);
6121 /* Multiply the stride by the section stride to get the
6123 stride = fold_build2_loc (input_location, MULT_EXPR,
6124 gfc_array_index_type,
6125 stride, info->stride[n]);
6127 if (se->direct_byref
6129 && info->ref->u.ar.type != AR_FULL)
6131 base = fold_build2_loc (input_location, MINUS_EXPR,
6132 TREE_TYPE (base), base, stride);
6134 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6136 tmp = gfc_conv_array_lbound (desc, n);
6137 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6138 TREE_TYPE (base), tmp, loop.from[dim]);
6139 tmp = fold_build2_loc (input_location, MULT_EXPR,
6140 TREE_TYPE (base), tmp,
6141 gfc_conv_array_stride (desc, n));
6142 base = fold_build2_loc (input_location, PLUS_EXPR,
6143 TREE_TYPE (base), tmp, base);
6146 /* Store the new stride. */
6147 gfc_conv_descriptor_stride_set (&loop.pre, parm,
6148 gfc_rank_cst[dim], stride);
6151 for (n = loop.dimen; n < loop.dimen + codim; n++)
6153 from = loop.from[n];
6155 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
6156 gfc_rank_cst[n], from);
6157 if (n < loop.dimen + codim - 1)
6158 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
6159 gfc_rank_cst[n], to);
6162 if (se->data_not_needed)
6163 gfc_conv_descriptor_data_set (&loop.pre, parm,
6164 gfc_index_zero_node);
6166 /* Point the data pointer at the 1st element in the section. */
6167 gfc_get_dataptr_offset (&loop.pre, parm, desc, offset,
6168 subref_array_target, expr);
6170 if ((se->direct_byref || GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6171 && !se->data_not_needed)
6173 /* Set the offset. */
6174 gfc_conv_descriptor_offset_set (&loop.pre, parm, base);
6178 /* Only the callee knows what the correct offset it, so just set
6180 gfc_conv_descriptor_offset_set (&loop.pre, parm, gfc_index_zero_node);
6185 if (!se->direct_byref || se->byref_noassign)
6187 /* Get a pointer to the new descriptor. */
6188 if (se->want_pointer)
6189 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
6194 gfc_add_block_to_block (&se->pre, &loop.pre);
6195 gfc_add_block_to_block (&se->post, &loop.post);
6197 /* Cleanup the scalarizer. */
6198 gfc_cleanup_loop (&loop);
6201 /* Helper function for gfc_conv_array_parameter if array size needs to be
6205 array_parameter_size (tree desc, gfc_expr *expr, tree *size)
6208 if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6209 *size = GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc));
6210 else if (expr->rank > 1)
6211 *size = build_call_expr_loc (input_location,
6212 gfor_fndecl_size0, 1,
6213 gfc_build_addr_expr (NULL, desc));
6216 tree ubound = gfc_conv_descriptor_ubound_get (desc, gfc_index_zero_node);
6217 tree lbound = gfc_conv_descriptor_lbound_get (desc, gfc_index_zero_node);
6219 *size = fold_build2_loc (input_location, MINUS_EXPR,
6220 gfc_array_index_type, ubound, lbound);
6221 *size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
6222 *size, gfc_index_one_node);
6223 *size = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
6224 *size, gfc_index_zero_node);
6226 elem = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
6227 *size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6228 *size, fold_convert (gfc_array_index_type, elem));
6231 /* Convert an array for passing as an actual parameter. */
6232 /* TODO: Optimize passing g77 arrays. */
6235 gfc_conv_array_parameter (gfc_se * se, gfc_expr * expr, gfc_ss * ss, bool g77,
6236 const gfc_symbol *fsym, const char *proc_name,
6241 tree tmp = NULL_TREE;
6243 tree parent = DECL_CONTEXT (current_function_decl);
6244 bool full_array_var;
6245 bool this_array_result;
6248 bool array_constructor;
6249 bool good_allocatable;
6250 bool ultimate_ptr_comp;
6251 bool ultimate_alloc_comp;
6256 ultimate_ptr_comp = false;
6257 ultimate_alloc_comp = false;
6259 for (ref = expr->ref; ref; ref = ref->next)
6261 if (ref->next == NULL)
6264 if (ref->type == REF_COMPONENT)
6266 ultimate_ptr_comp = ref->u.c.component->attr.pointer;
6267 ultimate_alloc_comp = ref->u.c.component->attr.allocatable;
6271 full_array_var = false;
6274 if (expr->expr_type == EXPR_VARIABLE && ref && !ultimate_ptr_comp)
6275 full_array_var = gfc_full_array_ref_p (ref, &contiguous);
6277 sym = full_array_var ? expr->symtree->n.sym : NULL;
6279 /* The symbol should have an array specification. */
6280 gcc_assert (!sym || sym->as || ref->u.ar.as);
6282 if (expr->expr_type == EXPR_ARRAY && expr->ts.type == BT_CHARACTER)
6284 get_array_ctor_strlen (&se->pre, expr->value.constructor, &tmp);
6285 expr->ts.u.cl->backend_decl = tmp;
6286 se->string_length = tmp;
6289 /* Is this the result of the enclosing procedure? */
6290 this_array_result = (full_array_var && sym->attr.flavor == FL_PROCEDURE);
6291 if (this_array_result
6292 && (sym->backend_decl != current_function_decl)
6293 && (sym->backend_decl != parent))
6294 this_array_result = false;
6296 /* Passing address of the array if it is not pointer or assumed-shape. */
6297 if (full_array_var && g77 && !this_array_result)
6299 tmp = gfc_get_symbol_decl (sym);
6301 if (sym->ts.type == BT_CHARACTER)
6302 se->string_length = sym->ts.u.cl->backend_decl;
6304 if (sym->ts.type == BT_DERIVED || sym->ts.type == BT_CLASS)
6306 gfc_conv_expr_descriptor (se, expr, ss);
6307 se->expr = gfc_conv_array_data (se->expr);
6311 if (!sym->attr.pointer
6313 && sym->as->type != AS_ASSUMED_SHAPE
6314 && !sym->attr.allocatable)
6316 /* Some variables are declared directly, others are declared as
6317 pointers and allocated on the heap. */
6318 if (sym->attr.dummy || POINTER_TYPE_P (TREE_TYPE (tmp)))
6321 se->expr = gfc_build_addr_expr (NULL_TREE, tmp);
6323 array_parameter_size (tmp, expr, size);
6327 if (sym->attr.allocatable)
6329 if (sym->attr.dummy || sym->attr.result)
6331 gfc_conv_expr_descriptor (se, expr, ss);
6335 array_parameter_size (tmp, expr, size);
6336 se->expr = gfc_conv_array_data (tmp);
6341 /* A convenient reduction in scope. */
6342 contiguous = g77 && !this_array_result && contiguous;
6344 /* There is no need to pack and unpack the array, if it is contiguous
6345 and not a deferred- or assumed-shape array, or if it is simply
6347 no_pack = ((sym && sym->as
6348 && !sym->attr.pointer
6349 && sym->as->type != AS_DEFERRED
6350 && sym->as->type != AS_ASSUMED_SHAPE)
6352 (ref && ref->u.ar.as
6353 && ref->u.ar.as->type != AS_DEFERRED
6354 && ref->u.ar.as->type != AS_ASSUMED_SHAPE)
6356 gfc_is_simply_contiguous (expr, false));
6358 no_pack = contiguous && no_pack;
6360 /* Array constructors are always contiguous and do not need packing. */
6361 array_constructor = g77 && !this_array_result && expr->expr_type == EXPR_ARRAY;
6363 /* Same is true of contiguous sections from allocatable variables. */
6364 good_allocatable = contiguous
6366 && expr->symtree->n.sym->attr.allocatable;
6368 /* Or ultimate allocatable components. */
6369 ultimate_alloc_comp = contiguous && ultimate_alloc_comp;
6371 if (no_pack || array_constructor || good_allocatable || ultimate_alloc_comp)
6373 gfc_conv_expr_descriptor (se, expr, ss);
6374 if (expr->ts.type == BT_CHARACTER)
6375 se->string_length = expr->ts.u.cl->backend_decl;
6377 array_parameter_size (se->expr, expr, size);
6378 se->expr = gfc_conv_array_data (se->expr);
6382 if (this_array_result)
6384 /* Result of the enclosing function. */
6385 gfc_conv_expr_descriptor (se, expr, ss);
6387 array_parameter_size (se->expr, expr, size);
6388 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
6390 if (g77 && TREE_TYPE (TREE_TYPE (se->expr)) != NULL_TREE
6391 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr))))
6392 se->expr = gfc_conv_array_data (build_fold_indirect_ref_loc (input_location,
6399 /* Every other type of array. */
6400 se->want_pointer = 1;
6401 gfc_conv_expr_descriptor (se, expr, ss);
6403 array_parameter_size (build_fold_indirect_ref_loc (input_location,
6408 /* Deallocate the allocatable components of structures that are
6410 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
6411 && expr->ts.u.derived->attr.alloc_comp
6412 && expr->expr_type != EXPR_VARIABLE)
6414 tmp = build_fold_indirect_ref_loc (input_location, se->expr);
6415 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, tmp, expr->rank);
6417 /* The components shall be deallocated before their containing entity. */
6418 gfc_prepend_expr_to_block (&se->post, tmp);
6421 if (g77 || (fsym && fsym->attr.contiguous
6422 && !gfc_is_simply_contiguous (expr, false)))
6424 tree origptr = NULL_TREE;
6428 /* For contiguous arrays, save the original value of the descriptor. */
6431 origptr = gfc_create_var (pvoid_type_node, "origptr");
6432 tmp = build_fold_indirect_ref_loc (input_location, desc);
6433 tmp = gfc_conv_array_data (tmp);
6434 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6435 TREE_TYPE (origptr), origptr,
6436 fold_convert (TREE_TYPE (origptr), tmp));
6437 gfc_add_expr_to_block (&se->pre, tmp);
6440 /* Repack the array. */
6441 if (gfc_option.warn_array_temp)
6444 gfc_warning ("Creating array temporary at %L for argument '%s'",
6445 &expr->where, fsym->name);
6447 gfc_warning ("Creating array temporary at %L", &expr->where);
6450 ptr = build_call_expr_loc (input_location,
6451 gfor_fndecl_in_pack, 1, desc);
6453 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6455 tmp = gfc_conv_expr_present (sym);
6456 ptr = build3_loc (input_location, COND_EXPR, TREE_TYPE (se->expr),
6457 tmp, fold_convert (TREE_TYPE (se->expr), ptr),
6458 fold_convert (TREE_TYPE (se->expr), null_pointer_node));
6461 ptr = gfc_evaluate_now (ptr, &se->pre);
6463 /* Use the packed data for the actual argument, except for contiguous arrays,
6464 where the descriptor's data component is set. */
6469 tmp = build_fold_indirect_ref_loc (input_location, desc);
6470 gfc_conv_descriptor_data_set (&se->pre, tmp, ptr);
6473 if (gfc_option.rtcheck & GFC_RTCHECK_ARRAY_TEMPS)
6477 if (fsym && proc_name)
6478 asprintf (&msg, "An array temporary was created for argument "
6479 "'%s' of procedure '%s'", fsym->name, proc_name);
6481 asprintf (&msg, "An array temporary was created");
6483 tmp = build_fold_indirect_ref_loc (input_location,
6485 tmp = gfc_conv_array_data (tmp);
6486 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6487 fold_convert (TREE_TYPE (tmp), ptr), tmp);
6489 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6490 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
6492 gfc_conv_expr_present (sym), tmp);
6494 gfc_trans_runtime_check (false, true, tmp, &se->pre,
6499 gfc_start_block (&block);
6501 /* Copy the data back. */
6502 if (fsym == NULL || fsym->attr.intent != INTENT_IN)
6504 tmp = build_call_expr_loc (input_location,
6505 gfor_fndecl_in_unpack, 2, desc, ptr);
6506 gfc_add_expr_to_block (&block, tmp);
6509 /* Free the temporary. */
6510 tmp = gfc_call_free (convert (pvoid_type_node, ptr));
6511 gfc_add_expr_to_block (&block, tmp);
6513 stmt = gfc_finish_block (&block);
6515 gfc_init_block (&block);
6516 /* Only if it was repacked. This code needs to be executed before the
6517 loop cleanup code. */
6518 tmp = build_fold_indirect_ref_loc (input_location,
6520 tmp = gfc_conv_array_data (tmp);
6521 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6522 fold_convert (TREE_TYPE (tmp), ptr), tmp);
6524 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6525 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
6527 gfc_conv_expr_present (sym), tmp);
6529 tmp = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
6531 gfc_add_expr_to_block (&block, tmp);
6532 gfc_add_block_to_block (&block, &se->post);
6534 gfc_init_block (&se->post);
6536 /* Reset the descriptor pointer. */
6539 tmp = build_fold_indirect_ref_loc (input_location, desc);
6540 gfc_conv_descriptor_data_set (&se->post, tmp, origptr);
6543 gfc_add_block_to_block (&se->post, &block);
6548 /* Generate code to deallocate an array, if it is allocated. */
6551 gfc_trans_dealloc_allocated (tree descriptor)
6557 gfc_start_block (&block);
6559 var = gfc_conv_descriptor_data_get (descriptor);
6562 /* Call array_deallocate with an int * present in the second argument.
6563 Although it is ignored here, it's presence ensures that arrays that
6564 are already deallocated are ignored. */
6565 tmp = gfc_deallocate_with_status (var, NULL_TREE, true, NULL);
6566 gfc_add_expr_to_block (&block, tmp);
6568 /* Zero the data pointer. */
6569 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
6570 var, build_int_cst (TREE_TYPE (var), 0));
6571 gfc_add_expr_to_block (&block, tmp);
6573 return gfc_finish_block (&block);
6577 /* This helper function calculates the size in words of a full array. */
6580 get_full_array_size (stmtblock_t *block, tree decl, int rank)
6585 idx = gfc_rank_cst[rank - 1];
6586 nelems = gfc_conv_descriptor_ubound_get (decl, idx);
6587 tmp = gfc_conv_descriptor_lbound_get (decl, idx);
6588 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
6590 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
6591 tmp, gfc_index_one_node);
6592 tmp = gfc_evaluate_now (tmp, block);
6594 nelems = gfc_conv_descriptor_stride_get (decl, idx);
6595 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6597 return gfc_evaluate_now (tmp, block);
6601 /* Allocate dest to the same size as src, and copy src -> dest.
6602 If no_malloc is set, only the copy is done. */
6605 duplicate_allocatable (tree dest, tree src, tree type, int rank,
6615 /* If the source is null, set the destination to null. Then,
6616 allocate memory to the destination. */
6617 gfc_init_block (&block);
6621 tmp = null_pointer_node;
6622 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type, dest, tmp);
6623 gfc_add_expr_to_block (&block, tmp);
6624 null_data = gfc_finish_block (&block);
6626 gfc_init_block (&block);
6627 size = TYPE_SIZE_UNIT (TREE_TYPE (type));
6630 tmp = gfc_call_malloc (&block, type, size);
6631 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
6632 dest, fold_convert (type, tmp));
6633 gfc_add_expr_to_block (&block, tmp);
6636 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
6637 tmp = build_call_expr_loc (input_location, tmp, 3,
6642 gfc_conv_descriptor_data_set (&block, dest, null_pointer_node);
6643 null_data = gfc_finish_block (&block);
6645 gfc_init_block (&block);
6646 nelems = get_full_array_size (&block, src, rank);
6647 tmp = fold_convert (gfc_array_index_type,
6648 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
6649 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6653 tmp = TREE_TYPE (gfc_conv_descriptor_data_get (src));
6654 tmp = gfc_call_malloc (&block, tmp, size);
6655 gfc_conv_descriptor_data_set (&block, dest, tmp);
6658 /* We know the temporary and the value will be the same length,
6659 so can use memcpy. */
6660 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
6661 tmp = build_call_expr_loc (input_location,
6662 tmp, 3, gfc_conv_descriptor_data_get (dest),
6663 gfc_conv_descriptor_data_get (src), size);
6666 gfc_add_expr_to_block (&block, tmp);
6667 tmp = gfc_finish_block (&block);
6669 /* Null the destination if the source is null; otherwise do
6670 the allocate and copy. */
6674 null_cond = gfc_conv_descriptor_data_get (src);
6676 null_cond = convert (pvoid_type_node, null_cond);
6677 null_cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6678 null_cond, null_pointer_node);
6679 return build3_v (COND_EXPR, null_cond, tmp, null_data);
6683 /* Allocate dest to the same size as src, and copy data src -> dest. */
6686 gfc_duplicate_allocatable (tree dest, tree src, tree type, int rank)
6688 return duplicate_allocatable (dest, src, type, rank, false);
6692 /* Copy data src -> dest. */
6695 gfc_copy_allocatable_data (tree dest, tree src, tree type, int rank)
6697 return duplicate_allocatable (dest, src, type, rank, true);
6701 /* Recursively traverse an object of derived type, generating code to
6702 deallocate, nullify or copy allocatable components. This is the work horse
6703 function for the functions named in this enum. */
6705 enum {DEALLOCATE_ALLOC_COMP = 1, NULLIFY_ALLOC_COMP, COPY_ALLOC_COMP,
6706 COPY_ONLY_ALLOC_COMP};
6709 structure_alloc_comps (gfc_symbol * der_type, tree decl,
6710 tree dest, int rank, int purpose)
6714 stmtblock_t fnblock;
6715 stmtblock_t loopbody;
6726 tree null_cond = NULL_TREE;
6728 gfc_init_block (&fnblock);
6730 decl_type = TREE_TYPE (decl);
6732 if ((POINTER_TYPE_P (decl_type) && rank != 0)
6733 || (TREE_CODE (decl_type) == REFERENCE_TYPE && rank == 0))
6735 decl = build_fold_indirect_ref_loc (input_location,
6738 /* Just in case in gets dereferenced. */
6739 decl_type = TREE_TYPE (decl);
6741 /* If this an array of derived types with allocatable components
6742 build a loop and recursively call this function. */
6743 if (TREE_CODE (decl_type) == ARRAY_TYPE
6744 || GFC_DESCRIPTOR_TYPE_P (decl_type))
6746 tmp = gfc_conv_array_data (decl);
6747 var = build_fold_indirect_ref_loc (input_location,
6750 /* Get the number of elements - 1 and set the counter. */
6751 if (GFC_DESCRIPTOR_TYPE_P (decl_type))
6753 /* Use the descriptor for an allocatable array. Since this
6754 is a full array reference, we only need the descriptor
6755 information from dimension = rank. */
6756 tmp = get_full_array_size (&fnblock, decl, rank);
6757 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6758 gfc_array_index_type, tmp,
6759 gfc_index_one_node);
6761 null_cond = gfc_conv_descriptor_data_get (decl);
6762 null_cond = fold_build2_loc (input_location, NE_EXPR,
6763 boolean_type_node, null_cond,
6764 build_int_cst (TREE_TYPE (null_cond), 0));
6768 /* Otherwise use the TYPE_DOMAIN information. */
6769 tmp = array_type_nelts (decl_type);
6770 tmp = fold_convert (gfc_array_index_type, tmp);
6773 /* Remember that this is, in fact, the no. of elements - 1. */
6774 nelems = gfc_evaluate_now (tmp, &fnblock);
6775 index = gfc_create_var (gfc_array_index_type, "S");
6777 /* Build the body of the loop. */
6778 gfc_init_block (&loopbody);
6780 vref = gfc_build_array_ref (var, index, NULL);
6782 if (purpose == COPY_ALLOC_COMP)
6784 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
6786 tmp = gfc_duplicate_allocatable (dest, decl, decl_type, rank);
6787 gfc_add_expr_to_block (&fnblock, tmp);
6789 tmp = build_fold_indirect_ref_loc (input_location,
6790 gfc_conv_array_data (dest));
6791 dref = gfc_build_array_ref (tmp, index, NULL);
6792 tmp = structure_alloc_comps (der_type, vref, dref, rank, purpose);
6794 else if (purpose == COPY_ONLY_ALLOC_COMP)
6796 tmp = build_fold_indirect_ref_loc (input_location,
6797 gfc_conv_array_data (dest));
6798 dref = gfc_build_array_ref (tmp, index, NULL);
6799 tmp = structure_alloc_comps (der_type, vref, dref, rank,
6803 tmp = structure_alloc_comps (der_type, vref, NULL_TREE, rank, purpose);
6805 gfc_add_expr_to_block (&loopbody, tmp);
6807 /* Build the loop and return. */
6808 gfc_init_loopinfo (&loop);
6810 loop.from[0] = gfc_index_zero_node;
6811 loop.loopvar[0] = index;
6812 loop.to[0] = nelems;
6813 gfc_trans_scalarizing_loops (&loop, &loopbody);
6814 gfc_add_block_to_block (&fnblock, &loop.pre);
6816 tmp = gfc_finish_block (&fnblock);
6817 if (null_cond != NULL_TREE)
6818 tmp = build3_v (COND_EXPR, null_cond, tmp,
6819 build_empty_stmt (input_location));
6824 /* Otherwise, act on the components or recursively call self to
6825 act on a chain of components. */
6826 for (c = der_type->components; c; c = c->next)
6828 bool cmp_has_alloc_comps = (c->ts.type == BT_DERIVED
6829 || c->ts.type == BT_CLASS)
6830 && c->ts.u.derived->attr.alloc_comp;
6831 cdecl = c->backend_decl;
6832 ctype = TREE_TYPE (cdecl);
6836 case DEALLOCATE_ALLOC_COMP:
6837 if (cmp_has_alloc_comps && !c->attr.pointer)
6839 /* Do not deallocate the components of ultimate pointer
6841 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6842 decl, cdecl, NULL_TREE);
6843 rank = c->as ? c->as->rank : 0;
6844 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
6846 gfc_add_expr_to_block (&fnblock, tmp);
6849 if (c->attr.allocatable
6850 && (c->attr.dimension || c->attr.codimension))
6852 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6853 decl, cdecl, NULL_TREE);
6854 tmp = gfc_trans_dealloc_allocated (comp);
6855 gfc_add_expr_to_block (&fnblock, tmp);
6857 else if (c->attr.allocatable)
6859 /* Allocatable scalar components. */
6860 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6861 decl, cdecl, NULL_TREE);
6863 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
6865 gfc_add_expr_to_block (&fnblock, tmp);
6867 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6868 void_type_node, comp,
6869 build_int_cst (TREE_TYPE (comp), 0));
6870 gfc_add_expr_to_block (&fnblock, tmp);
6872 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
6874 /* Allocatable scalar CLASS components. */
6875 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6876 decl, cdecl, NULL_TREE);
6878 /* Add reference to '_data' component. */
6879 tmp = CLASS_DATA (c)->backend_decl;
6880 comp = fold_build3_loc (input_location, COMPONENT_REF,
6881 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
6883 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
6884 CLASS_DATA (c)->ts);
6885 gfc_add_expr_to_block (&fnblock, tmp);
6887 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6888 void_type_node, comp,
6889 build_int_cst (TREE_TYPE (comp), 0));
6890 gfc_add_expr_to_block (&fnblock, tmp);
6894 case NULLIFY_ALLOC_COMP:
6895 if (c->attr.pointer)
6897 else if (c->attr.allocatable
6898 && (c->attr.dimension|| c->attr.codimension))
6900 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6901 decl, cdecl, NULL_TREE);
6902 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
6904 else if (c->attr.allocatable)
6906 /* Allocatable scalar components. */
6907 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6908 decl, cdecl, NULL_TREE);
6909 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6910 void_type_node, comp,
6911 build_int_cst (TREE_TYPE (comp), 0));
6912 gfc_add_expr_to_block (&fnblock, tmp);
6914 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
6916 /* Allocatable scalar CLASS components. */
6917 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6918 decl, cdecl, NULL_TREE);
6919 /* Add reference to '_data' component. */
6920 tmp = CLASS_DATA (c)->backend_decl;
6921 comp = fold_build3_loc (input_location, COMPONENT_REF,
6922 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
6923 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6924 void_type_node, comp,
6925 build_int_cst (TREE_TYPE (comp), 0));
6926 gfc_add_expr_to_block (&fnblock, tmp);
6928 else if (cmp_has_alloc_comps)
6930 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6931 decl, cdecl, NULL_TREE);
6932 rank = c->as ? c->as->rank : 0;
6933 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
6935 gfc_add_expr_to_block (&fnblock, tmp);
6939 case COPY_ALLOC_COMP:
6940 if (c->attr.pointer)
6943 /* We need source and destination components. */
6944 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
6946 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
6948 dcmp = fold_convert (TREE_TYPE (comp), dcmp);
6950 if (c->attr.allocatable && !cmp_has_alloc_comps)
6952 rank = c->as ? c->as->rank : 0;
6953 tmp = gfc_duplicate_allocatable (dcmp, comp, ctype, rank);
6954 gfc_add_expr_to_block (&fnblock, tmp);
6957 if (cmp_has_alloc_comps)
6959 rank = c->as ? c->as->rank : 0;
6960 tmp = fold_convert (TREE_TYPE (dcmp), comp);
6961 gfc_add_modify (&fnblock, dcmp, tmp);
6962 tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp,
6964 gfc_add_expr_to_block (&fnblock, tmp);
6974 return gfc_finish_block (&fnblock);
6977 /* Recursively traverse an object of derived type, generating code to
6978 nullify allocatable components. */
6981 gfc_nullify_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
6983 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
6984 NULLIFY_ALLOC_COMP);
6988 /* Recursively traverse an object of derived type, generating code to
6989 deallocate allocatable components. */
6992 gfc_deallocate_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
6994 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
6995 DEALLOCATE_ALLOC_COMP);
6999 /* Recursively traverse an object of derived type, generating code to
7000 copy it and its allocatable components. */
7003 gfc_copy_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
7005 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ALLOC_COMP);
7009 /* Recursively traverse an object of derived type, generating code to
7010 copy only its allocatable components. */
7013 gfc_copy_only_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
7015 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ONLY_ALLOC_COMP);
7019 /* Returns the value of LBOUND for an expression. This could be broken out
7020 from gfc_conv_intrinsic_bound but this seemed to be simpler. This is
7021 called by gfc_alloc_allocatable_for_assignment. */
7023 get_std_lbound (gfc_expr *expr, tree desc, int dim, bool assumed_size)
7028 tree cond, cond1, cond3, cond4;
7032 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)))
7034 tmp = gfc_rank_cst[dim];
7035 lbound = gfc_conv_descriptor_lbound_get (desc, tmp);
7036 ubound = gfc_conv_descriptor_ubound_get (desc, tmp);
7037 stride = gfc_conv_descriptor_stride_get (desc, tmp);
7038 cond1 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
7040 cond3 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
7041 stride, gfc_index_zero_node);
7042 cond3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7043 boolean_type_node, cond3, cond1);
7044 cond4 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
7045 stride, gfc_index_zero_node);
7047 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
7048 tmp, build_int_cst (gfc_array_index_type,
7051 cond = boolean_false_node;
7053 cond1 = fold_build2_loc (input_location, TRUTH_OR_EXPR,
7054 boolean_type_node, cond3, cond4);
7055 cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
7056 boolean_type_node, cond, cond1);
7058 return fold_build3_loc (input_location, COND_EXPR,
7059 gfc_array_index_type, cond,
7060 lbound, gfc_index_one_node);
7062 else if (expr->expr_type == EXPR_VARIABLE)
7064 tmp = TREE_TYPE (expr->symtree->n.sym->backend_decl);
7065 for (ref = expr->ref; ref; ref = ref->next)
7067 if (ref->type == REF_COMPONENT
7068 && ref->u.c.component->as
7070 && ref->next->u.ar.type == AR_FULL)
7071 tmp = TREE_TYPE (ref->u.c.component->backend_decl);
7073 return GFC_TYPE_ARRAY_LBOUND(tmp, dim);
7075 else if (expr->expr_type == EXPR_FUNCTION)
7077 /* A conversion function, so use the argument. */
7078 expr = expr->value.function.actual->expr;
7079 if (expr->expr_type != EXPR_VARIABLE)
7080 return gfc_index_one_node;
7081 desc = TREE_TYPE (expr->symtree->n.sym->backend_decl);
7082 return get_std_lbound (expr, desc, dim, assumed_size);
7085 return gfc_index_one_node;
7089 /* Returns true if an expression represents an lhs that can be reallocated
7093 gfc_is_reallocatable_lhs (gfc_expr *expr)
7100 /* An allocatable variable. */
7101 if (expr->symtree->n.sym->attr.allocatable
7103 && expr->ref->type == REF_ARRAY
7104 && expr->ref->u.ar.type == AR_FULL)
7107 /* All that can be left are allocatable components. */
7108 if ((expr->symtree->n.sym->ts.type != BT_DERIVED
7109 && expr->symtree->n.sym->ts.type != BT_CLASS)
7110 || !expr->symtree->n.sym->ts.u.derived->attr.alloc_comp)
7113 /* Find a component ref followed by an array reference. */
7114 for (ref = expr->ref; ref; ref = ref->next)
7116 && ref->type == REF_COMPONENT
7117 && ref->next->type == REF_ARRAY
7118 && !ref->next->next)
7124 /* Return true if valid reallocatable lhs. */
7125 if (ref->u.c.component->attr.allocatable
7126 && ref->next->u.ar.type == AR_FULL)
7133 /* Allocate the lhs of an assignment to an allocatable array, otherwise
7137 gfc_alloc_allocatable_for_assignment (gfc_loopinfo *loop,
7141 stmtblock_t realloc_block;
7142 stmtblock_t alloc_block;
7165 gfc_array_spec * as;
7167 /* x = f(...) with x allocatable. In this case, expr1 is the rhs.
7168 Find the lhs expression in the loop chain and set expr1 and
7169 expr2 accordingly. */
7170 if (expr1->expr_type == EXPR_FUNCTION && expr2 == NULL)
7173 /* Find the ss for the lhs. */
7175 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
7176 if (lss->expr && lss->expr->expr_type == EXPR_VARIABLE)
7178 if (lss == gfc_ss_terminator)
7183 /* Bail out if this is not a valid allocate on assignment. */
7184 if (!gfc_is_reallocatable_lhs (expr1)
7185 || (expr2 && !expr2->rank))
7188 /* Find the ss for the lhs. */
7190 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
7191 if (lss->expr == expr1)
7194 if (lss == gfc_ss_terminator)
7197 /* Find an ss for the rhs. For operator expressions, we see the
7198 ss's for the operands. Any one of these will do. */
7200 for (; rss && rss != gfc_ss_terminator; rss = rss->loop_chain)
7201 if (rss->expr != expr1 && rss != loop->temp_ss)
7204 if (expr2 && rss == gfc_ss_terminator)
7207 gfc_start_block (&fblock);
7209 /* Since the lhs is allocatable, this must be a descriptor type.
7210 Get the data and array size. */
7211 desc = lss->data.info.descriptor;
7212 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)));
7213 array1 = gfc_conv_descriptor_data_get (desc);
7215 /* 7.4.1.3 "If variable is an allocated allocatable variable, it is
7216 deallocated if expr is an array of different shape or any of the
7217 corresponding length type parameter values of variable and expr
7218 differ." This assures F95 compatibility. */
7219 jump_label1 = gfc_build_label_decl (NULL_TREE);
7220 jump_label2 = gfc_build_label_decl (NULL_TREE);
7222 /* Allocate if data is NULL. */
7223 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
7224 array1, build_int_cst (TREE_TYPE (array1), 0));
7225 tmp = build3_v (COND_EXPR, cond,
7226 build1_v (GOTO_EXPR, jump_label1),
7227 build_empty_stmt (input_location));
7228 gfc_add_expr_to_block (&fblock, tmp);
7230 /* Get arrayspec if expr is a full array. */
7231 if (expr2 && expr2->expr_type == EXPR_FUNCTION
7232 && expr2->value.function.isym
7233 && expr2->value.function.isym->conversion)
7235 /* For conversion functions, take the arg. */
7236 gfc_expr *arg = expr2->value.function.actual->expr;
7237 as = gfc_get_full_arrayspec_from_expr (arg);
7240 as = gfc_get_full_arrayspec_from_expr (expr2);
7244 /* If the lhs shape is not the same as the rhs jump to setting the
7245 bounds and doing the reallocation....... */
7246 for (n = 0; n < expr1->rank; n++)
7248 /* Check the shape. */
7249 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
7250 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[n]);
7251 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7252 gfc_array_index_type,
7253 loop->to[n], loop->from[n]);
7254 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7255 gfc_array_index_type,
7257 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7258 gfc_array_index_type,
7260 cond = fold_build2_loc (input_location, NE_EXPR,
7262 tmp, gfc_index_zero_node);
7263 tmp = build3_v (COND_EXPR, cond,
7264 build1_v (GOTO_EXPR, jump_label1),
7265 build_empty_stmt (input_location));
7266 gfc_add_expr_to_block (&fblock, tmp);
7269 /* ....else jump past the (re)alloc code. */
7270 tmp = build1_v (GOTO_EXPR, jump_label2);
7271 gfc_add_expr_to_block (&fblock, tmp);
7273 /* Add the label to start automatic (re)allocation. */
7274 tmp = build1_v (LABEL_EXPR, jump_label1);
7275 gfc_add_expr_to_block (&fblock, tmp);
7277 size1 = gfc_conv_descriptor_size (desc, expr1->rank);
7279 /* Get the rhs size. Fix both sizes. */
7281 desc2 = rss->data.info.descriptor;
7284 size2 = gfc_index_one_node;
7285 for (n = 0; n < expr2->rank; n++)
7287 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7288 gfc_array_index_type,
7289 loop->to[n], loop->from[n]);
7290 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7291 gfc_array_index_type,
7292 tmp, gfc_index_one_node);
7293 size2 = fold_build2_loc (input_location, MULT_EXPR,
7294 gfc_array_index_type,
7298 size1 = gfc_evaluate_now (size1, &fblock);
7299 size2 = gfc_evaluate_now (size2, &fblock);
7301 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7303 neq_size = gfc_evaluate_now (cond, &fblock);
7306 /* Now modify the lhs descriptor and the associated scalarizer
7307 variables. F2003 7.4.1.3: "If variable is or becomes an
7308 unallocated allocatable variable, then it is allocated with each
7309 deferred type parameter equal to the corresponding type parameters
7310 of expr , with the shape of expr , and with each lower bound equal
7311 to the corresponding element of LBOUND(expr)."
7312 Reuse size1 to keep a dimension-by-dimension track of the
7313 stride of the new array. */
7314 size1 = gfc_index_one_node;
7315 offset = gfc_index_zero_node;
7317 for (n = 0; n < expr2->rank; n++)
7319 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7320 gfc_array_index_type,
7321 loop->to[n], loop->from[n]);
7322 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7323 gfc_array_index_type,
7324 tmp, gfc_index_one_node);
7326 lbound = gfc_index_one_node;
7331 lbd = get_std_lbound (expr2, desc2, n,
7332 as->type == AS_ASSUMED_SIZE);
7333 ubound = fold_build2_loc (input_location,
7335 gfc_array_index_type,
7337 ubound = fold_build2_loc (input_location,
7339 gfc_array_index_type,
7344 gfc_conv_descriptor_lbound_set (&fblock, desc,
7347 gfc_conv_descriptor_ubound_set (&fblock, desc,
7350 gfc_conv_descriptor_stride_set (&fblock, desc,
7353 lbound = gfc_conv_descriptor_lbound_get (desc,
7355 tmp2 = fold_build2_loc (input_location, MULT_EXPR,
7356 gfc_array_index_type,
7358 offset = fold_build2_loc (input_location, MINUS_EXPR,
7359 gfc_array_index_type,
7361 size1 = fold_build2_loc (input_location, MULT_EXPR,
7362 gfc_array_index_type,
7366 /* Set the lhs descriptor and scalarizer offsets. For rank > 1,
7367 the array offset is saved and the info.offset is used for a
7368 running offset. Use the saved_offset instead. */
7369 tmp = gfc_conv_descriptor_offset (desc);
7370 gfc_add_modify (&fblock, tmp, offset);
7371 if (lss->data.info.saved_offset
7372 && TREE_CODE (lss->data.info.saved_offset) == VAR_DECL)
7373 gfc_add_modify (&fblock, lss->data.info.saved_offset, tmp);
7375 /* Now set the deltas for the lhs. */
7376 for (n = 0; n < expr1->rank; n++)
7378 tmp = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
7379 dim = lss->data.info.dim[n];
7380 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7381 gfc_array_index_type, tmp,
7383 if (lss->data.info.delta[dim]
7384 && TREE_CODE (lss->data.info.delta[dim]) == VAR_DECL)
7385 gfc_add_modify (&fblock, lss->data.info.delta[dim], tmp);
7388 /* Get the new lhs size in bytes. */
7389 if (expr1->ts.type == BT_CHARACTER && expr1->ts.deferred)
7391 tmp = expr2->ts.u.cl->backend_decl;
7392 gcc_assert (expr1->ts.u.cl->backend_decl);
7393 tmp = fold_convert (TREE_TYPE (expr1->ts.u.cl->backend_decl), tmp);
7394 gfc_add_modify (&fblock, expr1->ts.u.cl->backend_decl, tmp);
7396 else if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->backend_decl)
7398 tmp = TYPE_SIZE_UNIT (TREE_TYPE (gfc_typenode_for_spec (&expr1->ts)));
7399 tmp = fold_build2_loc (input_location, MULT_EXPR,
7400 gfc_array_index_type, tmp,
7401 expr1->ts.u.cl->backend_decl);
7404 tmp = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&expr1->ts));
7405 tmp = fold_convert (gfc_array_index_type, tmp);
7406 size2 = fold_build2_loc (input_location, MULT_EXPR,
7407 gfc_array_index_type,
7409 size2 = fold_convert (size_type_node, size2);
7410 size2 = gfc_evaluate_now (size2, &fblock);
7412 /* Realloc expression. Note that the scalarizer uses desc.data
7413 in the array reference - (*desc.data)[<element>]. */
7414 gfc_init_block (&realloc_block);
7415 tmp = build_call_expr_loc (input_location,
7416 builtin_decl_explicit (BUILT_IN_REALLOC), 2,
7417 fold_convert (pvoid_type_node, array1),
7419 gfc_conv_descriptor_data_set (&realloc_block,
7421 realloc_expr = gfc_finish_block (&realloc_block);
7423 /* Only reallocate if sizes are different. */
7424 tmp = build3_v (COND_EXPR, neq_size, realloc_expr,
7425 build_empty_stmt (input_location));
7429 /* Malloc expression. */
7430 gfc_init_block (&alloc_block);
7431 tmp = build_call_expr_loc (input_location,
7432 builtin_decl_explicit (BUILT_IN_MALLOC),
7434 gfc_conv_descriptor_data_set (&alloc_block,
7436 tmp = gfc_conv_descriptor_dtype (desc);
7437 gfc_add_modify (&alloc_block, tmp, gfc_get_dtype (TREE_TYPE (desc)));
7438 alloc_expr = gfc_finish_block (&alloc_block);
7440 /* Malloc if not allocated; realloc otherwise. */
7441 tmp = build_int_cst (TREE_TYPE (array1), 0);
7442 cond = fold_build2_loc (input_location, EQ_EXPR,
7445 tmp = build3_v (COND_EXPR, cond, alloc_expr, realloc_expr);
7446 gfc_add_expr_to_block (&fblock, tmp);
7448 /* Make sure that the scalarizer data pointer is updated. */
7449 if (lss->data.info.data
7450 && TREE_CODE (lss->data.info.data) == VAR_DECL)
7452 tmp = gfc_conv_descriptor_data_get (desc);
7453 gfc_add_modify (&fblock, lss->data.info.data, tmp);
7456 /* Add the exit label. */
7457 tmp = build1_v (LABEL_EXPR, jump_label2);
7458 gfc_add_expr_to_block (&fblock, tmp);
7460 return gfc_finish_block (&fblock);
7464 /* NULLIFY an allocatable/pointer array on function entry, free it on exit.
7465 Do likewise, recursively if necessary, with the allocatable components of
7469 gfc_trans_deferred_array (gfc_symbol * sym, gfc_wrapped_block * block)
7475 stmtblock_t cleanup;
7478 bool sym_has_alloc_comp;
7480 sym_has_alloc_comp = (sym->ts.type == BT_DERIVED
7481 || sym->ts.type == BT_CLASS)
7482 && sym->ts.u.derived->attr.alloc_comp;
7484 /* Make sure the frontend gets these right. */
7485 if (!(sym->attr.pointer || sym->attr.allocatable || sym_has_alloc_comp))
7486 fatal_error ("Possible front-end bug: Deferred array size without pointer, "
7487 "allocatable attribute or derived type without allocatable "
7490 gfc_save_backend_locus (&loc);
7491 gfc_set_backend_locus (&sym->declared_at);
7492 gfc_init_block (&init);
7494 gcc_assert (TREE_CODE (sym->backend_decl) == VAR_DECL
7495 || TREE_CODE (sym->backend_decl) == PARM_DECL);
7497 if (sym->ts.type == BT_CHARACTER
7498 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
7500 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
7501 gfc_trans_vla_type_sizes (sym, &init);
7504 /* Dummy, use associated and result variables don't need anything special. */
7505 if (sym->attr.dummy || sym->attr.use_assoc || sym->attr.result)
7507 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
7508 gfc_restore_backend_locus (&loc);
7512 descriptor = sym->backend_decl;
7514 /* Although static, derived types with default initializers and
7515 allocatable components must not be nulled wholesale; instead they
7516 are treated component by component. */
7517 if (TREE_STATIC (descriptor) && !sym_has_alloc_comp)
7519 /* SAVEd variables are not freed on exit. */
7520 gfc_trans_static_array_pointer (sym);
7522 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
7523 gfc_restore_backend_locus (&loc);
7527 /* Get the descriptor type. */
7528 type = TREE_TYPE (sym->backend_decl);
7530 if (sym_has_alloc_comp && !(sym->attr.pointer || sym->attr.allocatable))
7533 && !(TREE_STATIC (sym->backend_decl) && sym->attr.is_main_program))
7535 if (sym->value == NULL
7536 || !gfc_has_default_initializer (sym->ts.u.derived))
7538 rank = sym->as ? sym->as->rank : 0;
7539 tmp = gfc_nullify_alloc_comp (sym->ts.u.derived,
7541 gfc_add_expr_to_block (&init, tmp);
7544 gfc_init_default_dt (sym, &init, false);
7547 else if (!GFC_DESCRIPTOR_TYPE_P (type))
7549 /* If the backend_decl is not a descriptor, we must have a pointer
7551 descriptor = build_fold_indirect_ref_loc (input_location,
7553 type = TREE_TYPE (descriptor);
7556 /* NULLIFY the data pointer. */
7557 if (GFC_DESCRIPTOR_TYPE_P (type) && !sym->attr.save)
7558 gfc_conv_descriptor_data_set (&init, descriptor, null_pointer_node);
7560 gfc_restore_backend_locus (&loc);
7561 gfc_init_block (&cleanup);
7563 /* Allocatable arrays need to be freed when they go out of scope.
7564 The allocatable components of pointers must not be touched. */
7565 if (sym_has_alloc_comp && !(sym->attr.function || sym->attr.result)
7566 && !sym->attr.pointer && !sym->attr.save)
7569 rank = sym->as ? sym->as->rank : 0;
7570 tmp = gfc_deallocate_alloc_comp (sym->ts.u.derived, descriptor, rank);
7571 gfc_add_expr_to_block (&cleanup, tmp);
7574 if (sym->attr.allocatable && (sym->attr.dimension || sym->attr.codimension)
7575 && !sym->attr.save && !sym->attr.result)
7577 tmp = gfc_trans_dealloc_allocated (sym->backend_decl);
7578 gfc_add_expr_to_block (&cleanup, tmp);
7581 gfc_add_init_cleanup (block, gfc_finish_block (&init),
7582 gfc_finish_block (&cleanup));
7585 /************ Expression Walking Functions ******************/
7587 /* Walk a variable reference.
7589 Possible extension - multiple component subscripts.
7590 x(:,:) = foo%a(:)%b(:)
7592 forall (i=..., j=...)
7593 x(i,j) = foo%a(j)%b(i)
7595 This adds a fair amount of complexity because you need to deal with more
7596 than one ref. Maybe handle in a similar manner to vector subscripts.
7597 Maybe not worth the effort. */
7601 gfc_walk_variable_expr (gfc_ss * ss, gfc_expr * expr)
7605 for (ref = expr->ref; ref; ref = ref->next)
7606 if (ref->type == REF_ARRAY && ref->u.ar.type != AR_ELEMENT)
7609 return gfc_walk_array_ref (ss, expr, ref);
7614 gfc_walk_array_ref (gfc_ss * ss, gfc_expr * expr, gfc_ref * ref)
7620 for (; ref; ref = ref->next)
7622 if (ref->type == REF_SUBSTRING)
7624 ss = gfc_get_scalar_ss (ss, ref->u.ss.start);
7625 ss = gfc_get_scalar_ss (ss, ref->u.ss.end);
7628 /* We're only interested in array sections from now on. */
7629 if (ref->type != REF_ARRAY)
7637 for (n = ar->dimen - 1; n >= 0; n--)
7638 ss = gfc_get_scalar_ss (ss, ar->start[n]);
7642 newss = gfc_get_array_ss (ss, expr, ar->as->rank, GFC_SS_SECTION);
7643 newss->data.info.ref = ref;
7645 /* Make sure array is the same as array(:,:), this way
7646 we don't need to special case all the time. */
7647 ar->dimen = ar->as->rank;
7648 for (n = 0; n < ar->dimen; n++)
7650 ar->dimen_type[n] = DIMEN_RANGE;
7652 gcc_assert (ar->start[n] == NULL);
7653 gcc_assert (ar->end[n] == NULL);
7654 gcc_assert (ar->stride[n] == NULL);
7660 newss = gfc_get_array_ss (ss, expr, 0, GFC_SS_SECTION);
7661 newss->data.info.ref = ref;
7663 /* We add SS chains for all the subscripts in the section. */
7664 for (n = 0; n < ar->dimen; n++)
7668 switch (ar->dimen_type[n])
7671 /* Add SS for elemental (scalar) subscripts. */
7672 gcc_assert (ar->start[n]);
7673 indexss = gfc_get_scalar_ss (gfc_ss_terminator, ar->start[n]);
7674 indexss->loop_chain = gfc_ss_terminator;
7675 newss->data.info.subscript[n] = indexss;
7679 /* We don't add anything for sections, just remember this
7680 dimension for later. */
7681 newss->data.info.dim[newss->data.info.dimen] = n;
7682 newss->data.info.dimen++;
7686 /* Create a GFC_SS_VECTOR index in which we can store
7687 the vector's descriptor. */
7688 indexss = gfc_get_array_ss (gfc_ss_terminator, ar->start[n],
7690 indexss->loop_chain = gfc_ss_terminator;
7691 newss->data.info.subscript[n] = indexss;
7692 newss->data.info.dim[newss->data.info.dimen] = n;
7693 newss->data.info.dimen++;
7697 /* We should know what sort of section it is by now. */
7701 /* We should have at least one non-elemental dimension,
7702 unless we are creating a descriptor for a (scalar) coarray. */
7703 gcc_assert (newss->data.info.dimen > 0
7704 || newss->data.info.ref->u.ar.as->corank > 0);
7709 /* We should know what sort of section it is by now. */
7718 /* Walk an expression operator. If only one operand of a binary expression is
7719 scalar, we must also add the scalar term to the SS chain. */
7722 gfc_walk_op_expr (gfc_ss * ss, gfc_expr * expr)
7727 head = gfc_walk_subexpr (ss, expr->value.op.op1);
7728 if (expr->value.op.op2 == NULL)
7731 head2 = gfc_walk_subexpr (head, expr->value.op.op2);
7733 /* All operands are scalar. Pass back and let the caller deal with it. */
7737 /* All operands require scalarization. */
7738 if (head != ss && (expr->value.op.op2 == NULL || head2 != head))
7741 /* One of the operands needs scalarization, the other is scalar.
7742 Create a gfc_ss for the scalar expression. */
7745 /* First operand is scalar. We build the chain in reverse order, so
7746 add the scalar SS after the second operand. */
7748 while (head && head->next != ss)
7750 /* Check we haven't somehow broken the chain. */
7752 head->next = gfc_get_scalar_ss (ss, expr->value.op.op1);
7754 else /* head2 == head */
7756 gcc_assert (head2 == head);
7757 /* Second operand is scalar. */
7758 head2 = gfc_get_scalar_ss (head2, expr->value.op.op2);
7765 /* Reverse a SS chain. */
7768 gfc_reverse_ss (gfc_ss * ss)
7773 gcc_assert (ss != NULL);
7775 head = gfc_ss_terminator;
7776 while (ss != gfc_ss_terminator)
7779 /* Check we didn't somehow break the chain. */
7780 gcc_assert (next != NULL);
7790 /* Walk the arguments of an elemental function. */
7793 gfc_walk_elemental_function_args (gfc_ss * ss, gfc_actual_arglist *arg,
7801 head = gfc_ss_terminator;
7804 for (; arg; arg = arg->next)
7809 newss = gfc_walk_subexpr (head, arg->expr);
7812 /* Scalar argument. */
7813 gcc_assert (type == GFC_SS_SCALAR || type == GFC_SS_REFERENCE);
7814 newss = gfc_get_scalar_ss (head, arg->expr);
7824 while (tail->next != gfc_ss_terminator)
7831 /* If all the arguments are scalar we don't need the argument SS. */
7832 gfc_free_ss_chain (head);
7837 /* Add it onto the existing chain. */
7843 /* Walk a function call. Scalar functions are passed back, and taken out of
7844 scalarization loops. For elemental functions we walk their arguments.
7845 The result of functions returning arrays is stored in a temporary outside
7846 the loop, so that the function is only called once. Hence we do not need
7847 to walk their arguments. */
7850 gfc_walk_function_expr (gfc_ss * ss, gfc_expr * expr)
7852 gfc_intrinsic_sym *isym;
7854 gfc_component *comp = NULL;
7856 isym = expr->value.function.isym;
7858 /* Handle intrinsic functions separately. */
7860 return gfc_walk_intrinsic_function (ss, expr, isym);
7862 sym = expr->value.function.esym;
7864 sym = expr->symtree->n.sym;
7866 /* A function that returns arrays. */
7867 gfc_is_proc_ptr_comp (expr, &comp);
7868 if ((!comp && gfc_return_by_reference (sym) && sym->result->attr.dimension)
7869 || (comp && comp->attr.dimension))
7870 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_FUNCTION);
7872 /* Walk the parameters of an elemental function. For now we always pass
7874 if (sym->attr.elemental)
7875 return gfc_walk_elemental_function_args (ss, expr->value.function.actual,
7878 /* Scalar functions are OK as these are evaluated outside the scalarization
7879 loop. Pass back and let the caller deal with it. */
7884 /* An array temporary is constructed for array constructors. */
7887 gfc_walk_array_constructor (gfc_ss * ss, gfc_expr * expr)
7889 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_CONSTRUCTOR);
7893 /* Walk an expression. Add walked expressions to the head of the SS chain.
7894 A wholly scalar expression will not be added. */
7897 gfc_walk_subexpr (gfc_ss * ss, gfc_expr * expr)
7901 switch (expr->expr_type)
7904 head = gfc_walk_variable_expr (ss, expr);
7908 head = gfc_walk_op_expr (ss, expr);
7912 head = gfc_walk_function_expr (ss, expr);
7917 case EXPR_STRUCTURE:
7918 /* Pass back and let the caller deal with it. */
7922 head = gfc_walk_array_constructor (ss, expr);
7925 case EXPR_SUBSTRING:
7926 /* Pass back and let the caller deal with it. */
7930 internal_error ("bad expression type during walk (%d)",
7937 /* Entry point for expression walking.
7938 A return value equal to the passed chain means this is
7939 a scalar expression. It is up to the caller to take whatever action is
7940 necessary to translate these. */
7943 gfc_walk_expr (gfc_expr * expr)
7947 res = gfc_walk_subexpr (gfc_ss_terminator, expr);
7948 return gfc_reverse_ss (res);