1 /* Array translation routines
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 Free Software Foundation, Inc.
5 Contributed by Paul Brook <paul@nowt.org>
6 and Steven Bosscher <s.bosscher@student.tudelft.nl>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* trans-array.c-- Various array related code, including scalarization,
25 allocation, initialization and other support routines. */
27 /* How the scalarizer works.
28 In gfortran, array expressions use the same core routines as scalar
30 First, a Scalarization State (SS) chain is built. This is done by walking
31 the expression tree, and building a linear list of the terms in the
32 expression. As the tree is walked, scalar subexpressions are translated.
34 The scalarization parameters are stored in a gfc_loopinfo structure.
35 First the start and stride of each term is calculated by
36 gfc_conv_ss_startstride. During this process the expressions for the array
37 descriptors and data pointers are also translated.
39 If the expression is an assignment, we must then resolve any dependencies.
40 In fortran all the rhs values of an assignment must be evaluated before
41 any assignments take place. This can require a temporary array to store the
42 values. We also require a temporary when we are passing array expressions
43 or vector subscripts as procedure parameters.
45 Array sections are passed without copying to a temporary. These use the
46 scalarizer to determine the shape of the section. The flag
47 loop->array_parameter tells the scalarizer that the actual values and loop
48 variables will not be required.
50 The function gfc_conv_loop_setup generates the scalarization setup code.
51 It determines the range of the scalarizing loop variables. If a temporary
52 is required, this is created and initialized. Code for scalar expressions
53 taken outside the loop is also generated at this time. Next the offset and
54 scaling required to translate from loop variables to array indices for each
57 A call to gfc_start_scalarized_body marks the start of the scalarized
58 expression. This creates a scope and declares the loop variables. Before
59 calling this gfc_make_ss_chain_used must be used to indicate which terms
60 will be used inside this loop.
62 The scalar gfc_conv_* functions are then used to build the main body of the
63 scalarization loop. Scalarization loop variables and precalculated scalar
64 values are automatically substituted. Note that gfc_advance_se_ss_chain
65 must be used, rather than changing the se->ss directly.
67 For assignment expressions requiring a temporary two sub loops are
68 generated. The first stores the result of the expression in the temporary,
69 the second copies it to the result. A call to
70 gfc_trans_scalarized_loop_boundary marks the end of the main loop code and
71 the start of the copying loop. The temporary may be less than full rank.
73 Finally gfc_trans_scalarizing_loops is called to generate the implicit do
74 loops. The loops are added to the pre chain of the loopinfo. The post
75 chain may still contain cleanup code.
77 After the loop code has been added into its parent scope gfc_cleanup_loop
78 is called to free all the SS allocated by the scalarizer. */
82 #include "coretypes.h"
84 #include "diagnostic-core.h" /* For internal_error/fatal_error. */
87 #include "constructor.h"
89 #include "trans-stmt.h"
90 #include "trans-types.h"
91 #include "trans-array.h"
92 #include "trans-const.h"
93 #include "dependency.h"
95 static bool gfc_get_array_constructor_size (mpz_t *, gfc_constructor_base);
97 /* The contents of this structure aren't actually used, just the address. */
98 static gfc_ss gfc_ss_terminator_var;
99 gfc_ss * const gfc_ss_terminator = &gfc_ss_terminator_var;
103 gfc_array_dataptr_type (tree desc)
105 return (GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc)));
109 /* Build expressions to access the members of an array descriptor.
110 It's surprisingly easy to mess up here, so never access
111 an array descriptor by "brute force", always use these
112 functions. This also avoids problems if we change the format
113 of an array descriptor.
115 To understand these magic numbers, look at the comments
116 before gfc_build_array_type() in trans-types.c.
118 The code within these defines should be the only code which knows the format
119 of an array descriptor.
121 Any code just needing to read obtain the bounds of an array should use
122 gfc_conv_array_* rather than the following functions as these will return
123 know constant values, and work with arrays which do not have descriptors.
125 Don't forget to #undef these! */
128 #define OFFSET_FIELD 1
129 #define DTYPE_FIELD 2
130 #define DIMENSION_FIELD 3
132 #define STRIDE_SUBFIELD 0
133 #define LBOUND_SUBFIELD 1
134 #define UBOUND_SUBFIELD 2
136 /* This provides READ-ONLY access to the data field. The field itself
137 doesn't have the proper type. */
140 gfc_conv_descriptor_data_get (tree desc)
144 type = TREE_TYPE (desc);
145 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
147 field = TYPE_FIELDS (type);
148 gcc_assert (DATA_FIELD == 0);
150 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
152 t = fold_convert (GFC_TYPE_ARRAY_DATAPTR_TYPE (type), t);
157 /* This provides WRITE access to the data field.
159 TUPLES_P is true if we are generating tuples.
161 This function gets called through the following macros:
162 gfc_conv_descriptor_data_set
163 gfc_conv_descriptor_data_set. */
166 gfc_conv_descriptor_data_set (stmtblock_t *block, tree desc, tree value)
170 type = TREE_TYPE (desc);
171 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
173 field = TYPE_FIELDS (type);
174 gcc_assert (DATA_FIELD == 0);
176 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
178 gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value));
182 /* This provides address access to the data field. This should only be
183 used by array allocation, passing this on to the runtime. */
186 gfc_conv_descriptor_data_addr (tree desc)
190 type = TREE_TYPE (desc);
191 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
193 field = TYPE_FIELDS (type);
194 gcc_assert (DATA_FIELD == 0);
196 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
198 return gfc_build_addr_expr (NULL_TREE, t);
202 gfc_conv_descriptor_offset (tree desc)
207 type = TREE_TYPE (desc);
208 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
210 field = gfc_advance_chain (TYPE_FIELDS (type), OFFSET_FIELD);
211 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
213 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
214 desc, field, NULL_TREE);
218 gfc_conv_descriptor_offset_get (tree desc)
220 return gfc_conv_descriptor_offset (desc);
224 gfc_conv_descriptor_offset_set (stmtblock_t *block, tree desc,
227 tree t = gfc_conv_descriptor_offset (desc);
228 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
233 gfc_conv_descriptor_dtype (tree desc)
238 type = TREE_TYPE (desc);
239 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
241 field = gfc_advance_chain (TYPE_FIELDS (type), DTYPE_FIELD);
242 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
244 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
245 desc, field, NULL_TREE);
249 gfc_conv_descriptor_dimension (tree desc, tree dim)
255 type = TREE_TYPE (desc);
256 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
258 field = gfc_advance_chain (TYPE_FIELDS (type), DIMENSION_FIELD);
259 gcc_assert (field != NULL_TREE
260 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
261 && TREE_CODE (TREE_TYPE (TREE_TYPE (field))) == RECORD_TYPE);
263 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
264 desc, field, NULL_TREE);
265 tmp = gfc_build_array_ref (tmp, dim, NULL);
270 gfc_conv_descriptor_stride (tree desc, tree dim)
275 tmp = gfc_conv_descriptor_dimension (desc, dim);
276 field = TYPE_FIELDS (TREE_TYPE (tmp));
277 field = gfc_advance_chain (field, STRIDE_SUBFIELD);
278 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
280 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
281 tmp, field, NULL_TREE);
286 gfc_conv_descriptor_stride_get (tree desc, tree dim)
288 tree type = TREE_TYPE (desc);
289 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
290 if (integer_zerop (dim)
291 && (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE
292 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_SHAPE_CONT
293 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT))
294 return gfc_index_one_node;
296 return gfc_conv_descriptor_stride (desc, dim);
300 gfc_conv_descriptor_stride_set (stmtblock_t *block, tree desc,
301 tree dim, tree value)
303 tree t = gfc_conv_descriptor_stride (desc, dim);
304 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
308 gfc_conv_descriptor_lbound (tree desc, tree dim)
313 tmp = gfc_conv_descriptor_dimension (desc, dim);
314 field = TYPE_FIELDS (TREE_TYPE (tmp));
315 field = gfc_advance_chain (field, LBOUND_SUBFIELD);
316 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
318 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
319 tmp, field, NULL_TREE);
324 gfc_conv_descriptor_lbound_get (tree desc, tree dim)
326 return gfc_conv_descriptor_lbound (desc, dim);
330 gfc_conv_descriptor_lbound_set (stmtblock_t *block, tree desc,
331 tree dim, tree value)
333 tree t = gfc_conv_descriptor_lbound (desc, dim);
334 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
338 gfc_conv_descriptor_ubound (tree desc, tree dim)
343 tmp = gfc_conv_descriptor_dimension (desc, dim);
344 field = TYPE_FIELDS (TREE_TYPE (tmp));
345 field = gfc_advance_chain (field, UBOUND_SUBFIELD);
346 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
348 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
349 tmp, field, NULL_TREE);
354 gfc_conv_descriptor_ubound_get (tree desc, tree dim)
356 return gfc_conv_descriptor_ubound (desc, dim);
360 gfc_conv_descriptor_ubound_set (stmtblock_t *block, tree desc,
361 tree dim, tree value)
363 tree t = gfc_conv_descriptor_ubound (desc, dim);
364 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
367 /* Build a null array descriptor constructor. */
370 gfc_build_null_descriptor (tree type)
375 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
376 gcc_assert (DATA_FIELD == 0);
377 field = TYPE_FIELDS (type);
379 /* Set a NULL data pointer. */
380 tmp = build_constructor_single (type, field, null_pointer_node);
381 TREE_CONSTANT (tmp) = 1;
382 /* All other fields are ignored. */
388 /* Modify a descriptor such that the lbound of a given dimension is the value
389 specified. This also updates ubound and offset accordingly. */
392 gfc_conv_shift_descriptor_lbound (stmtblock_t* block, tree desc,
393 int dim, tree new_lbound)
395 tree offs, ubound, lbound, stride;
396 tree diff, offs_diff;
398 new_lbound = fold_convert (gfc_array_index_type, new_lbound);
400 offs = gfc_conv_descriptor_offset_get (desc);
401 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
402 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
403 stride = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[dim]);
405 /* Get difference (new - old) by which to shift stuff. */
406 diff = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
409 /* Shift ubound and offset accordingly. This has to be done before
410 updating the lbound, as they depend on the lbound expression! */
411 ubound = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
413 gfc_conv_descriptor_ubound_set (block, desc, gfc_rank_cst[dim], ubound);
414 offs_diff = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
416 offs = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
418 gfc_conv_descriptor_offset_set (block, desc, offs);
420 /* Finally set lbound to value we want. */
421 gfc_conv_descriptor_lbound_set (block, desc, gfc_rank_cst[dim], new_lbound);
425 /* Cleanup those #defines. */
430 #undef DIMENSION_FIELD
431 #undef STRIDE_SUBFIELD
432 #undef LBOUND_SUBFIELD
433 #undef UBOUND_SUBFIELD
436 /* Mark a SS chain as used. Flags specifies in which loops the SS is used.
437 flags & 1 = Main loop body.
438 flags & 2 = temp copy loop. */
441 gfc_mark_ss_chain_used (gfc_ss * ss, unsigned flags)
443 for (; ss != gfc_ss_terminator; ss = ss->next)
444 ss->useflags = flags;
447 static void gfc_free_ss (gfc_ss *);
450 /* Free a gfc_ss chain. */
453 gfc_free_ss_chain (gfc_ss * ss)
457 while (ss != gfc_ss_terminator)
459 gcc_assert (ss != NULL);
470 gfc_free_ss (gfc_ss * ss)
477 for (n = 0; n < ss->data.info.dimen; n++)
479 if (ss->data.info.subscript[ss->data.info.dim[n]])
480 gfc_free_ss_chain (ss->data.info.subscript[ss->data.info.dim[n]]);
492 /* Free all the SS associated with a loop. */
495 gfc_cleanup_loop (gfc_loopinfo * loop)
501 while (ss != gfc_ss_terminator)
503 gcc_assert (ss != NULL);
504 next = ss->loop_chain;
511 /* Associate a SS chain with a loop. */
514 gfc_add_ss_to_loop (gfc_loopinfo * loop, gfc_ss * head)
518 if (head == gfc_ss_terminator)
522 for (; ss && ss != gfc_ss_terminator; ss = ss->next)
524 if (ss->next == gfc_ss_terminator)
525 ss->loop_chain = loop->ss;
527 ss->loop_chain = ss->next;
529 gcc_assert (ss == gfc_ss_terminator);
534 /* Generate an initializer for a static pointer or allocatable array. */
537 gfc_trans_static_array_pointer (gfc_symbol * sym)
541 gcc_assert (TREE_STATIC (sym->backend_decl));
542 /* Just zero the data member. */
543 type = TREE_TYPE (sym->backend_decl);
544 DECL_INITIAL (sym->backend_decl) = gfc_build_null_descriptor (type);
548 /* If the bounds of SE's loop have not yet been set, see if they can be
549 determined from array spec AS, which is the array spec of a called
550 function. MAPPING maps the callee's dummy arguments to the values
551 that the caller is passing. Add any initialization and finalization
555 gfc_set_loop_bounds_from_array_spec (gfc_interface_mapping * mapping,
556 gfc_se * se, gfc_array_spec * as)
564 if (as && as->type == AS_EXPLICIT)
565 for (n = 0; n < se->loop->dimen + se->loop->codimen; n++)
567 dim = se->ss->data.info.dim[n];
568 gcc_assert (dim < as->rank);
569 gcc_assert (se->loop->dimen == as->rank);
570 if (se->loop->to[n] == NULL_TREE)
572 /* Evaluate the lower bound. */
573 gfc_init_se (&tmpse, NULL);
574 gfc_apply_interface_mapping (mapping, &tmpse, as->lower[dim]);
575 gfc_add_block_to_block (&se->pre, &tmpse.pre);
576 gfc_add_block_to_block (&se->post, &tmpse.post);
577 lower = fold_convert (gfc_array_index_type, tmpse.expr);
579 if (se->loop->codimen == 0
580 || n < se->loop->dimen + se->loop->codimen - 1)
582 /* ...and the upper bound. */
583 gfc_init_se (&tmpse, NULL);
584 gfc_apply_interface_mapping (mapping, &tmpse, as->upper[dim]);
585 gfc_add_block_to_block (&se->pre, &tmpse.pre);
586 gfc_add_block_to_block (&se->post, &tmpse.post);
587 upper = fold_convert (gfc_array_index_type, tmpse.expr);
589 /* Set the upper bound of the loop to UPPER - LOWER. */
590 tmp = fold_build2_loc (input_location, MINUS_EXPR,
591 gfc_array_index_type, upper, lower);
592 tmp = gfc_evaluate_now (tmp, &se->pre);
593 se->loop->to[n] = tmp;
600 /* Generate code to allocate an array temporary, or create a variable to
601 hold the data. If size is NULL, zero the descriptor so that the
602 callee will allocate the array. If DEALLOC is true, also generate code to
603 free the array afterwards.
605 If INITIAL is not NULL, it is packed using internal_pack and the result used
606 as data instead of allocating a fresh, unitialized area of memory.
608 Initialization code is added to PRE and finalization code to POST.
609 DYNAMIC is true if the caller may want to extend the array later
610 using realloc. This prevents us from putting the array on the stack. */
613 gfc_trans_allocate_array_storage (stmtblock_t * pre, stmtblock_t * post,
614 gfc_ss_info * info, tree size, tree nelem,
615 tree initial, bool dynamic, bool dealloc)
621 desc = info->descriptor;
622 info->offset = gfc_index_zero_node;
623 if (size == NULL_TREE || integer_zerop (size))
625 /* A callee allocated array. */
626 gfc_conv_descriptor_data_set (pre, desc, null_pointer_node);
631 /* Allocate the temporary. */
632 onstack = !dynamic && initial == NULL_TREE
633 && gfc_can_put_var_on_stack (size);
637 /* Make a temporary variable to hold the data. */
638 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (nelem),
639 nelem, gfc_index_one_node);
640 tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node,
642 tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)),
644 tmp = gfc_create_var (tmp, "A");
645 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
646 gfc_conv_descriptor_data_set (pre, desc, tmp);
650 /* Allocate memory to hold the data or call internal_pack. */
651 if (initial == NULL_TREE)
653 tmp = gfc_call_malloc (pre, NULL, size);
654 tmp = gfc_evaluate_now (tmp, pre);
661 stmtblock_t do_copying;
663 tmp = TREE_TYPE (initial); /* Pointer to descriptor. */
664 gcc_assert (TREE_CODE (tmp) == POINTER_TYPE);
665 tmp = TREE_TYPE (tmp); /* The descriptor itself. */
666 tmp = gfc_get_element_type (tmp);
667 gcc_assert (tmp == gfc_get_element_type (TREE_TYPE (desc)));
668 packed = gfc_create_var (build_pointer_type (tmp), "data");
670 tmp = build_call_expr_loc (input_location,
671 gfor_fndecl_in_pack, 1, initial);
672 tmp = fold_convert (TREE_TYPE (packed), tmp);
673 gfc_add_modify (pre, packed, tmp);
675 tmp = build_fold_indirect_ref_loc (input_location,
677 source_data = gfc_conv_descriptor_data_get (tmp);
679 /* internal_pack may return source->data without any allocation
680 or copying if it is already packed. If that's the case, we
681 need to allocate and copy manually. */
683 gfc_start_block (&do_copying);
684 tmp = gfc_call_malloc (&do_copying, NULL, size);
685 tmp = fold_convert (TREE_TYPE (packed), tmp);
686 gfc_add_modify (&do_copying, packed, tmp);
687 tmp = gfc_build_memcpy_call (packed, source_data, size);
688 gfc_add_expr_to_block (&do_copying, tmp);
690 was_packed = fold_build2_loc (input_location, EQ_EXPR,
691 boolean_type_node, packed,
693 tmp = gfc_finish_block (&do_copying);
694 tmp = build3_v (COND_EXPR, was_packed, tmp,
695 build_empty_stmt (input_location));
696 gfc_add_expr_to_block (pre, tmp);
698 tmp = fold_convert (pvoid_type_node, packed);
701 gfc_conv_descriptor_data_set (pre, desc, tmp);
704 info->data = gfc_conv_descriptor_data_get (desc);
706 /* The offset is zero because we create temporaries with a zero
708 gfc_conv_descriptor_offset_set (pre, desc, gfc_index_zero_node);
710 if (dealloc && !onstack)
712 /* Free the temporary. */
713 tmp = gfc_conv_descriptor_data_get (desc);
714 tmp = gfc_call_free (fold_convert (pvoid_type_node, tmp));
715 gfc_add_expr_to_block (post, tmp);
720 /* Get the array reference dimension corresponding to the given loop dimension.
721 It is different from the true array dimension given by the dim array in
722 the case of a partial array reference
723 It is different from the loop dimension in the case of a transposed array.
727 get_array_ref_dim (gfc_ss_info *info, int loop_dim)
729 int n, array_dim, array_ref_dim;
732 array_dim = info->dim[loop_dim];
734 for (n = 0; n < info->dimen; n++)
735 if (n != loop_dim && info->dim[n] < array_dim)
738 return array_ref_dim;
742 /* Generate code to create and initialize the descriptor for a temporary
743 array. This is used for both temporaries needed by the scalarizer, and
744 functions returning arrays. Adjusts the loop variables to be
745 zero-based, and calculates the loop bounds for callee allocated arrays.
746 Allocate the array unless it's callee allocated (we have a callee
747 allocated array if 'callee_alloc' is true, or if loop->to[n] is
748 NULL_TREE for any n). Also fills in the descriptor, data and offset
749 fields of info if known. Returns the size of the array, or NULL for a
750 callee allocated array.
752 PRE, POST, INITIAL, DYNAMIC and DEALLOC are as for
753 gfc_trans_allocate_array_storage.
757 gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post,
758 gfc_loopinfo * loop, gfc_ss_info * info,
759 tree eltype, tree initial, bool dynamic,
760 bool dealloc, bool callee_alloc, locus * where)
762 tree from[GFC_MAX_DIMENSIONS], to[GFC_MAX_DIMENSIONS];
772 memset (from, 0, sizeof (from));
773 memset (to, 0, sizeof (to));
775 gcc_assert (info->dimen > 0);
776 gcc_assert (loop->dimen == info->dimen);
778 if (gfc_option.warn_array_temp && where)
779 gfc_warning ("Creating array temporary at %L", where);
781 /* Set the lower bound to zero. */
782 for (n = 0; n < loop->dimen; n++)
786 /* Callee allocated arrays may not have a known bound yet. */
788 loop->to[n] = gfc_evaluate_now (
789 fold_build2_loc (input_location, MINUS_EXPR,
790 gfc_array_index_type,
791 loop->to[n], loop->from[n]),
793 loop->from[n] = gfc_index_zero_node;
795 /* We are constructing the temporary's descriptor based on the loop
796 dimensions. As the dimensions may be accessed in arbitrary order
797 (think of transpose) the size taken from the n'th loop may not map
798 to the n'th dimension of the array. We need to reconstruct loop infos
799 in the right order before using it to set the descriptor
801 tmp_dim = get_array_ref_dim (info, n);
802 from[tmp_dim] = loop->from[n];
803 to[tmp_dim] = loop->to[n];
805 info->delta[dim] = gfc_index_zero_node;
806 info->start[dim] = gfc_index_zero_node;
807 info->end[dim] = gfc_index_zero_node;
808 info->stride[dim] = gfc_index_one_node;
811 /* Initialize the descriptor. */
813 gfc_get_array_type_bounds (eltype, info->dimen, 0, from, to, 1,
814 GFC_ARRAY_UNKNOWN, true);
815 desc = gfc_create_var (type, "atmp");
816 GFC_DECL_PACKED_ARRAY (desc) = 1;
818 info->descriptor = desc;
819 size = gfc_index_one_node;
821 /* Fill in the array dtype. */
822 tmp = gfc_conv_descriptor_dtype (desc);
823 gfc_add_modify (pre, tmp, gfc_get_dtype (TREE_TYPE (desc)));
826 Fill in the bounds and stride. This is a packed array, so:
829 for (n = 0; n < rank; n++)
832 delta = ubound[n] + 1 - lbound[n];
835 size = size * sizeof(element);
840 /* If there is at least one null loop->to[n], it is a callee allocated
842 for (n = 0; n < loop->dimen; n++)
843 if (loop->to[n] == NULL_TREE)
849 for (n = 0; n < loop->dimen; n++)
853 if (size == NULL_TREE)
855 /* For a callee allocated array express the loop bounds in terms
856 of the descriptor fields. */
857 tmp = fold_build2_loc (input_location,
858 MINUS_EXPR, gfc_array_index_type,
859 gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]),
860 gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]));
865 /* Store the stride and bound components in the descriptor. */
866 gfc_conv_descriptor_stride_set (pre, desc, gfc_rank_cst[n], size);
868 gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n],
869 gfc_index_zero_node);
871 gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n],
874 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
875 to[n], gfc_index_one_node);
877 /* Check whether the size for this dimension is negative. */
878 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, tmp,
879 gfc_index_zero_node);
880 cond = gfc_evaluate_now (cond, pre);
885 or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
886 boolean_type_node, or_expr, cond);
888 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
890 size = gfc_evaluate_now (size, pre);
892 for (n = info->dimen; n < info->dimen + info->codimen; n++)
894 gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n],
895 gfc_index_zero_node);
896 if (n < info->dimen + info->codimen - 1)
897 gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n], loop->to[n]);
900 /* Get the size of the array. */
902 if (size && !callee_alloc)
904 /* If or_expr is true, then the extent in at least one
905 dimension is zero and the size is set to zero. */
906 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
907 or_expr, gfc_index_zero_node, size);
910 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
912 fold_convert (gfc_array_index_type,
913 TYPE_SIZE_UNIT (gfc_get_element_type (type))));
921 gfc_trans_allocate_array_storage (pre, post, info, size, nelem, initial,
924 if (info->dimen > loop->temp_dim)
925 loop->temp_dim = info->dimen;
931 /* Return the number of iterations in a loop that starts at START,
932 ends at END, and has step STEP. */
935 gfc_get_iteration_count (tree start, tree end, tree step)
940 type = TREE_TYPE (step);
941 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, end, start);
942 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, type, tmp, step);
943 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, tmp,
944 build_int_cst (type, 1));
945 tmp = fold_build2_loc (input_location, MAX_EXPR, type, tmp,
946 build_int_cst (type, 0));
947 return fold_convert (gfc_array_index_type, tmp);
951 /* Extend the data in array DESC by EXTRA elements. */
954 gfc_grow_array (stmtblock_t * pblock, tree desc, tree extra)
961 if (integer_zerop (extra))
964 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[0]);
966 /* Add EXTRA to the upper bound. */
967 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
969 gfc_conv_descriptor_ubound_set (pblock, desc, gfc_rank_cst[0], tmp);
971 /* Get the value of the current data pointer. */
972 arg0 = gfc_conv_descriptor_data_get (desc);
974 /* Calculate the new array size. */
975 size = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
976 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
977 ubound, gfc_index_one_node);
978 arg1 = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
979 fold_convert (size_type_node, tmp),
980 fold_convert (size_type_node, size));
982 /* Call the realloc() function. */
983 tmp = gfc_call_realloc (pblock, arg0, arg1);
984 gfc_conv_descriptor_data_set (pblock, desc, tmp);
988 /* Return true if the bounds of iterator I can only be determined
992 gfc_iterator_has_dynamic_bounds (gfc_iterator * i)
994 return (i->start->expr_type != EXPR_CONSTANT
995 || i->end->expr_type != EXPR_CONSTANT
996 || i->step->expr_type != EXPR_CONSTANT);
1000 /* Split the size of constructor element EXPR into the sum of two terms,
1001 one of which can be determined at compile time and one of which must
1002 be calculated at run time. Set *SIZE to the former and return true
1003 if the latter might be nonzero. */
1006 gfc_get_array_constructor_element_size (mpz_t * size, gfc_expr * expr)
1008 if (expr->expr_type == EXPR_ARRAY)
1009 return gfc_get_array_constructor_size (size, expr->value.constructor);
1010 else if (expr->rank > 0)
1012 /* Calculate everything at run time. */
1013 mpz_set_ui (*size, 0);
1018 /* A single element. */
1019 mpz_set_ui (*size, 1);
1025 /* Like gfc_get_array_constructor_element_size, but applied to the whole
1026 of array constructor C. */
1029 gfc_get_array_constructor_size (mpz_t * size, gfc_constructor_base base)
1037 mpz_set_ui (*size, 0);
1042 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1045 if (i && gfc_iterator_has_dynamic_bounds (i))
1049 dynamic |= gfc_get_array_constructor_element_size (&len, c->expr);
1052 /* Multiply the static part of the element size by the
1053 number of iterations. */
1054 mpz_sub (val, i->end->value.integer, i->start->value.integer);
1055 mpz_fdiv_q (val, val, i->step->value.integer);
1056 mpz_add_ui (val, val, 1);
1057 if (mpz_sgn (val) > 0)
1058 mpz_mul (len, len, val);
1060 mpz_set_ui (len, 0);
1062 mpz_add (*size, *size, len);
1071 /* Make sure offset is a variable. */
1074 gfc_put_offset_into_var (stmtblock_t * pblock, tree * poffset,
1077 /* We should have already created the offset variable. We cannot
1078 create it here because we may be in an inner scope. */
1079 gcc_assert (*offsetvar != NULL_TREE);
1080 gfc_add_modify (pblock, *offsetvar, *poffset);
1081 *poffset = *offsetvar;
1082 TREE_USED (*offsetvar) = 1;
1086 /* Variables needed for bounds-checking. */
1087 static bool first_len;
1088 static tree first_len_val;
1089 static bool typespec_chararray_ctor;
1092 gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
1093 tree offset, gfc_se * se, gfc_expr * expr)
1097 gfc_conv_expr (se, expr);
1099 /* Store the value. */
1100 tmp = build_fold_indirect_ref_loc (input_location,
1101 gfc_conv_descriptor_data_get (desc));
1102 tmp = gfc_build_array_ref (tmp, offset, NULL);
1104 if (expr->ts.type == BT_CHARACTER)
1106 int i = gfc_validate_kind (BT_CHARACTER, expr->ts.kind, false);
1109 esize = size_in_bytes (gfc_get_element_type (TREE_TYPE (desc)));
1110 esize = fold_convert (gfc_charlen_type_node, esize);
1111 esize = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
1112 gfc_charlen_type_node, esize,
1113 build_int_cst (gfc_charlen_type_node,
1114 gfc_character_kinds[i].bit_size / 8));
1116 gfc_conv_string_parameter (se);
1117 if (POINTER_TYPE_P (TREE_TYPE (tmp)))
1119 /* The temporary is an array of pointers. */
1120 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1121 gfc_add_modify (&se->pre, tmp, se->expr);
1125 /* The temporary is an array of string values. */
1126 tmp = gfc_build_addr_expr (gfc_get_pchar_type (expr->ts.kind), tmp);
1127 /* We know the temporary and the value will be the same length,
1128 so can use memcpy. */
1129 gfc_trans_string_copy (&se->pre, esize, tmp, expr->ts.kind,
1130 se->string_length, se->expr, expr->ts.kind);
1132 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) && !typespec_chararray_ctor)
1136 gfc_add_modify (&se->pre, first_len_val,
1142 /* Verify that all constructor elements are of the same
1144 tree cond = fold_build2_loc (input_location, NE_EXPR,
1145 boolean_type_node, first_len_val,
1147 gfc_trans_runtime_check
1148 (true, false, cond, &se->pre, &expr->where,
1149 "Different CHARACTER lengths (%ld/%ld) in array constructor",
1150 fold_convert (long_integer_type_node, first_len_val),
1151 fold_convert (long_integer_type_node, se->string_length));
1157 /* TODO: Should the frontend already have done this conversion? */
1158 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1159 gfc_add_modify (&se->pre, tmp, se->expr);
1162 gfc_add_block_to_block (pblock, &se->pre);
1163 gfc_add_block_to_block (pblock, &se->post);
1167 /* Add the contents of an array to the constructor. DYNAMIC is as for
1168 gfc_trans_array_constructor_value. */
1171 gfc_trans_array_constructor_subarray (stmtblock_t * pblock,
1172 tree type ATTRIBUTE_UNUSED,
1173 tree desc, gfc_expr * expr,
1174 tree * poffset, tree * offsetvar,
1185 /* We need this to be a variable so we can increment it. */
1186 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1188 gfc_init_se (&se, NULL);
1190 /* Walk the array expression. */
1191 ss = gfc_walk_expr (expr);
1192 gcc_assert (ss != gfc_ss_terminator);
1194 /* Initialize the scalarizer. */
1195 gfc_init_loopinfo (&loop);
1196 gfc_add_ss_to_loop (&loop, ss);
1198 /* Initialize the loop. */
1199 gfc_conv_ss_startstride (&loop);
1200 gfc_conv_loop_setup (&loop, &expr->where);
1202 /* Make sure the constructed array has room for the new data. */
1205 /* Set SIZE to the total number of elements in the subarray. */
1206 size = gfc_index_one_node;
1207 for (n = 0; n < loop.dimen; n++)
1209 tmp = gfc_get_iteration_count (loop.from[n], loop.to[n],
1210 gfc_index_one_node);
1211 size = fold_build2_loc (input_location, MULT_EXPR,
1212 gfc_array_index_type, size, tmp);
1215 /* Grow the constructed array by SIZE elements. */
1216 gfc_grow_array (&loop.pre, desc, size);
1219 /* Make the loop body. */
1220 gfc_mark_ss_chain_used (ss, 1);
1221 gfc_start_scalarized_body (&loop, &body);
1222 gfc_copy_loopinfo_to_se (&se, &loop);
1225 gfc_trans_array_ctor_element (&body, desc, *poffset, &se, expr);
1226 gcc_assert (se.ss == gfc_ss_terminator);
1228 /* Increment the offset. */
1229 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1230 *poffset, gfc_index_one_node);
1231 gfc_add_modify (&body, *poffset, tmp);
1233 /* Finish the loop. */
1234 gfc_trans_scalarizing_loops (&loop, &body);
1235 gfc_add_block_to_block (&loop.pre, &loop.post);
1236 tmp = gfc_finish_block (&loop.pre);
1237 gfc_add_expr_to_block (pblock, tmp);
1239 gfc_cleanup_loop (&loop);
1243 /* Assign the values to the elements of an array constructor. DYNAMIC
1244 is true if descriptor DESC only contains enough data for the static
1245 size calculated by gfc_get_array_constructor_size. When true, memory
1246 for the dynamic parts must be allocated using realloc. */
1249 gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
1250 tree desc, gfc_constructor_base base,
1251 tree * poffset, tree * offsetvar,
1260 tree shadow_loopvar = NULL_TREE;
1261 gfc_saved_var saved_loopvar;
1264 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1266 /* If this is an iterator or an array, the offset must be a variable. */
1267 if ((c->iterator || c->expr->rank > 0) && INTEGER_CST_P (*poffset))
1268 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1270 /* Shadowing the iterator avoids changing its value and saves us from
1271 keeping track of it. Further, it makes sure that there's always a
1272 backend-decl for the symbol, even if there wasn't one before,
1273 e.g. in the case of an iterator that appears in a specification
1274 expression in an interface mapping. */
1277 gfc_symbol *sym = c->iterator->var->symtree->n.sym;
1278 tree type = gfc_typenode_for_spec (&sym->ts);
1280 shadow_loopvar = gfc_create_var (type, "shadow_loopvar");
1281 gfc_shadow_sym (sym, shadow_loopvar, &saved_loopvar);
1284 gfc_start_block (&body);
1286 if (c->expr->expr_type == EXPR_ARRAY)
1288 /* Array constructors can be nested. */
1289 gfc_trans_array_constructor_value (&body, type, desc,
1290 c->expr->value.constructor,
1291 poffset, offsetvar, dynamic);
1293 else if (c->expr->rank > 0)
1295 gfc_trans_array_constructor_subarray (&body, type, desc, c->expr,
1296 poffset, offsetvar, dynamic);
1300 /* This code really upsets the gimplifier so don't bother for now. */
1307 while (p && !(p->iterator || p->expr->expr_type != EXPR_CONSTANT))
1309 p = gfc_constructor_next (p);
1314 /* Scalar values. */
1315 gfc_init_se (&se, NULL);
1316 gfc_trans_array_ctor_element (&body, desc, *poffset,
1319 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1320 gfc_array_index_type,
1321 *poffset, gfc_index_one_node);
1325 /* Collect multiple scalar constants into a constructor. */
1326 VEC(constructor_elt,gc) *v = NULL;
1330 HOST_WIDE_INT idx = 0;
1333 /* Count the number of consecutive scalar constants. */
1334 while (p && !(p->iterator
1335 || p->expr->expr_type != EXPR_CONSTANT))
1337 gfc_init_se (&se, NULL);
1338 gfc_conv_constant (&se, p->expr);
1340 if (c->expr->ts.type != BT_CHARACTER)
1341 se.expr = fold_convert (type, se.expr);
1342 /* For constant character array constructors we build
1343 an array of pointers. */
1344 else if (POINTER_TYPE_P (type))
1345 se.expr = gfc_build_addr_expr
1346 (gfc_get_pchar_type (p->expr->ts.kind),
1349 CONSTRUCTOR_APPEND_ELT (v,
1350 build_int_cst (gfc_array_index_type,
1354 p = gfc_constructor_next (p);
1357 bound = build_int_cst (NULL_TREE, n - 1);
1358 /* Create an array type to hold them. */
1359 tmptype = build_range_type (gfc_array_index_type,
1360 gfc_index_zero_node, bound);
1361 tmptype = build_array_type (type, tmptype);
1363 init = build_constructor (tmptype, v);
1364 TREE_CONSTANT (init) = 1;
1365 TREE_STATIC (init) = 1;
1366 /* Create a static variable to hold the data. */
1367 tmp = gfc_create_var (tmptype, "data");
1368 TREE_STATIC (tmp) = 1;
1369 TREE_CONSTANT (tmp) = 1;
1370 TREE_READONLY (tmp) = 1;
1371 DECL_INITIAL (tmp) = init;
1374 /* Use BUILTIN_MEMCPY to assign the values. */
1375 tmp = gfc_conv_descriptor_data_get (desc);
1376 tmp = build_fold_indirect_ref_loc (input_location,
1378 tmp = gfc_build_array_ref (tmp, *poffset, NULL);
1379 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
1380 init = gfc_build_addr_expr (NULL_TREE, init);
1382 size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
1383 bound = build_int_cst (NULL_TREE, n * size);
1384 tmp = build_call_expr_loc (input_location,
1385 built_in_decls[BUILT_IN_MEMCPY], 3,
1387 gfc_add_expr_to_block (&body, tmp);
1389 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1390 gfc_array_index_type, *poffset,
1391 build_int_cst (gfc_array_index_type, n));
1393 if (!INTEGER_CST_P (*poffset))
1395 gfc_add_modify (&body, *offsetvar, *poffset);
1396 *poffset = *offsetvar;
1400 /* The frontend should already have done any expansions
1404 /* Pass the code as is. */
1405 tmp = gfc_finish_block (&body);
1406 gfc_add_expr_to_block (pblock, tmp);
1410 /* Build the implied do-loop. */
1411 stmtblock_t implied_do_block;
1419 loopbody = gfc_finish_block (&body);
1421 /* Create a new block that holds the implied-do loop. A temporary
1422 loop-variable is used. */
1423 gfc_start_block(&implied_do_block);
1425 /* Initialize the loop. */
1426 gfc_init_se (&se, NULL);
1427 gfc_conv_expr_val (&se, c->iterator->start);
1428 gfc_add_block_to_block (&implied_do_block, &se.pre);
1429 gfc_add_modify (&implied_do_block, shadow_loopvar, se.expr);
1431 gfc_init_se (&se, NULL);
1432 gfc_conv_expr_val (&se, c->iterator->end);
1433 gfc_add_block_to_block (&implied_do_block, &se.pre);
1434 end = gfc_evaluate_now (se.expr, &implied_do_block);
1436 gfc_init_se (&se, NULL);
1437 gfc_conv_expr_val (&se, c->iterator->step);
1438 gfc_add_block_to_block (&implied_do_block, &se.pre);
1439 step = gfc_evaluate_now (se.expr, &implied_do_block);
1441 /* If this array expands dynamically, and the number of iterations
1442 is not constant, we won't have allocated space for the static
1443 part of C->EXPR's size. Do that now. */
1444 if (dynamic && gfc_iterator_has_dynamic_bounds (c->iterator))
1446 /* Get the number of iterations. */
1447 tmp = gfc_get_iteration_count (shadow_loopvar, end, step);
1449 /* Get the static part of C->EXPR's size. */
1450 gfc_get_array_constructor_element_size (&size, c->expr);
1451 tmp2 = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
1453 /* Grow the array by TMP * TMP2 elements. */
1454 tmp = fold_build2_loc (input_location, MULT_EXPR,
1455 gfc_array_index_type, tmp, tmp2);
1456 gfc_grow_array (&implied_do_block, desc, tmp);
1459 /* Generate the loop body. */
1460 exit_label = gfc_build_label_decl (NULL_TREE);
1461 gfc_start_block (&body);
1463 /* Generate the exit condition. Depending on the sign of
1464 the step variable we have to generate the correct
1466 tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
1467 step, build_int_cst (TREE_TYPE (step), 0));
1468 cond = fold_build3_loc (input_location, COND_EXPR,
1469 boolean_type_node, tmp,
1470 fold_build2_loc (input_location, GT_EXPR,
1471 boolean_type_node, shadow_loopvar, end),
1472 fold_build2_loc (input_location, LT_EXPR,
1473 boolean_type_node, shadow_loopvar, end));
1474 tmp = build1_v (GOTO_EXPR, exit_label);
1475 TREE_USED (exit_label) = 1;
1476 tmp = build3_v (COND_EXPR, cond, tmp,
1477 build_empty_stmt (input_location));
1478 gfc_add_expr_to_block (&body, tmp);
1480 /* The main loop body. */
1481 gfc_add_expr_to_block (&body, loopbody);
1483 /* Increase loop variable by step. */
1484 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1485 TREE_TYPE (shadow_loopvar), shadow_loopvar,
1487 gfc_add_modify (&body, shadow_loopvar, tmp);
1489 /* Finish the loop. */
1490 tmp = gfc_finish_block (&body);
1491 tmp = build1_v (LOOP_EXPR, tmp);
1492 gfc_add_expr_to_block (&implied_do_block, tmp);
1494 /* Add the exit label. */
1495 tmp = build1_v (LABEL_EXPR, exit_label);
1496 gfc_add_expr_to_block (&implied_do_block, tmp);
1498 /* Finishe the implied-do loop. */
1499 tmp = gfc_finish_block(&implied_do_block);
1500 gfc_add_expr_to_block(pblock, tmp);
1502 gfc_restore_sym (c->iterator->var->symtree->n.sym, &saved_loopvar);
1509 /* A catch-all to obtain the string length for anything that is not a
1510 a substring of non-constant length, a constant, array or variable. */
1513 get_array_ctor_all_strlen (stmtblock_t *block, gfc_expr *e, tree *len)
1518 /* Don't bother if we already know the length is a constant. */
1519 if (*len && INTEGER_CST_P (*len))
1522 if (!e->ref && e->ts.u.cl && e->ts.u.cl->length
1523 && e->ts.u.cl->length->expr_type == EXPR_CONSTANT)
1526 gfc_conv_const_charlen (e->ts.u.cl);
1527 *len = e->ts.u.cl->backend_decl;
1531 /* Otherwise, be brutal even if inefficient. */
1532 ss = gfc_walk_expr (e);
1533 gfc_init_se (&se, NULL);
1535 /* No function call, in case of side effects. */
1536 se.no_function_call = 1;
1537 if (ss == gfc_ss_terminator)
1538 gfc_conv_expr (&se, e);
1540 gfc_conv_expr_descriptor (&se, e, ss);
1542 /* Fix the value. */
1543 *len = gfc_evaluate_now (se.string_length, &se.pre);
1545 gfc_add_block_to_block (block, &se.pre);
1546 gfc_add_block_to_block (block, &se.post);
1548 e->ts.u.cl->backend_decl = *len;
1553 /* Figure out the string length of a variable reference expression.
1554 Used by get_array_ctor_strlen. */
1557 get_array_ctor_var_strlen (stmtblock_t *block, gfc_expr * expr, tree * len)
1563 /* Don't bother if we already know the length is a constant. */
1564 if (*len && INTEGER_CST_P (*len))
1567 ts = &expr->symtree->n.sym->ts;
1568 for (ref = expr->ref; ref; ref = ref->next)
1573 /* Array references don't change the string length. */
1577 /* Use the length of the component. */
1578 ts = &ref->u.c.component->ts;
1582 if (ref->u.ss.start->expr_type != EXPR_CONSTANT
1583 || ref->u.ss.end->expr_type != EXPR_CONSTANT)
1585 /* Note that this might evaluate expr. */
1586 get_array_ctor_all_strlen (block, expr, len);
1589 mpz_init_set_ui (char_len, 1);
1590 mpz_add (char_len, char_len, ref->u.ss.end->value.integer);
1591 mpz_sub (char_len, char_len, ref->u.ss.start->value.integer);
1592 *len = gfc_conv_mpz_to_tree (char_len, gfc_default_integer_kind);
1593 *len = convert (gfc_charlen_type_node, *len);
1594 mpz_clear (char_len);
1602 *len = ts->u.cl->backend_decl;
1606 /* Figure out the string length of a character array constructor.
1607 If len is NULL, don't calculate the length; this happens for recursive calls
1608 when a sub-array-constructor is an element but not at the first position,
1609 so when we're not interested in the length.
1610 Returns TRUE if all elements are character constants. */
1613 get_array_ctor_strlen (stmtblock_t *block, gfc_constructor_base base, tree * len)
1620 if (gfc_constructor_first (base) == NULL)
1623 *len = build_int_cstu (gfc_charlen_type_node, 0);
1627 /* Loop over all constructor elements to find out is_const, but in len we
1628 want to store the length of the first, not the last, element. We can
1629 of course exit the loop as soon as is_const is found to be false. */
1630 for (c = gfc_constructor_first (base);
1631 c && is_const; c = gfc_constructor_next (c))
1633 switch (c->expr->expr_type)
1636 if (len && !(*len && INTEGER_CST_P (*len)))
1637 *len = build_int_cstu (gfc_charlen_type_node,
1638 c->expr->value.character.length);
1642 if (!get_array_ctor_strlen (block, c->expr->value.constructor, len))
1649 get_array_ctor_var_strlen (block, c->expr, len);
1655 get_array_ctor_all_strlen (block, c->expr, len);
1659 /* After the first iteration, we don't want the length modified. */
1666 /* Check whether the array constructor C consists entirely of constant
1667 elements, and if so returns the number of those elements, otherwise
1668 return zero. Note, an empty or NULL array constructor returns zero. */
1670 unsigned HOST_WIDE_INT
1671 gfc_constant_array_constructor_p (gfc_constructor_base base)
1673 unsigned HOST_WIDE_INT nelem = 0;
1675 gfc_constructor *c = gfc_constructor_first (base);
1679 || c->expr->rank > 0
1680 || c->expr->expr_type != EXPR_CONSTANT)
1682 c = gfc_constructor_next (c);
1689 /* Given EXPR, the constant array constructor specified by an EXPR_ARRAY,
1690 and the tree type of it's elements, TYPE, return a static constant
1691 variable that is compile-time initialized. */
1694 gfc_build_constant_array_constructor (gfc_expr * expr, tree type)
1696 tree tmptype, init, tmp;
1697 HOST_WIDE_INT nelem;
1702 VEC(constructor_elt,gc) *v = NULL;
1704 /* First traverse the constructor list, converting the constants
1705 to tree to build an initializer. */
1707 c = gfc_constructor_first (expr->value.constructor);
1710 gfc_init_se (&se, NULL);
1711 gfc_conv_constant (&se, c->expr);
1712 if (c->expr->ts.type != BT_CHARACTER)
1713 se.expr = fold_convert (type, se.expr);
1714 else if (POINTER_TYPE_P (type))
1715 se.expr = gfc_build_addr_expr (gfc_get_pchar_type (c->expr->ts.kind),
1717 CONSTRUCTOR_APPEND_ELT (v, build_int_cst (gfc_array_index_type, nelem),
1719 c = gfc_constructor_next (c);
1723 /* Next determine the tree type for the array. We use the gfortran
1724 front-end's gfc_get_nodesc_array_type in order to create a suitable
1725 GFC_ARRAY_TYPE_P that may be used by the scalarizer. */
1727 memset (&as, 0, sizeof (gfc_array_spec));
1729 as.rank = expr->rank;
1730 as.type = AS_EXPLICIT;
1733 as.lower[0] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
1734 as.upper[0] = gfc_get_int_expr (gfc_default_integer_kind,
1738 for (i = 0; i < expr->rank; i++)
1740 int tmp = (int) mpz_get_si (expr->shape[i]);
1741 as.lower[i] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
1742 as.upper[i] = gfc_get_int_expr (gfc_default_integer_kind,
1746 tmptype = gfc_get_nodesc_array_type (type, &as, PACKED_STATIC, true);
1748 /* as is not needed anymore. */
1749 for (i = 0; i < as.rank + as.corank; i++)
1751 gfc_free_expr (as.lower[i]);
1752 gfc_free_expr (as.upper[i]);
1755 init = build_constructor (tmptype, v);
1757 TREE_CONSTANT (init) = 1;
1758 TREE_STATIC (init) = 1;
1760 tmp = gfc_create_var (tmptype, "A");
1761 TREE_STATIC (tmp) = 1;
1762 TREE_CONSTANT (tmp) = 1;
1763 TREE_READONLY (tmp) = 1;
1764 DECL_INITIAL (tmp) = init;
1770 /* Translate a constant EXPR_ARRAY array constructor for the scalarizer.
1771 This mostly initializes the scalarizer state info structure with the
1772 appropriate values to directly use the array created by the function
1773 gfc_build_constant_array_constructor. */
1776 gfc_trans_constant_array_constructor (gfc_loopinfo * loop,
1777 gfc_ss * ss, tree type)
1783 tmp = gfc_build_constant_array_constructor (ss->expr, type);
1785 info = &ss->data.info;
1787 info->descriptor = tmp;
1788 info->data = gfc_build_addr_expr (NULL_TREE, tmp);
1789 info->offset = gfc_index_zero_node;
1791 for (i = 0; i < info->dimen + info->codimen; i++)
1793 info->delta[i] = gfc_index_zero_node;
1794 info->start[i] = gfc_index_zero_node;
1795 info->end[i] = gfc_index_zero_node;
1796 info->stride[i] = gfc_index_one_node;
1800 if (info->dimen > loop->temp_dim)
1801 loop->temp_dim = info->dimen;
1804 /* Helper routine of gfc_trans_array_constructor to determine if the
1805 bounds of the loop specified by LOOP are constant and simple enough
1806 to use with gfc_trans_constant_array_constructor. Returns the
1807 iteration count of the loop if suitable, and NULL_TREE otherwise. */
1810 constant_array_constructor_loop_size (gfc_loopinfo * loop)
1812 tree size = gfc_index_one_node;
1816 for (i = 0; i < loop->dimen; i++)
1818 /* If the bounds aren't constant, return NULL_TREE. */
1819 if (!INTEGER_CST_P (loop->from[i]) || !INTEGER_CST_P (loop->to[i]))
1821 if (!integer_zerop (loop->from[i]))
1823 /* Only allow nonzero "from" in one-dimensional arrays. */
1824 if (loop->dimen != 1)
1826 tmp = fold_build2_loc (input_location, MINUS_EXPR,
1827 gfc_array_index_type,
1828 loop->to[i], loop->from[i]);
1832 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1833 tmp, gfc_index_one_node);
1834 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
1842 /* Array constructors are handled by constructing a temporary, then using that
1843 within the scalarization loop. This is not optimal, but seems by far the
1847 gfc_trans_array_constructor (gfc_loopinfo * loop, gfc_ss * ss, locus * where)
1849 gfc_constructor_base c;
1856 bool old_first_len, old_typespec_chararray_ctor;
1857 tree old_first_len_val;
1859 /* Save the old values for nested checking. */
1860 old_first_len = first_len;
1861 old_first_len_val = first_len_val;
1862 old_typespec_chararray_ctor = typespec_chararray_ctor;
1864 /* Do bounds-checking here and in gfc_trans_array_ctor_element only if no
1865 typespec was given for the array constructor. */
1866 typespec_chararray_ctor = (ss->expr->ts.u.cl
1867 && ss->expr->ts.u.cl->length_from_typespec);
1869 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
1870 && ss->expr->ts.type == BT_CHARACTER && !typespec_chararray_ctor)
1872 first_len_val = gfc_create_var (gfc_charlen_type_node, "len");
1876 ss->data.info.dimen = loop->dimen;
1878 c = ss->expr->value.constructor;
1879 if (ss->expr->ts.type == BT_CHARACTER)
1883 /* get_array_ctor_strlen walks the elements of the constructor, if a
1884 typespec was given, we already know the string length and want the one
1886 if (typespec_chararray_ctor && ss->expr->ts.u.cl->length
1887 && ss->expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
1891 const_string = false;
1892 gfc_init_se (&length_se, NULL);
1893 gfc_conv_expr_type (&length_se, ss->expr->ts.u.cl->length,
1894 gfc_charlen_type_node);
1895 ss->string_length = length_se.expr;
1896 gfc_add_block_to_block (&loop->pre, &length_se.pre);
1897 gfc_add_block_to_block (&loop->post, &length_se.post);
1900 const_string = get_array_ctor_strlen (&loop->pre, c,
1901 &ss->string_length);
1903 /* Complex character array constructors should have been taken care of
1904 and not end up here. */
1905 gcc_assert (ss->string_length);
1907 ss->expr->ts.u.cl->backend_decl = ss->string_length;
1909 type = gfc_get_character_type_len (ss->expr->ts.kind, ss->string_length);
1911 type = build_pointer_type (type);
1914 type = gfc_typenode_for_spec (&ss->expr->ts);
1916 /* See if the constructor determines the loop bounds. */
1919 if (ss->expr->shape && loop->dimen > 1 && loop->to[0] == NULL_TREE)
1921 /* We have a multidimensional parameter. */
1923 for (n = 0; n < ss->expr->rank; n++)
1925 loop->from[n] = gfc_index_zero_node;
1926 loop->to[n] = gfc_conv_mpz_to_tree (ss->expr->shape [n],
1927 gfc_index_integer_kind);
1928 loop->to[n] = fold_build2_loc (input_location, MINUS_EXPR,
1929 gfc_array_index_type,
1930 loop->to[n], gfc_index_one_node);
1934 if (loop->to[0] == NULL_TREE)
1938 /* We should have a 1-dimensional, zero-based loop. */
1939 gcc_assert (loop->dimen == 1);
1940 gcc_assert (integer_zerop (loop->from[0]));
1942 /* Split the constructor size into a static part and a dynamic part.
1943 Allocate the static size up-front and record whether the dynamic
1944 size might be nonzero. */
1946 dynamic = gfc_get_array_constructor_size (&size, c);
1947 mpz_sub_ui (size, size, 1);
1948 loop->to[0] = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
1952 /* Special case constant array constructors. */
1955 unsigned HOST_WIDE_INT nelem = gfc_constant_array_constructor_p (c);
1958 tree size = constant_array_constructor_loop_size (loop);
1959 if (size && compare_tree_int (size, nelem) == 0)
1961 gfc_trans_constant_array_constructor (loop, ss, type);
1967 if (TREE_CODE (loop->to[0]) == VAR_DECL)
1970 gfc_trans_create_temp_array (&loop->pre, &loop->post, loop, &ss->data.info,
1971 type, NULL_TREE, dynamic, true, false, where);
1973 desc = ss->data.info.descriptor;
1974 offset = gfc_index_zero_node;
1975 offsetvar = gfc_create_var_np (gfc_array_index_type, "offset");
1976 TREE_NO_WARNING (offsetvar) = 1;
1977 TREE_USED (offsetvar) = 0;
1978 gfc_trans_array_constructor_value (&loop->pre, type, desc, c,
1979 &offset, &offsetvar, dynamic);
1981 /* If the array grows dynamically, the upper bound of the loop variable
1982 is determined by the array's final upper bound. */
1985 tmp = fold_build2_loc (input_location, MINUS_EXPR,
1986 gfc_array_index_type,
1987 offsetvar, gfc_index_one_node);
1988 tmp = gfc_evaluate_now (tmp, &loop->pre);
1989 gfc_conv_descriptor_ubound_set (&loop->pre, desc, gfc_rank_cst[0], tmp);
1990 if (loop->to[0] && TREE_CODE (loop->to[0]) == VAR_DECL)
1991 gfc_add_modify (&loop->pre, loop->to[0], tmp);
1996 if (TREE_USED (offsetvar))
1997 pushdecl (offsetvar);
1999 gcc_assert (INTEGER_CST_P (offset));
2002 /* Disable bound checking for now because it's probably broken. */
2003 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2010 /* Restore old values of globals. */
2011 first_len = old_first_len;
2012 first_len_val = old_first_len_val;
2013 typespec_chararray_ctor = old_typespec_chararray_ctor;
2017 /* INFO describes a GFC_SS_SECTION in loop LOOP, and this function is
2018 called after evaluating all of INFO's vector dimensions. Go through
2019 each such vector dimension and see if we can now fill in any missing
2023 gfc_set_vector_loop_bounds (gfc_loopinfo * loop, gfc_ss_info * info)
2032 for (n = 0; n < loop->dimen + loop->codimen; n++)
2035 if (info->ref->u.ar.dimen_type[dim] == DIMEN_VECTOR
2036 && loop->to[n] == NULL)
2038 /* Loop variable N indexes vector dimension DIM, and we don't
2039 yet know the upper bound of loop variable N. Set it to the
2040 difference between the vector's upper and lower bounds. */
2041 gcc_assert (loop->from[n] == gfc_index_zero_node);
2042 gcc_assert (info->subscript[dim]
2043 && info->subscript[dim]->type == GFC_SS_VECTOR);
2045 gfc_init_se (&se, NULL);
2046 desc = info->subscript[dim]->data.info.descriptor;
2047 zero = gfc_rank_cst[0];
2048 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2049 gfc_array_index_type,
2050 gfc_conv_descriptor_ubound_get (desc, zero),
2051 gfc_conv_descriptor_lbound_get (desc, zero));
2052 tmp = gfc_evaluate_now (tmp, &loop->pre);
2059 /* Add the pre and post chains for all the scalar expressions in a SS chain
2060 to loop. This is called after the loop parameters have been calculated,
2061 but before the actual scalarizing loops. */
2064 gfc_add_loop_ss_code (gfc_loopinfo * loop, gfc_ss * ss, bool subscript,
2070 /* TODO: This can generate bad code if there are ordering dependencies,
2071 e.g., a callee allocated function and an unknown size constructor. */
2072 gcc_assert (ss != NULL);
2074 for (; ss != gfc_ss_terminator; ss = ss->loop_chain)
2081 /* Scalar expression. Evaluate this now. This includes elemental
2082 dimension indices, but not array section bounds. */
2083 gfc_init_se (&se, NULL);
2084 gfc_conv_expr (&se, ss->expr);
2085 gfc_add_block_to_block (&loop->pre, &se.pre);
2087 if (ss->expr->ts.type != BT_CHARACTER)
2089 /* Move the evaluation of scalar expressions outside the
2090 scalarization loop, except for WHERE assignments. */
2092 se.expr = convert(gfc_array_index_type, se.expr);
2094 se.expr = gfc_evaluate_now (se.expr, &loop->pre);
2095 gfc_add_block_to_block (&loop->pre, &se.post);
2098 gfc_add_block_to_block (&loop->post, &se.post);
2100 ss->data.scalar.expr = se.expr;
2101 ss->string_length = se.string_length;
2104 case GFC_SS_REFERENCE:
2105 /* Scalar argument to elemental procedure. Evaluate this
2107 gfc_init_se (&se, NULL);
2108 gfc_conv_expr (&se, ss->expr);
2109 gfc_add_block_to_block (&loop->pre, &se.pre);
2110 gfc_add_block_to_block (&loop->post, &se.post);
2112 ss->data.scalar.expr = gfc_evaluate_now (se.expr, &loop->pre);
2113 ss->string_length = se.string_length;
2116 case GFC_SS_SECTION:
2117 /* Add the expressions for scalar and vector subscripts. */
2118 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2119 if (ss->data.info.subscript[n])
2120 gfc_add_loop_ss_code (loop, ss->data.info.subscript[n], true,
2123 gfc_set_vector_loop_bounds (loop, &ss->data.info);
2127 /* Get the vector's descriptor and store it in SS. */
2128 gfc_init_se (&se, NULL);
2129 gfc_conv_expr_descriptor (&se, ss->expr, gfc_walk_expr (ss->expr));
2130 gfc_add_block_to_block (&loop->pre, &se.pre);
2131 gfc_add_block_to_block (&loop->post, &se.post);
2132 ss->data.info.descriptor = se.expr;
2135 case GFC_SS_INTRINSIC:
2136 gfc_add_intrinsic_ss_code (loop, ss);
2139 case GFC_SS_FUNCTION:
2140 /* Array function return value. We call the function and save its
2141 result in a temporary for use inside the loop. */
2142 gfc_init_se (&se, NULL);
2145 gfc_conv_expr (&se, ss->expr);
2146 gfc_add_block_to_block (&loop->pre, &se.pre);
2147 gfc_add_block_to_block (&loop->post, &se.post);
2148 ss->string_length = se.string_length;
2151 case GFC_SS_CONSTRUCTOR:
2152 if (ss->expr->ts.type == BT_CHARACTER
2153 && ss->string_length == NULL
2154 && ss->expr->ts.u.cl
2155 && ss->expr->ts.u.cl->length)
2157 gfc_init_se (&se, NULL);
2158 gfc_conv_expr_type (&se, ss->expr->ts.u.cl->length,
2159 gfc_charlen_type_node);
2160 ss->string_length = se.expr;
2161 gfc_add_block_to_block (&loop->pre, &se.pre);
2162 gfc_add_block_to_block (&loop->post, &se.post);
2164 gfc_trans_array_constructor (loop, ss, where);
2168 case GFC_SS_COMPONENT:
2169 /* Do nothing. These are handled elsewhere. */
2179 /* Translate expressions for the descriptor and data pointer of a SS. */
2183 gfc_conv_ss_descriptor (stmtblock_t * block, gfc_ss * ss, int base)
2188 /* Get the descriptor for the array to be scalarized. */
2189 gcc_assert (ss->expr->expr_type == EXPR_VARIABLE);
2190 gfc_init_se (&se, NULL);
2191 se.descriptor_only = 1;
2192 gfc_conv_expr_lhs (&se, ss->expr);
2193 gfc_add_block_to_block (block, &se.pre);
2194 ss->data.info.descriptor = se.expr;
2195 ss->string_length = se.string_length;
2199 /* Also the data pointer. */
2200 tmp = gfc_conv_array_data (se.expr);
2201 /* If this is a variable or address of a variable we use it directly.
2202 Otherwise we must evaluate it now to avoid breaking dependency
2203 analysis by pulling the expressions for elemental array indices
2206 || (TREE_CODE (tmp) == ADDR_EXPR
2207 && DECL_P (TREE_OPERAND (tmp, 0)))))
2208 tmp = gfc_evaluate_now (tmp, block);
2209 ss->data.info.data = tmp;
2211 tmp = gfc_conv_array_offset (se.expr);
2212 ss->data.info.offset = gfc_evaluate_now (tmp, block);
2214 /* Make absolutely sure that the saved_offset is indeed saved
2215 so that the variable is still accessible after the loops
2217 ss->data.info.saved_offset = ss->data.info.offset;
2222 /* Initialize a gfc_loopinfo structure. */
2225 gfc_init_loopinfo (gfc_loopinfo * loop)
2229 memset (loop, 0, sizeof (gfc_loopinfo));
2230 gfc_init_block (&loop->pre);
2231 gfc_init_block (&loop->post);
2233 /* Initially scalarize in order and default to no loop reversal. */
2234 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2237 loop->reverse[n] = GFC_CANNOT_REVERSE;
2240 loop->ss = gfc_ss_terminator;
2244 /* Copies the loop variable info to a gfc_se structure. Does not copy the SS
2248 gfc_copy_loopinfo_to_se (gfc_se * se, gfc_loopinfo * loop)
2254 /* Return an expression for the data pointer of an array. */
2257 gfc_conv_array_data (tree descriptor)
2261 type = TREE_TYPE (descriptor);
2262 if (GFC_ARRAY_TYPE_P (type))
2264 if (TREE_CODE (type) == POINTER_TYPE)
2268 /* Descriptorless arrays. */
2269 return gfc_build_addr_expr (NULL_TREE, descriptor);
2273 return gfc_conv_descriptor_data_get (descriptor);
2277 /* Return an expression for the base offset of an array. */
2280 gfc_conv_array_offset (tree descriptor)
2284 type = TREE_TYPE (descriptor);
2285 if (GFC_ARRAY_TYPE_P (type))
2286 return GFC_TYPE_ARRAY_OFFSET (type);
2288 return gfc_conv_descriptor_offset_get (descriptor);
2292 /* Get an expression for the array stride. */
2295 gfc_conv_array_stride (tree descriptor, int dim)
2300 type = TREE_TYPE (descriptor);
2302 /* For descriptorless arrays use the array size. */
2303 tmp = GFC_TYPE_ARRAY_STRIDE (type, dim);
2304 if (tmp != NULL_TREE)
2307 tmp = gfc_conv_descriptor_stride_get (descriptor, gfc_rank_cst[dim]);
2312 /* Like gfc_conv_array_stride, but for the lower bound. */
2315 gfc_conv_array_lbound (tree descriptor, int dim)
2320 type = TREE_TYPE (descriptor);
2322 tmp = GFC_TYPE_ARRAY_LBOUND (type, dim);
2323 if (tmp != NULL_TREE)
2326 tmp = gfc_conv_descriptor_lbound_get (descriptor, gfc_rank_cst[dim]);
2331 /* Like gfc_conv_array_stride, but for the upper bound. */
2334 gfc_conv_array_ubound (tree descriptor, int dim)
2339 type = TREE_TYPE (descriptor);
2341 tmp = GFC_TYPE_ARRAY_UBOUND (type, dim);
2342 if (tmp != NULL_TREE)
2345 /* This should only ever happen when passing an assumed shape array
2346 as an actual parameter. The value will never be used. */
2347 if (GFC_ARRAY_TYPE_P (TREE_TYPE (descriptor)))
2348 return gfc_index_zero_node;
2350 tmp = gfc_conv_descriptor_ubound_get (descriptor, gfc_rank_cst[dim]);
2355 /* Generate code to perform an array index bound check. */
2358 gfc_trans_array_bound_check (gfc_se * se, tree descriptor, tree index, int n,
2359 locus * where, bool check_upper)
2362 tree tmp_lo, tmp_up;
2364 const char * name = NULL;
2366 if (!(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS))
2369 index = gfc_evaluate_now (index, &se->pre);
2371 /* We find a name for the error message. */
2373 name = se->ss->expr->symtree->name;
2375 if (!name && se->loop && se->loop->ss && se->loop->ss->expr
2376 && se->loop->ss->expr->symtree)
2377 name = se->loop->ss->expr->symtree->name;
2379 if (!name && se->loop && se->loop->ss && se->loop->ss->loop_chain
2380 && se->loop->ss->loop_chain->expr
2381 && se->loop->ss->loop_chain->expr->symtree)
2382 name = se->loop->ss->loop_chain->expr->symtree->name;
2384 if (!name && se->loop && se->loop->ss && se->loop->ss->expr)
2386 if (se->loop->ss->expr->expr_type == EXPR_FUNCTION
2387 && se->loop->ss->expr->value.function.name)
2388 name = se->loop->ss->expr->value.function.name;
2390 if (se->loop->ss->type == GFC_SS_CONSTRUCTOR
2391 || se->loop->ss->type == GFC_SS_SCALAR)
2392 name = "unnamed constant";
2395 if (TREE_CODE (descriptor) == VAR_DECL)
2396 name = IDENTIFIER_POINTER (DECL_NAME (descriptor));
2398 /* If upper bound is present, include both bounds in the error message. */
2401 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2402 tmp_up = gfc_conv_array_ubound (descriptor, n);
2405 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2406 "outside of expected range (%%ld:%%ld)", n+1, name);
2408 asprintf (&msg, "Index '%%ld' of dimension %d "
2409 "outside of expected range (%%ld:%%ld)", n+1);
2411 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2413 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2414 fold_convert (long_integer_type_node, index),
2415 fold_convert (long_integer_type_node, tmp_lo),
2416 fold_convert (long_integer_type_node, tmp_up));
2417 fault = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
2419 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2420 fold_convert (long_integer_type_node, index),
2421 fold_convert (long_integer_type_node, tmp_lo),
2422 fold_convert (long_integer_type_node, tmp_up));
2427 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2430 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2431 "below lower bound of %%ld", n+1, name);
2433 asprintf (&msg, "Index '%%ld' of dimension %d "
2434 "below lower bound of %%ld", n+1);
2436 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2438 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2439 fold_convert (long_integer_type_node, index),
2440 fold_convert (long_integer_type_node, tmp_lo));
2448 /* Return the offset for an index. Performs bound checking for elemental
2449 dimensions. Single element references are processed separately.
2450 DIM is the array dimension, I is the loop dimension. */
2453 gfc_conv_array_index_offset (gfc_se * se, gfc_ss_info * info, int dim, int i,
2454 gfc_array_ref * ar, tree stride)
2460 /* Get the index into the array for this dimension. */
2463 gcc_assert (ar->type != AR_ELEMENT);
2464 switch (ar->dimen_type[dim])
2466 case DIMEN_THIS_IMAGE:
2470 /* Elemental dimension. */
2471 gcc_assert (info->subscript[dim]
2472 && info->subscript[dim]->type == GFC_SS_SCALAR);
2473 /* We've already translated this value outside the loop. */
2474 index = info->subscript[dim]->data.scalar.expr;
2476 index = gfc_trans_array_bound_check (se, info->descriptor,
2477 index, dim, &ar->where,
2478 ar->as->type != AS_ASSUMED_SIZE
2479 || dim < ar->dimen - 1);
2483 gcc_assert (info && se->loop);
2484 gcc_assert (info->subscript[dim]
2485 && info->subscript[dim]->type == GFC_SS_VECTOR);
2486 desc = info->subscript[dim]->data.info.descriptor;
2488 /* Get a zero-based index into the vector. */
2489 index = fold_build2_loc (input_location, MINUS_EXPR,
2490 gfc_array_index_type,
2491 se->loop->loopvar[i], se->loop->from[i]);
2493 /* Multiply the index by the stride. */
2494 index = fold_build2_loc (input_location, MULT_EXPR,
2495 gfc_array_index_type,
2496 index, gfc_conv_array_stride (desc, 0));
2498 /* Read the vector to get an index into info->descriptor. */
2499 data = build_fold_indirect_ref_loc (input_location,
2500 gfc_conv_array_data (desc));
2501 index = gfc_build_array_ref (data, index, NULL);
2502 index = gfc_evaluate_now (index, &se->pre);
2503 index = fold_convert (gfc_array_index_type, index);
2505 /* Do any bounds checking on the final info->descriptor index. */
2506 index = gfc_trans_array_bound_check (se, info->descriptor,
2507 index, dim, &ar->where,
2508 ar->as->type != AS_ASSUMED_SIZE
2509 || dim < ar->dimen - 1);
2513 /* Scalarized dimension. */
2514 gcc_assert (info && se->loop);
2516 /* Multiply the loop variable by the stride and delta. */
2517 index = se->loop->loopvar[i];
2518 if (!integer_onep (info->stride[dim]))
2519 index = fold_build2_loc (input_location, MULT_EXPR,
2520 gfc_array_index_type, index,
2522 if (!integer_zerop (info->delta[dim]))
2523 index = fold_build2_loc (input_location, PLUS_EXPR,
2524 gfc_array_index_type, index,
2534 /* Temporary array or derived type component. */
2535 gcc_assert (se->loop);
2536 index = se->loop->loopvar[se->loop->order[i]];
2537 if (!integer_zerop (info->delta[dim]))
2538 index = fold_build2_loc (input_location, PLUS_EXPR,
2539 gfc_array_index_type, index, info->delta[dim]);
2542 /* Multiply by the stride. */
2543 if (!integer_onep (stride))
2544 index = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
2551 /* Build a scalarized reference to an array. */
2554 gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar)
2557 tree decl = NULL_TREE;
2562 info = &se->ss->data.info;
2564 n = se->loop->order[0];
2568 index = gfc_conv_array_index_offset (se, info, info->dim[n], n, ar,
2570 /* Add the offset for this dimension to the stored offset for all other
2572 if (!integer_zerop (info->offset))
2573 index = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2574 index, info->offset);
2576 if (se->ss->expr && is_subref_array (se->ss->expr))
2577 decl = se->ss->expr->symtree->n.sym->backend_decl;
2579 tmp = build_fold_indirect_ref_loc (input_location,
2581 se->expr = gfc_build_array_ref (tmp, index, decl);
2585 /* Translate access of temporary array. */
2588 gfc_conv_tmp_array_ref (gfc_se * se)
2590 se->string_length = se->ss->string_length;
2591 gfc_conv_scalarized_array_ref (se, NULL);
2592 gfc_advance_se_ss_chain (se);
2596 /* Build an array reference. se->expr already holds the array descriptor.
2597 This should be either a variable, indirect variable reference or component
2598 reference. For arrays which do not have a descriptor, se->expr will be
2600 a(i, j, k) = base[offset + i * stride[0] + j * stride[1] + k * stride[2]]*/
2603 gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar, gfc_symbol * sym,
2616 /* Handle scalarized references separately. */
2617 if (ar->type != AR_ELEMENT)
2619 gfc_conv_scalarized_array_ref (se, ar);
2620 gfc_advance_se_ss_chain (se);
2624 index = gfc_index_zero_node;
2626 /* Calculate the offsets from all the dimensions. */
2627 for (n = 0; n < ar->dimen; n++)
2629 /* Calculate the index for this dimension. */
2630 gfc_init_se (&indexse, se);
2631 gfc_conv_expr_type (&indexse, ar->start[n], gfc_array_index_type);
2632 gfc_add_block_to_block (&se->pre, &indexse.pre);
2634 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2636 /* Check array bounds. */
2640 /* Evaluate the indexse.expr only once. */
2641 indexse.expr = save_expr (indexse.expr);
2644 tmp = gfc_conv_array_lbound (se->expr, n);
2645 if (sym->attr.temporary)
2647 gfc_init_se (&tmpse, se);
2648 gfc_conv_expr_type (&tmpse, ar->as->lower[n],
2649 gfc_array_index_type);
2650 gfc_add_block_to_block (&se->pre, &tmpse.pre);
2654 cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2656 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2657 "below lower bound of %%ld", n+1, sym->name);
2658 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
2659 fold_convert (long_integer_type_node,
2661 fold_convert (long_integer_type_node, tmp));
2664 /* Upper bound, but not for the last dimension of assumed-size
2666 if (n < ar->dimen - 1 || ar->as->type != AS_ASSUMED_SIZE)
2668 tmp = gfc_conv_array_ubound (se->expr, n);
2669 if (sym->attr.temporary)
2671 gfc_init_se (&tmpse, se);
2672 gfc_conv_expr_type (&tmpse, ar->as->upper[n],
2673 gfc_array_index_type);
2674 gfc_add_block_to_block (&se->pre, &tmpse.pre);
2678 cond = fold_build2_loc (input_location, GT_EXPR,
2679 boolean_type_node, indexse.expr, tmp);
2680 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2681 "above upper bound of %%ld", n+1, sym->name);
2682 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
2683 fold_convert (long_integer_type_node,
2685 fold_convert (long_integer_type_node, tmp));
2690 /* Multiply the index by the stride. */
2691 stride = gfc_conv_array_stride (se->expr, n);
2692 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
2693 indexse.expr, stride);
2695 /* And add it to the total. */
2696 index = fold_build2_loc (input_location, PLUS_EXPR,
2697 gfc_array_index_type, index, tmp);
2700 tmp = gfc_conv_array_offset (se->expr);
2701 if (!integer_zerop (tmp))
2702 index = fold_build2_loc (input_location, PLUS_EXPR,
2703 gfc_array_index_type, index, tmp);
2705 /* Access the calculated element. */
2706 tmp = gfc_conv_array_data (se->expr);
2707 tmp = build_fold_indirect_ref (tmp);
2708 se->expr = gfc_build_array_ref (tmp, index, sym->backend_decl);
2712 /* Generate the code to be executed immediately before entering a
2713 scalarization loop. */
2716 gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag,
2717 stmtblock_t * pblock)
2726 /* This code will be executed before entering the scalarization loop
2727 for this dimension. */
2728 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
2730 if ((ss->useflags & flag) == 0)
2733 if (ss->type != GFC_SS_SECTION
2734 && ss->type != GFC_SS_FUNCTION && ss->type != GFC_SS_CONSTRUCTOR
2735 && ss->type != GFC_SS_COMPONENT)
2738 info = &ss->data.info;
2740 if (dim >= info->dimen)
2743 if (dim == info->dimen - 1)
2745 /* For the outermost loop calculate the offset due to any
2746 elemental dimensions. It will have been initialized with the
2747 base offset of the array. */
2750 for (i = 0; i < info->ref->u.ar.dimen; i++)
2752 if (info->ref->u.ar.dimen_type[i] != DIMEN_ELEMENT)
2755 gfc_init_se (&se, NULL);
2757 se.expr = info->descriptor;
2758 stride = gfc_conv_array_stride (info->descriptor, i);
2759 index = gfc_conv_array_index_offset (&se, info, i, -1,
2762 gfc_add_block_to_block (pblock, &se.pre);
2764 info->offset = fold_build2_loc (input_location, PLUS_EXPR,
2765 gfc_array_index_type,
2766 info->offset, index);
2767 info->offset = gfc_evaluate_now (info->offset, pblock);
2772 /* For the time being, the innermost loop is unconditionally on
2773 the first dimension of the scalarization loop. */
2774 gcc_assert (i == 0);
2775 stride = gfc_conv_array_stride (info->descriptor, info->dim[i]);
2777 /* Calculate the stride of the innermost loop. Hopefully this will
2778 allow the backend optimizers to do their stuff more effectively.
2780 info->stride0 = gfc_evaluate_now (stride, pblock);
2784 /* Add the offset for the previous loop dimension. */
2789 ar = &info->ref->u.ar;
2790 i = loop->order[dim + 1];
2798 gfc_init_se (&se, NULL);
2800 se.expr = info->descriptor;
2801 stride = gfc_conv_array_stride (info->descriptor, info->dim[i]);
2802 index = gfc_conv_array_index_offset (&se, info, info->dim[i], i,
2804 gfc_add_block_to_block (pblock, &se.pre);
2805 info->offset = fold_build2_loc (input_location, PLUS_EXPR,
2806 gfc_array_index_type, info->offset,
2808 info->offset = gfc_evaluate_now (info->offset, pblock);
2811 /* Remember this offset for the second loop. */
2812 if (dim == loop->temp_dim - 1)
2813 info->saved_offset = info->offset;
2818 /* Start a scalarized expression. Creates a scope and declares loop
2822 gfc_start_scalarized_body (gfc_loopinfo * loop, stmtblock_t * pbody)
2828 gcc_assert (!loop->array_parameter);
2830 for (dim = loop->dimen + loop->codimen - 1; dim >= 0; dim--)
2832 n = loop->order[dim];
2834 gfc_start_block (&loop->code[n]);
2836 /* Create the loop variable. */
2837 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "S");
2839 if (dim < loop->temp_dim)
2843 /* Calculate values that will be constant within this loop. */
2844 gfc_trans_preloop_setup (loop, dim, flags, &loop->code[n]);
2846 gfc_start_block (pbody);
2850 /* Generates the actual loop code for a scalarization loop. */
2853 gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n,
2854 stmtblock_t * pbody)
2865 if ((ompws_flags & (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS))
2866 == (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS)
2867 && n == loop->dimen - 1)
2869 /* We create an OMP_FOR construct for the outermost scalarized loop. */
2870 init = make_tree_vec (1);
2871 cond = make_tree_vec (1);
2872 incr = make_tree_vec (1);
2874 /* Cycle statement is implemented with a goto. Exit statement must not
2875 be present for this loop. */
2876 exit_label = gfc_build_label_decl (NULL_TREE);
2877 TREE_USED (exit_label) = 1;
2879 /* Label for cycle statements (if needed). */
2880 tmp = build1_v (LABEL_EXPR, exit_label);
2881 gfc_add_expr_to_block (pbody, tmp);
2883 stmt = make_node (OMP_FOR);
2885 TREE_TYPE (stmt) = void_type_node;
2886 OMP_FOR_BODY (stmt) = loopbody = gfc_finish_block (pbody);
2888 OMP_FOR_CLAUSES (stmt) = build_omp_clause (input_location,
2889 OMP_CLAUSE_SCHEDULE);
2890 OMP_CLAUSE_SCHEDULE_KIND (OMP_FOR_CLAUSES (stmt))
2891 = OMP_CLAUSE_SCHEDULE_STATIC;
2892 if (ompws_flags & OMPWS_NOWAIT)
2893 OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (stmt))
2894 = build_omp_clause (input_location, OMP_CLAUSE_NOWAIT);
2896 /* Initialize the loopvar. */
2897 TREE_VEC_ELT (init, 0) = build2_v (MODIFY_EXPR, loop->loopvar[n],
2899 OMP_FOR_INIT (stmt) = init;
2900 /* The exit condition. */
2901 TREE_VEC_ELT (cond, 0) = build2_loc (input_location, LE_EXPR,
2903 loop->loopvar[n], loop->to[n]);
2904 SET_EXPR_LOCATION (TREE_VEC_ELT (cond, 0), input_location);
2905 OMP_FOR_COND (stmt) = cond;
2906 /* Increment the loopvar. */
2907 tmp = build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2908 loop->loopvar[n], gfc_index_one_node);
2909 TREE_VEC_ELT (incr, 0) = fold_build2_loc (input_location, MODIFY_EXPR,
2910 void_type_node, loop->loopvar[n], tmp);
2911 OMP_FOR_INCR (stmt) = incr;
2913 ompws_flags &= ~OMPWS_CURR_SINGLEUNIT;
2914 gfc_add_expr_to_block (&loop->code[n], stmt);
2918 bool reverse_loop = (loop->reverse[n] == GFC_REVERSE_SET)
2919 && (loop->temp_ss == NULL);
2921 loopbody = gfc_finish_block (pbody);
2925 tmp = loop->from[n];
2926 loop->from[n] = loop->to[n];
2930 /* Initialize the loopvar. */
2931 if (loop->loopvar[n] != loop->from[n])
2932 gfc_add_modify (&loop->code[n], loop->loopvar[n], loop->from[n]);
2934 exit_label = gfc_build_label_decl (NULL_TREE);
2936 /* Generate the loop body. */
2937 gfc_init_block (&block);
2939 /* The exit condition. */
2940 cond = fold_build2_loc (input_location, reverse_loop ? LT_EXPR : GT_EXPR,
2941 boolean_type_node, loop->loopvar[n], loop->to[n]);
2942 tmp = build1_v (GOTO_EXPR, exit_label);
2943 TREE_USED (exit_label) = 1;
2944 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
2945 gfc_add_expr_to_block (&block, tmp);
2947 /* The main body. */
2948 gfc_add_expr_to_block (&block, loopbody);
2950 /* Increment the loopvar. */
2951 tmp = fold_build2_loc (input_location,
2952 reverse_loop ? MINUS_EXPR : PLUS_EXPR,
2953 gfc_array_index_type, loop->loopvar[n],
2954 gfc_index_one_node);
2956 gfc_add_modify (&block, loop->loopvar[n], tmp);
2958 /* Build the loop. */
2959 tmp = gfc_finish_block (&block);
2960 tmp = build1_v (LOOP_EXPR, tmp);
2961 gfc_add_expr_to_block (&loop->code[n], tmp);
2963 /* Add the exit label. */
2964 tmp = build1_v (LABEL_EXPR, exit_label);
2965 gfc_add_expr_to_block (&loop->code[n], tmp);
2971 /* Finishes and generates the loops for a scalarized expression. */
2974 gfc_trans_scalarizing_loops (gfc_loopinfo * loop, stmtblock_t * body)
2979 stmtblock_t *pblock;
2983 /* Generate the loops. */
2984 for (dim = 0; dim < loop->dimen + loop->codimen; dim++)
2986 n = loop->order[dim];
2987 gfc_trans_scalarized_loop_end (loop, n, pblock);
2988 loop->loopvar[n] = NULL_TREE;
2989 pblock = &loop->code[n];
2992 tmp = gfc_finish_block (pblock);
2993 gfc_add_expr_to_block (&loop->pre, tmp);
2995 /* Clear all the used flags. */
2996 for (ss = loop->ss; ss; ss = ss->loop_chain)
3001 /* Finish the main body of a scalarized expression, and start the secondary
3005 gfc_trans_scalarized_loop_boundary (gfc_loopinfo * loop, stmtblock_t * body)
3009 stmtblock_t *pblock;
3013 /* We finish as many loops as are used by the temporary. */
3014 for (dim = 0; dim < loop->temp_dim - 1; dim++)
3016 n = loop->order[dim];
3017 gfc_trans_scalarized_loop_end (loop, n, pblock);
3018 loop->loopvar[n] = NULL_TREE;
3019 pblock = &loop->code[n];
3022 /* We don't want to finish the outermost loop entirely. */
3023 n = loop->order[loop->temp_dim - 1];
3024 gfc_trans_scalarized_loop_end (loop, n, pblock);
3026 /* Restore the initial offsets. */
3027 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3029 if ((ss->useflags & 2) == 0)
3032 if (ss->type != GFC_SS_SECTION
3033 && ss->type != GFC_SS_FUNCTION && ss->type != GFC_SS_CONSTRUCTOR
3034 && ss->type != GFC_SS_COMPONENT)
3037 ss->data.info.offset = ss->data.info.saved_offset;
3040 /* Restart all the inner loops we just finished. */
3041 for (dim = loop->temp_dim - 2; dim >= 0; dim--)
3043 n = loop->order[dim];
3045 gfc_start_block (&loop->code[n]);
3047 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "Q");
3049 gfc_trans_preloop_setup (loop, dim, 2, &loop->code[n]);
3052 /* Start a block for the secondary copying code. */
3053 gfc_start_block (body);
3057 /* Calculate the lower bound of an array section. */
3060 gfc_conv_section_startstride (gfc_loopinfo * loop, gfc_ss * ss, int dim,
3061 bool coarray, bool coarray_last)
3065 gfc_expr *stride = NULL;
3070 gcc_assert (ss->type == GFC_SS_SECTION);
3072 info = &ss->data.info;
3074 if (info->ref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
3076 /* We use a zero-based index to access the vector. */
3077 info->start[dim] = gfc_index_zero_node;
3078 info->end[dim] = NULL;
3080 info->stride[dim] = gfc_index_one_node;
3084 gcc_assert (info->ref->u.ar.dimen_type[dim] == DIMEN_RANGE);
3085 desc = info->descriptor;
3086 start = info->ref->u.ar.start[dim];
3087 end = info->ref->u.ar.end[dim];
3089 stride = info->ref->u.ar.stride[dim];
3091 /* Calculate the start of the range. For vector subscripts this will
3092 be the range of the vector. */
3095 /* Specified section start. */
3096 gfc_init_se (&se, NULL);
3097 gfc_conv_expr_type (&se, start, gfc_array_index_type);
3098 gfc_add_block_to_block (&loop->pre, &se.pre);
3099 info->start[dim] = se.expr;
3103 /* No lower bound specified so use the bound of the array. */
3104 info->start[dim] = gfc_conv_array_lbound (desc, dim);
3106 info->start[dim] = gfc_evaluate_now (info->start[dim], &loop->pre);
3108 /* Similarly calculate the end. Although this is not used in the
3109 scalarizer, it is needed when checking bounds and where the end
3110 is an expression with side-effects. */
3115 /* Specified section start. */
3116 gfc_init_se (&se, NULL);
3117 gfc_conv_expr_type (&se, end, gfc_array_index_type);
3118 gfc_add_block_to_block (&loop->pre, &se.pre);
3119 info->end[dim] = se.expr;
3123 /* No upper bound specified so use the bound of the array. */
3124 info->end[dim] = gfc_conv_array_ubound (desc, dim);
3126 info->end[dim] = gfc_evaluate_now (info->end[dim], &loop->pre);
3129 /* Calculate the stride. */
3130 if (!coarray && stride == NULL)
3131 info->stride[dim] = gfc_index_one_node;
3134 gfc_init_se (&se, NULL);
3135 gfc_conv_expr_type (&se, stride, gfc_array_index_type);
3136 gfc_add_block_to_block (&loop->pre, &se.pre);
3137 info->stride[dim] = gfc_evaluate_now (se.expr, &loop->pre);
3142 /* Calculates the range start and stride for a SS chain. Also gets the
3143 descriptor and data pointer. The range of vector subscripts is the size
3144 of the vector. Array bounds are also checked. */
3147 gfc_conv_ss_startstride (gfc_loopinfo * loop)
3155 /* Determine the rank of the loop. */
3157 ss != gfc_ss_terminator && loop->dimen == 0; ss = ss->loop_chain)
3161 case GFC_SS_SECTION:
3162 case GFC_SS_CONSTRUCTOR:
3163 case GFC_SS_FUNCTION:
3164 case GFC_SS_COMPONENT:
3165 loop->dimen = ss->data.info.dimen;
3166 loop->codimen = ss->data.info.codimen;
3169 /* As usual, lbound and ubound are exceptions!. */
3170 case GFC_SS_INTRINSIC:
3171 switch (ss->expr->value.function.isym->id)
3173 case GFC_ISYM_LBOUND:
3174 case GFC_ISYM_UBOUND:
3175 loop->dimen = ss->data.info.dimen;
3179 case GFC_ISYM_LCOBOUND:
3180 case GFC_ISYM_UCOBOUND:
3181 case GFC_ISYM_THIS_IMAGE:
3182 loop->dimen = ss->data.info.dimen;
3183 loop->codimen = ss->data.info.codimen;
3195 /* We should have determined the rank of the expression by now. If
3196 not, that's bad news. */
3197 gcc_assert (loop->dimen + loop->codimen != 0);
3199 /* Loop over all the SS in the chain. */
3200 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3202 if (ss->expr && ss->expr->shape && !ss->shape)
3203 ss->shape = ss->expr->shape;
3207 case GFC_SS_SECTION:
3208 /* Get the descriptor for the array. */
3209 gfc_conv_ss_descriptor (&loop->pre, ss, !loop->array_parameter);
3211 for (n = 0; n < ss->data.info.dimen; n++)
3212 gfc_conv_section_startstride (loop, ss, ss->data.info.dim[n],
3214 for (n = ss->data.info.dimen;
3215 n < ss->data.info.dimen + ss->data.info.codimen; n++)
3216 gfc_conv_section_startstride (loop, ss, ss->data.info.dim[n], true,
3217 n == ss->data.info.dimen
3218 + ss->data.info.codimen -1);
3222 case GFC_SS_INTRINSIC:
3223 switch (ss->expr->value.function.isym->id)
3225 /* Fall through to supply start and stride. */
3226 case GFC_ISYM_LBOUND:
3227 case GFC_ISYM_UBOUND:
3228 case GFC_ISYM_LCOBOUND:
3229 case GFC_ISYM_UCOBOUND:
3230 case GFC_ISYM_THIS_IMAGE:
3237 case GFC_SS_CONSTRUCTOR:
3238 case GFC_SS_FUNCTION:
3239 for (n = 0; n < ss->data.info.dimen; n++)
3241 ss->data.info.start[n] = gfc_index_zero_node;
3242 ss->data.info.end[n] = gfc_index_zero_node;
3243 ss->data.info.stride[n] = gfc_index_one_node;
3252 /* The rest is just runtime bound checking. */
3253 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3256 tree lbound, ubound;
3258 tree size[GFC_MAX_DIMENSIONS];
3259 tree stride_pos, stride_neg, non_zerosized, tmp2, tmp3;
3264 gfc_start_block (&block);
3266 for (n = 0; n < loop->dimen; n++)
3267 size[n] = NULL_TREE;
3269 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3273 if (ss->type != GFC_SS_SECTION)
3276 /* Catch allocatable lhs in f2003. */
3277 if (gfc_option.flag_realloc_lhs && ss->is_alloc_lhs)
3280 gfc_start_block (&inner);
3282 /* TODO: range checking for mapped dimensions. */
3283 info = &ss->data.info;
3285 /* This code only checks ranges. Elemental and vector
3286 dimensions are checked later. */
3287 for (n = 0; n < loop->dimen; n++)
3292 if (info->ref->u.ar.dimen_type[dim] != DIMEN_RANGE)
3295 if (dim == info->ref->u.ar.dimen - 1
3296 && info->ref->u.ar.as->type == AS_ASSUMED_SIZE)
3297 check_upper = false;
3301 /* Zero stride is not allowed. */
3302 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
3303 info->stride[dim], gfc_index_zero_node);
3304 asprintf (&msg, "Zero stride is not allowed, for dimension %d "
3305 "of array '%s'", dim + 1, ss->expr->symtree->name);
3306 gfc_trans_runtime_check (true, false, tmp, &inner,
3307 &ss->expr->where, msg);
3310 desc = ss->data.info.descriptor;
3312 /* This is the run-time equivalent of resolve.c's
3313 check_dimension(). The logical is more readable there
3314 than it is here, with all the trees. */
3315 lbound = gfc_conv_array_lbound (desc, dim);
3316 end = info->end[dim];
3318 ubound = gfc_conv_array_ubound (desc, dim);
3322 /* non_zerosized is true when the selected range is not
3324 stride_pos = fold_build2_loc (input_location, GT_EXPR,
3325 boolean_type_node, info->stride[dim],
3326 gfc_index_zero_node);
3327 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
3328 info->start[dim], end);
3329 stride_pos = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3330 boolean_type_node, stride_pos, tmp);
3332 stride_neg = fold_build2_loc (input_location, LT_EXPR,
3334 info->stride[dim], gfc_index_zero_node);
3335 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
3336 info->start[dim], end);
3337 stride_neg = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3340 non_zerosized = fold_build2_loc (input_location, TRUTH_OR_EXPR,
3342 stride_pos, stride_neg);
3344 /* Check the start of the range against the lower and upper
3345 bounds of the array, if the range is not empty.
3346 If upper bound is present, include both bounds in the
3350 tmp = fold_build2_loc (input_location, LT_EXPR,
3352 info->start[dim], lbound);
3353 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3355 non_zerosized, tmp);
3356 tmp2 = fold_build2_loc (input_location, GT_EXPR,
3358 info->start[dim], ubound);
3359 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3361 non_zerosized, tmp2);
3362 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3363 "outside of expected range (%%ld:%%ld)",
3364 dim + 1, ss->expr->symtree->name);
3365 gfc_trans_runtime_check (true, false, tmp, &inner,
3366 &ss->expr->where, msg,
3367 fold_convert (long_integer_type_node, info->start[dim]),
3368 fold_convert (long_integer_type_node, lbound),
3369 fold_convert (long_integer_type_node, ubound));
3370 gfc_trans_runtime_check (true, false, tmp2, &inner,
3371 &ss->expr->where, msg,
3372 fold_convert (long_integer_type_node, info->start[dim]),
3373 fold_convert (long_integer_type_node, lbound),
3374 fold_convert (long_integer_type_node, ubound));
3379 tmp = fold_build2_loc (input_location, LT_EXPR,
3381 info->start[dim], lbound);
3382 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3383 boolean_type_node, non_zerosized, tmp);
3384 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3385 "below lower bound of %%ld",
3386 dim + 1, ss->expr->symtree->name);
3387 gfc_trans_runtime_check (true, false, tmp, &inner,
3388 &ss->expr->where, msg,
3389 fold_convert (long_integer_type_node, info->start[dim]),
3390 fold_convert (long_integer_type_node, lbound));
3394 /* Compute the last element of the range, which is not
3395 necessarily "end" (think 0:5:3, which doesn't contain 5)
3396 and check it against both lower and upper bounds. */
3398 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3399 gfc_array_index_type, end,
3401 tmp = fold_build2_loc (input_location, TRUNC_MOD_EXPR,
3402 gfc_array_index_type, tmp,
3404 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3405 gfc_array_index_type, end, tmp);
3406 tmp2 = fold_build2_loc (input_location, LT_EXPR,
3407 boolean_type_node, tmp, lbound);
3408 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3409 boolean_type_node, non_zerosized, tmp2);
3412 tmp3 = fold_build2_loc (input_location, GT_EXPR,
3413 boolean_type_node, tmp, ubound);
3414 tmp3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3415 boolean_type_node, non_zerosized, tmp3);
3416 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3417 "outside of expected range (%%ld:%%ld)",
3418 dim + 1, ss->expr->symtree->name);
3419 gfc_trans_runtime_check (true, false, tmp2, &inner,
3420 &ss->expr->where, msg,
3421 fold_convert (long_integer_type_node, tmp),
3422 fold_convert (long_integer_type_node, ubound),
3423 fold_convert (long_integer_type_node, lbound));
3424 gfc_trans_runtime_check (true, false, tmp3, &inner,
3425 &ss->expr->where, msg,
3426 fold_convert (long_integer_type_node, tmp),
3427 fold_convert (long_integer_type_node, ubound),
3428 fold_convert (long_integer_type_node, lbound));
3433 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3434 "below lower bound of %%ld",
3435 dim + 1, ss->expr->symtree->name);
3436 gfc_trans_runtime_check (true, false, tmp2, &inner,
3437 &ss->expr->where, msg,
3438 fold_convert (long_integer_type_node, tmp),
3439 fold_convert (long_integer_type_node, lbound));
3443 /* Check the section sizes match. */
3444 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3445 gfc_array_index_type, end,
3447 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
3448 gfc_array_index_type, tmp,
3450 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3451 gfc_array_index_type,
3452 gfc_index_one_node, tmp);
3453 tmp = fold_build2_loc (input_location, MAX_EXPR,
3454 gfc_array_index_type, tmp,
3455 build_int_cst (gfc_array_index_type, 0));
3456 /* We remember the size of the first section, and check all the
3457 others against this. */
3460 tmp3 = fold_build2_loc (input_location, NE_EXPR,
3461 boolean_type_node, tmp, size[n]);
3462 asprintf (&msg, "Array bound mismatch for dimension %d "
3463 "of array '%s' (%%ld/%%ld)",
3464 dim + 1, ss->expr->symtree->name);
3466 gfc_trans_runtime_check (true, false, tmp3, &inner,
3467 &ss->expr->where, msg,
3468 fold_convert (long_integer_type_node, tmp),
3469 fold_convert (long_integer_type_node, size[n]));
3474 size[n] = gfc_evaluate_now (tmp, &inner);
3477 tmp = gfc_finish_block (&inner);
3479 /* For optional arguments, only check bounds if the argument is
3481 if (ss->expr->symtree->n.sym->attr.optional
3482 || ss->expr->symtree->n.sym->attr.not_always_present)
3483 tmp = build3_v (COND_EXPR,
3484 gfc_conv_expr_present (ss->expr->symtree->n.sym),
3485 tmp, build_empty_stmt (input_location));
3487 gfc_add_expr_to_block (&block, tmp);
3491 tmp = gfc_finish_block (&block);
3492 gfc_add_expr_to_block (&loop->pre, tmp);
3496 /* Return true if both symbols could refer to the same data object. Does
3497 not take account of aliasing due to equivalence statements. */
3500 symbols_could_alias (gfc_symbol *lsym, gfc_symbol *rsym, bool lsym_pointer,
3501 bool lsym_target, bool rsym_pointer, bool rsym_target)
3503 /* Aliasing isn't possible if the symbols have different base types. */
3504 if (gfc_compare_types (&lsym->ts, &rsym->ts) == 0)
3507 /* Pointers can point to other pointers and target objects. */
3509 if ((lsym_pointer && (rsym_pointer || rsym_target))
3510 || (rsym_pointer && (lsym_pointer || lsym_target)))
3513 /* Special case: Argument association, cf. F90 12.4.1.6, F2003 12.4.1.7
3514 and F2008 12.5.2.13 items 3b and 4b. The pointer case (a) is already
3516 if (lsym_target && rsym_target
3517 && ((lsym->attr.dummy && !lsym->attr.contiguous
3518 && (!lsym->attr.dimension || lsym->as->type == AS_ASSUMED_SHAPE))
3519 || (rsym->attr.dummy && !rsym->attr.contiguous
3520 && (!rsym->attr.dimension
3521 || rsym->as->type == AS_ASSUMED_SHAPE))))
3528 /* Return true if the two SS could be aliased, i.e. both point to the same data
3530 /* TODO: resolve aliases based on frontend expressions. */
3533 gfc_could_be_alias (gfc_ss * lss, gfc_ss * rss)
3539 bool lsym_pointer, lsym_target, rsym_pointer, rsym_target;
3541 lsym = lss->expr->symtree->n.sym;
3542 rsym = rss->expr->symtree->n.sym;
3544 lsym_pointer = lsym->attr.pointer;
3545 lsym_target = lsym->attr.target;
3546 rsym_pointer = rsym->attr.pointer;
3547 rsym_target = rsym->attr.target;
3549 if (symbols_could_alias (lsym, rsym, lsym_pointer, lsym_target,
3550 rsym_pointer, rsym_target))
3553 if (rsym->ts.type != BT_DERIVED && rsym->ts.type != BT_CLASS
3554 && lsym->ts.type != BT_DERIVED && lsym->ts.type != BT_CLASS)
3557 /* For derived types we must check all the component types. We can ignore
3558 array references as these will have the same base type as the previous
3560 for (lref = lss->expr->ref; lref != lss->data.info.ref; lref = lref->next)
3562 if (lref->type != REF_COMPONENT)
3565 lsym_pointer = lsym_pointer || lref->u.c.sym->attr.pointer;
3566 lsym_target = lsym_target || lref->u.c.sym->attr.target;
3568 if (symbols_could_alias (lref->u.c.sym, rsym, lsym_pointer, lsym_target,
3569 rsym_pointer, rsym_target))
3572 if ((lsym_pointer && (rsym_pointer || rsym_target))
3573 || (rsym_pointer && (lsym_pointer || lsym_target)))
3575 if (gfc_compare_types (&lref->u.c.component->ts,
3580 for (rref = rss->expr->ref; rref != rss->data.info.ref;
3583 if (rref->type != REF_COMPONENT)
3586 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
3587 rsym_target = lsym_target || rref->u.c.sym->attr.target;
3589 if (symbols_could_alias (lref->u.c.sym, rref->u.c.sym,
3590 lsym_pointer, lsym_target,
3591 rsym_pointer, rsym_target))
3594 if ((lsym_pointer && (rsym_pointer || rsym_target))
3595 || (rsym_pointer && (lsym_pointer || lsym_target)))
3597 if (gfc_compare_types (&lref->u.c.component->ts,
3598 &rref->u.c.sym->ts))
3600 if (gfc_compare_types (&lref->u.c.sym->ts,
3601 &rref->u.c.component->ts))
3603 if (gfc_compare_types (&lref->u.c.component->ts,
3604 &rref->u.c.component->ts))
3610 lsym_pointer = lsym->attr.pointer;
3611 lsym_target = lsym->attr.target;
3612 lsym_pointer = lsym->attr.pointer;
3613 lsym_target = lsym->attr.target;
3615 for (rref = rss->expr->ref; rref != rss->data.info.ref; rref = rref->next)
3617 if (rref->type != REF_COMPONENT)
3620 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
3621 rsym_target = lsym_target || rref->u.c.sym->attr.target;
3623 if (symbols_could_alias (rref->u.c.sym, lsym,
3624 lsym_pointer, lsym_target,
3625 rsym_pointer, rsym_target))
3628 if ((lsym_pointer && (rsym_pointer || rsym_target))
3629 || (rsym_pointer && (lsym_pointer || lsym_target)))
3631 if (gfc_compare_types (&lsym->ts, &rref->u.c.component->ts))
3640 /* Resolve array data dependencies. Creates a temporary if required. */
3641 /* TODO: Calc dependencies with gfc_expr rather than gfc_ss, and move to
3645 gfc_conv_resolve_dependencies (gfc_loopinfo * loop, gfc_ss * dest,
3654 loop->temp_ss = NULL;
3656 for (ss = rss; ss != gfc_ss_terminator; ss = ss->next)
3658 if (ss->type != GFC_SS_SECTION)
3661 if (dest->expr->symtree->n.sym != ss->expr->symtree->n.sym)
3663 if (gfc_could_be_alias (dest, ss)
3664 || gfc_are_equivalenced_arrays (dest->expr, ss->expr))
3672 lref = dest->expr->ref;
3673 rref = ss->expr->ref;
3675 nDepend = gfc_dep_resolver (lref, rref, &loop->reverse[0]);
3680 for (i = 0; i < dest->data.info.dimen; i++)
3681 for (j = 0; j < ss->data.info.dimen; j++)
3683 && dest->data.info.dim[i] == ss->data.info.dim[j])
3685 /* If we don't access array elements in the same order,
3686 there is a dependency. */
3691 /* TODO : loop shifting. */
3694 /* Mark the dimensions for LOOP SHIFTING */
3695 for (n = 0; n < loop->dimen; n++)
3697 int dim = dest->data.info.dim[n];
3699 if (lref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
3701 else if (! gfc_is_same_range (&lref->u.ar,
3702 &rref->u.ar, dim, 0))
3706 /* Put all the dimensions with dependencies in the
3709 for (n = 0; n < loop->dimen; n++)
3711 gcc_assert (loop->order[n] == n);
3713 loop->order[dim++] = n;
3715 for (n = 0; n < loop->dimen; n++)
3718 loop->order[dim++] = n;
3721 gcc_assert (dim == loop->dimen);
3732 tree base_type = gfc_typenode_for_spec (&dest->expr->ts);
3733 if (GFC_ARRAY_TYPE_P (base_type)
3734 || GFC_DESCRIPTOR_TYPE_P (base_type))
3735 base_type = gfc_get_element_type (base_type);
3736 loop->temp_ss = gfc_get_ss ();
3737 loop->temp_ss->type = GFC_SS_TEMP;
3738 loop->temp_ss->data.temp.type = base_type;
3739 loop->temp_ss->string_length = dest->string_length;
3740 loop->temp_ss->data.temp.dimen = loop->dimen;
3741 loop->temp_ss->data.temp.codimen = loop->codimen;
3742 loop->temp_ss->next = gfc_ss_terminator;
3743 gfc_add_ss_to_loop (loop, loop->temp_ss);
3746 loop->temp_ss = NULL;
3750 /* Initialize the scalarization loop. Creates the loop variables. Determines
3751 the range of the loop variables. Creates a temporary if required.
3752 Calculates how to transform from loop variables to array indices for each
3753 expression. Also generates code for scalar expressions which have been
3754 moved outside the loop. */
3757 gfc_conv_loop_setup (gfc_loopinfo * loop, locus * where)
3759 int n, dim, spec_dim;
3761 gfc_ss_info *specinfo;
3764 gfc_ss *loopspec[GFC_MAX_DIMENSIONS];
3765 bool dynamic[GFC_MAX_DIMENSIONS];
3770 for (n = 0; n < loop->dimen + loop->codimen; n++)
3774 /* We use one SS term, and use that to determine the bounds of the
3775 loop for this dimension. We try to pick the simplest term. */
3776 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3778 if (ss->type == GFC_SS_SCALAR || ss->type == GFC_SS_REFERENCE)
3781 info = &ss->data.info;
3784 if (loopspec[n] != NULL)
3786 specinfo = &loopspec[n]->data.info;
3787 spec_dim = specinfo->dim[n];
3791 /* Silence unitialized warnings. */
3798 gcc_assert (ss->shape[dim]);
3799 /* The frontend has worked out the size for us. */
3801 || !loopspec[n]->shape
3802 || !integer_zerop (specinfo->start[spec_dim]))
3803 /* Prefer zero-based descriptors if possible. */
3808 if (ss->type == GFC_SS_CONSTRUCTOR)
3810 gfc_constructor_base base;
3811 /* An unknown size constructor will always be rank one.
3812 Higher rank constructors will either have known shape,
3813 or still be wrapped in a call to reshape. */
3814 gcc_assert (loop->dimen == 1);
3816 /* Always prefer to use the constructor bounds if the size
3817 can be determined at compile time. Prefer not to otherwise,
3818 since the general case involves realloc, and it's better to
3819 avoid that overhead if possible. */
3820 base = ss->expr->value.constructor;
3821 dynamic[n] = gfc_get_array_constructor_size (&i, base);
3822 if (!dynamic[n] || !loopspec[n])
3827 /* TODO: Pick the best bound if we have a choice between a
3828 function and something else. */
3829 if (ss->type == GFC_SS_FUNCTION)
3835 /* Avoid using an allocatable lhs in an assignment, since
3836 there might be a reallocation coming. */
3837 if (loopspec[n] && ss->is_alloc_lhs)
3840 if (ss->type != GFC_SS_SECTION)
3845 /* Criteria for choosing a loop specifier (most important first):
3846 doesn't need realloc
3852 else if ((loopspec[n]->type == GFC_SS_CONSTRUCTOR && dynamic[n])
3853 || n >= loop->dimen)
3855 else if (integer_onep (info->stride[dim])
3856 && !integer_onep (specinfo->stride[spec_dim]))
3858 else if (INTEGER_CST_P (info->stride[dim])
3859 && !INTEGER_CST_P (specinfo->stride[spec_dim]))
3861 else if (INTEGER_CST_P (info->start[dim])
3862 && !INTEGER_CST_P (specinfo->start[spec_dim]))
3864 /* We don't work out the upper bound.
3865 else if (INTEGER_CST_P (info->finish[n])
3866 && ! INTEGER_CST_P (specinfo->finish[n]))
3867 loopspec[n] = ss; */
3870 /* We should have found the scalarization loop specifier. If not,
3872 gcc_assert (loopspec[n]);
3874 info = &loopspec[n]->data.info;
3877 /* Set the extents of this range. */
3878 cshape = loopspec[n]->shape;
3879 if (n < loop->dimen && cshape && INTEGER_CST_P (info->start[dim])
3880 && INTEGER_CST_P (info->stride[dim]))
3882 loop->from[n] = info->start[dim];
3883 mpz_set (i, cshape[get_array_ref_dim (info, n)]);
3884 mpz_sub_ui (i, i, 1);
3885 /* To = from + (size - 1) * stride. */
3886 tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind);
3887 if (!integer_onep (info->stride[dim]))
3888 tmp = fold_build2_loc (input_location, MULT_EXPR,
3889 gfc_array_index_type, tmp,
3891 loop->to[n] = fold_build2_loc (input_location, PLUS_EXPR,
3892 gfc_array_index_type,
3893 loop->from[n], tmp);
3897 loop->from[n] = info->start[dim];
3898 switch (loopspec[n]->type)
3900 case GFC_SS_CONSTRUCTOR:
3901 /* The upper bound is calculated when we expand the
3903 gcc_assert (loop->to[n] == NULL_TREE);
3906 case GFC_SS_SECTION:
3907 /* Use the end expression if it exists and is not constant,
3908 so that it is only evaluated once. */
3909 loop->to[n] = info->end[dim];
3912 case GFC_SS_FUNCTION:
3913 /* The loop bound will be set when we generate the call. */
3914 gcc_assert (loop->to[n] == NULL_TREE);
3922 /* Transform everything so we have a simple incrementing variable. */
3923 if (n < loop->dimen && integer_onep (info->stride[dim]))
3924 info->delta[dim] = gfc_index_zero_node;
3925 else if (n < loop->dimen)
3927 /* Set the delta for this section. */
3928 info->delta[dim] = gfc_evaluate_now (loop->from[n], &loop->pre);
3929 /* Number of iterations is (end - start + step) / step.
3930 with start = 0, this simplifies to
3932 for (i = 0; i<=last; i++){...}; */
3933 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3934 gfc_array_index_type, loop->to[n],
3936 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
3937 gfc_array_index_type, tmp, info->stride[dim]);
3938 tmp = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
3939 tmp, build_int_cst (gfc_array_index_type, -1));
3940 loop->to[n] = gfc_evaluate_now (tmp, &loop->pre);
3941 /* Make the loop variable start at 0. */
3942 loop->from[n] = gfc_index_zero_node;
3946 /* Add all the scalar code that can be taken out of the loops.
3947 This may include calculating the loop bounds, so do it before
3948 allocating the temporary. */
3949 gfc_add_loop_ss_code (loop, loop->ss, false, where);
3951 /* If we want a temporary then create it. */
3952 if (loop->temp_ss != NULL)
3954 gcc_assert (loop->temp_ss->type == GFC_SS_TEMP);
3956 /* Make absolutely sure that this is a complete type. */
3957 if (loop->temp_ss->string_length)
3958 loop->temp_ss->data.temp.type
3959 = gfc_get_character_type_len_for_eltype
3960 (TREE_TYPE (loop->temp_ss->data.temp.type),
3961 loop->temp_ss->string_length);
3963 tmp = loop->temp_ss->data.temp.type;
3964 n = loop->temp_ss->data.temp.dimen;
3965 memset (&loop->temp_ss->data.info, 0, sizeof (gfc_ss_info));
3966 loop->temp_ss->type = GFC_SS_SECTION;
3967 loop->temp_ss->data.info.dimen = n;
3969 gcc_assert (loop->temp_ss->data.info.dimen != 0);
3970 for (n = 0; n < loop->temp_ss->data.info.dimen; n++)
3971 loop->temp_ss->data.info.dim[n] = n;
3973 gfc_trans_create_temp_array (&loop->pre, &loop->post, loop,
3974 &loop->temp_ss->data.info, tmp, NULL_TREE,
3975 false, true, false, where);
3978 for (n = 0; n < loop->temp_dim; n++)
3979 loopspec[loop->order[n]] = NULL;
3983 /* For array parameters we don't have loop variables, so don't calculate the
3985 if (loop->array_parameter)
3988 /* Calculate the translation from loop variables to array indices. */
3989 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3991 if (ss->type != GFC_SS_SECTION && ss->type != GFC_SS_COMPONENT
3992 && ss->type != GFC_SS_CONSTRUCTOR)
3996 info = &ss->data.info;
3998 for (n = 0; n < info->dimen; n++)
4000 /* If we are specifying the range the delta is already set. */
4001 if (loopspec[n] != ss)
4003 dim = ss->data.info.dim[n];
4005 /* Calculate the offset relative to the loop variable.
4006 First multiply by the stride. */
4007 tmp = loop->from[n];
4008 if (!integer_onep (info->stride[dim]))
4009 tmp = fold_build2_loc (input_location, MULT_EXPR,
4010 gfc_array_index_type,
4011 tmp, info->stride[dim]);
4013 /* Then subtract this from our starting value. */
4014 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4015 gfc_array_index_type,
4016 info->start[dim], tmp);
4018 info->delta[dim] = gfc_evaluate_now (tmp, &loop->pre);
4025 /* Calculate the size of a given array dimension from the bounds. This
4026 is simply (ubound - lbound + 1) if this expression is positive
4027 or 0 if it is negative (pick either one if it is zero). Optionally
4028 (if or_expr is present) OR the (expression != 0) condition to it. */
4031 gfc_conv_array_extent_dim (tree lbound, tree ubound, tree* or_expr)
4036 /* Calculate (ubound - lbound + 1). */
4037 res = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4039 res = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, res,
4040 gfc_index_one_node);
4042 /* Check whether the size for this dimension is negative. */
4043 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, res,
4044 gfc_index_zero_node);
4045 res = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, cond,
4046 gfc_index_zero_node, res);
4048 /* Build OR expression. */
4050 *or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
4051 boolean_type_node, *or_expr, cond);
4057 /* For an array descriptor, get the total number of elements. This is just
4058 the product of the extents along from_dim to to_dim. */
4061 gfc_conv_descriptor_size_1 (tree desc, int from_dim, int to_dim)
4066 res = gfc_index_one_node;
4068 for (dim = from_dim; dim < to_dim; ++dim)
4074 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
4075 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
4077 extent = gfc_conv_array_extent_dim (lbound, ubound, NULL);
4078 res = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4086 /* Full size of an array. */
4089 gfc_conv_descriptor_size (tree desc, int rank)
4091 return gfc_conv_descriptor_size_1 (desc, 0, rank);
4095 /* Size of a coarray for all dimensions but the last. */
4098 gfc_conv_descriptor_cosize (tree desc, int rank, int corank)
4100 return gfc_conv_descriptor_size_1 (desc, rank, rank + corank - 1);
4104 /* Helper function for marking a boolean expression tree as unlikely. */
4107 gfc_unlikely (tree cond)
4111 cond = fold_convert (long_integer_type_node, cond);
4112 tmp = build_zero_cst (long_integer_type_node);
4113 cond = build_call_expr_loc (input_location,
4114 built_in_decls[BUILT_IN_EXPECT], 2, cond, tmp);
4115 cond = fold_convert (boolean_type_node, cond);
4119 /* Fills in an array descriptor, and returns the size of the array.
4120 The size will be a simple_val, ie a variable or a constant. Also
4121 calculates the offset of the base. The pointer argument overflow,
4122 which should be of integer type, will increase in value if overflow
4123 occurs during the size calculation. Returns the size of the array.
4127 for (n = 0; n < rank; n++)
4129 a.lbound[n] = specified_lower_bound;
4130 offset = offset + a.lbond[n] * stride;
4132 a.ubound[n] = specified_upper_bound;
4133 a.stride[n] = stride;
4134 size = siz >= 0 ? ubound + size : 0; //size = ubound + 1 - lbound
4135 overflow += size == 0 ? 0: (MAX/size < stride ? 1: 0);
4136 stride = stride * size;
4138 element_size = sizeof (array element);
4139 stride = (size_t) stride;
4140 overflow += element_size == 0 ? 0: (MAX/element_size < stride ? 1: 0);
4141 stride = stride * element_size;
4147 gfc_array_init_size (tree descriptor, int rank, int corank, tree * poffset,
4148 gfc_expr ** lower, gfc_expr ** upper,
4149 stmtblock_t * pblock, tree * overflow)
4162 stmtblock_t thenblock;
4163 stmtblock_t elseblock;
4168 type = TREE_TYPE (descriptor);
4170 stride = gfc_index_one_node;
4171 offset = gfc_index_zero_node;
4173 /* Set the dtype. */
4174 tmp = gfc_conv_descriptor_dtype (descriptor);
4175 gfc_add_modify (pblock, tmp, gfc_get_dtype (TREE_TYPE (descriptor)));
4177 or_expr = boolean_false_node;
4179 for (n = 0; n < rank; n++)
4184 /* We have 3 possibilities for determining the size of the array:
4185 lower == NULL => lbound = 1, ubound = upper[n]
4186 upper[n] = NULL => lbound = 1, ubound = lower[n]
4187 upper[n] != NULL => lbound = lower[n], ubound = upper[n] */
4190 /* Set lower bound. */
4191 gfc_init_se (&se, NULL);
4193 se.expr = gfc_index_one_node;
4196 gcc_assert (lower[n]);
4199 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4200 gfc_add_block_to_block (pblock, &se.pre);
4204 se.expr = gfc_index_one_node;
4208 gfc_conv_descriptor_lbound_set (pblock, descriptor, gfc_rank_cst[n],
4210 conv_lbound = se.expr;
4212 /* Work out the offset for this component. */
4213 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4215 offset = fold_build2_loc (input_location, MINUS_EXPR,
4216 gfc_array_index_type, offset, tmp);
4218 /* Set upper bound. */
4219 gfc_init_se (&se, NULL);
4220 gcc_assert (ubound);
4221 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4222 gfc_add_block_to_block (pblock, &se.pre);
4224 gfc_conv_descriptor_ubound_set (pblock, descriptor,
4225 gfc_rank_cst[n], se.expr);
4226 conv_ubound = se.expr;
4228 /* Store the stride. */
4229 gfc_conv_descriptor_stride_set (pblock, descriptor,
4230 gfc_rank_cst[n], stride);
4232 /* Calculate size and check whether extent is negative. */
4233 size = gfc_conv_array_extent_dim (conv_lbound, conv_ubound, &or_expr);
4234 size = gfc_evaluate_now (size, pblock);
4236 /* Check whether multiplying the stride by the number of
4237 elements in this dimension would overflow. We must also check
4238 whether the current dimension has zero size in order to avoid
4241 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
4242 gfc_array_index_type,
4243 fold_convert (gfc_array_index_type,
4244 TYPE_MAX_VALUE (gfc_array_index_type)),
4246 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
4247 boolean_type_node, tmp, stride));
4248 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4249 integer_one_node, integer_zero_node);
4250 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
4251 boolean_type_node, size,
4252 gfc_index_zero_node));
4253 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4254 integer_zero_node, tmp);
4255 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
4257 *overflow = gfc_evaluate_now (tmp, pblock);
4259 /* Multiply the stride by the number of elements in this dimension. */
4260 stride = fold_build2_loc (input_location, MULT_EXPR,
4261 gfc_array_index_type, stride, size);
4262 stride = gfc_evaluate_now (stride, pblock);
4265 for (n = rank; n < rank + corank; n++)
4269 /* Set lower bound. */
4270 gfc_init_se (&se, NULL);
4271 if (lower == NULL || lower[n] == NULL)
4273 gcc_assert (n == rank + corank - 1);
4274 se.expr = gfc_index_one_node;
4278 if (ubound || n == rank + corank - 1)
4280 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4281 gfc_add_block_to_block (pblock, &se.pre);
4285 se.expr = gfc_index_one_node;
4289 gfc_conv_descriptor_lbound_set (pblock, descriptor, gfc_rank_cst[n],
4292 if (n < rank + corank - 1)
4294 gfc_init_se (&se, NULL);
4295 gcc_assert (ubound);
4296 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4297 gfc_add_block_to_block (pblock, &se.pre);
4298 gfc_conv_descriptor_ubound_set (pblock, descriptor,
4299 gfc_rank_cst[n], se.expr);
4303 /* The stride is the number of elements in the array, so multiply by the
4304 size of an element to get the total size. */
4305 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
4306 /* Convert to size_t. */
4307 element_size = fold_convert (size_type_node, tmp);
4308 stride = fold_convert (size_type_node, stride);
4310 /* First check for overflow. Since an array of type character can
4311 have zero element_size, we must check for that before
4313 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
4315 TYPE_MAX_VALUE (size_type_node), element_size);
4316 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
4317 boolean_type_node, tmp, stride));
4318 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4319 integer_one_node, integer_zero_node);
4320 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
4321 boolean_type_node, element_size,
4322 build_int_cst (size_type_node, 0)));
4323 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4324 integer_zero_node, tmp);
4325 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
4327 *overflow = gfc_evaluate_now (tmp, pblock);
4329 size = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
4330 stride, element_size);
4332 if (poffset != NULL)
4334 offset = gfc_evaluate_now (offset, pblock);
4338 if (integer_zerop (or_expr))
4340 if (integer_onep (or_expr))
4341 return build_int_cst (size_type_node, 0);
4343 var = gfc_create_var (TREE_TYPE (size), "size");
4344 gfc_start_block (&thenblock);
4345 gfc_add_modify (&thenblock, var, build_int_cst (size_type_node, 0));
4346 thencase = gfc_finish_block (&thenblock);
4348 gfc_start_block (&elseblock);
4349 gfc_add_modify (&elseblock, var, size);
4350 elsecase = gfc_finish_block (&elseblock);
4352 tmp = gfc_evaluate_now (or_expr, pblock);
4353 tmp = build3_v (COND_EXPR, tmp, thencase, elsecase);
4354 gfc_add_expr_to_block (pblock, tmp);
4360 /* Initializes the descriptor and generates a call to _gfor_allocate. Does
4361 the work for an ALLOCATE statement. */
4365 gfc_array_allocate (gfc_se * se, gfc_expr * expr, tree pstat)
4373 tree overflow; /* Boolean storing whether size calculation overflows. */
4376 stmtblock_t elseblock;
4379 gfc_ref *ref, *prev_ref = NULL;
4380 bool allocatable_array, coarray;
4384 /* Find the last reference in the chain. */
4385 while (ref && ref->next != NULL)
4387 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT
4388 || (ref->u.ar.dimen == 0 && ref->u.ar.codimen > 0));
4393 if (ref == NULL || ref->type != REF_ARRAY)
4398 allocatable_array = expr->symtree->n.sym->attr.allocatable;
4399 coarray = expr->symtree->n.sym->attr.codimension;
4403 allocatable_array = prev_ref->u.c.component->attr.allocatable;
4404 coarray = prev_ref->u.c.component->attr.codimension;
4407 /* Return if this is a scalar coarray. */
4408 if ((!prev_ref && !expr->symtree->n.sym->attr.dimension)
4409 || (prev_ref && !prev_ref->u.c.component->attr.dimension))
4411 gcc_assert (coarray);
4415 /* Figure out the size of the array. */
4416 switch (ref->u.ar.type)
4422 upper = ref->u.ar.start;
4428 lower = ref->u.ar.start;
4429 upper = ref->u.ar.end;
4433 gcc_assert (ref->u.ar.as->type == AS_EXPLICIT);
4435 lower = ref->u.ar.as->lower;
4436 upper = ref->u.ar.as->upper;
4444 overflow = integer_zero_node;
4445 size = gfc_array_init_size (se->expr, ref->u.ar.as->rank,
4446 ref->u.ar.as->corank, &offset, lower, upper,
4447 &se->pre, &overflow);
4449 var_overflow = gfc_create_var (integer_type_node, "overflow");
4450 gfc_add_modify (&se->pre, var_overflow, overflow);
4452 /* Generate the block of code handling overflow. */
4453 msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const
4454 ("Integer overflow when calculating the amount of "
4455 "memory to allocate"));
4456 error = build_call_expr_loc (input_location,
4457 gfor_fndecl_runtime_error, 1, msg);
4459 if (pstat != NULL_TREE && !integer_zerop (pstat))
4461 /* Set the status variable if it's present. */
4462 stmtblock_t set_status_block;
4463 tree status_type = pstat ? TREE_TYPE (TREE_TYPE (pstat)) : NULL_TREE;
4465 gfc_start_block (&set_status_block);
4466 gfc_add_modify (&set_status_block,
4467 fold_build1_loc (input_location, INDIRECT_REF,
4468 status_type, pstat),
4469 build_int_cst (status_type, LIBERROR_ALLOCATION));
4471 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
4472 pstat, build_int_cst (TREE_TYPE (pstat), 0));
4473 error = fold_build3_loc (input_location, COND_EXPR, void_type_node, tmp,
4474 error, gfc_finish_block (&set_status_block));
4477 gfc_start_block (&elseblock);
4479 /* Allocate memory to store the data. */
4480 pointer = gfc_conv_descriptor_data_get (se->expr);
4481 STRIP_NOPS (pointer);
4483 /* The allocate_array variants take the old pointer as first argument. */
4484 if (allocatable_array)
4485 tmp = gfc_allocate_array_with_status (&elseblock, pointer, size, pstat, expr);
4487 tmp = gfc_allocate_with_status (&elseblock, size, pstat);
4488 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node, pointer,
4491 gfc_add_expr_to_block (&elseblock, tmp);
4493 cond = gfc_unlikely (fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
4494 var_overflow, integer_zero_node));
4495 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
4496 error, gfc_finish_block (&elseblock));
4498 gfc_add_expr_to_block (&se->pre, tmp);
4500 gfc_conv_descriptor_offset_set (&se->pre, se->expr, offset);
4502 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
4503 && expr->ts.u.derived->attr.alloc_comp)
4505 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, se->expr,
4506 ref->u.ar.as->rank);
4507 gfc_add_expr_to_block (&se->pre, tmp);
4514 /* Deallocate an array variable. Also used when an allocated variable goes
4519 gfc_array_deallocate (tree descriptor, tree pstat, gfc_expr* expr)
4525 gfc_start_block (&block);
4526 /* Get a pointer to the data. */
4527 var = gfc_conv_descriptor_data_get (descriptor);
4530 /* Parameter is the address of the data component. */
4531 tmp = gfc_deallocate_with_status (var, pstat, false, expr);
4532 gfc_add_expr_to_block (&block, tmp);
4534 /* Zero the data pointer. */
4535 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
4536 var, build_int_cst (TREE_TYPE (var), 0));
4537 gfc_add_expr_to_block (&block, tmp);
4539 return gfc_finish_block (&block);
4543 /* Create an array constructor from an initialization expression.
4544 We assume the frontend already did any expansions and conversions. */
4547 gfc_conv_array_initializer (tree type, gfc_expr * expr)
4553 unsigned HOST_WIDE_INT lo;
4555 VEC(constructor_elt,gc) *v = NULL;
4557 switch (expr->expr_type)
4560 case EXPR_STRUCTURE:
4561 /* A single scalar or derived type value. Create an array with all
4562 elements equal to that value. */
4563 gfc_init_se (&se, NULL);
4565 if (expr->expr_type == EXPR_CONSTANT)
4566 gfc_conv_constant (&se, expr);
4568 gfc_conv_structure (&se, expr, 1);
4570 tmp = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4571 gcc_assert (tmp && INTEGER_CST_P (tmp));
4572 hi = TREE_INT_CST_HIGH (tmp);
4573 lo = TREE_INT_CST_LOW (tmp);
4577 /* This will probably eat buckets of memory for large arrays. */
4578 while (hi != 0 || lo != 0)
4580 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, se.expr);
4588 /* Create a vector of all the elements. */
4589 for (c = gfc_constructor_first (expr->value.constructor);
4590 c; c = gfc_constructor_next (c))
4594 /* Problems occur when we get something like
4595 integer :: a(lots) = (/(i, i=1, lots)/) */
4596 gfc_fatal_error ("The number of elements in the array constructor "
4597 "at %L requires an increase of the allowed %d "
4598 "upper limit. See -fmax-array-constructor "
4599 "option", &expr->where,
4600 gfc_option.flag_max_array_constructor);
4603 if (mpz_cmp_si (c->offset, 0) != 0)
4604 index = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
4608 gfc_init_se (&se, NULL);
4609 switch (c->expr->expr_type)
4612 gfc_conv_constant (&se, c->expr);
4613 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
4616 case EXPR_STRUCTURE:
4617 gfc_conv_structure (&se, c->expr, 1);
4618 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
4623 /* Catch those occasional beasts that do not simplify
4624 for one reason or another, assuming that if they are
4625 standard defying the frontend will catch them. */
4626 gfc_conv_expr (&se, c->expr);
4627 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
4634 return gfc_build_null_descriptor (type);
4640 /* Create a constructor from the list of elements. */
4641 tmp = build_constructor (type, v);
4642 TREE_CONSTANT (tmp) = 1;
4647 /* Generate code to evaluate non-constant array bounds. Sets *poffset and
4648 returns the size (in elements) of the array. */
4651 gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
4652 stmtblock_t * pblock)
4667 size = gfc_index_one_node;
4668 offset = gfc_index_zero_node;
4669 for (dim = 0; dim < as->rank; dim++)
4671 /* Evaluate non-constant array bound expressions. */
4672 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
4673 if (as->lower[dim] && !INTEGER_CST_P (lbound))
4675 gfc_init_se (&se, NULL);
4676 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
4677 gfc_add_block_to_block (pblock, &se.pre);
4678 gfc_add_modify (pblock, lbound, se.expr);
4680 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
4681 if (as->upper[dim] && !INTEGER_CST_P (ubound))
4683 gfc_init_se (&se, NULL);
4684 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
4685 gfc_add_block_to_block (pblock, &se.pre);
4686 gfc_add_modify (pblock, ubound, se.expr);
4688 /* The offset of this dimension. offset = offset - lbound * stride. */
4689 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4691 offset = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4694 /* The size of this dimension, and the stride of the next. */
4695 if (dim + 1 < as->rank)
4696 stride = GFC_TYPE_ARRAY_STRIDE (type, dim + 1);
4698 stride = GFC_TYPE_ARRAY_SIZE (type);
4700 if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride)))
4702 /* Calculate stride = size * (ubound + 1 - lbound). */
4703 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4704 gfc_array_index_type,
4705 gfc_index_one_node, lbound);
4706 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4707 gfc_array_index_type, ubound, tmp);
4708 tmp = fold_build2_loc (input_location, MULT_EXPR,
4709 gfc_array_index_type, size, tmp);
4711 gfc_add_modify (pblock, stride, tmp);
4713 stride = gfc_evaluate_now (tmp, pblock);
4715 /* Make sure that negative size arrays are translated
4716 to being zero size. */
4717 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
4718 stride, gfc_index_zero_node);
4719 tmp = fold_build3_loc (input_location, COND_EXPR,
4720 gfc_array_index_type, tmp,
4721 stride, gfc_index_zero_node);
4722 gfc_add_modify (pblock, stride, tmp);
4727 for (dim = as->rank; dim < as->rank + as->corank; dim++)
4729 /* Evaluate non-constant array bound expressions. */
4730 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
4731 if (as->lower[dim] && !INTEGER_CST_P (lbound))
4733 gfc_init_se (&se, NULL);
4734 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
4735 gfc_add_block_to_block (pblock, &se.pre);
4736 gfc_add_modify (pblock, lbound, se.expr);
4738 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
4739 if (as->upper[dim] && !INTEGER_CST_P (ubound))
4741 gfc_init_se (&se, NULL);
4742 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
4743 gfc_add_block_to_block (pblock, &se.pre);
4744 gfc_add_modify (pblock, ubound, se.expr);
4747 gfc_trans_vla_type_sizes (sym, pblock);
4754 /* Generate code to initialize/allocate an array variable. */
4757 gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym,
4758 gfc_wrapped_block * block)
4767 gcc_assert (!(sym->attr.pointer || sym->attr.allocatable));
4769 /* Do nothing for USEd variables. */
4770 if (sym->attr.use_assoc)
4773 type = TREE_TYPE (decl);
4774 gcc_assert (GFC_ARRAY_TYPE_P (type));
4775 onstack = TREE_CODE (type) != POINTER_TYPE;
4777 gfc_start_block (&init);
4779 /* Evaluate character string length. */
4780 if (sym->ts.type == BT_CHARACTER
4781 && onstack && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
4783 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
4785 gfc_trans_vla_type_sizes (sym, &init);
4787 /* Emit a DECL_EXPR for this variable, which will cause the
4788 gimplifier to allocate storage, and all that good stuff. */
4789 tmp = fold_build1_loc (input_location, DECL_EXPR, TREE_TYPE (decl), decl);
4790 gfc_add_expr_to_block (&init, tmp);
4795 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
4799 type = TREE_TYPE (type);
4801 gcc_assert (!sym->attr.use_assoc);
4802 gcc_assert (!TREE_STATIC (decl));
4803 gcc_assert (!sym->module);
4805 if (sym->ts.type == BT_CHARACTER
4806 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
4807 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
4809 size = gfc_trans_array_bounds (type, sym, &offset, &init);
4811 /* Don't actually allocate space for Cray Pointees. */
4812 if (sym->attr.cray_pointee)
4814 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
4815 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
4817 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
4821 /* The size is the number of elements in the array, so multiply by the
4822 size of an element to get the total size. */
4823 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
4824 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4825 size, fold_convert (gfc_array_index_type, tmp));
4827 /* Allocate memory to hold the data. */
4828 tmp = gfc_call_malloc (&init, TREE_TYPE (decl), size);
4829 gfc_add_modify (&init, decl, tmp);
4831 /* Set offset of the array. */
4832 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
4833 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
4835 /* Automatic arrays should not have initializers. */
4836 gcc_assert (!sym->value);
4838 /* Free the temporary. */
4839 tmp = gfc_call_free (convert (pvoid_type_node, decl));
4841 gfc_add_init_cleanup (block, gfc_finish_block (&init), tmp);
4845 /* Generate entry and exit code for g77 calling convention arrays. */
4848 gfc_trans_g77_array (gfc_symbol * sym, gfc_wrapped_block * block)
4858 gfc_save_backend_locus (&loc);
4859 gfc_set_backend_locus (&sym->declared_at);
4861 /* Descriptor type. */
4862 parm = sym->backend_decl;
4863 type = TREE_TYPE (parm);
4864 gcc_assert (GFC_ARRAY_TYPE_P (type));
4866 gfc_start_block (&init);
4868 if (sym->ts.type == BT_CHARACTER
4869 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
4870 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
4872 /* Evaluate the bounds of the array. */
4873 gfc_trans_array_bounds (type, sym, &offset, &init);
4875 /* Set the offset. */
4876 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
4877 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
4879 /* Set the pointer itself if we aren't using the parameter directly. */
4880 if (TREE_CODE (parm) != PARM_DECL)
4882 tmp = convert (TREE_TYPE (parm), GFC_DECL_SAVED_DESCRIPTOR (parm));
4883 gfc_add_modify (&init, parm, tmp);
4885 stmt = gfc_finish_block (&init);
4887 gfc_restore_backend_locus (&loc);
4889 /* Add the initialization code to the start of the function. */
4891 if (sym->attr.optional || sym->attr.not_always_present)
4893 tmp = gfc_conv_expr_present (sym);
4894 stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
4897 gfc_add_init_cleanup (block, stmt, NULL_TREE);
4901 /* Modify the descriptor of an array parameter so that it has the
4902 correct lower bound. Also move the upper bound accordingly.
4903 If the array is not packed, it will be copied into a temporary.
4904 For each dimension we set the new lower and upper bounds. Then we copy the
4905 stride and calculate the offset for this dimension. We also work out
4906 what the stride of a packed array would be, and see it the two match.
4907 If the array need repacking, we set the stride to the values we just
4908 calculated, recalculate the offset and copy the array data.
4909 Code is also added to copy the data back at the end of the function.
4913 gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc,
4914 gfc_wrapped_block * block)
4921 tree stmtInit, stmtCleanup;
4928 tree stride, stride2;
4938 /* Do nothing for pointer and allocatable arrays. */
4939 if (sym->attr.pointer || sym->attr.allocatable)
4942 if (sym->attr.dummy && gfc_is_nodesc_array (sym))
4944 gfc_trans_g77_array (sym, block);
4948 gfc_save_backend_locus (&loc);
4949 gfc_set_backend_locus (&sym->declared_at);
4951 /* Descriptor type. */
4952 type = TREE_TYPE (tmpdesc);
4953 gcc_assert (GFC_ARRAY_TYPE_P (type));
4954 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
4955 dumdesc = build_fold_indirect_ref_loc (input_location, dumdesc);
4956 gfc_start_block (&init);
4958 if (sym->ts.type == BT_CHARACTER
4959 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
4960 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
4962 checkparm = (sym->as->type == AS_EXPLICIT
4963 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS));
4965 no_repack = !(GFC_DECL_PACKED_ARRAY (tmpdesc)
4966 || GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc));
4968 if (GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc))
4970 /* For non-constant shape arrays we only check if the first dimension
4971 is contiguous. Repacking higher dimensions wouldn't gain us
4972 anything as we still don't know the array stride. */
4973 partial = gfc_create_var (boolean_type_node, "partial");
4974 TREE_USED (partial) = 1;
4975 tmp = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
4976 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, tmp,
4977 gfc_index_one_node);
4978 gfc_add_modify (&init, partial, tmp);
4981 partial = NULL_TREE;
4983 /* The naming of stmt_unpacked and stmt_packed may be counter-intuitive
4984 here, however I think it does the right thing. */
4987 /* Set the first stride. */
4988 stride = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
4989 stride = gfc_evaluate_now (stride, &init);
4991 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
4992 stride, gfc_index_zero_node);
4993 tmp = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
4994 tmp, gfc_index_one_node, stride);
4995 stride = GFC_TYPE_ARRAY_STRIDE (type, 0);
4996 gfc_add_modify (&init, stride, tmp);
4998 /* Allow the user to disable array repacking. */
4999 stmt_unpacked = NULL_TREE;
5003 gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type, 0)));
5004 /* A library call to repack the array if necessary. */
5005 tmp = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5006 stmt_unpacked = build_call_expr_loc (input_location,
5007 gfor_fndecl_in_pack, 1, tmp);
5009 stride = gfc_index_one_node;
5011 if (gfc_option.warn_array_temp)
5012 gfc_warning ("Creating array temporary at %L", &loc);
5015 /* This is for the case where the array data is used directly without
5016 calling the repack function. */
5017 if (no_repack || partial != NULL_TREE)
5018 stmt_packed = gfc_conv_descriptor_data_get (dumdesc);
5020 stmt_packed = NULL_TREE;
5022 /* Assign the data pointer. */
5023 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
5025 /* Don't repack unknown shape arrays when the first stride is 1. */
5026 tmp = fold_build3_loc (input_location, COND_EXPR, TREE_TYPE (stmt_packed),
5027 partial, stmt_packed, stmt_unpacked);
5030 tmp = stmt_packed != NULL_TREE ? stmt_packed : stmt_unpacked;
5031 gfc_add_modify (&init, tmpdesc, fold_convert (type, tmp));
5033 offset = gfc_index_zero_node;
5034 size = gfc_index_one_node;
5036 /* Evaluate the bounds of the array. */
5037 for (n = 0; n < sym->as->rank; n++)
5039 if (checkparm || !sym->as->upper[n])
5041 /* Get the bounds of the actual parameter. */
5042 dubound = gfc_conv_descriptor_ubound_get (dumdesc, gfc_rank_cst[n]);
5043 dlbound = gfc_conv_descriptor_lbound_get (dumdesc, gfc_rank_cst[n]);
5047 dubound = NULL_TREE;
5048 dlbound = NULL_TREE;
5051 lbound = GFC_TYPE_ARRAY_LBOUND (type, n);
5052 if (!INTEGER_CST_P (lbound))
5054 gfc_init_se (&se, NULL);
5055 gfc_conv_expr_type (&se, sym->as->lower[n],
5056 gfc_array_index_type);
5057 gfc_add_block_to_block (&init, &se.pre);
5058 gfc_add_modify (&init, lbound, se.expr);
5061 ubound = GFC_TYPE_ARRAY_UBOUND (type, n);
5062 /* Set the desired upper bound. */
5063 if (sym->as->upper[n])
5065 /* We know what we want the upper bound to be. */
5066 if (!INTEGER_CST_P (ubound))
5068 gfc_init_se (&se, NULL);
5069 gfc_conv_expr_type (&se, sym->as->upper[n],
5070 gfc_array_index_type);
5071 gfc_add_block_to_block (&init, &se.pre);
5072 gfc_add_modify (&init, ubound, se.expr);
5075 /* Check the sizes match. */
5078 /* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */
5082 temp = fold_build2_loc (input_location, MINUS_EXPR,
5083 gfc_array_index_type, ubound, lbound);
5084 temp = fold_build2_loc (input_location, PLUS_EXPR,
5085 gfc_array_index_type,
5086 gfc_index_one_node, temp);
5087 stride2 = fold_build2_loc (input_location, MINUS_EXPR,
5088 gfc_array_index_type, dubound,
5090 stride2 = fold_build2_loc (input_location, PLUS_EXPR,
5091 gfc_array_index_type,
5092 gfc_index_one_node, stride2);
5093 tmp = fold_build2_loc (input_location, NE_EXPR,
5094 gfc_array_index_type, temp, stride2);
5095 asprintf (&msg, "Dimension %d of array '%s' has extent "
5096 "%%ld instead of %%ld", n+1, sym->name);
5098 gfc_trans_runtime_check (true, false, tmp, &init, &loc, msg,
5099 fold_convert (long_integer_type_node, temp),
5100 fold_convert (long_integer_type_node, stride2));
5107 /* For assumed shape arrays move the upper bound by the same amount
5108 as the lower bound. */
5109 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5110 gfc_array_index_type, dubound, dlbound);
5111 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5112 gfc_array_index_type, tmp, lbound);
5113 gfc_add_modify (&init, ubound, tmp);
5115 /* The offset of this dimension. offset = offset - lbound * stride. */
5116 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5118 offset = fold_build2_loc (input_location, MINUS_EXPR,
5119 gfc_array_index_type, offset, tmp);
5121 /* The size of this dimension, and the stride of the next. */
5122 if (n + 1 < sym->as->rank)
5124 stride = GFC_TYPE_ARRAY_STRIDE (type, n + 1);
5126 if (no_repack || partial != NULL_TREE)
5128 gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[n+1]);
5130 /* Figure out the stride if not a known constant. */
5131 if (!INTEGER_CST_P (stride))
5134 stmt_packed = NULL_TREE;
5137 /* Calculate stride = size * (ubound + 1 - lbound). */
5138 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5139 gfc_array_index_type,
5140 gfc_index_one_node, lbound);
5141 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5142 gfc_array_index_type, ubound, tmp);
5143 size = fold_build2_loc (input_location, MULT_EXPR,
5144 gfc_array_index_type, size, tmp);
5148 /* Assign the stride. */
5149 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
5150 tmp = fold_build3_loc (input_location, COND_EXPR,
5151 gfc_array_index_type, partial,
5152 stmt_unpacked, stmt_packed);
5154 tmp = (stmt_packed != NULL_TREE) ? stmt_packed : stmt_unpacked;
5155 gfc_add_modify (&init, stride, tmp);
5160 stride = GFC_TYPE_ARRAY_SIZE (type);
5162 if (stride && !INTEGER_CST_P (stride))
5164 /* Calculate size = stride * (ubound + 1 - lbound). */
5165 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5166 gfc_array_index_type,
5167 gfc_index_one_node, lbound);
5168 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5169 gfc_array_index_type,
5171 tmp = fold_build2_loc (input_location, MULT_EXPR,
5172 gfc_array_index_type,
5173 GFC_TYPE_ARRAY_STRIDE (type, n), tmp);
5174 gfc_add_modify (&init, stride, tmp);
5179 /* Set the offset. */
5180 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5181 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5183 gfc_trans_vla_type_sizes (sym, &init);
5185 stmtInit = gfc_finish_block (&init);
5187 /* Only do the entry/initialization code if the arg is present. */
5188 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5189 optional_arg = (sym->attr.optional
5190 || (sym->ns->proc_name->attr.entry_master
5191 && sym->attr.dummy));
5194 tmp = gfc_conv_expr_present (sym);
5195 stmtInit = build3_v (COND_EXPR, tmp, stmtInit,
5196 build_empty_stmt (input_location));
5201 stmtCleanup = NULL_TREE;
5204 stmtblock_t cleanup;
5205 gfc_start_block (&cleanup);
5207 if (sym->attr.intent != INTENT_IN)
5209 /* Copy the data back. */
5210 tmp = build_call_expr_loc (input_location,
5211 gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc);
5212 gfc_add_expr_to_block (&cleanup, tmp);
5215 /* Free the temporary. */
5216 tmp = gfc_call_free (tmpdesc);
5217 gfc_add_expr_to_block (&cleanup, tmp);
5219 stmtCleanup = gfc_finish_block (&cleanup);
5221 /* Only do the cleanup if the array was repacked. */
5222 tmp = build_fold_indirect_ref_loc (input_location, dumdesc);
5223 tmp = gfc_conv_descriptor_data_get (tmp);
5224 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
5226 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
5227 build_empty_stmt (input_location));
5231 tmp = gfc_conv_expr_present (sym);
5232 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
5233 build_empty_stmt (input_location));
5237 /* We don't need to free any memory allocated by internal_pack as it will
5238 be freed at the end of the function by pop_context. */
5239 gfc_add_init_cleanup (block, stmtInit, stmtCleanup);
5241 gfc_restore_backend_locus (&loc);
5245 /* Calculate the overall offset, including subreferences. */
5247 gfc_get_dataptr_offset (stmtblock_t *block, tree parm, tree desc, tree offset,
5248 bool subref, gfc_expr *expr)
5258 /* If offset is NULL and this is not a subreferenced array, there is
5260 if (offset == NULL_TREE)
5263 offset = gfc_index_zero_node;
5268 tmp = gfc_conv_array_data (desc);
5269 tmp = build_fold_indirect_ref_loc (input_location,
5271 tmp = gfc_build_array_ref (tmp, offset, NULL);
5273 /* Offset the data pointer for pointer assignments from arrays with
5274 subreferences; e.g. my_integer => my_type(:)%integer_component. */
5277 /* Go past the array reference. */
5278 for (ref = expr->ref; ref; ref = ref->next)
5279 if (ref->type == REF_ARRAY &&
5280 ref->u.ar.type != AR_ELEMENT)
5286 /* Calculate the offset for each subsequent subreference. */
5287 for (; ref; ref = ref->next)
5292 field = ref->u.c.component->backend_decl;
5293 gcc_assert (field && TREE_CODE (field) == FIELD_DECL);
5294 tmp = fold_build3_loc (input_location, COMPONENT_REF,
5296 tmp, field, NULL_TREE);
5300 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE);
5301 gfc_init_se (&start, NULL);
5302 gfc_conv_expr_type (&start, ref->u.ss.start, gfc_charlen_type_node);
5303 gfc_add_block_to_block (block, &start.pre);
5304 tmp = gfc_build_array_ref (tmp, start.expr, NULL);
5308 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE
5309 && ref->u.ar.type == AR_ELEMENT);
5311 /* TODO - Add bounds checking. */
5312 stride = gfc_index_one_node;
5313 index = gfc_index_zero_node;
5314 for (n = 0; n < ref->u.ar.dimen; n++)
5319 /* Update the index. */
5320 gfc_init_se (&start, NULL);
5321 gfc_conv_expr_type (&start, ref->u.ar.start[n], gfc_array_index_type);
5322 itmp = gfc_evaluate_now (start.expr, block);
5323 gfc_init_se (&start, NULL);
5324 gfc_conv_expr_type (&start, ref->u.ar.as->lower[n], gfc_array_index_type);
5325 jtmp = gfc_evaluate_now (start.expr, block);
5326 itmp = fold_build2_loc (input_location, MINUS_EXPR,
5327 gfc_array_index_type, itmp, jtmp);
5328 itmp = fold_build2_loc (input_location, MULT_EXPR,
5329 gfc_array_index_type, itmp, stride);
5330 index = fold_build2_loc (input_location, PLUS_EXPR,
5331 gfc_array_index_type, itmp, index);
5332 index = gfc_evaluate_now (index, block);
5334 /* Update the stride. */
5335 gfc_init_se (&start, NULL);
5336 gfc_conv_expr_type (&start, ref->u.ar.as->upper[n], gfc_array_index_type);
5337 itmp = fold_build2_loc (input_location, MINUS_EXPR,
5338 gfc_array_index_type, start.expr,
5340 itmp = fold_build2_loc (input_location, PLUS_EXPR,
5341 gfc_array_index_type,
5342 gfc_index_one_node, itmp);
5343 stride = fold_build2_loc (input_location, MULT_EXPR,
5344 gfc_array_index_type, stride, itmp);
5345 stride = gfc_evaluate_now (stride, block);
5348 /* Apply the index to obtain the array element. */
5349 tmp = gfc_build_array_ref (tmp, index, NULL);
5359 /* Set the target data pointer. */
5360 offset = gfc_build_addr_expr (gfc_array_dataptr_type (desc), tmp);
5361 gfc_conv_descriptor_data_set (block, parm, offset);
5365 /* gfc_conv_expr_descriptor needs the string length an expression
5366 so that the size of the temporary can be obtained. This is done
5367 by adding up the string lengths of all the elements in the
5368 expression. Function with non-constant expressions have their
5369 string lengths mapped onto the actual arguments using the
5370 interface mapping machinery in trans-expr.c. */
5372 get_array_charlen (gfc_expr *expr, gfc_se *se)
5374 gfc_interface_mapping mapping;
5375 gfc_formal_arglist *formal;
5376 gfc_actual_arglist *arg;
5379 if (expr->ts.u.cl->length
5380 && gfc_is_constant_expr (expr->ts.u.cl->length))
5382 if (!expr->ts.u.cl->backend_decl)
5383 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5387 switch (expr->expr_type)
5390 get_array_charlen (expr->value.op.op1, se);
5392 /* For parentheses the expression ts.u.cl is identical. */
5393 if (expr->value.op.op == INTRINSIC_PARENTHESES)
5396 expr->ts.u.cl->backend_decl =
5397 gfc_create_var (gfc_charlen_type_node, "sln");
5399 if (expr->value.op.op2)
5401 get_array_charlen (expr->value.op.op2, se);
5403 gcc_assert (expr->value.op.op == INTRINSIC_CONCAT);
5405 /* Add the string lengths and assign them to the expression
5406 string length backend declaration. */
5407 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
5408 fold_build2_loc (input_location, PLUS_EXPR,
5409 gfc_charlen_type_node,
5410 expr->value.op.op1->ts.u.cl->backend_decl,
5411 expr->value.op.op2->ts.u.cl->backend_decl));
5414 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
5415 expr->value.op.op1->ts.u.cl->backend_decl);
5419 if (expr->value.function.esym == NULL
5420 || expr->ts.u.cl->length->expr_type == EXPR_CONSTANT)
5422 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5426 /* Map expressions involving the dummy arguments onto the actual
5427 argument expressions. */
5428 gfc_init_interface_mapping (&mapping);
5429 formal = expr->symtree->n.sym->formal;
5430 arg = expr->value.function.actual;
5432 /* Set se = NULL in the calls to the interface mapping, to suppress any
5434 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
5439 gfc_add_interface_mapping (&mapping, formal->sym, NULL, arg->expr);
5442 gfc_init_se (&tse, NULL);
5444 /* Build the expression for the character length and convert it. */
5445 gfc_apply_interface_mapping (&mapping, &tse, expr->ts.u.cl->length);
5447 gfc_add_block_to_block (&se->pre, &tse.pre);
5448 gfc_add_block_to_block (&se->post, &tse.post);
5449 tse.expr = fold_convert (gfc_charlen_type_node, tse.expr);
5450 tse.expr = fold_build2_loc (input_location, MAX_EXPR,
5451 gfc_charlen_type_node, tse.expr,
5452 build_int_cst (gfc_charlen_type_node, 0));
5453 expr->ts.u.cl->backend_decl = tse.expr;
5454 gfc_free_interface_mapping (&mapping);
5458 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5463 /* Helper function to check dimensions. */
5465 dim_ok (gfc_ss_info *info)
5468 for (n = 0; n < info->dimen; n++)
5469 if (info->dim[n] != n)
5474 /* Convert an array for passing as an actual argument. Expressions and
5475 vector subscripts are evaluated and stored in a temporary, which is then
5476 passed. For whole arrays the descriptor is passed. For array sections
5477 a modified copy of the descriptor is passed, but using the original data.
5479 This function is also used for array pointer assignments, and there
5482 - se->want_pointer && !se->direct_byref
5483 EXPR is an actual argument. On exit, se->expr contains a
5484 pointer to the array descriptor.
5486 - !se->want_pointer && !se->direct_byref
5487 EXPR is an actual argument to an intrinsic function or the
5488 left-hand side of a pointer assignment. On exit, se->expr
5489 contains the descriptor for EXPR.
5491 - !se->want_pointer && se->direct_byref
5492 EXPR is the right-hand side of a pointer assignment and
5493 se->expr is the descriptor for the previously-evaluated
5494 left-hand side. The function creates an assignment from
5498 The se->force_tmp flag disables the non-copying descriptor optimization
5499 that is used for transpose. It may be used in cases where there is an
5500 alias between the transpose argument and another argument in the same
5504 gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss)
5516 bool subref_array_target = false;
5519 gcc_assert (ss != NULL);
5520 gcc_assert (ss != gfc_ss_terminator);
5522 /* Special case things we know we can pass easily. */
5523 switch (expr->expr_type)
5526 /* If we have a linear array section, we can pass it directly.
5527 Otherwise we need to copy it into a temporary. */
5529 gcc_assert (ss->type == GFC_SS_SECTION);
5530 gcc_assert (ss->expr == expr);
5531 info = &ss->data.info;
5533 /* Get the descriptor for the array. */
5534 gfc_conv_ss_descriptor (&se->pre, ss, 0);
5535 desc = info->descriptor;
5537 subref_array_target = se->direct_byref && is_subref_array (expr);
5538 need_tmp = gfc_ref_needs_temporary_p (expr->ref)
5539 && !subref_array_target;
5546 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
5548 /* Create a new descriptor if the array doesn't have one. */
5551 else if (info->ref->u.ar.type == AR_FULL)
5553 else if (se->direct_byref)
5556 full = gfc_full_array_ref_p (info->ref, NULL);
5558 if (full && dim_ok (info))
5560 if (se->direct_byref && !se->byref_noassign)
5562 /* Copy the descriptor for pointer assignments. */
5563 gfc_add_modify (&se->pre, se->expr, desc);
5565 /* Add any offsets from subreferences. */
5566 gfc_get_dataptr_offset (&se->pre, se->expr, desc, NULL_TREE,
5567 subref_array_target, expr);
5569 else if (se->want_pointer)
5571 /* We pass full arrays directly. This means that pointers and
5572 allocatable arrays should also work. */
5573 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
5580 if (expr->ts.type == BT_CHARACTER)
5581 se->string_length = gfc_get_expr_charlen (expr);
5589 /* We don't need to copy data in some cases. */
5590 arg = gfc_get_noncopying_intrinsic_argument (expr);
5593 /* This is a call to transpose... */
5594 gcc_assert (expr->value.function.isym->id == GFC_ISYM_TRANSPOSE);
5595 /* ... which has already been handled by the scalarizer, so
5596 that we just need to get its argument's descriptor. */
5597 gfc_conv_expr_descriptor (se, expr->value.function.actual->expr, ss);
5601 /* A transformational function return value will be a temporary
5602 array descriptor. We still need to go through the scalarizer
5603 to create the descriptor. Elemental functions ar handled as
5604 arbitrary expressions, i.e. copy to a temporary. */
5606 if (se->direct_byref)
5608 gcc_assert (ss->type == GFC_SS_FUNCTION && ss->expr == expr);
5610 /* For pointer assignments pass the descriptor directly. */
5614 gcc_assert (se->ss == ss);
5615 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
5616 gfc_conv_expr (se, expr);
5620 if (ss->expr != expr || ss->type != GFC_SS_FUNCTION)
5622 if (ss->expr != expr)
5623 /* Elemental function. */
5624 gcc_assert ((expr->value.function.esym != NULL
5625 && expr->value.function.esym->attr.elemental)
5626 || (expr->value.function.isym != NULL
5627 && expr->value.function.isym->elemental));
5629 gcc_assert (ss->type == GFC_SS_INTRINSIC);
5632 if (expr->ts.type == BT_CHARACTER
5633 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
5634 get_array_charlen (expr, se);
5640 /* Transformational function. */
5641 info = &ss->data.info;
5647 /* Constant array constructors don't need a temporary. */
5648 if (ss->type == GFC_SS_CONSTRUCTOR
5649 && expr->ts.type != BT_CHARACTER
5650 && gfc_constant_array_constructor_p (expr->value.constructor))
5653 info = &ss->data.info;
5663 /* Something complicated. Copy it into a temporary. */
5669 /* If we are creating a temporary, we don't need to bother about aliases
5674 gfc_init_loopinfo (&loop);
5676 /* Associate the SS with the loop. */
5677 gfc_add_ss_to_loop (&loop, ss);
5679 /* Tell the scalarizer not to bother creating loop variables, etc. */
5681 loop.array_parameter = 1;
5683 /* The right-hand side of a pointer assignment mustn't use a temporary. */
5684 gcc_assert (!se->direct_byref);
5686 /* Setup the scalarizing loops and bounds. */
5687 gfc_conv_ss_startstride (&loop);
5691 /* Tell the scalarizer to make a temporary. */
5692 loop.temp_ss = gfc_get_ss ();
5693 loop.temp_ss->type = GFC_SS_TEMP;
5694 loop.temp_ss->next = gfc_ss_terminator;
5696 if (expr->ts.type == BT_CHARACTER
5697 && !expr->ts.u.cl->backend_decl)
5698 get_array_charlen (expr, se);
5700 loop.temp_ss->data.temp.type = gfc_typenode_for_spec (&expr->ts);
5702 if (expr->ts.type == BT_CHARACTER)
5703 loop.temp_ss->string_length = expr->ts.u.cl->backend_decl;
5705 loop.temp_ss->string_length = NULL;
5707 se->string_length = loop.temp_ss->string_length;
5708 loop.temp_ss->data.temp.dimen = loop.dimen;
5709 loop.temp_ss->data.temp.codimen = loop.codimen;
5710 gfc_add_ss_to_loop (&loop, loop.temp_ss);
5713 gfc_conv_loop_setup (&loop, & expr->where);
5717 /* Copy into a temporary and pass that. We don't need to copy the data
5718 back because expressions and vector subscripts must be INTENT_IN. */
5719 /* TODO: Optimize passing function return values. */
5723 /* Start the copying loops. */
5724 gfc_mark_ss_chain_used (loop.temp_ss, 1);
5725 gfc_mark_ss_chain_used (ss, 1);
5726 gfc_start_scalarized_body (&loop, &block);
5728 /* Copy each data element. */
5729 gfc_init_se (&lse, NULL);
5730 gfc_copy_loopinfo_to_se (&lse, &loop);
5731 gfc_init_se (&rse, NULL);
5732 gfc_copy_loopinfo_to_se (&rse, &loop);
5734 lse.ss = loop.temp_ss;
5737 gfc_conv_scalarized_array_ref (&lse, NULL);
5738 if (expr->ts.type == BT_CHARACTER)
5740 gfc_conv_expr (&rse, expr);
5741 if (POINTER_TYPE_P (TREE_TYPE (rse.expr)))
5742 rse.expr = build_fold_indirect_ref_loc (input_location,
5746 gfc_conv_expr_val (&rse, expr);
5748 gfc_add_block_to_block (&block, &rse.pre);
5749 gfc_add_block_to_block (&block, &lse.pre);
5751 lse.string_length = rse.string_length;
5752 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, true,
5753 expr->expr_type == EXPR_VARIABLE, true);
5754 gfc_add_expr_to_block (&block, tmp);
5756 /* Finish the copying loops. */
5757 gfc_trans_scalarizing_loops (&loop, &block);
5759 desc = loop.temp_ss->data.info.descriptor;
5761 else if (expr->expr_type == EXPR_FUNCTION && dim_ok (info))
5763 desc = info->descriptor;
5764 se->string_length = ss->string_length;
5768 /* We pass sections without copying to a temporary. Make a new
5769 descriptor and point it at the section we want. The loop variable
5770 limits will be the limits of the section.
5771 A function may decide to repack the array to speed up access, but
5772 we're not bothered about that here. */
5773 int dim, ndim, codim;
5781 /* Set the string_length for a character array. */
5782 if (expr->ts.type == BT_CHARACTER)
5783 se->string_length = gfc_get_expr_charlen (expr);
5785 desc = info->descriptor;
5786 if (se->direct_byref && !se->byref_noassign)
5788 /* For pointer assignments we fill in the destination. */
5790 parmtype = TREE_TYPE (parm);
5794 /* Otherwise make a new one. */
5795 parmtype = gfc_get_element_type (TREE_TYPE (desc));
5796 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
5797 loop.codimen, loop.from,
5799 GFC_ARRAY_UNKNOWN, false);
5800 parm = gfc_create_var (parmtype, "parm");
5803 offset = gfc_index_zero_node;
5805 /* The following can be somewhat confusing. We have two
5806 descriptors, a new one and the original array.
5807 {parm, parmtype, dim} refer to the new one.
5808 {desc, type, n, loop} refer to the original, which maybe
5809 a descriptorless array.
5810 The bounds of the scalarization are the bounds of the section.
5811 We don't have to worry about numeric overflows when calculating
5812 the offsets because all elements are within the array data. */
5814 /* Set the dtype. */
5815 tmp = gfc_conv_descriptor_dtype (parm);
5816 gfc_add_modify (&loop.pre, tmp, gfc_get_dtype (parmtype));
5818 /* Set offset for assignments to pointer only to zero if it is not
5820 if (se->direct_byref
5821 && info->ref && info->ref->u.ar.type != AR_FULL)
5822 base = gfc_index_zero_node;
5823 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
5824 base = gfc_evaluate_now (gfc_conv_array_offset (desc), &loop.pre);
5828 ndim = info->ref ? info->ref->u.ar.dimen : info->dimen;
5829 codim = info->codimen;
5830 for (n = 0; n < ndim; n++)
5832 stride = gfc_conv_array_stride (desc, n);
5834 /* Work out the offset. */
5836 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
5838 gcc_assert (info->subscript[n]
5839 && info->subscript[n]->type == GFC_SS_SCALAR);
5840 start = info->subscript[n]->data.scalar.expr;
5844 /* Evaluate and remember the start of the section. */
5845 start = info->start[n];
5846 stride = gfc_evaluate_now (stride, &loop.pre);
5849 tmp = gfc_conv_array_lbound (desc, n);
5850 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (tmp),
5852 tmp = fold_build2_loc (input_location, MULT_EXPR, TREE_TYPE (tmp),
5854 offset = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (tmp),
5858 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
5860 /* For elemental dimensions, we only need the offset. */
5864 /* Vector subscripts need copying and are handled elsewhere. */
5866 gcc_assert (info->ref->u.ar.dimen_type[n] == DIMEN_RANGE);
5868 /* look for the corresponding scalarizer dimension: dim. */
5869 for (dim = 0; dim < ndim; dim++)
5870 if (info->dim[dim] == n)
5873 /* loop exited early: the DIM being looked for has been found. */
5874 gcc_assert (dim < ndim);
5876 /* Set the new lower bound. */
5877 from = loop.from[dim];
5880 /* If we have an array section or are assigning make sure that
5881 the lower bound is 1. References to the full
5882 array should otherwise keep the original bounds. */
5884 || info->ref->u.ar.type != AR_FULL)
5885 && !integer_onep (from))
5887 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5888 gfc_array_index_type, gfc_index_one_node,
5890 to = fold_build2_loc (input_location, PLUS_EXPR,
5891 gfc_array_index_type, to, tmp);
5892 from = gfc_index_one_node;
5894 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
5895 gfc_rank_cst[dim], from);
5897 /* Set the new upper bound. */
5898 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
5899 gfc_rank_cst[dim], to);
5901 /* Multiply the stride by the section stride to get the
5903 stride = fold_build2_loc (input_location, MULT_EXPR,
5904 gfc_array_index_type,
5905 stride, info->stride[n]);
5907 if (se->direct_byref
5909 && info->ref->u.ar.type != AR_FULL)
5911 base = fold_build2_loc (input_location, MINUS_EXPR,
5912 TREE_TYPE (base), base, stride);
5914 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
5916 tmp = gfc_conv_array_lbound (desc, n);
5917 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5918 TREE_TYPE (base), tmp, loop.from[dim]);
5919 tmp = fold_build2_loc (input_location, MULT_EXPR,
5920 TREE_TYPE (base), tmp,
5921 gfc_conv_array_stride (desc, n));
5922 base = fold_build2_loc (input_location, PLUS_EXPR,
5923 TREE_TYPE (base), tmp, base);
5926 /* Store the new stride. */
5927 gfc_conv_descriptor_stride_set (&loop.pre, parm,
5928 gfc_rank_cst[dim], stride);
5931 for (n = ndim; n < ndim + codim; n++)
5933 /* look for the corresponding scalarizer dimension: dim. */
5934 for (dim = 0; dim < ndim + codim; dim++)
5935 if (info->dim[dim] == n)
5938 /* loop exited early: the DIM being looked for has been found. */
5939 gcc_assert (dim < ndim + codim);
5941 from = loop.from[dim];
5943 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
5944 gfc_rank_cst[dim], from);
5945 if (n < ndim + codim - 1)
5946 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
5947 gfc_rank_cst[dim], to);
5951 if (se->data_not_needed)
5952 gfc_conv_descriptor_data_set (&loop.pre, parm,
5953 gfc_index_zero_node);
5955 /* Point the data pointer at the 1st element in the section. */
5956 gfc_get_dataptr_offset (&loop.pre, parm, desc, offset,
5957 subref_array_target, expr);
5959 if ((se->direct_byref || GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
5960 && !se->data_not_needed)
5962 /* Set the offset. */
5963 gfc_conv_descriptor_offset_set (&loop.pre, parm, base);
5967 /* Only the callee knows what the correct offset it, so just set
5969 gfc_conv_descriptor_offset_set (&loop.pre, parm, gfc_index_zero_node);
5974 if (!se->direct_byref || se->byref_noassign)
5976 /* Get a pointer to the new descriptor. */
5977 if (se->want_pointer)
5978 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
5983 gfc_add_block_to_block (&se->pre, &loop.pre);
5984 gfc_add_block_to_block (&se->post, &loop.post);
5986 /* Cleanup the scalarizer. */
5987 gfc_cleanup_loop (&loop);
5990 /* Helper function for gfc_conv_array_parameter if array size needs to be
5994 array_parameter_size (tree desc, gfc_expr *expr, tree *size)
5997 if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
5998 *size = GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc));
5999 else if (expr->rank > 1)
6000 *size = build_call_expr_loc (input_location,
6001 gfor_fndecl_size0, 1,
6002 gfc_build_addr_expr (NULL, desc));
6005 tree ubound = gfc_conv_descriptor_ubound_get (desc, gfc_index_zero_node);
6006 tree lbound = gfc_conv_descriptor_lbound_get (desc, gfc_index_zero_node);
6008 *size = fold_build2_loc (input_location, MINUS_EXPR,
6009 gfc_array_index_type, ubound, lbound);
6010 *size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
6011 *size, gfc_index_one_node);
6012 *size = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
6013 *size, gfc_index_zero_node);
6015 elem = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
6016 *size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6017 *size, fold_convert (gfc_array_index_type, elem));
6020 /* Convert an array for passing as an actual parameter. */
6021 /* TODO: Optimize passing g77 arrays. */
6024 gfc_conv_array_parameter (gfc_se * se, gfc_expr * expr, gfc_ss * ss, bool g77,
6025 const gfc_symbol *fsym, const char *proc_name,
6030 tree tmp = NULL_TREE;
6032 tree parent = DECL_CONTEXT (current_function_decl);
6033 bool full_array_var;
6034 bool this_array_result;
6037 bool array_constructor;
6038 bool good_allocatable;
6039 bool ultimate_ptr_comp;
6040 bool ultimate_alloc_comp;
6045 ultimate_ptr_comp = false;
6046 ultimate_alloc_comp = false;
6048 for (ref = expr->ref; ref; ref = ref->next)
6050 if (ref->next == NULL)
6053 if (ref->type == REF_COMPONENT)
6055 ultimate_ptr_comp = ref->u.c.component->attr.pointer;
6056 ultimate_alloc_comp = ref->u.c.component->attr.allocatable;
6060 full_array_var = false;
6063 if (expr->expr_type == EXPR_VARIABLE && ref && !ultimate_ptr_comp)
6064 full_array_var = gfc_full_array_ref_p (ref, &contiguous);
6066 sym = full_array_var ? expr->symtree->n.sym : NULL;
6068 /* The symbol should have an array specification. */
6069 gcc_assert (!sym || sym->as || ref->u.ar.as);
6071 if (expr->expr_type == EXPR_ARRAY && expr->ts.type == BT_CHARACTER)
6073 get_array_ctor_strlen (&se->pre, expr->value.constructor, &tmp);
6074 expr->ts.u.cl->backend_decl = tmp;
6075 se->string_length = tmp;
6078 /* Is this the result of the enclosing procedure? */
6079 this_array_result = (full_array_var && sym->attr.flavor == FL_PROCEDURE);
6080 if (this_array_result
6081 && (sym->backend_decl != current_function_decl)
6082 && (sym->backend_decl != parent))
6083 this_array_result = false;
6085 /* Passing address of the array if it is not pointer or assumed-shape. */
6086 if (full_array_var && g77 && !this_array_result)
6088 tmp = gfc_get_symbol_decl (sym);
6090 if (sym->ts.type == BT_CHARACTER)
6091 se->string_length = sym->ts.u.cl->backend_decl;
6093 if (sym->ts.type == BT_DERIVED || sym->ts.type == BT_CLASS)
6095 gfc_conv_expr_descriptor (se, expr, ss);
6096 se->expr = gfc_conv_array_data (se->expr);
6100 if (!sym->attr.pointer
6102 && sym->as->type != AS_ASSUMED_SHAPE
6103 && !sym->attr.allocatable)
6105 /* Some variables are declared directly, others are declared as
6106 pointers and allocated on the heap. */
6107 if (sym->attr.dummy || POINTER_TYPE_P (TREE_TYPE (tmp)))
6110 se->expr = gfc_build_addr_expr (NULL_TREE, tmp);
6112 array_parameter_size (tmp, expr, size);
6116 if (sym->attr.allocatable)
6118 if (sym->attr.dummy || sym->attr.result)
6120 gfc_conv_expr_descriptor (se, expr, ss);
6124 array_parameter_size (tmp, expr, size);
6125 se->expr = gfc_conv_array_data (tmp);
6130 /* A convenient reduction in scope. */
6131 contiguous = g77 && !this_array_result && contiguous;
6133 /* There is no need to pack and unpack the array, if it is contiguous
6134 and not a deferred- or assumed-shape array, or if it is simply
6136 no_pack = ((sym && sym->as
6137 && !sym->attr.pointer
6138 && sym->as->type != AS_DEFERRED
6139 && sym->as->type != AS_ASSUMED_SHAPE)
6141 (ref && ref->u.ar.as
6142 && ref->u.ar.as->type != AS_DEFERRED
6143 && ref->u.ar.as->type != AS_ASSUMED_SHAPE)
6145 gfc_is_simply_contiguous (expr, false));
6147 no_pack = contiguous && no_pack;
6149 /* Array constructors are always contiguous and do not need packing. */
6150 array_constructor = g77 && !this_array_result && expr->expr_type == EXPR_ARRAY;
6152 /* Same is true of contiguous sections from allocatable variables. */
6153 good_allocatable = contiguous
6155 && expr->symtree->n.sym->attr.allocatable;
6157 /* Or ultimate allocatable components. */
6158 ultimate_alloc_comp = contiguous && ultimate_alloc_comp;
6160 if (no_pack || array_constructor || good_allocatable || ultimate_alloc_comp)
6162 gfc_conv_expr_descriptor (se, expr, ss);
6163 if (expr->ts.type == BT_CHARACTER)
6164 se->string_length = expr->ts.u.cl->backend_decl;
6166 array_parameter_size (se->expr, expr, size);
6167 se->expr = gfc_conv_array_data (se->expr);
6171 if (this_array_result)
6173 /* Result of the enclosing function. */
6174 gfc_conv_expr_descriptor (se, expr, ss);
6176 array_parameter_size (se->expr, expr, size);
6177 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
6179 if (g77 && TREE_TYPE (TREE_TYPE (se->expr)) != NULL_TREE
6180 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr))))
6181 se->expr = gfc_conv_array_data (build_fold_indirect_ref_loc (input_location,
6188 /* Every other type of array. */
6189 se->want_pointer = 1;
6190 gfc_conv_expr_descriptor (se, expr, ss);
6192 array_parameter_size (build_fold_indirect_ref_loc (input_location,
6197 /* Deallocate the allocatable components of structures that are
6199 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
6200 && expr->ts.u.derived->attr.alloc_comp
6201 && expr->expr_type != EXPR_VARIABLE)
6203 tmp = build_fold_indirect_ref_loc (input_location, se->expr);
6204 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, tmp, expr->rank);
6206 /* The components shall be deallocated before their containing entity. */
6207 gfc_prepend_expr_to_block (&se->post, tmp);
6210 if (g77 || (fsym && fsym->attr.contiguous
6211 && !gfc_is_simply_contiguous (expr, false)))
6213 tree origptr = NULL_TREE;
6217 /* For contiguous arrays, save the original value of the descriptor. */
6220 origptr = gfc_create_var (pvoid_type_node, "origptr");
6221 tmp = build_fold_indirect_ref_loc (input_location, desc);
6222 tmp = gfc_conv_array_data (tmp);
6223 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6224 TREE_TYPE (origptr), origptr,
6225 fold_convert (TREE_TYPE (origptr), tmp));
6226 gfc_add_expr_to_block (&se->pre, tmp);
6229 /* Repack the array. */
6230 if (gfc_option.warn_array_temp)
6233 gfc_warning ("Creating array temporary at %L for argument '%s'",
6234 &expr->where, fsym->name);
6236 gfc_warning ("Creating array temporary at %L", &expr->where);
6239 ptr = build_call_expr_loc (input_location,
6240 gfor_fndecl_in_pack, 1, desc);
6242 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6244 tmp = gfc_conv_expr_present (sym);
6245 ptr = build3_loc (input_location, COND_EXPR, TREE_TYPE (se->expr),
6246 tmp, fold_convert (TREE_TYPE (se->expr), ptr),
6247 fold_convert (TREE_TYPE (se->expr), null_pointer_node));
6250 ptr = gfc_evaluate_now (ptr, &se->pre);
6252 /* Use the packed data for the actual argument, except for contiguous arrays,
6253 where the descriptor's data component is set. */
6258 tmp = build_fold_indirect_ref_loc (input_location, desc);
6259 gfc_conv_descriptor_data_set (&se->pre, tmp, ptr);
6262 if (gfc_option.rtcheck & GFC_RTCHECK_ARRAY_TEMPS)
6266 if (fsym && proc_name)
6267 asprintf (&msg, "An array temporary was created for argument "
6268 "'%s' of procedure '%s'", fsym->name, proc_name);
6270 asprintf (&msg, "An array temporary was created");
6272 tmp = build_fold_indirect_ref_loc (input_location,
6274 tmp = gfc_conv_array_data (tmp);
6275 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6276 fold_convert (TREE_TYPE (tmp), ptr), tmp);
6278 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6279 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
6281 gfc_conv_expr_present (sym), tmp);
6283 gfc_trans_runtime_check (false, true, tmp, &se->pre,
6288 gfc_start_block (&block);
6290 /* Copy the data back. */
6291 if (fsym == NULL || fsym->attr.intent != INTENT_IN)
6293 tmp = build_call_expr_loc (input_location,
6294 gfor_fndecl_in_unpack, 2, desc, ptr);
6295 gfc_add_expr_to_block (&block, tmp);
6298 /* Free the temporary. */
6299 tmp = gfc_call_free (convert (pvoid_type_node, ptr));
6300 gfc_add_expr_to_block (&block, tmp);
6302 stmt = gfc_finish_block (&block);
6304 gfc_init_block (&block);
6305 /* Only if it was repacked. This code needs to be executed before the
6306 loop cleanup code. */
6307 tmp = build_fold_indirect_ref_loc (input_location,
6309 tmp = gfc_conv_array_data (tmp);
6310 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6311 fold_convert (TREE_TYPE (tmp), ptr), tmp);
6313 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6314 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
6316 gfc_conv_expr_present (sym), tmp);
6318 tmp = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
6320 gfc_add_expr_to_block (&block, tmp);
6321 gfc_add_block_to_block (&block, &se->post);
6323 gfc_init_block (&se->post);
6325 /* Reset the descriptor pointer. */
6328 tmp = build_fold_indirect_ref_loc (input_location, desc);
6329 gfc_conv_descriptor_data_set (&se->post, tmp, origptr);
6332 gfc_add_block_to_block (&se->post, &block);
6337 /* Generate code to deallocate an array, if it is allocated. */
6340 gfc_trans_dealloc_allocated (tree descriptor)
6346 gfc_start_block (&block);
6348 var = gfc_conv_descriptor_data_get (descriptor);
6351 /* Call array_deallocate with an int * present in the second argument.
6352 Although it is ignored here, it's presence ensures that arrays that
6353 are already deallocated are ignored. */
6354 tmp = gfc_deallocate_with_status (var, NULL_TREE, true, NULL);
6355 gfc_add_expr_to_block (&block, tmp);
6357 /* Zero the data pointer. */
6358 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
6359 var, build_int_cst (TREE_TYPE (var), 0));
6360 gfc_add_expr_to_block (&block, tmp);
6362 return gfc_finish_block (&block);
6366 /* This helper function calculates the size in words of a full array. */
6369 get_full_array_size (stmtblock_t *block, tree decl, int rank)
6374 idx = gfc_rank_cst[rank - 1];
6375 nelems = gfc_conv_descriptor_ubound_get (decl, idx);
6376 tmp = gfc_conv_descriptor_lbound_get (decl, idx);
6377 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
6379 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
6380 tmp, gfc_index_one_node);
6381 tmp = gfc_evaluate_now (tmp, block);
6383 nelems = gfc_conv_descriptor_stride_get (decl, idx);
6384 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6386 return gfc_evaluate_now (tmp, block);
6390 /* Allocate dest to the same size as src, and copy src -> dest.
6391 If no_malloc is set, only the copy is done. */
6394 duplicate_allocatable (tree dest, tree src, tree type, int rank,
6404 /* If the source is null, set the destination to null. Then,
6405 allocate memory to the destination. */
6406 gfc_init_block (&block);
6410 tmp = null_pointer_node;
6411 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type, dest, tmp);
6412 gfc_add_expr_to_block (&block, tmp);
6413 null_data = gfc_finish_block (&block);
6415 gfc_init_block (&block);
6416 size = TYPE_SIZE_UNIT (TREE_TYPE (type));
6419 tmp = gfc_call_malloc (&block, type, size);
6420 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
6421 dest, fold_convert (type, tmp));
6422 gfc_add_expr_to_block (&block, tmp);
6425 tmp = built_in_decls[BUILT_IN_MEMCPY];
6426 tmp = build_call_expr_loc (input_location, tmp, 3,
6431 gfc_conv_descriptor_data_set (&block, dest, null_pointer_node);
6432 null_data = gfc_finish_block (&block);
6434 gfc_init_block (&block);
6435 nelems = get_full_array_size (&block, src, rank);
6436 tmp = fold_convert (gfc_array_index_type,
6437 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
6438 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6442 tmp = TREE_TYPE (gfc_conv_descriptor_data_get (src));
6443 tmp = gfc_call_malloc (&block, tmp, size);
6444 gfc_conv_descriptor_data_set (&block, dest, tmp);
6447 /* We know the temporary and the value will be the same length,
6448 so can use memcpy. */
6449 tmp = built_in_decls[BUILT_IN_MEMCPY];
6450 tmp = build_call_expr_loc (input_location,
6451 tmp, 3, gfc_conv_descriptor_data_get (dest),
6452 gfc_conv_descriptor_data_get (src), size);
6455 gfc_add_expr_to_block (&block, tmp);
6456 tmp = gfc_finish_block (&block);
6458 /* Null the destination if the source is null; otherwise do
6459 the allocate and copy. */
6463 null_cond = gfc_conv_descriptor_data_get (src);
6465 null_cond = convert (pvoid_type_node, null_cond);
6466 null_cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6467 null_cond, null_pointer_node);
6468 return build3_v (COND_EXPR, null_cond, tmp, null_data);
6472 /* Allocate dest to the same size as src, and copy data src -> dest. */
6475 gfc_duplicate_allocatable (tree dest, tree src, tree type, int rank)
6477 return duplicate_allocatable (dest, src, type, rank, false);
6481 /* Copy data src -> dest. */
6484 gfc_copy_allocatable_data (tree dest, tree src, tree type, int rank)
6486 return duplicate_allocatable (dest, src, type, rank, true);
6490 /* Recursively traverse an object of derived type, generating code to
6491 deallocate, nullify or copy allocatable components. This is the work horse
6492 function for the functions named in this enum. */
6494 enum {DEALLOCATE_ALLOC_COMP = 1, NULLIFY_ALLOC_COMP, COPY_ALLOC_COMP,
6495 COPY_ONLY_ALLOC_COMP};
6498 structure_alloc_comps (gfc_symbol * der_type, tree decl,
6499 tree dest, int rank, int purpose)
6503 stmtblock_t fnblock;
6504 stmtblock_t loopbody;
6515 tree null_cond = NULL_TREE;
6517 gfc_init_block (&fnblock);
6519 decl_type = TREE_TYPE (decl);
6521 if ((POINTER_TYPE_P (decl_type) && rank != 0)
6522 || (TREE_CODE (decl_type) == REFERENCE_TYPE && rank == 0))
6524 decl = build_fold_indirect_ref_loc (input_location,
6527 /* Just in case in gets dereferenced. */
6528 decl_type = TREE_TYPE (decl);
6530 /* If this an array of derived types with allocatable components
6531 build a loop and recursively call this function. */
6532 if (TREE_CODE (decl_type) == ARRAY_TYPE
6533 || GFC_DESCRIPTOR_TYPE_P (decl_type))
6535 tmp = gfc_conv_array_data (decl);
6536 var = build_fold_indirect_ref_loc (input_location,
6539 /* Get the number of elements - 1 and set the counter. */
6540 if (GFC_DESCRIPTOR_TYPE_P (decl_type))
6542 /* Use the descriptor for an allocatable array. Since this
6543 is a full array reference, we only need the descriptor
6544 information from dimension = rank. */
6545 tmp = get_full_array_size (&fnblock, decl, rank);
6546 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6547 gfc_array_index_type, tmp,
6548 gfc_index_one_node);
6550 null_cond = gfc_conv_descriptor_data_get (decl);
6551 null_cond = fold_build2_loc (input_location, NE_EXPR,
6552 boolean_type_node, null_cond,
6553 build_int_cst (TREE_TYPE (null_cond), 0));
6557 /* Otherwise use the TYPE_DOMAIN information. */
6558 tmp = array_type_nelts (decl_type);
6559 tmp = fold_convert (gfc_array_index_type, tmp);
6562 /* Remember that this is, in fact, the no. of elements - 1. */
6563 nelems = gfc_evaluate_now (tmp, &fnblock);
6564 index = gfc_create_var (gfc_array_index_type, "S");
6566 /* Build the body of the loop. */
6567 gfc_init_block (&loopbody);
6569 vref = gfc_build_array_ref (var, index, NULL);
6571 if (purpose == COPY_ALLOC_COMP)
6573 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
6575 tmp = gfc_duplicate_allocatable (dest, decl, decl_type, rank);
6576 gfc_add_expr_to_block (&fnblock, tmp);
6578 tmp = build_fold_indirect_ref_loc (input_location,
6579 gfc_conv_array_data (dest));
6580 dref = gfc_build_array_ref (tmp, index, NULL);
6581 tmp = structure_alloc_comps (der_type, vref, dref, rank, purpose);
6583 else if (purpose == COPY_ONLY_ALLOC_COMP)
6585 tmp = build_fold_indirect_ref_loc (input_location,
6586 gfc_conv_array_data (dest));
6587 dref = gfc_build_array_ref (tmp, index, NULL);
6588 tmp = structure_alloc_comps (der_type, vref, dref, rank,
6592 tmp = structure_alloc_comps (der_type, vref, NULL_TREE, rank, purpose);
6594 gfc_add_expr_to_block (&loopbody, tmp);
6596 /* Build the loop and return. */
6597 gfc_init_loopinfo (&loop);
6599 loop.from[0] = gfc_index_zero_node;
6600 loop.loopvar[0] = index;
6601 loop.to[0] = nelems;
6602 gfc_trans_scalarizing_loops (&loop, &loopbody);
6603 gfc_add_block_to_block (&fnblock, &loop.pre);
6605 tmp = gfc_finish_block (&fnblock);
6606 if (null_cond != NULL_TREE)
6607 tmp = build3_v (COND_EXPR, null_cond, tmp,
6608 build_empty_stmt (input_location));
6613 /* Otherwise, act on the components or recursively call self to
6614 act on a chain of components. */
6615 for (c = der_type->components; c; c = c->next)
6617 bool cmp_has_alloc_comps = (c->ts.type == BT_DERIVED
6618 || c->ts.type == BT_CLASS)
6619 && c->ts.u.derived->attr.alloc_comp;
6620 cdecl = c->backend_decl;
6621 ctype = TREE_TYPE (cdecl);
6625 case DEALLOCATE_ALLOC_COMP:
6626 if (c->attr.allocatable && c->attr.dimension)
6628 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6629 decl, cdecl, NULL_TREE);
6630 if (cmp_has_alloc_comps && !c->attr.pointer)
6632 /* Do not deallocate the components of ultimate pointer
6634 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
6635 c->as->rank, purpose);
6636 gfc_add_expr_to_block (&fnblock, tmp);
6638 tmp = gfc_trans_dealloc_allocated (comp);
6639 gfc_add_expr_to_block (&fnblock, tmp);
6641 else if (c->attr.allocatable)
6643 /* Allocatable scalar components. */
6644 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6645 decl, cdecl, NULL_TREE);
6647 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
6649 gfc_add_expr_to_block (&fnblock, tmp);
6651 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6652 void_type_node, comp,
6653 build_int_cst (TREE_TYPE (comp), 0));
6654 gfc_add_expr_to_block (&fnblock, tmp);
6656 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
6658 /* Allocatable scalar CLASS components. */
6659 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6660 decl, cdecl, NULL_TREE);
6662 /* Add reference to '_data' component. */
6663 tmp = CLASS_DATA (c)->backend_decl;
6664 comp = fold_build3_loc (input_location, COMPONENT_REF,
6665 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
6667 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
6668 CLASS_DATA (c)->ts);
6669 gfc_add_expr_to_block (&fnblock, tmp);
6671 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6672 void_type_node, comp,
6673 build_int_cst (TREE_TYPE (comp), 0));
6674 gfc_add_expr_to_block (&fnblock, tmp);
6678 case NULLIFY_ALLOC_COMP:
6679 if (c->attr.pointer)
6681 else if (c->attr.allocatable && c->attr.dimension)
6683 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6684 decl, cdecl, NULL_TREE);
6685 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
6687 else if (c->attr.allocatable)
6689 /* Allocatable scalar components. */
6690 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6691 decl, cdecl, NULL_TREE);
6692 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6693 void_type_node, comp,
6694 build_int_cst (TREE_TYPE (comp), 0));
6695 gfc_add_expr_to_block (&fnblock, tmp);
6697 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
6699 /* Allocatable scalar CLASS components. */
6700 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6701 decl, cdecl, NULL_TREE);
6702 /* Add reference to '_data' component. */
6703 tmp = CLASS_DATA (c)->backend_decl;
6704 comp = fold_build3_loc (input_location, COMPONENT_REF,
6705 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
6706 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6707 void_type_node, comp,
6708 build_int_cst (TREE_TYPE (comp), 0));
6709 gfc_add_expr_to_block (&fnblock, tmp);
6711 else if (cmp_has_alloc_comps)
6713 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6714 decl, cdecl, NULL_TREE);
6715 rank = c->as ? c->as->rank : 0;
6716 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
6718 gfc_add_expr_to_block (&fnblock, tmp);
6722 case COPY_ALLOC_COMP:
6723 if (c->attr.pointer)
6726 /* We need source and destination components. */
6727 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
6729 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
6731 dcmp = fold_convert (TREE_TYPE (comp), dcmp);
6733 if (c->attr.allocatable && !cmp_has_alloc_comps)
6735 rank = c->as ? c->as->rank : 0;
6736 tmp = gfc_duplicate_allocatable (dcmp, comp, ctype, rank);
6737 gfc_add_expr_to_block (&fnblock, tmp);
6740 if (cmp_has_alloc_comps)
6742 rank = c->as ? c->as->rank : 0;
6743 tmp = fold_convert (TREE_TYPE (dcmp), comp);
6744 gfc_add_modify (&fnblock, dcmp, tmp);
6745 tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp,
6747 gfc_add_expr_to_block (&fnblock, tmp);
6757 return gfc_finish_block (&fnblock);
6760 /* Recursively traverse an object of derived type, generating code to
6761 nullify allocatable components. */
6764 gfc_nullify_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
6766 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
6767 NULLIFY_ALLOC_COMP);
6771 /* Recursively traverse an object of derived type, generating code to
6772 deallocate allocatable components. */
6775 gfc_deallocate_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
6777 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
6778 DEALLOCATE_ALLOC_COMP);
6782 /* Recursively traverse an object of derived type, generating code to
6783 copy it and its allocatable components. */
6786 gfc_copy_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
6788 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ALLOC_COMP);
6792 /* Recursively traverse an object of derived type, generating code to
6793 copy only its allocatable components. */
6796 gfc_copy_only_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
6798 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ONLY_ALLOC_COMP);
6802 /* Returns the value of LBOUND for an expression. This could be broken out
6803 from gfc_conv_intrinsic_bound but this seemed to be simpler. This is
6804 called by gfc_alloc_allocatable_for_assignment. */
6806 get_std_lbound (gfc_expr *expr, tree desc, int dim, bool assumed_size)
6811 tree cond, cond1, cond3, cond4;
6815 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)))
6817 tmp = gfc_rank_cst[dim];
6818 lbound = gfc_conv_descriptor_lbound_get (desc, tmp);
6819 ubound = gfc_conv_descriptor_ubound_get (desc, tmp);
6820 stride = gfc_conv_descriptor_stride_get (desc, tmp);
6821 cond1 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
6823 cond3 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
6824 stride, gfc_index_zero_node);
6825 cond3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
6826 boolean_type_node, cond3, cond1);
6827 cond4 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
6828 stride, gfc_index_zero_node);
6830 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
6831 tmp, build_int_cst (gfc_array_index_type,
6834 cond = boolean_false_node;
6836 cond1 = fold_build2_loc (input_location, TRUTH_OR_EXPR,
6837 boolean_type_node, cond3, cond4);
6838 cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
6839 boolean_type_node, cond, cond1);
6841 return fold_build3_loc (input_location, COND_EXPR,
6842 gfc_array_index_type, cond,
6843 lbound, gfc_index_one_node);
6845 else if (expr->expr_type == EXPR_VARIABLE)
6847 tmp = TREE_TYPE (expr->symtree->n.sym->backend_decl);
6848 for (ref = expr->ref; ref; ref = ref->next)
6850 if (ref->type == REF_COMPONENT
6851 && ref->u.c.component->as
6853 && ref->next->u.ar.type == AR_FULL)
6854 tmp = TREE_TYPE (ref->u.c.component->backend_decl);
6856 return GFC_TYPE_ARRAY_LBOUND(tmp, dim);
6858 else if (expr->expr_type == EXPR_FUNCTION)
6860 /* A conversion function, so use the argument. */
6861 expr = expr->value.function.actual->expr;
6862 if (expr->expr_type != EXPR_VARIABLE)
6863 return gfc_index_one_node;
6864 desc = TREE_TYPE (expr->symtree->n.sym->backend_decl);
6865 return get_std_lbound (expr, desc, dim, assumed_size);
6868 return gfc_index_one_node;
6872 /* Returns true if an expression represents an lhs that can be reallocated
6876 gfc_is_reallocatable_lhs (gfc_expr *expr)
6883 /* An allocatable variable. */
6884 if (expr->symtree->n.sym->attr.allocatable
6886 && expr->ref->type == REF_ARRAY
6887 && expr->ref->u.ar.type == AR_FULL)
6890 /* All that can be left are allocatable components. */
6891 if ((expr->symtree->n.sym->ts.type != BT_DERIVED
6892 && expr->symtree->n.sym->ts.type != BT_CLASS)
6893 || !expr->symtree->n.sym->ts.u.derived->attr.alloc_comp)
6896 /* Find a component ref followed by an array reference. */
6897 for (ref = expr->ref; ref; ref = ref->next)
6899 && ref->type == REF_COMPONENT
6900 && ref->next->type == REF_ARRAY
6901 && !ref->next->next)
6907 /* Return true if valid reallocatable lhs. */
6908 if (ref->u.c.component->attr.allocatable
6909 && ref->next->u.ar.type == AR_FULL)
6916 /* Allocate the lhs of an assignment to an allocatable array, otherwise
6920 gfc_alloc_allocatable_for_assignment (gfc_loopinfo *loop,
6924 stmtblock_t realloc_block;
6925 stmtblock_t alloc_block;
6948 gfc_array_spec * as;
6950 /* x = f(...) with x allocatable. In this case, expr1 is the rhs.
6951 Find the lhs expression in the loop chain and set expr1 and
6952 expr2 accordingly. */
6953 if (expr1->expr_type == EXPR_FUNCTION && expr2 == NULL)
6956 /* Find the ss for the lhs. */
6958 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
6959 if (lss->expr && lss->expr->expr_type == EXPR_VARIABLE)
6961 if (lss == gfc_ss_terminator)
6966 /* Bail out if this is not a valid allocate on assignment. */
6967 if (!gfc_is_reallocatable_lhs (expr1)
6968 || (expr2 && !expr2->rank))
6971 /* Find the ss for the lhs. */
6973 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
6974 if (lss->expr == expr1)
6977 if (lss == gfc_ss_terminator)
6980 /* Find an ss for the rhs. For operator expressions, we see the
6981 ss's for the operands. Any one of these will do. */
6983 for (; rss && rss != gfc_ss_terminator; rss = rss->loop_chain)
6984 if (rss->expr != expr1 && rss != loop->temp_ss)
6987 if (expr2 && rss == gfc_ss_terminator)
6990 gfc_start_block (&fblock);
6992 /* Since the lhs is allocatable, this must be a descriptor type.
6993 Get the data and array size. */
6994 desc = lss->data.info.descriptor;
6995 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)));
6996 array1 = gfc_conv_descriptor_data_get (desc);
6998 /* 7.4.1.3 "If variable is an allocated allocatable variable, it is
6999 deallocated if expr is an array of different shape or any of the
7000 corresponding length type parameter values of variable and expr
7001 differ." This assures F95 compatibility. */
7002 jump_label1 = gfc_build_label_decl (NULL_TREE);
7003 jump_label2 = gfc_build_label_decl (NULL_TREE);
7005 /* Allocate if data is NULL. */
7006 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
7007 array1, build_int_cst (TREE_TYPE (array1), 0));
7008 tmp = build3_v (COND_EXPR, cond,
7009 build1_v (GOTO_EXPR, jump_label1),
7010 build_empty_stmt (input_location));
7011 gfc_add_expr_to_block (&fblock, tmp);
7013 /* Get arrayspec if expr is a full array. */
7014 if (expr2 && expr2->expr_type == EXPR_FUNCTION
7015 && expr2->value.function.isym
7016 && expr2->value.function.isym->conversion)
7018 /* For conversion functions, take the arg. */
7019 gfc_expr *arg = expr2->value.function.actual->expr;
7020 as = gfc_get_full_arrayspec_from_expr (arg);
7023 as = gfc_get_full_arrayspec_from_expr (expr2);
7027 /* If the lhs shape is not the same as the rhs jump to setting the
7028 bounds and doing the reallocation....... */
7029 for (n = 0; n < expr1->rank; n++)
7031 /* Check the shape. */
7032 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
7033 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[n]);
7034 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7035 gfc_array_index_type,
7036 loop->to[n], loop->from[n]);
7037 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7038 gfc_array_index_type,
7040 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7041 gfc_array_index_type,
7043 cond = fold_build2_loc (input_location, NE_EXPR,
7045 tmp, gfc_index_zero_node);
7046 tmp = build3_v (COND_EXPR, cond,
7047 build1_v (GOTO_EXPR, jump_label1),
7048 build_empty_stmt (input_location));
7049 gfc_add_expr_to_block (&fblock, tmp);
7052 /* ....else jump past the (re)alloc code. */
7053 tmp = build1_v (GOTO_EXPR, jump_label2);
7054 gfc_add_expr_to_block (&fblock, tmp);
7056 /* Add the label to start automatic (re)allocation. */
7057 tmp = build1_v (LABEL_EXPR, jump_label1);
7058 gfc_add_expr_to_block (&fblock, tmp);
7060 size1 = gfc_conv_descriptor_size (desc, expr1->rank);
7062 /* Get the rhs size. Fix both sizes. */
7064 desc2 = rss->data.info.descriptor;
7067 size2 = gfc_index_one_node;
7068 for (n = 0; n < expr2->rank; n++)
7070 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7071 gfc_array_index_type,
7072 loop->to[n], loop->from[n]);
7073 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7074 gfc_array_index_type,
7075 tmp, gfc_index_one_node);
7076 size2 = fold_build2_loc (input_location, MULT_EXPR,
7077 gfc_array_index_type,
7081 size1 = gfc_evaluate_now (size1, &fblock);
7082 size2 = gfc_evaluate_now (size2, &fblock);
7084 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7086 neq_size = gfc_evaluate_now (cond, &fblock);
7089 /* Now modify the lhs descriptor and the associated scalarizer
7090 variables. F2003 7.4.1.3: "If variable is or becomes an
7091 unallocated allocatable variable, then it is allocated with each
7092 deferred type parameter equal to the corresponding type parameters
7093 of expr , with the shape of expr , and with each lower bound equal
7094 to the corresponding element of LBOUND(expr)."
7095 Reuse size1 to keep a dimension-by-dimension track of the
7096 stride of the new array. */
7097 size1 = gfc_index_one_node;
7098 offset = gfc_index_zero_node;
7100 for (n = 0; n < expr2->rank; n++)
7102 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7103 gfc_array_index_type,
7104 loop->to[n], loop->from[n]);
7105 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7106 gfc_array_index_type,
7107 tmp, gfc_index_one_node);
7109 lbound = gfc_index_one_node;
7114 lbd = get_std_lbound (expr2, desc2, n,
7115 as->type == AS_ASSUMED_SIZE);
7116 ubound = fold_build2_loc (input_location,
7118 gfc_array_index_type,
7120 ubound = fold_build2_loc (input_location,
7122 gfc_array_index_type,
7127 gfc_conv_descriptor_lbound_set (&fblock, desc,
7130 gfc_conv_descriptor_ubound_set (&fblock, desc,
7133 gfc_conv_descriptor_stride_set (&fblock, desc,
7136 lbound = gfc_conv_descriptor_lbound_get (desc,
7138 tmp2 = fold_build2_loc (input_location, MULT_EXPR,
7139 gfc_array_index_type,
7141 offset = fold_build2_loc (input_location, MINUS_EXPR,
7142 gfc_array_index_type,
7144 size1 = fold_build2_loc (input_location, MULT_EXPR,
7145 gfc_array_index_type,
7149 /* Set the lhs descriptor and scalarizer offsets. For rank > 1,
7150 the array offset is saved and the info.offset is used for a
7151 running offset. Use the saved_offset instead. */
7152 tmp = gfc_conv_descriptor_offset (desc);
7153 gfc_add_modify (&fblock, tmp, offset);
7154 if (lss->data.info.saved_offset
7155 && TREE_CODE (lss->data.info.saved_offset) == VAR_DECL)
7156 gfc_add_modify (&fblock, lss->data.info.saved_offset, tmp);
7158 /* Now set the deltas for the lhs. */
7159 for (n = 0; n < expr1->rank; n++)
7161 tmp = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
7162 dim = lss->data.info.dim[n];
7163 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7164 gfc_array_index_type, tmp,
7166 if (lss->data.info.delta[dim]
7167 && TREE_CODE (lss->data.info.delta[dim]) == VAR_DECL)
7168 gfc_add_modify (&fblock, lss->data.info.delta[dim], tmp);
7171 /* Get the new lhs size in bytes. */
7172 if (expr1->ts.type == BT_CHARACTER && expr1->ts.deferred)
7174 tmp = expr2->ts.u.cl->backend_decl;
7175 gcc_assert (expr1->ts.u.cl->backend_decl);
7176 tmp = fold_convert (TREE_TYPE (expr1->ts.u.cl->backend_decl), tmp);
7177 gfc_add_modify (&fblock, expr1->ts.u.cl->backend_decl, tmp);
7179 else if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->backend_decl)
7181 tmp = TYPE_SIZE_UNIT (TREE_TYPE (gfc_typenode_for_spec (&expr1->ts)));
7182 tmp = fold_build2_loc (input_location, MULT_EXPR,
7183 gfc_array_index_type, tmp,
7184 expr1->ts.u.cl->backend_decl);
7187 tmp = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&expr1->ts));
7188 tmp = fold_convert (gfc_array_index_type, tmp);
7189 size2 = fold_build2_loc (input_location, MULT_EXPR,
7190 gfc_array_index_type,
7192 size2 = fold_convert (size_type_node, size2);
7193 size2 = gfc_evaluate_now (size2, &fblock);
7195 /* Realloc expression. Note that the scalarizer uses desc.data
7196 in the array reference - (*desc.data)[<element>]. */
7197 gfc_init_block (&realloc_block);
7198 tmp = build_call_expr_loc (input_location,
7199 built_in_decls[BUILT_IN_REALLOC], 2,
7200 fold_convert (pvoid_type_node, array1),
7202 gfc_conv_descriptor_data_set (&realloc_block,
7204 realloc_expr = gfc_finish_block (&realloc_block);
7206 /* Only reallocate if sizes are different. */
7207 tmp = build3_v (COND_EXPR, neq_size, realloc_expr,
7208 build_empty_stmt (input_location));
7212 /* Malloc expression. */
7213 gfc_init_block (&alloc_block);
7214 tmp = build_call_expr_loc (input_location,
7215 built_in_decls[BUILT_IN_MALLOC], 1,
7217 gfc_conv_descriptor_data_set (&alloc_block,
7219 tmp = gfc_conv_descriptor_dtype (desc);
7220 gfc_add_modify (&alloc_block, tmp, gfc_get_dtype (TREE_TYPE (desc)));
7221 alloc_expr = gfc_finish_block (&alloc_block);
7223 /* Malloc if not allocated; realloc otherwise. */
7224 tmp = build_int_cst (TREE_TYPE (array1), 0);
7225 cond = fold_build2_loc (input_location, EQ_EXPR,
7228 tmp = build3_v (COND_EXPR, cond, alloc_expr, realloc_expr);
7229 gfc_add_expr_to_block (&fblock, tmp);
7231 /* Make sure that the scalarizer data pointer is updated. */
7232 if (lss->data.info.data
7233 && TREE_CODE (lss->data.info.data) == VAR_DECL)
7235 tmp = gfc_conv_descriptor_data_get (desc);
7236 gfc_add_modify (&fblock, lss->data.info.data, tmp);
7239 /* Add the exit label. */
7240 tmp = build1_v (LABEL_EXPR, jump_label2);
7241 gfc_add_expr_to_block (&fblock, tmp);
7243 return gfc_finish_block (&fblock);
7247 /* NULLIFY an allocatable/pointer array on function entry, free it on exit.
7248 Do likewise, recursively if necessary, with the allocatable components of
7252 gfc_trans_deferred_array (gfc_symbol * sym, gfc_wrapped_block * block)
7258 stmtblock_t cleanup;
7261 bool sym_has_alloc_comp;
7263 sym_has_alloc_comp = (sym->ts.type == BT_DERIVED
7264 || sym->ts.type == BT_CLASS)
7265 && sym->ts.u.derived->attr.alloc_comp;
7267 /* Make sure the frontend gets these right. */
7268 if (!(sym->attr.pointer || sym->attr.allocatable || sym_has_alloc_comp))
7269 fatal_error ("Possible front-end bug: Deferred array size without pointer, "
7270 "allocatable attribute or derived type without allocatable "
7273 gfc_save_backend_locus (&loc);
7274 gfc_set_backend_locus (&sym->declared_at);
7275 gfc_init_block (&init);
7277 gcc_assert (TREE_CODE (sym->backend_decl) == VAR_DECL
7278 || TREE_CODE (sym->backend_decl) == PARM_DECL);
7280 if (sym->ts.type == BT_CHARACTER
7281 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
7283 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
7284 gfc_trans_vla_type_sizes (sym, &init);
7287 /* Dummy, use associated and result variables don't need anything special. */
7288 if (sym->attr.dummy || sym->attr.use_assoc || sym->attr.result)
7290 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
7291 gfc_restore_backend_locus (&loc);
7295 descriptor = sym->backend_decl;
7297 /* Although static, derived types with default initializers and
7298 allocatable components must not be nulled wholesale; instead they
7299 are treated component by component. */
7300 if (TREE_STATIC (descriptor) && !sym_has_alloc_comp)
7302 /* SAVEd variables are not freed on exit. */
7303 gfc_trans_static_array_pointer (sym);
7305 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
7306 gfc_restore_backend_locus (&loc);
7310 /* Get the descriptor type. */
7311 type = TREE_TYPE (sym->backend_decl);
7313 if (sym_has_alloc_comp && !(sym->attr.pointer || sym->attr.allocatable))
7316 && !(TREE_STATIC (sym->backend_decl) && sym->attr.is_main_program))
7318 if (sym->value == NULL
7319 || !gfc_has_default_initializer (sym->ts.u.derived))
7321 rank = sym->as ? sym->as->rank : 0;
7322 tmp = gfc_nullify_alloc_comp (sym->ts.u.derived,
7324 gfc_add_expr_to_block (&init, tmp);
7327 gfc_init_default_dt (sym, &init, false);
7330 else if (!GFC_DESCRIPTOR_TYPE_P (type))
7332 /* If the backend_decl is not a descriptor, we must have a pointer
7334 descriptor = build_fold_indirect_ref_loc (input_location,
7336 type = TREE_TYPE (descriptor);
7339 /* NULLIFY the data pointer. */
7340 if (GFC_DESCRIPTOR_TYPE_P (type) && !sym->attr.save)
7341 gfc_conv_descriptor_data_set (&init, descriptor, null_pointer_node);
7343 gfc_restore_backend_locus (&loc);
7344 gfc_init_block (&cleanup);
7346 /* Allocatable arrays need to be freed when they go out of scope.
7347 The allocatable components of pointers must not be touched. */
7348 if (sym_has_alloc_comp && !(sym->attr.function || sym->attr.result)
7349 && !sym->attr.pointer && !sym->attr.save)
7352 rank = sym->as ? sym->as->rank : 0;
7353 tmp = gfc_deallocate_alloc_comp (sym->ts.u.derived, descriptor, rank);
7354 gfc_add_expr_to_block (&cleanup, tmp);
7357 if (sym->attr.allocatable && sym->attr.dimension
7358 && !sym->attr.save && !sym->attr.result)
7360 tmp = gfc_trans_dealloc_allocated (sym->backend_decl);
7361 gfc_add_expr_to_block (&cleanup, tmp);
7364 gfc_add_init_cleanup (block, gfc_finish_block (&init),
7365 gfc_finish_block (&cleanup));
7368 /************ Expression Walking Functions ******************/
7370 /* Walk a variable reference.
7372 Possible extension - multiple component subscripts.
7373 x(:,:) = foo%a(:)%b(:)
7375 forall (i=..., j=...)
7376 x(i,j) = foo%a(j)%b(i)
7378 This adds a fair amount of complexity because you need to deal with more
7379 than one ref. Maybe handle in a similar manner to vector subscripts.
7380 Maybe not worth the effort. */
7384 gfc_walk_variable_expr (gfc_ss * ss, gfc_expr * expr)
7391 for (ref = expr->ref; ref; ref = ref->next)
7392 if (ref->type == REF_ARRAY && ref->u.ar.type != AR_ELEMENT)
7395 for (; ref; ref = ref->next)
7397 if (ref->type == REF_SUBSTRING)
7399 newss = gfc_get_ss ();
7400 newss->type = GFC_SS_SCALAR;
7401 newss->expr = ref->u.ss.start;
7405 newss = gfc_get_ss ();
7406 newss->type = GFC_SS_SCALAR;
7407 newss->expr = ref->u.ss.end;
7412 /* We're only interested in array sections from now on. */
7413 if (ref->type != REF_ARRAY)
7418 if (ar->as->rank == 0)
7420 /* Scalar coarray. */
7427 for (n = 0; n < ar->dimen + ar->codimen; n++)
7429 newss = gfc_get_ss ();
7430 newss->type = GFC_SS_SCALAR;
7431 newss->expr = ar->start[n];
7438 newss = gfc_get_ss ();
7439 newss->type = GFC_SS_SECTION;
7442 newss->data.info.dimen = ar->as->rank;
7443 newss->data.info.codimen = 0;
7444 newss->data.info.ref = ref;
7446 /* Make sure array is the same as array(:,:), this way
7447 we don't need to special case all the time. */
7448 ar->dimen = ar->as->rank;
7450 for (n = 0; n < ar->dimen; n++)
7452 newss->data.info.dim[n] = n;
7453 ar->dimen_type[n] = DIMEN_RANGE;
7455 gcc_assert (ar->start[n] == NULL);
7456 gcc_assert (ar->end[n] == NULL);
7457 gcc_assert (ar->stride[n] == NULL);
7459 for (n = ar->dimen; n < ar->dimen + ar->as->corank; n++)
7461 newss->data.info.dim[n] = n;
7462 ar->dimen_type[n] = DIMEN_RANGE;
7464 gcc_assert (ar->start[n] == NULL);
7465 gcc_assert (ar->end[n] == NULL);
7471 newss = gfc_get_ss ();
7472 newss->type = GFC_SS_SECTION;
7475 newss->data.info.dimen = 0;
7476 newss->data.info.codimen = 0;
7477 newss->data.info.ref = ref;
7479 /* We add SS chains for all the subscripts in the section. */
7480 for (n = 0; n < ar->dimen + ar->codimen; n++)
7484 switch (ar->dimen_type[n])
7486 case DIMEN_THIS_IMAGE:
7489 /* Add SS for elemental (scalar) subscripts. */
7490 gcc_assert (ar->start[n]);
7491 indexss = gfc_get_ss ();
7492 indexss->type = GFC_SS_SCALAR;
7493 indexss->expr = ar->start[n];
7494 indexss->next = gfc_ss_terminator;
7495 indexss->loop_chain = gfc_ss_terminator;
7496 newss->data.info.subscript[n] = indexss;
7500 /* We don't add anything for sections, just remember this
7501 dimension for later. */
7502 newss->data.info.dim[newss->data.info.dimen
7503 + newss->data.info.codimen] = n;
7505 newss->data.info.dimen++;
7509 /* Create a GFC_SS_VECTOR index in which we can store
7510 the vector's descriptor. */
7511 indexss = gfc_get_ss ();
7512 indexss->type = GFC_SS_VECTOR;
7513 indexss->expr = ar->start[n];
7514 indexss->next = gfc_ss_terminator;
7515 indexss->loop_chain = gfc_ss_terminator;
7516 newss->data.info.subscript[n] = indexss;
7517 newss->data.info.dim[newss->data.info.dimen
7518 + newss->data.info.codimen] = n;
7520 newss->data.info.dimen++;
7524 /* We should know what sort of section it is by now. */
7528 /* We should have at least one non-elemental dimension. */
7529 gcc_assert (newss->data.info.dimen > 0);
7534 /* We should know what sort of section it is by now. */
7543 /* Walk an expression operator. If only one operand of a binary expression is
7544 scalar, we must also add the scalar term to the SS chain. */
7547 gfc_walk_op_expr (gfc_ss * ss, gfc_expr * expr)
7553 head = gfc_walk_subexpr (ss, expr->value.op.op1);
7554 if (expr->value.op.op2 == NULL)
7557 head2 = gfc_walk_subexpr (head, expr->value.op.op2);
7559 /* All operands are scalar. Pass back and let the caller deal with it. */
7563 /* All operands require scalarization. */
7564 if (head != ss && (expr->value.op.op2 == NULL || head2 != head))
7567 /* One of the operands needs scalarization, the other is scalar.
7568 Create a gfc_ss for the scalar expression. */
7569 newss = gfc_get_ss ();
7570 newss->type = GFC_SS_SCALAR;
7573 /* First operand is scalar. We build the chain in reverse order, so
7574 add the scalar SS after the second operand. */
7576 while (head && head->next != ss)
7578 /* Check we haven't somehow broken the chain. */
7582 newss->expr = expr->value.op.op1;
7584 else /* head2 == head */
7586 gcc_assert (head2 == head);
7587 /* Second operand is scalar. */
7588 newss->next = head2;
7590 newss->expr = expr->value.op.op2;
7597 /* Reverse a SS chain. */
7600 gfc_reverse_ss (gfc_ss * ss)
7605 gcc_assert (ss != NULL);
7607 head = gfc_ss_terminator;
7608 while (ss != gfc_ss_terminator)
7611 /* Check we didn't somehow break the chain. */
7612 gcc_assert (next != NULL);
7622 /* Walk the arguments of an elemental function. */
7625 gfc_walk_elemental_function_args (gfc_ss * ss, gfc_actual_arglist *arg,
7633 head = gfc_ss_terminator;
7636 for (; arg; arg = arg->next)
7641 newss = gfc_walk_subexpr (head, arg->expr);
7644 /* Scalar argument. */
7645 newss = gfc_get_ss ();
7647 newss->expr = arg->expr;
7657 while (tail->next != gfc_ss_terminator)
7664 /* If all the arguments are scalar we don't need the argument SS. */
7665 gfc_free_ss_chain (head);
7670 /* Add it onto the existing chain. */
7676 /* Walk a function call. Scalar functions are passed back, and taken out of
7677 scalarization loops. For elemental functions we walk their arguments.
7678 The result of functions returning arrays is stored in a temporary outside
7679 the loop, so that the function is only called once. Hence we do not need
7680 to walk their arguments. */
7683 gfc_walk_function_expr (gfc_ss * ss, gfc_expr * expr)
7686 gfc_intrinsic_sym *isym;
7688 gfc_component *comp = NULL;
7691 isym = expr->value.function.isym;
7693 /* Handle intrinsic functions separately. */
7695 return gfc_walk_intrinsic_function (ss, expr, isym);
7697 sym = expr->value.function.esym;
7699 sym = expr->symtree->n.sym;
7701 /* A function that returns arrays. */
7702 gfc_is_proc_ptr_comp (expr, &comp);
7703 if ((!comp && gfc_return_by_reference (sym) && sym->result->attr.dimension)
7704 || (comp && comp->attr.dimension))
7706 newss = gfc_get_ss ();
7707 newss->type = GFC_SS_FUNCTION;
7710 newss->data.info.dimen = expr->rank;
7711 for (n = 0; n < newss->data.info.dimen; n++)
7712 newss->data.info.dim[n] = n;
7716 /* Walk the parameters of an elemental function. For now we always pass
7718 if (sym->attr.elemental)
7719 return gfc_walk_elemental_function_args (ss, expr->value.function.actual,
7722 /* Scalar functions are OK as these are evaluated outside the scalarization
7723 loop. Pass back and let the caller deal with it. */
7728 /* An array temporary is constructed for array constructors. */
7731 gfc_walk_array_constructor (gfc_ss * ss, gfc_expr * expr)
7736 newss = gfc_get_ss ();
7737 newss->type = GFC_SS_CONSTRUCTOR;
7740 newss->data.info.dimen = expr->rank;
7741 for (n = 0; n < expr->rank; n++)
7742 newss->data.info.dim[n] = n;
7748 /* Walk an expression. Add walked expressions to the head of the SS chain.
7749 A wholly scalar expression will not be added. */
7752 gfc_walk_subexpr (gfc_ss * ss, gfc_expr * expr)
7756 switch (expr->expr_type)
7759 head = gfc_walk_variable_expr (ss, expr);
7763 head = gfc_walk_op_expr (ss, expr);
7767 head = gfc_walk_function_expr (ss, expr);
7772 case EXPR_STRUCTURE:
7773 /* Pass back and let the caller deal with it. */
7777 head = gfc_walk_array_constructor (ss, expr);
7780 case EXPR_SUBSTRING:
7781 /* Pass back and let the caller deal with it. */
7785 internal_error ("bad expression type during walk (%d)",
7792 /* Entry point for expression walking.
7793 A return value equal to the passed chain means this is
7794 a scalar expression. It is up to the caller to take whatever action is
7795 necessary to translate these. */
7798 gfc_walk_expr (gfc_expr * expr)
7802 res = gfc_walk_subexpr (gfc_ss_terminator, expr);
7803 return gfc_reverse_ss (res);