1 /* Array translation routines
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 Free Software Foundation, Inc.
5 Contributed by Paul Brook <paul@nowt.org>
6 and Steven Bosscher <s.bosscher@student.tudelft.nl>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* trans-array.c-- Various array related code, including scalarization,
25 allocation, initialization and other support routines. */
27 /* How the scalarizer works.
28 In gfortran, array expressions use the same core routines as scalar
30 First, a Scalarization State (SS) chain is built. This is done by walking
31 the expression tree, and building a linear list of the terms in the
32 expression. As the tree is walked, scalar subexpressions are translated.
34 The scalarization parameters are stored in a gfc_loopinfo structure.
35 First the start and stride of each term is calculated by
36 gfc_conv_ss_startstride. During this process the expressions for the array
37 descriptors and data pointers are also translated.
39 If the expression is an assignment, we must then resolve any dependencies.
40 In fortran all the rhs values of an assignment must be evaluated before
41 any assignments take place. This can require a temporary array to store the
42 values. We also require a temporary when we are passing array expressions
43 or vector subscripts as procedure parameters.
45 Array sections are passed without copying to a temporary. These use the
46 scalarizer to determine the shape of the section. The flag
47 loop->array_parameter tells the scalarizer that the actual values and loop
48 variables will not be required.
50 The function gfc_conv_loop_setup generates the scalarization setup code.
51 It determines the range of the scalarizing loop variables. If a temporary
52 is required, this is created and initialized. Code for scalar expressions
53 taken outside the loop is also generated at this time. Next the offset and
54 scaling required to translate from loop variables to array indices for each
57 A call to gfc_start_scalarized_body marks the start of the scalarized
58 expression. This creates a scope and declares the loop variables. Before
59 calling this gfc_make_ss_chain_used must be used to indicate which terms
60 will be used inside this loop.
62 The scalar gfc_conv_* functions are then used to build the main body of the
63 scalarization loop. Scalarization loop variables and precalculated scalar
64 values are automatically substituted. Note that gfc_advance_se_ss_chain
65 must be used, rather than changing the se->ss directly.
67 For assignment expressions requiring a temporary two sub loops are
68 generated. The first stores the result of the expression in the temporary,
69 the second copies it to the result. A call to
70 gfc_trans_scalarized_loop_boundary marks the end of the main loop code and
71 the start of the copying loop. The temporary may be less than full rank.
73 Finally gfc_trans_scalarizing_loops is called to generate the implicit do
74 loops. The loops are added to the pre chain of the loopinfo. The post
75 chain may still contain cleanup code.
77 After the loop code has been added into its parent scope gfc_cleanup_loop
78 is called to free all the SS allocated by the scalarizer. */
82 #include "coretypes.h"
85 #include "diagnostic-core.h" /* For internal_error/fatal_error. */
88 #include "constructor.h"
90 #include "trans-stmt.h"
91 #include "trans-types.h"
92 #include "trans-array.h"
93 #include "trans-const.h"
94 #include "dependency.h"
96 static bool gfc_get_array_constructor_size (mpz_t *, gfc_constructor_base);
98 /* The contents of this structure aren't actually used, just the address. */
99 static gfc_ss gfc_ss_terminator_var;
100 gfc_ss * const gfc_ss_terminator = &gfc_ss_terminator_var;
104 gfc_array_dataptr_type (tree desc)
106 return (GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc)));
110 /* Build expressions to access the members of an array descriptor.
111 It's surprisingly easy to mess up here, so never access
112 an array descriptor by "brute force", always use these
113 functions. This also avoids problems if we change the format
114 of an array descriptor.
116 To understand these magic numbers, look at the comments
117 before gfc_build_array_type() in trans-types.c.
119 The code within these defines should be the only code which knows the format
120 of an array descriptor.
122 Any code just needing to read obtain the bounds of an array should use
123 gfc_conv_array_* rather than the following functions as these will return
124 know constant values, and work with arrays which do not have descriptors.
126 Don't forget to #undef these! */
129 #define OFFSET_FIELD 1
130 #define DTYPE_FIELD 2
131 #define DIMENSION_FIELD 3
133 #define STRIDE_SUBFIELD 0
134 #define LBOUND_SUBFIELD 1
135 #define UBOUND_SUBFIELD 2
137 /* This provides READ-ONLY access to the data field. The field itself
138 doesn't have the proper type. */
141 gfc_conv_descriptor_data_get (tree desc)
145 type = TREE_TYPE (desc);
146 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
148 field = TYPE_FIELDS (type);
149 gcc_assert (DATA_FIELD == 0);
151 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
153 t = fold_convert (GFC_TYPE_ARRAY_DATAPTR_TYPE (type), t);
158 /* This provides WRITE access to the data field.
160 TUPLES_P is true if we are generating tuples.
162 This function gets called through the following macros:
163 gfc_conv_descriptor_data_set
164 gfc_conv_descriptor_data_set. */
167 gfc_conv_descriptor_data_set (stmtblock_t *block, tree desc, tree value)
171 type = TREE_TYPE (desc);
172 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
174 field = TYPE_FIELDS (type);
175 gcc_assert (DATA_FIELD == 0);
177 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
179 gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value));
183 /* This provides address access to the data field. This should only be
184 used by array allocation, passing this on to the runtime. */
187 gfc_conv_descriptor_data_addr (tree desc)
191 type = TREE_TYPE (desc);
192 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
194 field = TYPE_FIELDS (type);
195 gcc_assert (DATA_FIELD == 0);
197 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
199 return gfc_build_addr_expr (NULL_TREE, t);
203 gfc_conv_descriptor_offset (tree desc)
208 type = TREE_TYPE (desc);
209 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
211 field = gfc_advance_chain (TYPE_FIELDS (type), OFFSET_FIELD);
212 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
214 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
215 desc, field, NULL_TREE);
219 gfc_conv_descriptor_offset_get (tree desc)
221 return gfc_conv_descriptor_offset (desc);
225 gfc_conv_descriptor_offset_set (stmtblock_t *block, tree desc,
228 tree t = gfc_conv_descriptor_offset (desc);
229 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
234 gfc_conv_descriptor_dtype (tree desc)
239 type = TREE_TYPE (desc);
240 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
242 field = gfc_advance_chain (TYPE_FIELDS (type), DTYPE_FIELD);
243 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
245 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
246 desc, field, NULL_TREE);
250 gfc_conv_descriptor_dimension (tree desc, tree dim)
256 type = TREE_TYPE (desc);
257 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
259 field = gfc_advance_chain (TYPE_FIELDS (type), DIMENSION_FIELD);
260 gcc_assert (field != NULL_TREE
261 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
262 && TREE_CODE (TREE_TYPE (TREE_TYPE (field))) == RECORD_TYPE);
264 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
265 desc, field, NULL_TREE);
266 tmp = gfc_build_array_ref (tmp, dim, NULL);
271 gfc_conv_descriptor_stride (tree desc, tree dim)
276 tmp = gfc_conv_descriptor_dimension (desc, dim);
277 field = TYPE_FIELDS (TREE_TYPE (tmp));
278 field = gfc_advance_chain (field, STRIDE_SUBFIELD);
279 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
281 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
282 tmp, field, NULL_TREE);
287 gfc_conv_descriptor_stride_get (tree desc, tree dim)
289 tree type = TREE_TYPE (desc);
290 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
291 if (integer_zerop (dim)
292 && (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE
293 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_SHAPE_CONT
294 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT))
295 return gfc_index_one_node;
297 return gfc_conv_descriptor_stride (desc, dim);
301 gfc_conv_descriptor_stride_set (stmtblock_t *block, tree desc,
302 tree dim, tree value)
304 tree t = gfc_conv_descriptor_stride (desc, dim);
305 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
309 gfc_conv_descriptor_lbound (tree desc, tree dim)
314 tmp = gfc_conv_descriptor_dimension (desc, dim);
315 field = TYPE_FIELDS (TREE_TYPE (tmp));
316 field = gfc_advance_chain (field, LBOUND_SUBFIELD);
317 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
319 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
320 tmp, field, NULL_TREE);
325 gfc_conv_descriptor_lbound_get (tree desc, tree dim)
327 return gfc_conv_descriptor_lbound (desc, dim);
331 gfc_conv_descriptor_lbound_set (stmtblock_t *block, tree desc,
332 tree dim, tree value)
334 tree t = gfc_conv_descriptor_lbound (desc, dim);
335 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
339 gfc_conv_descriptor_ubound (tree desc, tree dim)
344 tmp = gfc_conv_descriptor_dimension (desc, dim);
345 field = TYPE_FIELDS (TREE_TYPE (tmp));
346 field = gfc_advance_chain (field, UBOUND_SUBFIELD);
347 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
349 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
350 tmp, field, NULL_TREE);
355 gfc_conv_descriptor_ubound_get (tree desc, tree dim)
357 return gfc_conv_descriptor_ubound (desc, dim);
361 gfc_conv_descriptor_ubound_set (stmtblock_t *block, tree desc,
362 tree dim, tree value)
364 tree t = gfc_conv_descriptor_ubound (desc, dim);
365 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
368 /* Build a null array descriptor constructor. */
371 gfc_build_null_descriptor (tree type)
376 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
377 gcc_assert (DATA_FIELD == 0);
378 field = TYPE_FIELDS (type);
380 /* Set a NULL data pointer. */
381 tmp = build_constructor_single (type, field, null_pointer_node);
382 TREE_CONSTANT (tmp) = 1;
383 /* All other fields are ignored. */
389 /* Modify a descriptor such that the lbound of a given dimension is the value
390 specified. This also updates ubound and offset accordingly. */
393 gfc_conv_shift_descriptor_lbound (stmtblock_t* block, tree desc,
394 int dim, tree new_lbound)
396 tree offs, ubound, lbound, stride;
397 tree diff, offs_diff;
399 new_lbound = fold_convert (gfc_array_index_type, new_lbound);
401 offs = gfc_conv_descriptor_offset_get (desc);
402 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
403 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
404 stride = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[dim]);
406 /* Get difference (new - old) by which to shift stuff. */
407 diff = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
410 /* Shift ubound and offset accordingly. This has to be done before
411 updating the lbound, as they depend on the lbound expression! */
412 ubound = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
414 gfc_conv_descriptor_ubound_set (block, desc, gfc_rank_cst[dim], ubound);
415 offs_diff = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
417 offs = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
419 gfc_conv_descriptor_offset_set (block, desc, offs);
421 /* Finally set lbound to value we want. */
422 gfc_conv_descriptor_lbound_set (block, desc, gfc_rank_cst[dim], new_lbound);
426 /* Cleanup those #defines. */
431 #undef DIMENSION_FIELD
432 #undef STRIDE_SUBFIELD
433 #undef LBOUND_SUBFIELD
434 #undef UBOUND_SUBFIELD
437 /* Mark a SS chain as used. Flags specifies in which loops the SS is used.
438 flags & 1 = Main loop body.
439 flags & 2 = temp copy loop. */
442 gfc_mark_ss_chain_used (gfc_ss * ss, unsigned flags)
444 for (; ss != gfc_ss_terminator; ss = ss->next)
445 ss->useflags = flags;
448 static void gfc_free_ss (gfc_ss *);
451 /* Free a gfc_ss chain. */
454 gfc_free_ss_chain (gfc_ss * ss)
458 while (ss != gfc_ss_terminator)
460 gcc_assert (ss != NULL);
471 gfc_free_ss (gfc_ss * ss)
478 for (n = 0; n < ss->data.info.dimen; n++)
480 if (ss->data.info.subscript[ss->data.info.dim[n]])
481 gfc_free_ss_chain (ss->data.info.subscript[ss->data.info.dim[n]]);
493 /* Free all the SS associated with a loop. */
496 gfc_cleanup_loop (gfc_loopinfo * loop)
502 while (ss != gfc_ss_terminator)
504 gcc_assert (ss != NULL);
505 next = ss->loop_chain;
512 /* Associate a SS chain with a loop. */
515 gfc_add_ss_to_loop (gfc_loopinfo * loop, gfc_ss * head)
519 if (head == gfc_ss_terminator)
523 for (; ss && ss != gfc_ss_terminator; ss = ss->next)
525 if (ss->next == gfc_ss_terminator)
526 ss->loop_chain = loop->ss;
528 ss->loop_chain = ss->next;
530 gcc_assert (ss == gfc_ss_terminator);
535 /* Generate an initializer for a static pointer or allocatable array. */
538 gfc_trans_static_array_pointer (gfc_symbol * sym)
542 gcc_assert (TREE_STATIC (sym->backend_decl));
543 /* Just zero the data member. */
544 type = TREE_TYPE (sym->backend_decl);
545 DECL_INITIAL (sym->backend_decl) = gfc_build_null_descriptor (type);
549 /* If the bounds of SE's loop have not yet been set, see if they can be
550 determined from array spec AS, which is the array spec of a called
551 function. MAPPING maps the callee's dummy arguments to the values
552 that the caller is passing. Add any initialization and finalization
556 gfc_set_loop_bounds_from_array_spec (gfc_interface_mapping * mapping,
557 gfc_se * se, gfc_array_spec * as)
565 if (as && as->type == AS_EXPLICIT)
566 for (n = 0; n < se->loop->dimen + se->loop->codimen; n++)
568 dim = se->ss->data.info.dim[n];
569 gcc_assert (dim < as->rank);
570 gcc_assert (se->loop->dimen == as->rank);
571 if (se->loop->to[n] == NULL_TREE)
573 /* Evaluate the lower bound. */
574 gfc_init_se (&tmpse, NULL);
575 gfc_apply_interface_mapping (mapping, &tmpse, as->lower[dim]);
576 gfc_add_block_to_block (&se->pre, &tmpse.pre);
577 gfc_add_block_to_block (&se->post, &tmpse.post);
578 lower = fold_convert (gfc_array_index_type, tmpse.expr);
580 if (se->loop->codimen == 0
581 || n < se->loop->dimen + se->loop->codimen - 1)
583 /* ...and the upper bound. */
584 gfc_init_se (&tmpse, NULL);
585 gfc_apply_interface_mapping (mapping, &tmpse, as->upper[dim]);
586 gfc_add_block_to_block (&se->pre, &tmpse.pre);
587 gfc_add_block_to_block (&se->post, &tmpse.post);
588 upper = fold_convert (gfc_array_index_type, tmpse.expr);
590 /* Set the upper bound of the loop to UPPER - LOWER. */
591 tmp = fold_build2_loc (input_location, MINUS_EXPR,
592 gfc_array_index_type, upper, lower);
593 tmp = gfc_evaluate_now (tmp, &se->pre);
594 se->loop->to[n] = tmp;
601 /* Generate code to allocate an array temporary, or create a variable to
602 hold the data. If size is NULL, zero the descriptor so that the
603 callee will allocate the array. If DEALLOC is true, also generate code to
604 free the array afterwards.
606 If INITIAL is not NULL, it is packed using internal_pack and the result used
607 as data instead of allocating a fresh, unitialized area of memory.
609 Initialization code is added to PRE and finalization code to POST.
610 DYNAMIC is true if the caller may want to extend the array later
611 using realloc. This prevents us from putting the array on the stack. */
614 gfc_trans_allocate_array_storage (stmtblock_t * pre, stmtblock_t * post,
615 gfc_ss_info * info, tree size, tree nelem,
616 tree initial, bool dynamic, bool dealloc)
622 desc = info->descriptor;
623 info->offset = gfc_index_zero_node;
624 if (size == NULL_TREE || integer_zerop (size))
626 /* A callee allocated array. */
627 gfc_conv_descriptor_data_set (pre, desc, null_pointer_node);
632 /* Allocate the temporary. */
633 onstack = !dynamic && initial == NULL_TREE
634 && (gfc_option.flag_stack_arrays
635 || gfc_can_put_var_on_stack (size));
639 /* Make a temporary variable to hold the data. */
640 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (nelem),
641 nelem, gfc_index_one_node);
642 tmp = gfc_evaluate_now (tmp, pre);
643 tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node,
645 tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)),
647 tmp = gfc_create_var (tmp, "A");
648 /* If we're here only because of -fstack-arrays we have to
649 emit a DECL_EXPR to make the gimplifier emit alloca calls. */
650 if (!gfc_can_put_var_on_stack (size))
651 gfc_add_expr_to_block (pre,
652 fold_build1_loc (input_location,
653 DECL_EXPR, TREE_TYPE (tmp),
655 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
656 gfc_conv_descriptor_data_set (pre, desc, tmp);
660 /* Allocate memory to hold the data or call internal_pack. */
661 if (initial == NULL_TREE)
663 tmp = gfc_call_malloc (pre, NULL, size);
664 tmp = gfc_evaluate_now (tmp, pre);
671 stmtblock_t do_copying;
673 tmp = TREE_TYPE (initial); /* Pointer to descriptor. */
674 gcc_assert (TREE_CODE (tmp) == POINTER_TYPE);
675 tmp = TREE_TYPE (tmp); /* The descriptor itself. */
676 tmp = gfc_get_element_type (tmp);
677 gcc_assert (tmp == gfc_get_element_type (TREE_TYPE (desc)));
678 packed = gfc_create_var (build_pointer_type (tmp), "data");
680 tmp = build_call_expr_loc (input_location,
681 gfor_fndecl_in_pack, 1, initial);
682 tmp = fold_convert (TREE_TYPE (packed), tmp);
683 gfc_add_modify (pre, packed, tmp);
685 tmp = build_fold_indirect_ref_loc (input_location,
687 source_data = gfc_conv_descriptor_data_get (tmp);
689 /* internal_pack may return source->data without any allocation
690 or copying if it is already packed. If that's the case, we
691 need to allocate and copy manually. */
693 gfc_start_block (&do_copying);
694 tmp = gfc_call_malloc (&do_copying, NULL, size);
695 tmp = fold_convert (TREE_TYPE (packed), tmp);
696 gfc_add_modify (&do_copying, packed, tmp);
697 tmp = gfc_build_memcpy_call (packed, source_data, size);
698 gfc_add_expr_to_block (&do_copying, tmp);
700 was_packed = fold_build2_loc (input_location, EQ_EXPR,
701 boolean_type_node, packed,
703 tmp = gfc_finish_block (&do_copying);
704 tmp = build3_v (COND_EXPR, was_packed, tmp,
705 build_empty_stmt (input_location));
706 gfc_add_expr_to_block (pre, tmp);
708 tmp = fold_convert (pvoid_type_node, packed);
711 gfc_conv_descriptor_data_set (pre, desc, tmp);
714 info->data = gfc_conv_descriptor_data_get (desc);
716 /* The offset is zero because we create temporaries with a zero
718 gfc_conv_descriptor_offset_set (pre, desc, gfc_index_zero_node);
720 if (dealloc && !onstack)
722 /* Free the temporary. */
723 tmp = gfc_conv_descriptor_data_get (desc);
724 tmp = gfc_call_free (fold_convert (pvoid_type_node, tmp));
725 gfc_add_expr_to_block (post, tmp);
730 /* Get the array reference dimension corresponding to the given loop dimension.
731 It is different from the true array dimension given by the dim array in
732 the case of a partial array reference
733 It is different from the loop dimension in the case of a transposed array.
737 get_array_ref_dim (gfc_ss_info *info, int loop_dim)
739 int n, array_dim, array_ref_dim;
742 array_dim = info->dim[loop_dim];
744 for (n = 0; n < info->dimen; n++)
745 if (n != loop_dim && info->dim[n] < array_dim)
748 return array_ref_dim;
752 /* Generate code to create and initialize the descriptor for a temporary
753 array. This is used for both temporaries needed by the scalarizer, and
754 functions returning arrays. Adjusts the loop variables to be
755 zero-based, and calculates the loop bounds for callee allocated arrays.
756 Allocate the array unless it's callee allocated (we have a callee
757 allocated array if 'callee_alloc' is true, or if loop->to[n] is
758 NULL_TREE for any n). Also fills in the descriptor, data and offset
759 fields of info if known. Returns the size of the array, or NULL for a
760 callee allocated array.
762 PRE, POST, INITIAL, DYNAMIC and DEALLOC are as for
763 gfc_trans_allocate_array_storage.
767 gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post,
768 gfc_loopinfo * loop, gfc_ss_info * info,
769 tree eltype, tree initial, bool dynamic,
770 bool dealloc, bool callee_alloc, locus * where)
772 tree from[GFC_MAX_DIMENSIONS], to[GFC_MAX_DIMENSIONS];
782 memset (from, 0, sizeof (from));
783 memset (to, 0, sizeof (to));
785 gcc_assert (info->dimen > 0);
786 gcc_assert (loop->dimen == info->dimen);
788 if (gfc_option.warn_array_temp && where)
789 gfc_warning ("Creating array temporary at %L", where);
791 /* Set the lower bound to zero. */
792 for (n = 0; n < loop->dimen; n++)
796 /* Callee allocated arrays may not have a known bound yet. */
798 loop->to[n] = gfc_evaluate_now (
799 fold_build2_loc (input_location, MINUS_EXPR,
800 gfc_array_index_type,
801 loop->to[n], loop->from[n]),
803 loop->from[n] = gfc_index_zero_node;
805 /* We are constructing the temporary's descriptor based on the loop
806 dimensions. As the dimensions may be accessed in arbitrary order
807 (think of transpose) the size taken from the n'th loop may not map
808 to the n'th dimension of the array. We need to reconstruct loop infos
809 in the right order before using it to set the descriptor
811 tmp_dim = get_array_ref_dim (info, n);
812 from[tmp_dim] = loop->from[n];
813 to[tmp_dim] = loop->to[n];
815 info->delta[dim] = gfc_index_zero_node;
816 info->start[dim] = gfc_index_zero_node;
817 info->end[dim] = gfc_index_zero_node;
818 info->stride[dim] = gfc_index_one_node;
821 /* Initialize the descriptor. */
823 gfc_get_array_type_bounds (eltype, info->dimen, 0, from, to, 1,
824 GFC_ARRAY_UNKNOWN, true);
825 desc = gfc_create_var (type, "atmp");
826 GFC_DECL_PACKED_ARRAY (desc) = 1;
828 info->descriptor = desc;
829 size = gfc_index_one_node;
831 /* Fill in the array dtype. */
832 tmp = gfc_conv_descriptor_dtype (desc);
833 gfc_add_modify (pre, tmp, gfc_get_dtype (TREE_TYPE (desc)));
836 Fill in the bounds and stride. This is a packed array, so:
839 for (n = 0; n < rank; n++)
842 delta = ubound[n] + 1 - lbound[n];
845 size = size * sizeof(element);
850 /* If there is at least one null loop->to[n], it is a callee allocated
852 for (n = 0; n < loop->dimen; n++)
853 if (loop->to[n] == NULL_TREE)
859 for (n = 0; n < loop->dimen; n++)
863 if (size == NULL_TREE)
865 /* For a callee allocated array express the loop bounds in terms
866 of the descriptor fields. */
867 tmp = fold_build2_loc (input_location,
868 MINUS_EXPR, gfc_array_index_type,
869 gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]),
870 gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]));
875 /* Store the stride and bound components in the descriptor. */
876 gfc_conv_descriptor_stride_set (pre, desc, gfc_rank_cst[n], size);
878 gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n],
879 gfc_index_zero_node);
881 gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n],
884 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
885 to[n], gfc_index_one_node);
887 /* Check whether the size for this dimension is negative. */
888 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, tmp,
889 gfc_index_zero_node);
890 cond = gfc_evaluate_now (cond, pre);
895 or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
896 boolean_type_node, or_expr, cond);
898 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
900 size = gfc_evaluate_now (size, pre);
902 for (n = info->dimen; n < info->dimen + info->codimen; n++)
904 gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n],
905 gfc_index_zero_node);
906 if (n < info->dimen + info->codimen - 1)
907 gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n], loop->to[n]);
910 /* Get the size of the array. */
912 if (size && !callee_alloc)
914 /* If or_expr is true, then the extent in at least one
915 dimension is zero and the size is set to zero. */
916 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
917 or_expr, gfc_index_zero_node, size);
920 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
922 fold_convert (gfc_array_index_type,
923 TYPE_SIZE_UNIT (gfc_get_element_type (type))));
931 gfc_trans_allocate_array_storage (pre, post, info, size, nelem, initial,
934 if (info->dimen > loop->temp_dim)
935 loop->temp_dim = info->dimen;
941 /* Return the number of iterations in a loop that starts at START,
942 ends at END, and has step STEP. */
945 gfc_get_iteration_count (tree start, tree end, tree step)
950 type = TREE_TYPE (step);
951 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, end, start);
952 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, type, tmp, step);
953 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, tmp,
954 build_int_cst (type, 1));
955 tmp = fold_build2_loc (input_location, MAX_EXPR, type, tmp,
956 build_int_cst (type, 0));
957 return fold_convert (gfc_array_index_type, tmp);
961 /* Extend the data in array DESC by EXTRA elements. */
964 gfc_grow_array (stmtblock_t * pblock, tree desc, tree extra)
971 if (integer_zerop (extra))
974 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[0]);
976 /* Add EXTRA to the upper bound. */
977 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
979 gfc_conv_descriptor_ubound_set (pblock, desc, gfc_rank_cst[0], tmp);
981 /* Get the value of the current data pointer. */
982 arg0 = gfc_conv_descriptor_data_get (desc);
984 /* Calculate the new array size. */
985 size = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
986 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
987 ubound, gfc_index_one_node);
988 arg1 = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
989 fold_convert (size_type_node, tmp),
990 fold_convert (size_type_node, size));
992 /* Call the realloc() function. */
993 tmp = gfc_call_realloc (pblock, arg0, arg1);
994 gfc_conv_descriptor_data_set (pblock, desc, tmp);
998 /* Return true if the bounds of iterator I can only be determined
1002 gfc_iterator_has_dynamic_bounds (gfc_iterator * i)
1004 return (i->start->expr_type != EXPR_CONSTANT
1005 || i->end->expr_type != EXPR_CONSTANT
1006 || i->step->expr_type != EXPR_CONSTANT);
1010 /* Split the size of constructor element EXPR into the sum of two terms,
1011 one of which can be determined at compile time and one of which must
1012 be calculated at run time. Set *SIZE to the former and return true
1013 if the latter might be nonzero. */
1016 gfc_get_array_constructor_element_size (mpz_t * size, gfc_expr * expr)
1018 if (expr->expr_type == EXPR_ARRAY)
1019 return gfc_get_array_constructor_size (size, expr->value.constructor);
1020 else if (expr->rank > 0)
1022 /* Calculate everything at run time. */
1023 mpz_set_ui (*size, 0);
1028 /* A single element. */
1029 mpz_set_ui (*size, 1);
1035 /* Like gfc_get_array_constructor_element_size, but applied to the whole
1036 of array constructor C. */
1039 gfc_get_array_constructor_size (mpz_t * size, gfc_constructor_base base)
1047 mpz_set_ui (*size, 0);
1052 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1055 if (i && gfc_iterator_has_dynamic_bounds (i))
1059 dynamic |= gfc_get_array_constructor_element_size (&len, c->expr);
1062 /* Multiply the static part of the element size by the
1063 number of iterations. */
1064 mpz_sub (val, i->end->value.integer, i->start->value.integer);
1065 mpz_fdiv_q (val, val, i->step->value.integer);
1066 mpz_add_ui (val, val, 1);
1067 if (mpz_sgn (val) > 0)
1068 mpz_mul (len, len, val);
1070 mpz_set_ui (len, 0);
1072 mpz_add (*size, *size, len);
1081 /* Make sure offset is a variable. */
1084 gfc_put_offset_into_var (stmtblock_t * pblock, tree * poffset,
1087 /* We should have already created the offset variable. We cannot
1088 create it here because we may be in an inner scope. */
1089 gcc_assert (*offsetvar != NULL_TREE);
1090 gfc_add_modify (pblock, *offsetvar, *poffset);
1091 *poffset = *offsetvar;
1092 TREE_USED (*offsetvar) = 1;
1096 /* Variables needed for bounds-checking. */
1097 static bool first_len;
1098 static tree first_len_val;
1099 static bool typespec_chararray_ctor;
1102 gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
1103 tree offset, gfc_se * se, gfc_expr * expr)
1107 gfc_conv_expr (se, expr);
1109 /* Store the value. */
1110 tmp = build_fold_indirect_ref_loc (input_location,
1111 gfc_conv_descriptor_data_get (desc));
1112 tmp = gfc_build_array_ref (tmp, offset, NULL);
1114 if (expr->ts.type == BT_CHARACTER)
1116 int i = gfc_validate_kind (BT_CHARACTER, expr->ts.kind, false);
1119 esize = size_in_bytes (gfc_get_element_type (TREE_TYPE (desc)));
1120 esize = fold_convert (gfc_charlen_type_node, esize);
1121 esize = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
1122 gfc_charlen_type_node, esize,
1123 build_int_cst (gfc_charlen_type_node,
1124 gfc_character_kinds[i].bit_size / 8));
1126 gfc_conv_string_parameter (se);
1127 if (POINTER_TYPE_P (TREE_TYPE (tmp)))
1129 /* The temporary is an array of pointers. */
1130 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1131 gfc_add_modify (&se->pre, tmp, se->expr);
1135 /* The temporary is an array of string values. */
1136 tmp = gfc_build_addr_expr (gfc_get_pchar_type (expr->ts.kind), tmp);
1137 /* We know the temporary and the value will be the same length,
1138 so can use memcpy. */
1139 gfc_trans_string_copy (&se->pre, esize, tmp, expr->ts.kind,
1140 se->string_length, se->expr, expr->ts.kind);
1142 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) && !typespec_chararray_ctor)
1146 gfc_add_modify (&se->pre, first_len_val,
1152 /* Verify that all constructor elements are of the same
1154 tree cond = fold_build2_loc (input_location, NE_EXPR,
1155 boolean_type_node, first_len_val,
1157 gfc_trans_runtime_check
1158 (true, false, cond, &se->pre, &expr->where,
1159 "Different CHARACTER lengths (%ld/%ld) in array constructor",
1160 fold_convert (long_integer_type_node, first_len_val),
1161 fold_convert (long_integer_type_node, se->string_length));
1167 /* TODO: Should the frontend already have done this conversion? */
1168 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1169 gfc_add_modify (&se->pre, tmp, se->expr);
1172 gfc_add_block_to_block (pblock, &se->pre);
1173 gfc_add_block_to_block (pblock, &se->post);
1177 /* Add the contents of an array to the constructor. DYNAMIC is as for
1178 gfc_trans_array_constructor_value. */
1181 gfc_trans_array_constructor_subarray (stmtblock_t * pblock,
1182 tree type ATTRIBUTE_UNUSED,
1183 tree desc, gfc_expr * expr,
1184 tree * poffset, tree * offsetvar,
1195 /* We need this to be a variable so we can increment it. */
1196 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1198 gfc_init_se (&se, NULL);
1200 /* Walk the array expression. */
1201 ss = gfc_walk_expr (expr);
1202 gcc_assert (ss != gfc_ss_terminator);
1204 /* Initialize the scalarizer. */
1205 gfc_init_loopinfo (&loop);
1206 gfc_add_ss_to_loop (&loop, ss);
1208 /* Initialize the loop. */
1209 gfc_conv_ss_startstride (&loop);
1210 gfc_conv_loop_setup (&loop, &expr->where);
1212 /* Make sure the constructed array has room for the new data. */
1215 /* Set SIZE to the total number of elements in the subarray. */
1216 size = gfc_index_one_node;
1217 for (n = 0; n < loop.dimen; n++)
1219 tmp = gfc_get_iteration_count (loop.from[n], loop.to[n],
1220 gfc_index_one_node);
1221 size = fold_build2_loc (input_location, MULT_EXPR,
1222 gfc_array_index_type, size, tmp);
1225 /* Grow the constructed array by SIZE elements. */
1226 gfc_grow_array (&loop.pre, desc, size);
1229 /* Make the loop body. */
1230 gfc_mark_ss_chain_used (ss, 1);
1231 gfc_start_scalarized_body (&loop, &body);
1232 gfc_copy_loopinfo_to_se (&se, &loop);
1235 gfc_trans_array_ctor_element (&body, desc, *poffset, &se, expr);
1236 gcc_assert (se.ss == gfc_ss_terminator);
1238 /* Increment the offset. */
1239 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1240 *poffset, gfc_index_one_node);
1241 gfc_add_modify (&body, *poffset, tmp);
1243 /* Finish the loop. */
1244 gfc_trans_scalarizing_loops (&loop, &body);
1245 gfc_add_block_to_block (&loop.pre, &loop.post);
1246 tmp = gfc_finish_block (&loop.pre);
1247 gfc_add_expr_to_block (pblock, tmp);
1249 gfc_cleanup_loop (&loop);
1253 /* Assign the values to the elements of an array constructor. DYNAMIC
1254 is true if descriptor DESC only contains enough data for the static
1255 size calculated by gfc_get_array_constructor_size. When true, memory
1256 for the dynamic parts must be allocated using realloc. */
1259 gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
1260 tree desc, gfc_constructor_base base,
1261 tree * poffset, tree * offsetvar,
1270 tree shadow_loopvar = NULL_TREE;
1271 gfc_saved_var saved_loopvar;
1274 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1276 /* If this is an iterator or an array, the offset must be a variable. */
1277 if ((c->iterator || c->expr->rank > 0) && INTEGER_CST_P (*poffset))
1278 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1280 /* Shadowing the iterator avoids changing its value and saves us from
1281 keeping track of it. Further, it makes sure that there's always a
1282 backend-decl for the symbol, even if there wasn't one before,
1283 e.g. in the case of an iterator that appears in a specification
1284 expression in an interface mapping. */
1287 gfc_symbol *sym = c->iterator->var->symtree->n.sym;
1288 tree type = gfc_typenode_for_spec (&sym->ts);
1290 shadow_loopvar = gfc_create_var (type, "shadow_loopvar");
1291 gfc_shadow_sym (sym, shadow_loopvar, &saved_loopvar);
1294 gfc_start_block (&body);
1296 if (c->expr->expr_type == EXPR_ARRAY)
1298 /* Array constructors can be nested. */
1299 gfc_trans_array_constructor_value (&body, type, desc,
1300 c->expr->value.constructor,
1301 poffset, offsetvar, dynamic);
1303 else if (c->expr->rank > 0)
1305 gfc_trans_array_constructor_subarray (&body, type, desc, c->expr,
1306 poffset, offsetvar, dynamic);
1310 /* This code really upsets the gimplifier so don't bother for now. */
1317 while (p && !(p->iterator || p->expr->expr_type != EXPR_CONSTANT))
1319 p = gfc_constructor_next (p);
1324 /* Scalar values. */
1325 gfc_init_se (&se, NULL);
1326 gfc_trans_array_ctor_element (&body, desc, *poffset,
1329 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1330 gfc_array_index_type,
1331 *poffset, gfc_index_one_node);
1335 /* Collect multiple scalar constants into a constructor. */
1336 VEC(constructor_elt,gc) *v = NULL;
1340 HOST_WIDE_INT idx = 0;
1343 /* Count the number of consecutive scalar constants. */
1344 while (p && !(p->iterator
1345 || p->expr->expr_type != EXPR_CONSTANT))
1347 gfc_init_se (&se, NULL);
1348 gfc_conv_constant (&se, p->expr);
1350 if (c->expr->ts.type != BT_CHARACTER)
1351 se.expr = fold_convert (type, se.expr);
1352 /* For constant character array constructors we build
1353 an array of pointers. */
1354 else if (POINTER_TYPE_P (type))
1355 se.expr = gfc_build_addr_expr
1356 (gfc_get_pchar_type (p->expr->ts.kind),
1359 CONSTRUCTOR_APPEND_ELT (v,
1360 build_int_cst (gfc_array_index_type,
1364 p = gfc_constructor_next (p);
1367 bound = build_int_cst (NULL_TREE, n - 1);
1368 /* Create an array type to hold them. */
1369 tmptype = build_range_type (gfc_array_index_type,
1370 gfc_index_zero_node, bound);
1371 tmptype = build_array_type (type, tmptype);
1373 init = build_constructor (tmptype, v);
1374 TREE_CONSTANT (init) = 1;
1375 TREE_STATIC (init) = 1;
1376 /* Create a static variable to hold the data. */
1377 tmp = gfc_create_var (tmptype, "data");
1378 TREE_STATIC (tmp) = 1;
1379 TREE_CONSTANT (tmp) = 1;
1380 TREE_READONLY (tmp) = 1;
1381 DECL_INITIAL (tmp) = init;
1384 /* Use BUILTIN_MEMCPY to assign the values. */
1385 tmp = gfc_conv_descriptor_data_get (desc);
1386 tmp = build_fold_indirect_ref_loc (input_location,
1388 tmp = gfc_build_array_ref (tmp, *poffset, NULL);
1389 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
1390 init = gfc_build_addr_expr (NULL_TREE, init);
1392 size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
1393 bound = build_int_cst (NULL_TREE, n * size);
1394 tmp = build_call_expr_loc (input_location,
1395 built_in_decls[BUILT_IN_MEMCPY], 3,
1397 gfc_add_expr_to_block (&body, tmp);
1399 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1400 gfc_array_index_type, *poffset,
1401 build_int_cst (gfc_array_index_type, n));
1403 if (!INTEGER_CST_P (*poffset))
1405 gfc_add_modify (&body, *offsetvar, *poffset);
1406 *poffset = *offsetvar;
1410 /* The frontend should already have done any expansions
1414 /* Pass the code as is. */
1415 tmp = gfc_finish_block (&body);
1416 gfc_add_expr_to_block (pblock, tmp);
1420 /* Build the implied do-loop. */
1421 stmtblock_t implied_do_block;
1429 loopbody = gfc_finish_block (&body);
1431 /* Create a new block that holds the implied-do loop. A temporary
1432 loop-variable is used. */
1433 gfc_start_block(&implied_do_block);
1435 /* Initialize the loop. */
1436 gfc_init_se (&se, NULL);
1437 gfc_conv_expr_val (&se, c->iterator->start);
1438 gfc_add_block_to_block (&implied_do_block, &se.pre);
1439 gfc_add_modify (&implied_do_block, shadow_loopvar, se.expr);
1441 gfc_init_se (&se, NULL);
1442 gfc_conv_expr_val (&se, c->iterator->end);
1443 gfc_add_block_to_block (&implied_do_block, &se.pre);
1444 end = gfc_evaluate_now (se.expr, &implied_do_block);
1446 gfc_init_se (&se, NULL);
1447 gfc_conv_expr_val (&se, c->iterator->step);
1448 gfc_add_block_to_block (&implied_do_block, &se.pre);
1449 step = gfc_evaluate_now (se.expr, &implied_do_block);
1451 /* If this array expands dynamically, and the number of iterations
1452 is not constant, we won't have allocated space for the static
1453 part of C->EXPR's size. Do that now. */
1454 if (dynamic && gfc_iterator_has_dynamic_bounds (c->iterator))
1456 /* Get the number of iterations. */
1457 tmp = gfc_get_iteration_count (shadow_loopvar, end, step);
1459 /* Get the static part of C->EXPR's size. */
1460 gfc_get_array_constructor_element_size (&size, c->expr);
1461 tmp2 = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
1463 /* Grow the array by TMP * TMP2 elements. */
1464 tmp = fold_build2_loc (input_location, MULT_EXPR,
1465 gfc_array_index_type, tmp, tmp2);
1466 gfc_grow_array (&implied_do_block, desc, tmp);
1469 /* Generate the loop body. */
1470 exit_label = gfc_build_label_decl (NULL_TREE);
1471 gfc_start_block (&body);
1473 /* Generate the exit condition. Depending on the sign of
1474 the step variable we have to generate the correct
1476 tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
1477 step, build_int_cst (TREE_TYPE (step), 0));
1478 cond = fold_build3_loc (input_location, COND_EXPR,
1479 boolean_type_node, tmp,
1480 fold_build2_loc (input_location, GT_EXPR,
1481 boolean_type_node, shadow_loopvar, end),
1482 fold_build2_loc (input_location, LT_EXPR,
1483 boolean_type_node, shadow_loopvar, end));
1484 tmp = build1_v (GOTO_EXPR, exit_label);
1485 TREE_USED (exit_label) = 1;
1486 tmp = build3_v (COND_EXPR, cond, tmp,
1487 build_empty_stmt (input_location));
1488 gfc_add_expr_to_block (&body, tmp);
1490 /* The main loop body. */
1491 gfc_add_expr_to_block (&body, loopbody);
1493 /* Increase loop variable by step. */
1494 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1495 TREE_TYPE (shadow_loopvar), shadow_loopvar,
1497 gfc_add_modify (&body, shadow_loopvar, tmp);
1499 /* Finish the loop. */
1500 tmp = gfc_finish_block (&body);
1501 tmp = build1_v (LOOP_EXPR, tmp);
1502 gfc_add_expr_to_block (&implied_do_block, tmp);
1504 /* Add the exit label. */
1505 tmp = build1_v (LABEL_EXPR, exit_label);
1506 gfc_add_expr_to_block (&implied_do_block, tmp);
1508 /* Finishe the implied-do loop. */
1509 tmp = gfc_finish_block(&implied_do_block);
1510 gfc_add_expr_to_block(pblock, tmp);
1512 gfc_restore_sym (c->iterator->var->symtree->n.sym, &saved_loopvar);
1519 /* A catch-all to obtain the string length for anything that is not a
1520 a substring of non-constant length, a constant, array or variable. */
1523 get_array_ctor_all_strlen (stmtblock_t *block, gfc_expr *e, tree *len)
1528 /* Don't bother if we already know the length is a constant. */
1529 if (*len && INTEGER_CST_P (*len))
1532 if (!e->ref && e->ts.u.cl && e->ts.u.cl->length
1533 && e->ts.u.cl->length->expr_type == EXPR_CONSTANT)
1536 gfc_conv_const_charlen (e->ts.u.cl);
1537 *len = e->ts.u.cl->backend_decl;
1541 /* Otherwise, be brutal even if inefficient. */
1542 ss = gfc_walk_expr (e);
1543 gfc_init_se (&se, NULL);
1545 /* No function call, in case of side effects. */
1546 se.no_function_call = 1;
1547 if (ss == gfc_ss_terminator)
1548 gfc_conv_expr (&se, e);
1550 gfc_conv_expr_descriptor (&se, e, ss);
1552 /* Fix the value. */
1553 *len = gfc_evaluate_now (se.string_length, &se.pre);
1555 gfc_add_block_to_block (block, &se.pre);
1556 gfc_add_block_to_block (block, &se.post);
1558 e->ts.u.cl->backend_decl = *len;
1563 /* Figure out the string length of a variable reference expression.
1564 Used by get_array_ctor_strlen. */
1567 get_array_ctor_var_strlen (stmtblock_t *block, gfc_expr * expr, tree * len)
1573 /* Don't bother if we already know the length is a constant. */
1574 if (*len && INTEGER_CST_P (*len))
1577 ts = &expr->symtree->n.sym->ts;
1578 for (ref = expr->ref; ref; ref = ref->next)
1583 /* Array references don't change the string length. */
1587 /* Use the length of the component. */
1588 ts = &ref->u.c.component->ts;
1592 if (ref->u.ss.start->expr_type != EXPR_CONSTANT
1593 || ref->u.ss.end->expr_type != EXPR_CONSTANT)
1595 /* Note that this might evaluate expr. */
1596 get_array_ctor_all_strlen (block, expr, len);
1599 mpz_init_set_ui (char_len, 1);
1600 mpz_add (char_len, char_len, ref->u.ss.end->value.integer);
1601 mpz_sub (char_len, char_len, ref->u.ss.start->value.integer);
1602 *len = gfc_conv_mpz_to_tree (char_len, gfc_default_integer_kind);
1603 *len = convert (gfc_charlen_type_node, *len);
1604 mpz_clear (char_len);
1612 *len = ts->u.cl->backend_decl;
1616 /* Figure out the string length of a character array constructor.
1617 If len is NULL, don't calculate the length; this happens for recursive calls
1618 when a sub-array-constructor is an element but not at the first position,
1619 so when we're not interested in the length.
1620 Returns TRUE if all elements are character constants. */
1623 get_array_ctor_strlen (stmtblock_t *block, gfc_constructor_base base, tree * len)
1630 if (gfc_constructor_first (base) == NULL)
1633 *len = build_int_cstu (gfc_charlen_type_node, 0);
1637 /* Loop over all constructor elements to find out is_const, but in len we
1638 want to store the length of the first, not the last, element. We can
1639 of course exit the loop as soon as is_const is found to be false. */
1640 for (c = gfc_constructor_first (base);
1641 c && is_const; c = gfc_constructor_next (c))
1643 switch (c->expr->expr_type)
1646 if (len && !(*len && INTEGER_CST_P (*len)))
1647 *len = build_int_cstu (gfc_charlen_type_node,
1648 c->expr->value.character.length);
1652 if (!get_array_ctor_strlen (block, c->expr->value.constructor, len))
1659 get_array_ctor_var_strlen (block, c->expr, len);
1665 get_array_ctor_all_strlen (block, c->expr, len);
1669 /* After the first iteration, we don't want the length modified. */
1676 /* Check whether the array constructor C consists entirely of constant
1677 elements, and if so returns the number of those elements, otherwise
1678 return zero. Note, an empty or NULL array constructor returns zero. */
1680 unsigned HOST_WIDE_INT
1681 gfc_constant_array_constructor_p (gfc_constructor_base base)
1683 unsigned HOST_WIDE_INT nelem = 0;
1685 gfc_constructor *c = gfc_constructor_first (base);
1689 || c->expr->rank > 0
1690 || c->expr->expr_type != EXPR_CONSTANT)
1692 c = gfc_constructor_next (c);
1699 /* Given EXPR, the constant array constructor specified by an EXPR_ARRAY,
1700 and the tree type of it's elements, TYPE, return a static constant
1701 variable that is compile-time initialized. */
1704 gfc_build_constant_array_constructor (gfc_expr * expr, tree type)
1706 tree tmptype, init, tmp;
1707 HOST_WIDE_INT nelem;
1712 VEC(constructor_elt,gc) *v = NULL;
1714 /* First traverse the constructor list, converting the constants
1715 to tree to build an initializer. */
1717 c = gfc_constructor_first (expr->value.constructor);
1720 gfc_init_se (&se, NULL);
1721 gfc_conv_constant (&se, c->expr);
1722 if (c->expr->ts.type != BT_CHARACTER)
1723 se.expr = fold_convert (type, se.expr);
1724 else if (POINTER_TYPE_P (type))
1725 se.expr = gfc_build_addr_expr (gfc_get_pchar_type (c->expr->ts.kind),
1727 CONSTRUCTOR_APPEND_ELT (v, build_int_cst (gfc_array_index_type, nelem),
1729 c = gfc_constructor_next (c);
1733 /* Next determine the tree type for the array. We use the gfortran
1734 front-end's gfc_get_nodesc_array_type in order to create a suitable
1735 GFC_ARRAY_TYPE_P that may be used by the scalarizer. */
1737 memset (&as, 0, sizeof (gfc_array_spec));
1739 as.rank = expr->rank;
1740 as.type = AS_EXPLICIT;
1743 as.lower[0] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
1744 as.upper[0] = gfc_get_int_expr (gfc_default_integer_kind,
1748 for (i = 0; i < expr->rank; i++)
1750 int tmp = (int) mpz_get_si (expr->shape[i]);
1751 as.lower[i] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
1752 as.upper[i] = gfc_get_int_expr (gfc_default_integer_kind,
1756 tmptype = gfc_get_nodesc_array_type (type, &as, PACKED_STATIC, true);
1758 /* as is not needed anymore. */
1759 for (i = 0; i < as.rank + as.corank; i++)
1761 gfc_free_expr (as.lower[i]);
1762 gfc_free_expr (as.upper[i]);
1765 init = build_constructor (tmptype, v);
1767 TREE_CONSTANT (init) = 1;
1768 TREE_STATIC (init) = 1;
1770 tmp = gfc_create_var (tmptype, "A");
1771 TREE_STATIC (tmp) = 1;
1772 TREE_CONSTANT (tmp) = 1;
1773 TREE_READONLY (tmp) = 1;
1774 DECL_INITIAL (tmp) = init;
1780 /* Translate a constant EXPR_ARRAY array constructor for the scalarizer.
1781 This mostly initializes the scalarizer state info structure with the
1782 appropriate values to directly use the array created by the function
1783 gfc_build_constant_array_constructor. */
1786 gfc_trans_constant_array_constructor (gfc_loopinfo * loop,
1787 gfc_ss * ss, tree type)
1793 tmp = gfc_build_constant_array_constructor (ss->expr, type);
1795 info = &ss->data.info;
1797 info->descriptor = tmp;
1798 info->data = gfc_build_addr_expr (NULL_TREE, tmp);
1799 info->offset = gfc_index_zero_node;
1801 for (i = 0; i < info->dimen + info->codimen; i++)
1803 info->delta[i] = gfc_index_zero_node;
1804 info->start[i] = gfc_index_zero_node;
1805 info->end[i] = gfc_index_zero_node;
1806 info->stride[i] = gfc_index_one_node;
1810 if (info->dimen > loop->temp_dim)
1811 loop->temp_dim = info->dimen;
1814 /* Helper routine of gfc_trans_array_constructor to determine if the
1815 bounds of the loop specified by LOOP are constant and simple enough
1816 to use with gfc_trans_constant_array_constructor. Returns the
1817 iteration count of the loop if suitable, and NULL_TREE otherwise. */
1820 constant_array_constructor_loop_size (gfc_loopinfo * loop)
1822 tree size = gfc_index_one_node;
1826 for (i = 0; i < loop->dimen; i++)
1828 /* If the bounds aren't constant, return NULL_TREE. */
1829 if (!INTEGER_CST_P (loop->from[i]) || !INTEGER_CST_P (loop->to[i]))
1831 if (!integer_zerop (loop->from[i]))
1833 /* Only allow nonzero "from" in one-dimensional arrays. */
1834 if (loop->dimen != 1)
1836 tmp = fold_build2_loc (input_location, MINUS_EXPR,
1837 gfc_array_index_type,
1838 loop->to[i], loop->from[i]);
1842 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1843 tmp, gfc_index_one_node);
1844 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
1852 /* Array constructors are handled by constructing a temporary, then using that
1853 within the scalarization loop. This is not optimal, but seems by far the
1857 gfc_trans_array_constructor (gfc_loopinfo * loop, gfc_ss * ss, locus * where)
1859 gfc_constructor_base c;
1866 bool old_first_len, old_typespec_chararray_ctor;
1867 tree old_first_len_val;
1869 /* Save the old values for nested checking. */
1870 old_first_len = first_len;
1871 old_first_len_val = first_len_val;
1872 old_typespec_chararray_ctor = typespec_chararray_ctor;
1874 /* Do bounds-checking here and in gfc_trans_array_ctor_element only if no
1875 typespec was given for the array constructor. */
1876 typespec_chararray_ctor = (ss->expr->ts.u.cl
1877 && ss->expr->ts.u.cl->length_from_typespec);
1879 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
1880 && ss->expr->ts.type == BT_CHARACTER && !typespec_chararray_ctor)
1882 first_len_val = gfc_create_var (gfc_charlen_type_node, "len");
1886 ss->data.info.dimen = loop->dimen;
1888 c = ss->expr->value.constructor;
1889 if (ss->expr->ts.type == BT_CHARACTER)
1893 /* get_array_ctor_strlen walks the elements of the constructor, if a
1894 typespec was given, we already know the string length and want the one
1896 if (typespec_chararray_ctor && ss->expr->ts.u.cl->length
1897 && ss->expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
1901 const_string = false;
1902 gfc_init_se (&length_se, NULL);
1903 gfc_conv_expr_type (&length_se, ss->expr->ts.u.cl->length,
1904 gfc_charlen_type_node);
1905 ss->string_length = length_se.expr;
1906 gfc_add_block_to_block (&loop->pre, &length_se.pre);
1907 gfc_add_block_to_block (&loop->post, &length_se.post);
1910 const_string = get_array_ctor_strlen (&loop->pre, c,
1911 &ss->string_length);
1913 /* Complex character array constructors should have been taken care of
1914 and not end up here. */
1915 gcc_assert (ss->string_length);
1917 ss->expr->ts.u.cl->backend_decl = ss->string_length;
1919 type = gfc_get_character_type_len (ss->expr->ts.kind, ss->string_length);
1921 type = build_pointer_type (type);
1924 type = gfc_typenode_for_spec (&ss->expr->ts);
1926 /* See if the constructor determines the loop bounds. */
1929 if (ss->expr->shape && loop->dimen > 1 && loop->to[0] == NULL_TREE)
1931 /* We have a multidimensional parameter. */
1933 for (n = 0; n < ss->expr->rank; n++)
1935 loop->from[n] = gfc_index_zero_node;
1936 loop->to[n] = gfc_conv_mpz_to_tree (ss->expr->shape [n],
1937 gfc_index_integer_kind);
1938 loop->to[n] = fold_build2_loc (input_location, MINUS_EXPR,
1939 gfc_array_index_type,
1940 loop->to[n], gfc_index_one_node);
1944 if (loop->to[0] == NULL_TREE)
1948 /* We should have a 1-dimensional, zero-based loop. */
1949 gcc_assert (loop->dimen == 1);
1950 gcc_assert (integer_zerop (loop->from[0]));
1952 /* Split the constructor size into a static part and a dynamic part.
1953 Allocate the static size up-front and record whether the dynamic
1954 size might be nonzero. */
1956 dynamic = gfc_get_array_constructor_size (&size, c);
1957 mpz_sub_ui (size, size, 1);
1958 loop->to[0] = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
1962 /* Special case constant array constructors. */
1965 unsigned HOST_WIDE_INT nelem = gfc_constant_array_constructor_p (c);
1968 tree size = constant_array_constructor_loop_size (loop);
1969 if (size && compare_tree_int (size, nelem) == 0)
1971 gfc_trans_constant_array_constructor (loop, ss, type);
1977 if (TREE_CODE (loop->to[0]) == VAR_DECL)
1980 gfc_trans_create_temp_array (&loop->pre, &loop->post, loop, &ss->data.info,
1981 type, NULL_TREE, dynamic, true, false, where);
1983 desc = ss->data.info.descriptor;
1984 offset = gfc_index_zero_node;
1985 offsetvar = gfc_create_var_np (gfc_array_index_type, "offset");
1986 TREE_NO_WARNING (offsetvar) = 1;
1987 TREE_USED (offsetvar) = 0;
1988 gfc_trans_array_constructor_value (&loop->pre, type, desc, c,
1989 &offset, &offsetvar, dynamic);
1991 /* If the array grows dynamically, the upper bound of the loop variable
1992 is determined by the array's final upper bound. */
1995 tmp = fold_build2_loc (input_location, MINUS_EXPR,
1996 gfc_array_index_type,
1997 offsetvar, gfc_index_one_node);
1998 tmp = gfc_evaluate_now (tmp, &loop->pre);
1999 gfc_conv_descriptor_ubound_set (&loop->pre, desc, gfc_rank_cst[0], tmp);
2000 if (loop->to[0] && TREE_CODE (loop->to[0]) == VAR_DECL)
2001 gfc_add_modify (&loop->pre, loop->to[0], tmp);
2006 if (TREE_USED (offsetvar))
2007 pushdecl (offsetvar);
2009 gcc_assert (INTEGER_CST_P (offset));
2012 /* Disable bound checking for now because it's probably broken. */
2013 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2020 /* Restore old values of globals. */
2021 first_len = old_first_len;
2022 first_len_val = old_first_len_val;
2023 typespec_chararray_ctor = old_typespec_chararray_ctor;
2027 /* INFO describes a GFC_SS_SECTION in loop LOOP, and this function is
2028 called after evaluating all of INFO's vector dimensions. Go through
2029 each such vector dimension and see if we can now fill in any missing
2033 gfc_set_vector_loop_bounds (gfc_loopinfo * loop, gfc_ss_info * info)
2042 for (n = 0; n < loop->dimen + loop->codimen; n++)
2045 if (info->ref->u.ar.dimen_type[dim] == DIMEN_VECTOR
2046 && loop->to[n] == NULL)
2048 /* Loop variable N indexes vector dimension DIM, and we don't
2049 yet know the upper bound of loop variable N. Set it to the
2050 difference between the vector's upper and lower bounds. */
2051 gcc_assert (loop->from[n] == gfc_index_zero_node);
2052 gcc_assert (info->subscript[dim]
2053 && info->subscript[dim]->type == GFC_SS_VECTOR);
2055 gfc_init_se (&se, NULL);
2056 desc = info->subscript[dim]->data.info.descriptor;
2057 zero = gfc_rank_cst[0];
2058 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2059 gfc_array_index_type,
2060 gfc_conv_descriptor_ubound_get (desc, zero),
2061 gfc_conv_descriptor_lbound_get (desc, zero));
2062 tmp = gfc_evaluate_now (tmp, &loop->pre);
2069 /* Add the pre and post chains for all the scalar expressions in a SS chain
2070 to loop. This is called after the loop parameters have been calculated,
2071 but before the actual scalarizing loops. */
2074 gfc_add_loop_ss_code (gfc_loopinfo * loop, gfc_ss * ss, bool subscript,
2080 /* TODO: This can generate bad code if there are ordering dependencies,
2081 e.g., a callee allocated function and an unknown size constructor. */
2082 gcc_assert (ss != NULL);
2084 for (; ss != gfc_ss_terminator; ss = ss->loop_chain)
2091 /* Scalar expression. Evaluate this now. This includes elemental
2092 dimension indices, but not array section bounds. */
2093 gfc_init_se (&se, NULL);
2094 gfc_conv_expr (&se, ss->expr);
2095 gfc_add_block_to_block (&loop->pre, &se.pre);
2097 if (ss->expr->ts.type != BT_CHARACTER)
2099 /* Move the evaluation of scalar expressions outside the
2100 scalarization loop, except for WHERE assignments. */
2102 se.expr = convert(gfc_array_index_type, se.expr);
2104 se.expr = gfc_evaluate_now (se.expr, &loop->pre);
2105 gfc_add_block_to_block (&loop->pre, &se.post);
2108 gfc_add_block_to_block (&loop->post, &se.post);
2110 ss->data.scalar.expr = se.expr;
2111 ss->string_length = se.string_length;
2114 case GFC_SS_REFERENCE:
2115 /* Scalar argument to elemental procedure. Evaluate this
2117 gfc_init_se (&se, NULL);
2118 gfc_conv_expr (&se, ss->expr);
2119 gfc_add_block_to_block (&loop->pre, &se.pre);
2120 gfc_add_block_to_block (&loop->post, &se.post);
2122 ss->data.scalar.expr = gfc_evaluate_now (se.expr, &loop->pre);
2123 ss->string_length = se.string_length;
2126 case GFC_SS_SECTION:
2127 /* Add the expressions for scalar and vector subscripts. */
2128 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2129 if (ss->data.info.subscript[n])
2130 gfc_add_loop_ss_code (loop, ss->data.info.subscript[n], true,
2133 gfc_set_vector_loop_bounds (loop, &ss->data.info);
2137 /* Get the vector's descriptor and store it in SS. */
2138 gfc_init_se (&se, NULL);
2139 gfc_conv_expr_descriptor (&se, ss->expr, gfc_walk_expr (ss->expr));
2140 gfc_add_block_to_block (&loop->pre, &se.pre);
2141 gfc_add_block_to_block (&loop->post, &se.post);
2142 ss->data.info.descriptor = se.expr;
2145 case GFC_SS_INTRINSIC:
2146 gfc_add_intrinsic_ss_code (loop, ss);
2149 case GFC_SS_FUNCTION:
2150 /* Array function return value. We call the function and save its
2151 result in a temporary for use inside the loop. */
2152 gfc_init_se (&se, NULL);
2155 gfc_conv_expr (&se, ss->expr);
2156 gfc_add_block_to_block (&loop->pre, &se.pre);
2157 gfc_add_block_to_block (&loop->post, &se.post);
2158 ss->string_length = se.string_length;
2161 case GFC_SS_CONSTRUCTOR:
2162 if (ss->expr->ts.type == BT_CHARACTER
2163 && ss->string_length == NULL
2164 && ss->expr->ts.u.cl
2165 && ss->expr->ts.u.cl->length)
2167 gfc_init_se (&se, NULL);
2168 gfc_conv_expr_type (&se, ss->expr->ts.u.cl->length,
2169 gfc_charlen_type_node);
2170 ss->string_length = se.expr;
2171 gfc_add_block_to_block (&loop->pre, &se.pre);
2172 gfc_add_block_to_block (&loop->post, &se.post);
2174 gfc_trans_array_constructor (loop, ss, where);
2178 case GFC_SS_COMPONENT:
2179 /* Do nothing. These are handled elsewhere. */
2189 /* Translate expressions for the descriptor and data pointer of a SS. */
2193 gfc_conv_ss_descriptor (stmtblock_t * block, gfc_ss * ss, int base)
2198 /* Get the descriptor for the array to be scalarized. */
2199 gcc_assert (ss->expr->expr_type == EXPR_VARIABLE);
2200 gfc_init_se (&se, NULL);
2201 se.descriptor_only = 1;
2202 gfc_conv_expr_lhs (&se, ss->expr);
2203 gfc_add_block_to_block (block, &se.pre);
2204 ss->data.info.descriptor = se.expr;
2205 ss->string_length = se.string_length;
2209 /* Also the data pointer. */
2210 tmp = gfc_conv_array_data (se.expr);
2211 /* If this is a variable or address of a variable we use it directly.
2212 Otherwise we must evaluate it now to avoid breaking dependency
2213 analysis by pulling the expressions for elemental array indices
2216 || (TREE_CODE (tmp) == ADDR_EXPR
2217 && DECL_P (TREE_OPERAND (tmp, 0)))))
2218 tmp = gfc_evaluate_now (tmp, block);
2219 ss->data.info.data = tmp;
2221 tmp = gfc_conv_array_offset (se.expr);
2222 ss->data.info.offset = gfc_evaluate_now (tmp, block);
2224 /* Make absolutely sure that the saved_offset is indeed saved
2225 so that the variable is still accessible after the loops
2227 ss->data.info.saved_offset = ss->data.info.offset;
2232 /* Initialize a gfc_loopinfo structure. */
2235 gfc_init_loopinfo (gfc_loopinfo * loop)
2239 memset (loop, 0, sizeof (gfc_loopinfo));
2240 gfc_init_block (&loop->pre);
2241 gfc_init_block (&loop->post);
2243 /* Initially scalarize in order and default to no loop reversal. */
2244 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2247 loop->reverse[n] = GFC_CANNOT_REVERSE;
2250 loop->ss = gfc_ss_terminator;
2254 /* Copies the loop variable info to a gfc_se structure. Does not copy the SS
2258 gfc_copy_loopinfo_to_se (gfc_se * se, gfc_loopinfo * loop)
2264 /* Return an expression for the data pointer of an array. */
2267 gfc_conv_array_data (tree descriptor)
2271 type = TREE_TYPE (descriptor);
2272 if (GFC_ARRAY_TYPE_P (type))
2274 if (TREE_CODE (type) == POINTER_TYPE)
2278 /* Descriptorless arrays. */
2279 return gfc_build_addr_expr (NULL_TREE, descriptor);
2283 return gfc_conv_descriptor_data_get (descriptor);
2287 /* Return an expression for the base offset of an array. */
2290 gfc_conv_array_offset (tree descriptor)
2294 type = TREE_TYPE (descriptor);
2295 if (GFC_ARRAY_TYPE_P (type))
2296 return GFC_TYPE_ARRAY_OFFSET (type);
2298 return gfc_conv_descriptor_offset_get (descriptor);
2302 /* Get an expression for the array stride. */
2305 gfc_conv_array_stride (tree descriptor, int dim)
2310 type = TREE_TYPE (descriptor);
2312 /* For descriptorless arrays use the array size. */
2313 tmp = GFC_TYPE_ARRAY_STRIDE (type, dim);
2314 if (tmp != NULL_TREE)
2317 tmp = gfc_conv_descriptor_stride_get (descriptor, gfc_rank_cst[dim]);
2322 /* Like gfc_conv_array_stride, but for the lower bound. */
2325 gfc_conv_array_lbound (tree descriptor, int dim)
2330 type = TREE_TYPE (descriptor);
2332 tmp = GFC_TYPE_ARRAY_LBOUND (type, dim);
2333 if (tmp != NULL_TREE)
2336 tmp = gfc_conv_descriptor_lbound_get (descriptor, gfc_rank_cst[dim]);
2341 /* Like gfc_conv_array_stride, but for the upper bound. */
2344 gfc_conv_array_ubound (tree descriptor, int dim)
2349 type = TREE_TYPE (descriptor);
2351 tmp = GFC_TYPE_ARRAY_UBOUND (type, dim);
2352 if (tmp != NULL_TREE)
2355 /* This should only ever happen when passing an assumed shape array
2356 as an actual parameter. The value will never be used. */
2357 if (GFC_ARRAY_TYPE_P (TREE_TYPE (descriptor)))
2358 return gfc_index_zero_node;
2360 tmp = gfc_conv_descriptor_ubound_get (descriptor, gfc_rank_cst[dim]);
2365 /* Generate code to perform an array index bound check. */
2368 gfc_trans_array_bound_check (gfc_se * se, tree descriptor, tree index, int n,
2369 locus * where, bool check_upper)
2372 tree tmp_lo, tmp_up;
2374 const char * name = NULL;
2376 if (!(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS))
2379 index = gfc_evaluate_now (index, &se->pre);
2381 /* We find a name for the error message. */
2383 name = se->ss->expr->symtree->name;
2385 if (!name && se->loop && se->loop->ss && se->loop->ss->expr
2386 && se->loop->ss->expr->symtree)
2387 name = se->loop->ss->expr->symtree->name;
2389 if (!name && se->loop && se->loop->ss && se->loop->ss->loop_chain
2390 && se->loop->ss->loop_chain->expr
2391 && se->loop->ss->loop_chain->expr->symtree)
2392 name = se->loop->ss->loop_chain->expr->symtree->name;
2394 if (!name && se->loop && se->loop->ss && se->loop->ss->expr)
2396 if (se->loop->ss->expr->expr_type == EXPR_FUNCTION
2397 && se->loop->ss->expr->value.function.name)
2398 name = se->loop->ss->expr->value.function.name;
2400 if (se->loop->ss->type == GFC_SS_CONSTRUCTOR
2401 || se->loop->ss->type == GFC_SS_SCALAR)
2402 name = "unnamed constant";
2405 if (TREE_CODE (descriptor) == VAR_DECL)
2406 name = IDENTIFIER_POINTER (DECL_NAME (descriptor));
2408 /* If upper bound is present, include both bounds in the error message. */
2411 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2412 tmp_up = gfc_conv_array_ubound (descriptor, n);
2415 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2416 "outside of expected range (%%ld:%%ld)", n+1, name);
2418 asprintf (&msg, "Index '%%ld' of dimension %d "
2419 "outside of expected range (%%ld:%%ld)", n+1);
2421 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2423 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2424 fold_convert (long_integer_type_node, index),
2425 fold_convert (long_integer_type_node, tmp_lo),
2426 fold_convert (long_integer_type_node, tmp_up));
2427 fault = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
2429 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2430 fold_convert (long_integer_type_node, index),
2431 fold_convert (long_integer_type_node, tmp_lo),
2432 fold_convert (long_integer_type_node, tmp_up));
2437 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2440 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2441 "below lower bound of %%ld", n+1, name);
2443 asprintf (&msg, "Index '%%ld' of dimension %d "
2444 "below lower bound of %%ld", n+1);
2446 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2448 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2449 fold_convert (long_integer_type_node, index),
2450 fold_convert (long_integer_type_node, tmp_lo));
2458 /* Return the offset for an index. Performs bound checking for elemental
2459 dimensions. Single element references are processed separately.
2460 DIM is the array dimension, I is the loop dimension. */
2463 gfc_conv_array_index_offset (gfc_se * se, gfc_ss_info * info, int dim, int i,
2464 gfc_array_ref * ar, tree stride)
2470 /* Get the index into the array for this dimension. */
2473 gcc_assert (ar->type != AR_ELEMENT);
2474 switch (ar->dimen_type[dim])
2476 case DIMEN_THIS_IMAGE:
2480 /* Elemental dimension. */
2481 gcc_assert (info->subscript[dim]
2482 && info->subscript[dim]->type == GFC_SS_SCALAR);
2483 /* We've already translated this value outside the loop. */
2484 index = info->subscript[dim]->data.scalar.expr;
2486 index = gfc_trans_array_bound_check (se, info->descriptor,
2487 index, dim, &ar->where,
2488 ar->as->type != AS_ASSUMED_SIZE
2489 || dim < ar->dimen - 1);
2493 gcc_assert (info && se->loop);
2494 gcc_assert (info->subscript[dim]
2495 && info->subscript[dim]->type == GFC_SS_VECTOR);
2496 desc = info->subscript[dim]->data.info.descriptor;
2498 /* Get a zero-based index into the vector. */
2499 index = fold_build2_loc (input_location, MINUS_EXPR,
2500 gfc_array_index_type,
2501 se->loop->loopvar[i], se->loop->from[i]);
2503 /* Multiply the index by the stride. */
2504 index = fold_build2_loc (input_location, MULT_EXPR,
2505 gfc_array_index_type,
2506 index, gfc_conv_array_stride (desc, 0));
2508 /* Read the vector to get an index into info->descriptor. */
2509 data = build_fold_indirect_ref_loc (input_location,
2510 gfc_conv_array_data (desc));
2511 index = gfc_build_array_ref (data, index, NULL);
2512 index = gfc_evaluate_now (index, &se->pre);
2513 index = fold_convert (gfc_array_index_type, index);
2515 /* Do any bounds checking on the final info->descriptor index. */
2516 index = gfc_trans_array_bound_check (se, info->descriptor,
2517 index, dim, &ar->where,
2518 ar->as->type != AS_ASSUMED_SIZE
2519 || dim < ar->dimen - 1);
2523 /* Scalarized dimension. */
2524 gcc_assert (info && se->loop);
2526 /* Multiply the loop variable by the stride and delta. */
2527 index = se->loop->loopvar[i];
2528 if (!integer_onep (info->stride[dim]))
2529 index = fold_build2_loc (input_location, MULT_EXPR,
2530 gfc_array_index_type, index,
2532 if (!integer_zerop (info->delta[dim]))
2533 index = fold_build2_loc (input_location, PLUS_EXPR,
2534 gfc_array_index_type, index,
2544 /* Temporary array or derived type component. */
2545 gcc_assert (se->loop);
2546 index = se->loop->loopvar[se->loop->order[i]];
2547 if (!integer_zerop (info->delta[dim]))
2548 index = fold_build2_loc (input_location, PLUS_EXPR,
2549 gfc_array_index_type, index, info->delta[dim]);
2552 /* Multiply by the stride. */
2553 if (!integer_onep (stride))
2554 index = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
2561 /* Build a scalarized reference to an array. */
2564 gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar)
2567 tree decl = NULL_TREE;
2572 info = &se->ss->data.info;
2574 n = se->loop->order[0];
2578 index = gfc_conv_array_index_offset (se, info, info->dim[n], n, ar,
2580 /* Add the offset for this dimension to the stored offset for all other
2582 if (!integer_zerop (info->offset))
2583 index = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2584 index, info->offset);
2586 if (se->ss->expr && is_subref_array (se->ss->expr))
2587 decl = se->ss->expr->symtree->n.sym->backend_decl;
2589 tmp = build_fold_indirect_ref_loc (input_location,
2591 se->expr = gfc_build_array_ref (tmp, index, decl);
2595 /* Translate access of temporary array. */
2598 gfc_conv_tmp_array_ref (gfc_se * se)
2600 se->string_length = se->ss->string_length;
2601 gfc_conv_scalarized_array_ref (se, NULL);
2602 gfc_advance_se_ss_chain (se);
2606 /* Build an array reference. se->expr already holds the array descriptor.
2607 This should be either a variable, indirect variable reference or component
2608 reference. For arrays which do not have a descriptor, se->expr will be
2610 a(i, j, k) = base[offset + i * stride[0] + j * stride[1] + k * stride[2]]*/
2613 gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar, gfc_symbol * sym,
2626 /* Handle scalarized references separately. */
2627 if (ar->type != AR_ELEMENT)
2629 gfc_conv_scalarized_array_ref (se, ar);
2630 gfc_advance_se_ss_chain (se);
2634 index = gfc_index_zero_node;
2636 /* Calculate the offsets from all the dimensions. */
2637 for (n = 0; n < ar->dimen; n++)
2639 /* Calculate the index for this dimension. */
2640 gfc_init_se (&indexse, se);
2641 gfc_conv_expr_type (&indexse, ar->start[n], gfc_array_index_type);
2642 gfc_add_block_to_block (&se->pre, &indexse.pre);
2644 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2646 /* Check array bounds. */
2650 /* Evaluate the indexse.expr only once. */
2651 indexse.expr = save_expr (indexse.expr);
2654 tmp = gfc_conv_array_lbound (se->expr, n);
2655 if (sym->attr.temporary)
2657 gfc_init_se (&tmpse, se);
2658 gfc_conv_expr_type (&tmpse, ar->as->lower[n],
2659 gfc_array_index_type);
2660 gfc_add_block_to_block (&se->pre, &tmpse.pre);
2664 cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2666 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2667 "below lower bound of %%ld", n+1, sym->name);
2668 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
2669 fold_convert (long_integer_type_node,
2671 fold_convert (long_integer_type_node, tmp));
2674 /* Upper bound, but not for the last dimension of assumed-size
2676 if (n < ar->dimen - 1 || ar->as->type != AS_ASSUMED_SIZE)
2678 tmp = gfc_conv_array_ubound (se->expr, n);
2679 if (sym->attr.temporary)
2681 gfc_init_se (&tmpse, se);
2682 gfc_conv_expr_type (&tmpse, ar->as->upper[n],
2683 gfc_array_index_type);
2684 gfc_add_block_to_block (&se->pre, &tmpse.pre);
2688 cond = fold_build2_loc (input_location, GT_EXPR,
2689 boolean_type_node, indexse.expr, tmp);
2690 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2691 "above upper bound of %%ld", n+1, sym->name);
2692 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
2693 fold_convert (long_integer_type_node,
2695 fold_convert (long_integer_type_node, tmp));
2700 /* Multiply the index by the stride. */
2701 stride = gfc_conv_array_stride (se->expr, n);
2702 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
2703 indexse.expr, stride);
2705 /* And add it to the total. */
2706 index = fold_build2_loc (input_location, PLUS_EXPR,
2707 gfc_array_index_type, index, tmp);
2710 tmp = gfc_conv_array_offset (se->expr);
2711 if (!integer_zerop (tmp))
2712 index = fold_build2_loc (input_location, PLUS_EXPR,
2713 gfc_array_index_type, index, tmp);
2715 /* Access the calculated element. */
2716 tmp = gfc_conv_array_data (se->expr);
2717 tmp = build_fold_indirect_ref (tmp);
2718 se->expr = gfc_build_array_ref (tmp, index, sym->backend_decl);
2722 /* Generate the code to be executed immediately before entering a
2723 scalarization loop. */
2726 gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag,
2727 stmtblock_t * pblock)
2736 /* This code will be executed before entering the scalarization loop
2737 for this dimension. */
2738 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
2740 if ((ss->useflags & flag) == 0)
2743 if (ss->type != GFC_SS_SECTION
2744 && ss->type != GFC_SS_FUNCTION && ss->type != GFC_SS_CONSTRUCTOR
2745 && ss->type != GFC_SS_COMPONENT)
2748 info = &ss->data.info;
2750 if (dim >= info->dimen)
2753 if (dim == info->dimen - 1)
2755 /* For the outermost loop calculate the offset due to any
2756 elemental dimensions. It will have been initialized with the
2757 base offset of the array. */
2760 for (i = 0; i < info->ref->u.ar.dimen; i++)
2762 if (info->ref->u.ar.dimen_type[i] != DIMEN_ELEMENT)
2765 gfc_init_se (&se, NULL);
2767 se.expr = info->descriptor;
2768 stride = gfc_conv_array_stride (info->descriptor, i);
2769 index = gfc_conv_array_index_offset (&se, info, i, -1,
2772 gfc_add_block_to_block (pblock, &se.pre);
2774 info->offset = fold_build2_loc (input_location, PLUS_EXPR,
2775 gfc_array_index_type,
2776 info->offset, index);
2777 info->offset = gfc_evaluate_now (info->offset, pblock);
2782 /* For the time being, the innermost loop is unconditionally on
2783 the first dimension of the scalarization loop. */
2784 gcc_assert (i == 0);
2785 stride = gfc_conv_array_stride (info->descriptor, info->dim[i]);
2787 /* Calculate the stride of the innermost loop. Hopefully this will
2788 allow the backend optimizers to do their stuff more effectively.
2790 info->stride0 = gfc_evaluate_now (stride, pblock);
2794 /* Add the offset for the previous loop dimension. */
2799 ar = &info->ref->u.ar;
2800 i = loop->order[dim + 1];
2808 gfc_init_se (&se, NULL);
2810 se.expr = info->descriptor;
2811 stride = gfc_conv_array_stride (info->descriptor, info->dim[i]);
2812 index = gfc_conv_array_index_offset (&se, info, info->dim[i], i,
2814 gfc_add_block_to_block (pblock, &se.pre);
2815 info->offset = fold_build2_loc (input_location, PLUS_EXPR,
2816 gfc_array_index_type, info->offset,
2818 info->offset = gfc_evaluate_now (info->offset, pblock);
2821 /* Remember this offset for the second loop. */
2822 if (dim == loop->temp_dim - 1)
2823 info->saved_offset = info->offset;
2828 /* Start a scalarized expression. Creates a scope and declares loop
2832 gfc_start_scalarized_body (gfc_loopinfo * loop, stmtblock_t * pbody)
2838 gcc_assert (!loop->array_parameter);
2840 for (dim = loop->dimen + loop->codimen - 1; dim >= 0; dim--)
2842 n = loop->order[dim];
2844 gfc_start_block (&loop->code[n]);
2846 /* Create the loop variable. */
2847 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "S");
2849 if (dim < loop->temp_dim)
2853 /* Calculate values that will be constant within this loop. */
2854 gfc_trans_preloop_setup (loop, dim, flags, &loop->code[n]);
2856 gfc_start_block (pbody);
2860 /* Generates the actual loop code for a scalarization loop. */
2863 gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n,
2864 stmtblock_t * pbody)
2875 if ((ompws_flags & (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS))
2876 == (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS)
2877 && n == loop->dimen - 1)
2879 /* We create an OMP_FOR construct for the outermost scalarized loop. */
2880 init = make_tree_vec (1);
2881 cond = make_tree_vec (1);
2882 incr = make_tree_vec (1);
2884 /* Cycle statement is implemented with a goto. Exit statement must not
2885 be present for this loop. */
2886 exit_label = gfc_build_label_decl (NULL_TREE);
2887 TREE_USED (exit_label) = 1;
2889 /* Label for cycle statements (if needed). */
2890 tmp = build1_v (LABEL_EXPR, exit_label);
2891 gfc_add_expr_to_block (pbody, tmp);
2893 stmt = make_node (OMP_FOR);
2895 TREE_TYPE (stmt) = void_type_node;
2896 OMP_FOR_BODY (stmt) = loopbody = gfc_finish_block (pbody);
2898 OMP_FOR_CLAUSES (stmt) = build_omp_clause (input_location,
2899 OMP_CLAUSE_SCHEDULE);
2900 OMP_CLAUSE_SCHEDULE_KIND (OMP_FOR_CLAUSES (stmt))
2901 = OMP_CLAUSE_SCHEDULE_STATIC;
2902 if (ompws_flags & OMPWS_NOWAIT)
2903 OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (stmt))
2904 = build_omp_clause (input_location, OMP_CLAUSE_NOWAIT);
2906 /* Initialize the loopvar. */
2907 TREE_VEC_ELT (init, 0) = build2_v (MODIFY_EXPR, loop->loopvar[n],
2909 OMP_FOR_INIT (stmt) = init;
2910 /* The exit condition. */
2911 TREE_VEC_ELT (cond, 0) = build2_loc (input_location, LE_EXPR,
2913 loop->loopvar[n], loop->to[n]);
2914 SET_EXPR_LOCATION (TREE_VEC_ELT (cond, 0), input_location);
2915 OMP_FOR_COND (stmt) = cond;
2916 /* Increment the loopvar. */
2917 tmp = build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2918 loop->loopvar[n], gfc_index_one_node);
2919 TREE_VEC_ELT (incr, 0) = fold_build2_loc (input_location, MODIFY_EXPR,
2920 void_type_node, loop->loopvar[n], tmp);
2921 OMP_FOR_INCR (stmt) = incr;
2923 ompws_flags &= ~OMPWS_CURR_SINGLEUNIT;
2924 gfc_add_expr_to_block (&loop->code[n], stmt);
2928 bool reverse_loop = (loop->reverse[n] == GFC_REVERSE_SET)
2929 && (loop->temp_ss == NULL);
2931 loopbody = gfc_finish_block (pbody);
2935 tmp = loop->from[n];
2936 loop->from[n] = loop->to[n];
2940 /* Initialize the loopvar. */
2941 if (loop->loopvar[n] != loop->from[n])
2942 gfc_add_modify (&loop->code[n], loop->loopvar[n], loop->from[n]);
2944 exit_label = gfc_build_label_decl (NULL_TREE);
2946 /* Generate the loop body. */
2947 gfc_init_block (&block);
2949 /* The exit condition. */
2950 cond = fold_build2_loc (input_location, reverse_loop ? LT_EXPR : GT_EXPR,
2951 boolean_type_node, loop->loopvar[n], loop->to[n]);
2952 tmp = build1_v (GOTO_EXPR, exit_label);
2953 TREE_USED (exit_label) = 1;
2954 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
2955 gfc_add_expr_to_block (&block, tmp);
2957 /* The main body. */
2958 gfc_add_expr_to_block (&block, loopbody);
2960 /* Increment the loopvar. */
2961 tmp = fold_build2_loc (input_location,
2962 reverse_loop ? MINUS_EXPR : PLUS_EXPR,
2963 gfc_array_index_type, loop->loopvar[n],
2964 gfc_index_one_node);
2966 gfc_add_modify (&block, loop->loopvar[n], tmp);
2968 /* Build the loop. */
2969 tmp = gfc_finish_block (&block);
2970 tmp = build1_v (LOOP_EXPR, tmp);
2971 gfc_add_expr_to_block (&loop->code[n], tmp);
2973 /* Add the exit label. */
2974 tmp = build1_v (LABEL_EXPR, exit_label);
2975 gfc_add_expr_to_block (&loop->code[n], tmp);
2981 /* Finishes and generates the loops for a scalarized expression. */
2984 gfc_trans_scalarizing_loops (gfc_loopinfo * loop, stmtblock_t * body)
2989 stmtblock_t *pblock;
2993 /* Generate the loops. */
2994 for (dim = 0; dim < loop->dimen + loop->codimen; dim++)
2996 n = loop->order[dim];
2997 gfc_trans_scalarized_loop_end (loop, n, pblock);
2998 loop->loopvar[n] = NULL_TREE;
2999 pblock = &loop->code[n];
3002 tmp = gfc_finish_block (pblock);
3003 gfc_add_expr_to_block (&loop->pre, tmp);
3005 /* Clear all the used flags. */
3006 for (ss = loop->ss; ss; ss = ss->loop_chain)
3011 /* Finish the main body of a scalarized expression, and start the secondary
3015 gfc_trans_scalarized_loop_boundary (gfc_loopinfo * loop, stmtblock_t * body)
3019 stmtblock_t *pblock;
3023 /* We finish as many loops as are used by the temporary. */
3024 for (dim = 0; dim < loop->temp_dim - 1; dim++)
3026 n = loop->order[dim];
3027 gfc_trans_scalarized_loop_end (loop, n, pblock);
3028 loop->loopvar[n] = NULL_TREE;
3029 pblock = &loop->code[n];
3032 /* We don't want to finish the outermost loop entirely. */
3033 n = loop->order[loop->temp_dim - 1];
3034 gfc_trans_scalarized_loop_end (loop, n, pblock);
3036 /* Restore the initial offsets. */
3037 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3039 if ((ss->useflags & 2) == 0)
3042 if (ss->type != GFC_SS_SECTION
3043 && ss->type != GFC_SS_FUNCTION && ss->type != GFC_SS_CONSTRUCTOR
3044 && ss->type != GFC_SS_COMPONENT)
3047 ss->data.info.offset = ss->data.info.saved_offset;
3050 /* Restart all the inner loops we just finished. */
3051 for (dim = loop->temp_dim - 2; dim >= 0; dim--)
3053 n = loop->order[dim];
3055 gfc_start_block (&loop->code[n]);
3057 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "Q");
3059 gfc_trans_preloop_setup (loop, dim, 2, &loop->code[n]);
3062 /* Start a block for the secondary copying code. */
3063 gfc_start_block (body);
3067 /* Calculate the lower bound of an array section. */
3070 gfc_conv_section_startstride (gfc_loopinfo * loop, gfc_ss * ss, int dim,
3071 bool coarray, bool coarray_last)
3075 gfc_expr *stride = NULL;
3080 gcc_assert (ss->type == GFC_SS_SECTION);
3082 info = &ss->data.info;
3084 if (info->ref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
3086 /* We use a zero-based index to access the vector. */
3087 info->start[dim] = gfc_index_zero_node;
3088 info->end[dim] = NULL;
3090 info->stride[dim] = gfc_index_one_node;
3094 gcc_assert (info->ref->u.ar.dimen_type[dim] == DIMEN_RANGE);
3095 desc = info->descriptor;
3096 start = info->ref->u.ar.start[dim];
3097 end = info->ref->u.ar.end[dim];
3099 stride = info->ref->u.ar.stride[dim];
3101 /* Calculate the start of the range. For vector subscripts this will
3102 be the range of the vector. */
3105 /* Specified section start. */
3106 gfc_init_se (&se, NULL);
3107 gfc_conv_expr_type (&se, start, gfc_array_index_type);
3108 gfc_add_block_to_block (&loop->pre, &se.pre);
3109 info->start[dim] = se.expr;
3113 /* No lower bound specified so use the bound of the array. */
3114 info->start[dim] = gfc_conv_array_lbound (desc, dim);
3116 info->start[dim] = gfc_evaluate_now (info->start[dim], &loop->pre);
3118 /* Similarly calculate the end. Although this is not used in the
3119 scalarizer, it is needed when checking bounds and where the end
3120 is an expression with side-effects. */
3125 /* Specified section start. */
3126 gfc_init_se (&se, NULL);
3127 gfc_conv_expr_type (&se, end, gfc_array_index_type);
3128 gfc_add_block_to_block (&loop->pre, &se.pre);
3129 info->end[dim] = se.expr;
3133 /* No upper bound specified so use the bound of the array. */
3134 info->end[dim] = gfc_conv_array_ubound (desc, dim);
3136 info->end[dim] = gfc_evaluate_now (info->end[dim], &loop->pre);
3139 /* Calculate the stride. */
3140 if (!coarray && stride == NULL)
3141 info->stride[dim] = gfc_index_one_node;
3144 gfc_init_se (&se, NULL);
3145 gfc_conv_expr_type (&se, stride, gfc_array_index_type);
3146 gfc_add_block_to_block (&loop->pre, &se.pre);
3147 info->stride[dim] = gfc_evaluate_now (se.expr, &loop->pre);
3152 /* Calculates the range start and stride for a SS chain. Also gets the
3153 descriptor and data pointer. The range of vector subscripts is the size
3154 of the vector. Array bounds are also checked. */
3157 gfc_conv_ss_startstride (gfc_loopinfo * loop)
3165 /* Determine the rank of the loop. */
3167 ss != gfc_ss_terminator && loop->dimen == 0; ss = ss->loop_chain)
3171 case GFC_SS_SECTION:
3172 case GFC_SS_CONSTRUCTOR:
3173 case GFC_SS_FUNCTION:
3174 case GFC_SS_COMPONENT:
3175 loop->dimen = ss->data.info.dimen;
3176 loop->codimen = ss->data.info.codimen;
3179 /* As usual, lbound and ubound are exceptions!. */
3180 case GFC_SS_INTRINSIC:
3181 switch (ss->expr->value.function.isym->id)
3183 case GFC_ISYM_LBOUND:
3184 case GFC_ISYM_UBOUND:
3185 loop->dimen = ss->data.info.dimen;
3189 case GFC_ISYM_LCOBOUND:
3190 case GFC_ISYM_UCOBOUND:
3191 case GFC_ISYM_THIS_IMAGE:
3192 loop->dimen = ss->data.info.dimen;
3193 loop->codimen = ss->data.info.codimen;
3205 /* We should have determined the rank of the expression by now. If
3206 not, that's bad news. */
3207 gcc_assert (loop->dimen + loop->codimen != 0);
3209 /* Loop over all the SS in the chain. */
3210 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3212 if (ss->expr && ss->expr->shape && !ss->shape)
3213 ss->shape = ss->expr->shape;
3217 case GFC_SS_SECTION:
3218 /* Get the descriptor for the array. */
3219 gfc_conv_ss_descriptor (&loop->pre, ss, !loop->array_parameter);
3221 for (n = 0; n < ss->data.info.dimen; n++)
3222 gfc_conv_section_startstride (loop, ss, ss->data.info.dim[n],
3224 for (n = ss->data.info.dimen;
3225 n < ss->data.info.dimen + ss->data.info.codimen; n++)
3226 gfc_conv_section_startstride (loop, ss, ss->data.info.dim[n], true,
3227 n == ss->data.info.dimen
3228 + ss->data.info.codimen -1);
3232 case GFC_SS_INTRINSIC:
3233 switch (ss->expr->value.function.isym->id)
3235 /* Fall through to supply start and stride. */
3236 case GFC_ISYM_LBOUND:
3237 case GFC_ISYM_UBOUND:
3238 case GFC_ISYM_LCOBOUND:
3239 case GFC_ISYM_UCOBOUND:
3240 case GFC_ISYM_THIS_IMAGE:
3247 case GFC_SS_CONSTRUCTOR:
3248 case GFC_SS_FUNCTION:
3249 for (n = 0; n < ss->data.info.dimen; n++)
3251 ss->data.info.start[n] = gfc_index_zero_node;
3252 ss->data.info.end[n] = gfc_index_zero_node;
3253 ss->data.info.stride[n] = gfc_index_one_node;
3262 /* The rest is just runtime bound checking. */
3263 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3266 tree lbound, ubound;
3268 tree size[GFC_MAX_DIMENSIONS];
3269 tree stride_pos, stride_neg, non_zerosized, tmp2, tmp3;
3274 gfc_start_block (&block);
3276 for (n = 0; n < loop->dimen; n++)
3277 size[n] = NULL_TREE;
3279 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3283 if (ss->type != GFC_SS_SECTION)
3286 /* Catch allocatable lhs in f2003. */
3287 if (gfc_option.flag_realloc_lhs && ss->is_alloc_lhs)
3290 gfc_start_block (&inner);
3292 /* TODO: range checking for mapped dimensions. */
3293 info = &ss->data.info;
3295 /* This code only checks ranges. Elemental and vector
3296 dimensions are checked later. */
3297 for (n = 0; n < loop->dimen; n++)
3302 if (info->ref->u.ar.dimen_type[dim] != DIMEN_RANGE)
3305 if (dim == info->ref->u.ar.dimen - 1
3306 && info->ref->u.ar.as->type == AS_ASSUMED_SIZE)
3307 check_upper = false;
3311 /* Zero stride is not allowed. */
3312 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
3313 info->stride[dim], gfc_index_zero_node);
3314 asprintf (&msg, "Zero stride is not allowed, for dimension %d "
3315 "of array '%s'", dim + 1, ss->expr->symtree->name);
3316 gfc_trans_runtime_check (true, false, tmp, &inner,
3317 &ss->expr->where, msg);
3320 desc = ss->data.info.descriptor;
3322 /* This is the run-time equivalent of resolve.c's
3323 check_dimension(). The logical is more readable there
3324 than it is here, with all the trees. */
3325 lbound = gfc_conv_array_lbound (desc, dim);
3326 end = info->end[dim];
3328 ubound = gfc_conv_array_ubound (desc, dim);
3332 /* non_zerosized is true when the selected range is not
3334 stride_pos = fold_build2_loc (input_location, GT_EXPR,
3335 boolean_type_node, info->stride[dim],
3336 gfc_index_zero_node);
3337 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
3338 info->start[dim], end);
3339 stride_pos = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3340 boolean_type_node, stride_pos, tmp);
3342 stride_neg = fold_build2_loc (input_location, LT_EXPR,
3344 info->stride[dim], gfc_index_zero_node);
3345 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
3346 info->start[dim], end);
3347 stride_neg = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3350 non_zerosized = fold_build2_loc (input_location, TRUTH_OR_EXPR,
3352 stride_pos, stride_neg);
3354 /* Check the start of the range against the lower and upper
3355 bounds of the array, if the range is not empty.
3356 If upper bound is present, include both bounds in the
3360 tmp = fold_build2_loc (input_location, LT_EXPR,
3362 info->start[dim], lbound);
3363 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3365 non_zerosized, tmp);
3366 tmp2 = fold_build2_loc (input_location, GT_EXPR,
3368 info->start[dim], ubound);
3369 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3371 non_zerosized, tmp2);
3372 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3373 "outside of expected range (%%ld:%%ld)",
3374 dim + 1, ss->expr->symtree->name);
3375 gfc_trans_runtime_check (true, false, tmp, &inner,
3376 &ss->expr->where, msg,
3377 fold_convert (long_integer_type_node, info->start[dim]),
3378 fold_convert (long_integer_type_node, lbound),
3379 fold_convert (long_integer_type_node, ubound));
3380 gfc_trans_runtime_check (true, false, tmp2, &inner,
3381 &ss->expr->where, msg,
3382 fold_convert (long_integer_type_node, info->start[dim]),
3383 fold_convert (long_integer_type_node, lbound),
3384 fold_convert (long_integer_type_node, ubound));
3389 tmp = fold_build2_loc (input_location, LT_EXPR,
3391 info->start[dim], lbound);
3392 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3393 boolean_type_node, non_zerosized, tmp);
3394 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3395 "below lower bound of %%ld",
3396 dim + 1, ss->expr->symtree->name);
3397 gfc_trans_runtime_check (true, false, tmp, &inner,
3398 &ss->expr->where, msg,
3399 fold_convert (long_integer_type_node, info->start[dim]),
3400 fold_convert (long_integer_type_node, lbound));
3404 /* Compute the last element of the range, which is not
3405 necessarily "end" (think 0:5:3, which doesn't contain 5)
3406 and check it against both lower and upper bounds. */
3408 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3409 gfc_array_index_type, end,
3411 tmp = fold_build2_loc (input_location, TRUNC_MOD_EXPR,
3412 gfc_array_index_type, tmp,
3414 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3415 gfc_array_index_type, end, tmp);
3416 tmp2 = fold_build2_loc (input_location, LT_EXPR,
3417 boolean_type_node, tmp, lbound);
3418 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3419 boolean_type_node, non_zerosized, tmp2);
3422 tmp3 = fold_build2_loc (input_location, GT_EXPR,
3423 boolean_type_node, tmp, ubound);
3424 tmp3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3425 boolean_type_node, non_zerosized, tmp3);
3426 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3427 "outside of expected range (%%ld:%%ld)",
3428 dim + 1, ss->expr->symtree->name);
3429 gfc_trans_runtime_check (true, false, tmp2, &inner,
3430 &ss->expr->where, msg,
3431 fold_convert (long_integer_type_node, tmp),
3432 fold_convert (long_integer_type_node, ubound),
3433 fold_convert (long_integer_type_node, lbound));
3434 gfc_trans_runtime_check (true, false, tmp3, &inner,
3435 &ss->expr->where, msg,
3436 fold_convert (long_integer_type_node, tmp),
3437 fold_convert (long_integer_type_node, ubound),
3438 fold_convert (long_integer_type_node, lbound));
3443 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3444 "below lower bound of %%ld",
3445 dim + 1, ss->expr->symtree->name);
3446 gfc_trans_runtime_check (true, false, tmp2, &inner,
3447 &ss->expr->where, msg,
3448 fold_convert (long_integer_type_node, tmp),
3449 fold_convert (long_integer_type_node, lbound));
3453 /* Check the section sizes match. */
3454 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3455 gfc_array_index_type, end,
3457 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
3458 gfc_array_index_type, tmp,
3460 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3461 gfc_array_index_type,
3462 gfc_index_one_node, tmp);
3463 tmp = fold_build2_loc (input_location, MAX_EXPR,
3464 gfc_array_index_type, tmp,
3465 build_int_cst (gfc_array_index_type, 0));
3466 /* We remember the size of the first section, and check all the
3467 others against this. */
3470 tmp3 = fold_build2_loc (input_location, NE_EXPR,
3471 boolean_type_node, tmp, size[n]);
3472 asprintf (&msg, "Array bound mismatch for dimension %d "
3473 "of array '%s' (%%ld/%%ld)",
3474 dim + 1, ss->expr->symtree->name);
3476 gfc_trans_runtime_check (true, false, tmp3, &inner,
3477 &ss->expr->where, msg,
3478 fold_convert (long_integer_type_node, tmp),
3479 fold_convert (long_integer_type_node, size[n]));
3484 size[n] = gfc_evaluate_now (tmp, &inner);
3487 tmp = gfc_finish_block (&inner);
3489 /* For optional arguments, only check bounds if the argument is
3491 if (ss->expr->symtree->n.sym->attr.optional
3492 || ss->expr->symtree->n.sym->attr.not_always_present)
3493 tmp = build3_v (COND_EXPR,
3494 gfc_conv_expr_present (ss->expr->symtree->n.sym),
3495 tmp, build_empty_stmt (input_location));
3497 gfc_add_expr_to_block (&block, tmp);
3501 tmp = gfc_finish_block (&block);
3502 gfc_add_expr_to_block (&loop->pre, tmp);
3506 /* Return true if both symbols could refer to the same data object. Does
3507 not take account of aliasing due to equivalence statements. */
3510 symbols_could_alias (gfc_symbol *lsym, gfc_symbol *rsym, bool lsym_pointer,
3511 bool lsym_target, bool rsym_pointer, bool rsym_target)
3513 /* Aliasing isn't possible if the symbols have different base types. */
3514 if (gfc_compare_types (&lsym->ts, &rsym->ts) == 0)
3517 /* Pointers can point to other pointers and target objects. */
3519 if ((lsym_pointer && (rsym_pointer || rsym_target))
3520 || (rsym_pointer && (lsym_pointer || lsym_target)))
3523 /* Special case: Argument association, cf. F90 12.4.1.6, F2003 12.4.1.7
3524 and F2008 12.5.2.13 items 3b and 4b. The pointer case (a) is already
3526 if (lsym_target && rsym_target
3527 && ((lsym->attr.dummy && !lsym->attr.contiguous
3528 && (!lsym->attr.dimension || lsym->as->type == AS_ASSUMED_SHAPE))
3529 || (rsym->attr.dummy && !rsym->attr.contiguous
3530 && (!rsym->attr.dimension
3531 || rsym->as->type == AS_ASSUMED_SHAPE))))
3538 /* Return true if the two SS could be aliased, i.e. both point to the same data
3540 /* TODO: resolve aliases based on frontend expressions. */
3543 gfc_could_be_alias (gfc_ss * lss, gfc_ss * rss)
3549 bool lsym_pointer, lsym_target, rsym_pointer, rsym_target;
3551 lsym = lss->expr->symtree->n.sym;
3552 rsym = rss->expr->symtree->n.sym;
3554 lsym_pointer = lsym->attr.pointer;
3555 lsym_target = lsym->attr.target;
3556 rsym_pointer = rsym->attr.pointer;
3557 rsym_target = rsym->attr.target;
3559 if (symbols_could_alias (lsym, rsym, lsym_pointer, lsym_target,
3560 rsym_pointer, rsym_target))
3563 if (rsym->ts.type != BT_DERIVED && rsym->ts.type != BT_CLASS
3564 && lsym->ts.type != BT_DERIVED && lsym->ts.type != BT_CLASS)
3567 /* For derived types we must check all the component types. We can ignore
3568 array references as these will have the same base type as the previous
3570 for (lref = lss->expr->ref; lref != lss->data.info.ref; lref = lref->next)
3572 if (lref->type != REF_COMPONENT)
3575 lsym_pointer = lsym_pointer || lref->u.c.sym->attr.pointer;
3576 lsym_target = lsym_target || lref->u.c.sym->attr.target;
3578 if (symbols_could_alias (lref->u.c.sym, rsym, lsym_pointer, lsym_target,
3579 rsym_pointer, rsym_target))
3582 if ((lsym_pointer && (rsym_pointer || rsym_target))
3583 || (rsym_pointer && (lsym_pointer || lsym_target)))
3585 if (gfc_compare_types (&lref->u.c.component->ts,
3590 for (rref = rss->expr->ref; rref != rss->data.info.ref;
3593 if (rref->type != REF_COMPONENT)
3596 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
3597 rsym_target = lsym_target || rref->u.c.sym->attr.target;
3599 if (symbols_could_alias (lref->u.c.sym, rref->u.c.sym,
3600 lsym_pointer, lsym_target,
3601 rsym_pointer, rsym_target))
3604 if ((lsym_pointer && (rsym_pointer || rsym_target))
3605 || (rsym_pointer && (lsym_pointer || lsym_target)))
3607 if (gfc_compare_types (&lref->u.c.component->ts,
3608 &rref->u.c.sym->ts))
3610 if (gfc_compare_types (&lref->u.c.sym->ts,
3611 &rref->u.c.component->ts))
3613 if (gfc_compare_types (&lref->u.c.component->ts,
3614 &rref->u.c.component->ts))
3620 lsym_pointer = lsym->attr.pointer;
3621 lsym_target = lsym->attr.target;
3622 lsym_pointer = lsym->attr.pointer;
3623 lsym_target = lsym->attr.target;
3625 for (rref = rss->expr->ref; rref != rss->data.info.ref; rref = rref->next)
3627 if (rref->type != REF_COMPONENT)
3630 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
3631 rsym_target = lsym_target || rref->u.c.sym->attr.target;
3633 if (symbols_could_alias (rref->u.c.sym, lsym,
3634 lsym_pointer, lsym_target,
3635 rsym_pointer, rsym_target))
3638 if ((lsym_pointer && (rsym_pointer || rsym_target))
3639 || (rsym_pointer && (lsym_pointer || lsym_target)))
3641 if (gfc_compare_types (&lsym->ts, &rref->u.c.component->ts))
3650 /* Resolve array data dependencies. Creates a temporary if required. */
3651 /* TODO: Calc dependencies with gfc_expr rather than gfc_ss, and move to
3655 gfc_conv_resolve_dependencies (gfc_loopinfo * loop, gfc_ss * dest,
3664 loop->temp_ss = NULL;
3666 for (ss = rss; ss != gfc_ss_terminator; ss = ss->next)
3668 if (ss->type != GFC_SS_SECTION)
3671 if (dest->expr->symtree->n.sym != ss->expr->symtree->n.sym)
3673 if (gfc_could_be_alias (dest, ss)
3674 || gfc_are_equivalenced_arrays (dest->expr, ss->expr))
3682 lref = dest->expr->ref;
3683 rref = ss->expr->ref;
3685 nDepend = gfc_dep_resolver (lref, rref, &loop->reverse[0]);
3690 for (i = 0; i < dest->data.info.dimen; i++)
3691 for (j = 0; j < ss->data.info.dimen; j++)
3693 && dest->data.info.dim[i] == ss->data.info.dim[j])
3695 /* If we don't access array elements in the same order,
3696 there is a dependency. */
3701 /* TODO : loop shifting. */
3704 /* Mark the dimensions for LOOP SHIFTING */
3705 for (n = 0; n < loop->dimen; n++)
3707 int dim = dest->data.info.dim[n];
3709 if (lref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
3711 else if (! gfc_is_same_range (&lref->u.ar,
3712 &rref->u.ar, dim, 0))
3716 /* Put all the dimensions with dependencies in the
3719 for (n = 0; n < loop->dimen; n++)
3721 gcc_assert (loop->order[n] == n);
3723 loop->order[dim++] = n;
3725 for (n = 0; n < loop->dimen; n++)
3728 loop->order[dim++] = n;
3731 gcc_assert (dim == loop->dimen);
3742 tree base_type = gfc_typenode_for_spec (&dest->expr->ts);
3743 if (GFC_ARRAY_TYPE_P (base_type)
3744 || GFC_DESCRIPTOR_TYPE_P (base_type))
3745 base_type = gfc_get_element_type (base_type);
3746 loop->temp_ss = gfc_get_ss ();
3747 loop->temp_ss->type = GFC_SS_TEMP;
3748 loop->temp_ss->data.temp.type = base_type;
3749 loop->temp_ss->string_length = dest->string_length;
3750 loop->temp_ss->data.temp.dimen = loop->dimen;
3751 loop->temp_ss->data.temp.codimen = loop->codimen;
3752 loop->temp_ss->next = gfc_ss_terminator;
3753 gfc_add_ss_to_loop (loop, loop->temp_ss);
3756 loop->temp_ss = NULL;
3760 /* Initialize the scalarization loop. Creates the loop variables. Determines
3761 the range of the loop variables. Creates a temporary if required.
3762 Calculates how to transform from loop variables to array indices for each
3763 expression. Also generates code for scalar expressions which have been
3764 moved outside the loop. */
3767 gfc_conv_loop_setup (gfc_loopinfo * loop, locus * where)
3769 int n, dim, spec_dim;
3771 gfc_ss_info *specinfo;
3774 gfc_ss *loopspec[GFC_MAX_DIMENSIONS];
3775 bool dynamic[GFC_MAX_DIMENSIONS];
3780 for (n = 0; n < loop->dimen + loop->codimen; n++)
3784 /* We use one SS term, and use that to determine the bounds of the
3785 loop for this dimension. We try to pick the simplest term. */
3786 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3788 if (ss->type == GFC_SS_SCALAR || ss->type == GFC_SS_REFERENCE)
3791 info = &ss->data.info;
3794 if (loopspec[n] != NULL)
3796 specinfo = &loopspec[n]->data.info;
3797 spec_dim = specinfo->dim[n];
3801 /* Silence unitialized warnings. */
3808 gcc_assert (ss->shape[dim]);
3809 /* The frontend has worked out the size for us. */
3811 || !loopspec[n]->shape
3812 || !integer_zerop (specinfo->start[spec_dim]))
3813 /* Prefer zero-based descriptors if possible. */
3818 if (ss->type == GFC_SS_CONSTRUCTOR)
3820 gfc_constructor_base base;
3821 /* An unknown size constructor will always be rank one.
3822 Higher rank constructors will either have known shape,
3823 or still be wrapped in a call to reshape. */
3824 gcc_assert (loop->dimen == 1);
3826 /* Always prefer to use the constructor bounds if the size
3827 can be determined at compile time. Prefer not to otherwise,
3828 since the general case involves realloc, and it's better to
3829 avoid that overhead if possible. */
3830 base = ss->expr->value.constructor;
3831 dynamic[n] = gfc_get_array_constructor_size (&i, base);
3832 if (!dynamic[n] || !loopspec[n])
3837 /* TODO: Pick the best bound if we have a choice between a
3838 function and something else. */
3839 if (ss->type == GFC_SS_FUNCTION)
3845 /* Avoid using an allocatable lhs in an assignment, since
3846 there might be a reallocation coming. */
3847 if (loopspec[n] && ss->is_alloc_lhs)
3850 if (ss->type != GFC_SS_SECTION)
3855 /* Criteria for choosing a loop specifier (most important first):
3856 doesn't need realloc
3862 else if ((loopspec[n]->type == GFC_SS_CONSTRUCTOR && dynamic[n])
3863 || n >= loop->dimen)
3865 else if (integer_onep (info->stride[dim])
3866 && !integer_onep (specinfo->stride[spec_dim]))
3868 else if (INTEGER_CST_P (info->stride[dim])
3869 && !INTEGER_CST_P (specinfo->stride[spec_dim]))
3871 else if (INTEGER_CST_P (info->start[dim])
3872 && !INTEGER_CST_P (specinfo->start[spec_dim]))
3874 /* We don't work out the upper bound.
3875 else if (INTEGER_CST_P (info->finish[n])
3876 && ! INTEGER_CST_P (specinfo->finish[n]))
3877 loopspec[n] = ss; */
3880 /* We should have found the scalarization loop specifier. If not,
3882 gcc_assert (loopspec[n]);
3884 info = &loopspec[n]->data.info;
3887 /* Set the extents of this range. */
3888 cshape = loopspec[n]->shape;
3889 if (n < loop->dimen && cshape && INTEGER_CST_P (info->start[dim])
3890 && INTEGER_CST_P (info->stride[dim]))
3892 loop->from[n] = info->start[dim];
3893 mpz_set (i, cshape[get_array_ref_dim (info, n)]);
3894 mpz_sub_ui (i, i, 1);
3895 /* To = from + (size - 1) * stride. */
3896 tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind);
3897 if (!integer_onep (info->stride[dim]))
3898 tmp = fold_build2_loc (input_location, MULT_EXPR,
3899 gfc_array_index_type, tmp,
3901 loop->to[n] = fold_build2_loc (input_location, PLUS_EXPR,
3902 gfc_array_index_type,
3903 loop->from[n], tmp);
3907 loop->from[n] = info->start[dim];
3908 switch (loopspec[n]->type)
3910 case GFC_SS_CONSTRUCTOR:
3911 /* The upper bound is calculated when we expand the
3913 gcc_assert (loop->to[n] == NULL_TREE);
3916 case GFC_SS_SECTION:
3917 /* Use the end expression if it exists and is not constant,
3918 so that it is only evaluated once. */
3919 loop->to[n] = info->end[dim];
3922 case GFC_SS_FUNCTION:
3923 /* The loop bound will be set when we generate the call. */
3924 gcc_assert (loop->to[n] == NULL_TREE);
3932 /* Transform everything so we have a simple incrementing variable. */
3933 if (n < loop->dimen && integer_onep (info->stride[dim]))
3934 info->delta[dim] = gfc_index_zero_node;
3935 else if (n < loop->dimen)
3937 /* Set the delta for this section. */
3938 info->delta[dim] = gfc_evaluate_now (loop->from[n], &loop->pre);
3939 /* Number of iterations is (end - start + step) / step.
3940 with start = 0, this simplifies to
3942 for (i = 0; i<=last; i++){...}; */
3943 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3944 gfc_array_index_type, loop->to[n],
3946 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
3947 gfc_array_index_type, tmp, info->stride[dim]);
3948 tmp = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
3949 tmp, build_int_cst (gfc_array_index_type, -1));
3950 loop->to[n] = gfc_evaluate_now (tmp, &loop->pre);
3951 /* Make the loop variable start at 0. */
3952 loop->from[n] = gfc_index_zero_node;
3956 /* Add all the scalar code that can be taken out of the loops.
3957 This may include calculating the loop bounds, so do it before
3958 allocating the temporary. */
3959 gfc_add_loop_ss_code (loop, loop->ss, false, where);
3961 /* If we want a temporary then create it. */
3962 if (loop->temp_ss != NULL)
3964 gcc_assert (loop->temp_ss->type == GFC_SS_TEMP);
3966 /* Make absolutely sure that this is a complete type. */
3967 if (loop->temp_ss->string_length)
3968 loop->temp_ss->data.temp.type
3969 = gfc_get_character_type_len_for_eltype
3970 (TREE_TYPE (loop->temp_ss->data.temp.type),
3971 loop->temp_ss->string_length);
3973 tmp = loop->temp_ss->data.temp.type;
3974 n = loop->temp_ss->data.temp.dimen;
3975 memset (&loop->temp_ss->data.info, 0, sizeof (gfc_ss_info));
3976 loop->temp_ss->type = GFC_SS_SECTION;
3977 loop->temp_ss->data.info.dimen = n;
3979 gcc_assert (loop->temp_ss->data.info.dimen != 0);
3980 for (n = 0; n < loop->temp_ss->data.info.dimen; n++)
3981 loop->temp_ss->data.info.dim[n] = n;
3983 gfc_trans_create_temp_array (&loop->pre, &loop->post, loop,
3984 &loop->temp_ss->data.info, tmp, NULL_TREE,
3985 false, true, false, where);
3988 for (n = 0; n < loop->temp_dim; n++)
3989 loopspec[loop->order[n]] = NULL;
3993 /* For array parameters we don't have loop variables, so don't calculate the
3995 if (loop->array_parameter)
3998 /* Calculate the translation from loop variables to array indices. */
3999 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4001 if (ss->type != GFC_SS_SECTION && ss->type != GFC_SS_COMPONENT
4002 && ss->type != GFC_SS_CONSTRUCTOR)
4006 info = &ss->data.info;
4008 for (n = 0; n < info->dimen; n++)
4010 /* If we are specifying the range the delta is already set. */
4011 if (loopspec[n] != ss)
4013 dim = ss->data.info.dim[n];
4015 /* Calculate the offset relative to the loop variable.
4016 First multiply by the stride. */
4017 tmp = loop->from[n];
4018 if (!integer_onep (info->stride[dim]))
4019 tmp = fold_build2_loc (input_location, MULT_EXPR,
4020 gfc_array_index_type,
4021 tmp, info->stride[dim]);
4023 /* Then subtract this from our starting value. */
4024 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4025 gfc_array_index_type,
4026 info->start[dim], tmp);
4028 info->delta[dim] = gfc_evaluate_now (tmp, &loop->pre);
4035 /* Calculate the size of a given array dimension from the bounds. This
4036 is simply (ubound - lbound + 1) if this expression is positive
4037 or 0 if it is negative (pick either one if it is zero). Optionally
4038 (if or_expr is present) OR the (expression != 0) condition to it. */
4041 gfc_conv_array_extent_dim (tree lbound, tree ubound, tree* or_expr)
4046 /* Calculate (ubound - lbound + 1). */
4047 res = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4049 res = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, res,
4050 gfc_index_one_node);
4052 /* Check whether the size for this dimension is negative. */
4053 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, res,
4054 gfc_index_zero_node);
4055 res = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, cond,
4056 gfc_index_zero_node, res);
4058 /* Build OR expression. */
4060 *or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
4061 boolean_type_node, *or_expr, cond);
4067 /* For an array descriptor, get the total number of elements. This is just
4068 the product of the extents along from_dim to to_dim. */
4071 gfc_conv_descriptor_size_1 (tree desc, int from_dim, int to_dim)
4076 res = gfc_index_one_node;
4078 for (dim = from_dim; dim < to_dim; ++dim)
4084 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
4085 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
4087 extent = gfc_conv_array_extent_dim (lbound, ubound, NULL);
4088 res = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4096 /* Full size of an array. */
4099 gfc_conv_descriptor_size (tree desc, int rank)
4101 return gfc_conv_descriptor_size_1 (desc, 0, rank);
4105 /* Size of a coarray for all dimensions but the last. */
4108 gfc_conv_descriptor_cosize (tree desc, int rank, int corank)
4110 return gfc_conv_descriptor_size_1 (desc, rank, rank + corank - 1);
4114 /* Helper function for marking a boolean expression tree as unlikely. */
4117 gfc_unlikely (tree cond)
4121 cond = fold_convert (long_integer_type_node, cond);
4122 tmp = build_zero_cst (long_integer_type_node);
4123 cond = build_call_expr_loc (input_location,
4124 built_in_decls[BUILT_IN_EXPECT], 2, cond, tmp);
4125 cond = fold_convert (boolean_type_node, cond);
4129 /* Fills in an array descriptor, and returns the size of the array.
4130 The size will be a simple_val, ie a variable or a constant. Also
4131 calculates the offset of the base. The pointer argument overflow,
4132 which should be of integer type, will increase in value if overflow
4133 occurs during the size calculation. Returns the size of the array.
4137 for (n = 0; n < rank; n++)
4139 a.lbound[n] = specified_lower_bound;
4140 offset = offset + a.lbond[n] * stride;
4142 a.ubound[n] = specified_upper_bound;
4143 a.stride[n] = stride;
4144 size = siz >= 0 ? ubound + size : 0; //size = ubound + 1 - lbound
4145 overflow += size == 0 ? 0: (MAX/size < stride ? 1: 0);
4146 stride = stride * size;
4148 element_size = sizeof (array element);
4149 stride = (size_t) stride;
4150 overflow += element_size == 0 ? 0: (MAX/element_size < stride ? 1: 0);
4151 stride = stride * element_size;
4157 gfc_array_init_size (tree descriptor, int rank, int corank, tree * poffset,
4158 gfc_expr ** lower, gfc_expr ** upper,
4159 stmtblock_t * pblock, tree * overflow)
4172 stmtblock_t thenblock;
4173 stmtblock_t elseblock;
4178 type = TREE_TYPE (descriptor);
4180 stride = gfc_index_one_node;
4181 offset = gfc_index_zero_node;
4183 /* Set the dtype. */
4184 tmp = gfc_conv_descriptor_dtype (descriptor);
4185 gfc_add_modify (pblock, tmp, gfc_get_dtype (TREE_TYPE (descriptor)));
4187 or_expr = boolean_false_node;
4189 for (n = 0; n < rank; n++)
4194 /* We have 3 possibilities for determining the size of the array:
4195 lower == NULL => lbound = 1, ubound = upper[n]
4196 upper[n] = NULL => lbound = 1, ubound = lower[n]
4197 upper[n] != NULL => lbound = lower[n], ubound = upper[n] */
4200 /* Set lower bound. */
4201 gfc_init_se (&se, NULL);
4203 se.expr = gfc_index_one_node;
4206 gcc_assert (lower[n]);
4209 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4210 gfc_add_block_to_block (pblock, &se.pre);
4214 se.expr = gfc_index_one_node;
4218 gfc_conv_descriptor_lbound_set (pblock, descriptor, gfc_rank_cst[n],
4220 conv_lbound = se.expr;
4222 /* Work out the offset for this component. */
4223 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4225 offset = fold_build2_loc (input_location, MINUS_EXPR,
4226 gfc_array_index_type, offset, tmp);
4228 /* Set upper bound. */
4229 gfc_init_se (&se, NULL);
4230 gcc_assert (ubound);
4231 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4232 gfc_add_block_to_block (pblock, &se.pre);
4234 gfc_conv_descriptor_ubound_set (pblock, descriptor,
4235 gfc_rank_cst[n], se.expr);
4236 conv_ubound = se.expr;
4238 /* Store the stride. */
4239 gfc_conv_descriptor_stride_set (pblock, descriptor,
4240 gfc_rank_cst[n], stride);
4242 /* Calculate size and check whether extent is negative. */
4243 size = gfc_conv_array_extent_dim (conv_lbound, conv_ubound, &or_expr);
4244 size = gfc_evaluate_now (size, pblock);
4246 /* Check whether multiplying the stride by the number of
4247 elements in this dimension would overflow. We must also check
4248 whether the current dimension has zero size in order to avoid
4251 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
4252 gfc_array_index_type,
4253 fold_convert (gfc_array_index_type,
4254 TYPE_MAX_VALUE (gfc_array_index_type)),
4256 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
4257 boolean_type_node, tmp, stride));
4258 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4259 integer_one_node, integer_zero_node);
4260 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
4261 boolean_type_node, size,
4262 gfc_index_zero_node));
4263 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4264 integer_zero_node, tmp);
4265 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
4267 *overflow = gfc_evaluate_now (tmp, pblock);
4269 /* Multiply the stride by the number of elements in this dimension. */
4270 stride = fold_build2_loc (input_location, MULT_EXPR,
4271 gfc_array_index_type, stride, size);
4272 stride = gfc_evaluate_now (stride, pblock);
4275 for (n = rank; n < rank + corank; n++)
4279 /* Set lower bound. */
4280 gfc_init_se (&se, NULL);
4281 if (lower == NULL || lower[n] == NULL)
4283 gcc_assert (n == rank + corank - 1);
4284 se.expr = gfc_index_one_node;
4288 if (ubound || n == rank + corank - 1)
4290 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4291 gfc_add_block_to_block (pblock, &se.pre);
4295 se.expr = gfc_index_one_node;
4299 gfc_conv_descriptor_lbound_set (pblock, descriptor, gfc_rank_cst[n],
4302 if (n < rank + corank - 1)
4304 gfc_init_se (&se, NULL);
4305 gcc_assert (ubound);
4306 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4307 gfc_add_block_to_block (pblock, &se.pre);
4308 gfc_conv_descriptor_ubound_set (pblock, descriptor,
4309 gfc_rank_cst[n], se.expr);
4313 /* The stride is the number of elements in the array, so multiply by the
4314 size of an element to get the total size. */
4315 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
4316 /* Convert to size_t. */
4317 element_size = fold_convert (size_type_node, tmp);
4318 stride = fold_convert (size_type_node, stride);
4320 /* First check for overflow. Since an array of type character can
4321 have zero element_size, we must check for that before
4323 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
4325 TYPE_MAX_VALUE (size_type_node), element_size);
4326 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
4327 boolean_type_node, tmp, stride));
4328 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4329 integer_one_node, integer_zero_node);
4330 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
4331 boolean_type_node, element_size,
4332 build_int_cst (size_type_node, 0)));
4333 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4334 integer_zero_node, tmp);
4335 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
4337 *overflow = gfc_evaluate_now (tmp, pblock);
4339 size = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
4340 stride, element_size);
4342 if (poffset != NULL)
4344 offset = gfc_evaluate_now (offset, pblock);
4348 if (integer_zerop (or_expr))
4350 if (integer_onep (or_expr))
4351 return build_int_cst (size_type_node, 0);
4353 var = gfc_create_var (TREE_TYPE (size), "size");
4354 gfc_start_block (&thenblock);
4355 gfc_add_modify (&thenblock, var, build_int_cst (size_type_node, 0));
4356 thencase = gfc_finish_block (&thenblock);
4358 gfc_start_block (&elseblock);
4359 gfc_add_modify (&elseblock, var, size);
4360 elsecase = gfc_finish_block (&elseblock);
4362 tmp = gfc_evaluate_now (or_expr, pblock);
4363 tmp = build3_v (COND_EXPR, tmp, thencase, elsecase);
4364 gfc_add_expr_to_block (pblock, tmp);
4370 /* Initializes the descriptor and generates a call to _gfor_allocate. Does
4371 the work for an ALLOCATE statement. */
4375 gfc_array_allocate (gfc_se * se, gfc_expr * expr, tree pstat)
4383 tree overflow; /* Boolean storing whether size calculation overflows. */
4386 stmtblock_t elseblock;
4389 gfc_ref *ref, *prev_ref = NULL;
4390 bool allocatable_array, coarray;
4394 /* Find the last reference in the chain. */
4395 while (ref && ref->next != NULL)
4397 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT
4398 || (ref->u.ar.dimen == 0 && ref->u.ar.codimen > 0));
4403 if (ref == NULL || ref->type != REF_ARRAY)
4408 allocatable_array = expr->symtree->n.sym->attr.allocatable;
4409 coarray = expr->symtree->n.sym->attr.codimension;
4413 allocatable_array = prev_ref->u.c.component->attr.allocatable;
4414 coarray = prev_ref->u.c.component->attr.codimension;
4417 /* Return if this is a scalar coarray. */
4418 if ((!prev_ref && !expr->symtree->n.sym->attr.dimension)
4419 || (prev_ref && !prev_ref->u.c.component->attr.dimension))
4421 gcc_assert (coarray);
4425 /* Figure out the size of the array. */
4426 switch (ref->u.ar.type)
4432 upper = ref->u.ar.start;
4438 lower = ref->u.ar.start;
4439 upper = ref->u.ar.end;
4443 gcc_assert (ref->u.ar.as->type == AS_EXPLICIT);
4445 lower = ref->u.ar.as->lower;
4446 upper = ref->u.ar.as->upper;
4454 overflow = integer_zero_node;
4455 size = gfc_array_init_size (se->expr, ref->u.ar.as->rank,
4456 ref->u.ar.as->corank, &offset, lower, upper,
4457 &se->pre, &overflow);
4459 var_overflow = gfc_create_var (integer_type_node, "overflow");
4460 gfc_add_modify (&se->pre, var_overflow, overflow);
4462 /* Generate the block of code handling overflow. */
4463 msg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const
4464 ("Integer overflow when calculating the amount of "
4465 "memory to allocate"));
4466 error = build_call_expr_loc (input_location,
4467 gfor_fndecl_runtime_error, 1, msg);
4469 if (pstat != NULL_TREE && !integer_zerop (pstat))
4471 /* Set the status variable if it's present. */
4472 stmtblock_t set_status_block;
4473 tree status_type = pstat ? TREE_TYPE (TREE_TYPE (pstat)) : NULL_TREE;
4475 gfc_start_block (&set_status_block);
4476 gfc_add_modify (&set_status_block,
4477 fold_build1_loc (input_location, INDIRECT_REF,
4478 status_type, pstat),
4479 build_int_cst (status_type, LIBERROR_ALLOCATION));
4481 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
4482 pstat, build_int_cst (TREE_TYPE (pstat), 0));
4483 error = fold_build3_loc (input_location, COND_EXPR, void_type_node, tmp,
4484 error, gfc_finish_block (&set_status_block));
4487 gfc_start_block (&elseblock);
4489 /* Allocate memory to store the data. */
4490 pointer = gfc_conv_descriptor_data_get (se->expr);
4491 STRIP_NOPS (pointer);
4493 /* The allocate_array variants take the old pointer as first argument. */
4494 if (allocatable_array)
4495 tmp = gfc_allocate_array_with_status (&elseblock, pointer, size, pstat, expr);
4497 tmp = gfc_allocate_with_status (&elseblock, size, pstat);
4498 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node, pointer,
4501 gfc_add_expr_to_block (&elseblock, tmp);
4503 cond = gfc_unlikely (fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
4504 var_overflow, integer_zero_node));
4505 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
4506 error, gfc_finish_block (&elseblock));
4508 gfc_add_expr_to_block (&se->pre, tmp);
4510 gfc_conv_descriptor_offset_set (&se->pre, se->expr, offset);
4512 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
4513 && expr->ts.u.derived->attr.alloc_comp)
4515 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, se->expr,
4516 ref->u.ar.as->rank);
4517 gfc_add_expr_to_block (&se->pre, tmp);
4524 /* Deallocate an array variable. Also used when an allocated variable goes
4529 gfc_array_deallocate (tree descriptor, tree pstat, gfc_expr* expr)
4535 gfc_start_block (&block);
4536 /* Get a pointer to the data. */
4537 var = gfc_conv_descriptor_data_get (descriptor);
4540 /* Parameter is the address of the data component. */
4541 tmp = gfc_deallocate_with_status (var, pstat, false, expr);
4542 gfc_add_expr_to_block (&block, tmp);
4544 /* Zero the data pointer. */
4545 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
4546 var, build_int_cst (TREE_TYPE (var), 0));
4547 gfc_add_expr_to_block (&block, tmp);
4549 return gfc_finish_block (&block);
4553 /* Create an array constructor from an initialization expression.
4554 We assume the frontend already did any expansions and conversions. */
4557 gfc_conv_array_initializer (tree type, gfc_expr * expr)
4563 unsigned HOST_WIDE_INT lo;
4565 VEC(constructor_elt,gc) *v = NULL;
4567 switch (expr->expr_type)
4570 case EXPR_STRUCTURE:
4571 /* A single scalar or derived type value. Create an array with all
4572 elements equal to that value. */
4573 gfc_init_se (&se, NULL);
4575 if (expr->expr_type == EXPR_CONSTANT)
4576 gfc_conv_constant (&se, expr);
4578 gfc_conv_structure (&se, expr, 1);
4580 tmp = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4581 gcc_assert (tmp && INTEGER_CST_P (tmp));
4582 hi = TREE_INT_CST_HIGH (tmp);
4583 lo = TREE_INT_CST_LOW (tmp);
4587 /* This will probably eat buckets of memory for large arrays. */
4588 while (hi != 0 || lo != 0)
4590 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, se.expr);
4598 /* Create a vector of all the elements. */
4599 for (c = gfc_constructor_first (expr->value.constructor);
4600 c; c = gfc_constructor_next (c))
4604 /* Problems occur when we get something like
4605 integer :: a(lots) = (/(i, i=1, lots)/) */
4606 gfc_fatal_error ("The number of elements in the array constructor "
4607 "at %L requires an increase of the allowed %d "
4608 "upper limit. See -fmax-array-constructor "
4609 "option", &expr->where,
4610 gfc_option.flag_max_array_constructor);
4613 if (mpz_cmp_si (c->offset, 0) != 0)
4614 index = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
4618 gfc_init_se (&se, NULL);
4619 switch (c->expr->expr_type)
4622 gfc_conv_constant (&se, c->expr);
4623 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
4626 case EXPR_STRUCTURE:
4627 gfc_conv_structure (&se, c->expr, 1);
4628 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
4633 /* Catch those occasional beasts that do not simplify
4634 for one reason or another, assuming that if they are
4635 standard defying the frontend will catch them. */
4636 gfc_conv_expr (&se, c->expr);
4637 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
4644 return gfc_build_null_descriptor (type);
4650 /* Create a constructor from the list of elements. */
4651 tmp = build_constructor (type, v);
4652 TREE_CONSTANT (tmp) = 1;
4657 /* Generate code to evaluate non-constant array bounds. Sets *poffset and
4658 returns the size (in elements) of the array. */
4661 gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
4662 stmtblock_t * pblock)
4677 size = gfc_index_one_node;
4678 offset = gfc_index_zero_node;
4679 for (dim = 0; dim < as->rank; dim++)
4681 /* Evaluate non-constant array bound expressions. */
4682 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
4683 if (as->lower[dim] && !INTEGER_CST_P (lbound))
4685 gfc_init_se (&se, NULL);
4686 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
4687 gfc_add_block_to_block (pblock, &se.pre);
4688 gfc_add_modify (pblock, lbound, se.expr);
4690 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
4691 if (as->upper[dim] && !INTEGER_CST_P (ubound))
4693 gfc_init_se (&se, NULL);
4694 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
4695 gfc_add_block_to_block (pblock, &se.pre);
4696 gfc_add_modify (pblock, ubound, se.expr);
4698 /* The offset of this dimension. offset = offset - lbound * stride. */
4699 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4701 offset = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4704 /* The size of this dimension, and the stride of the next. */
4705 if (dim + 1 < as->rank)
4706 stride = GFC_TYPE_ARRAY_STRIDE (type, dim + 1);
4708 stride = GFC_TYPE_ARRAY_SIZE (type);
4710 if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride)))
4712 /* Calculate stride = size * (ubound + 1 - lbound). */
4713 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4714 gfc_array_index_type,
4715 gfc_index_one_node, lbound);
4716 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4717 gfc_array_index_type, ubound, tmp);
4718 tmp = fold_build2_loc (input_location, MULT_EXPR,
4719 gfc_array_index_type, size, tmp);
4721 gfc_add_modify (pblock, stride, tmp);
4723 stride = gfc_evaluate_now (tmp, pblock);
4725 /* Make sure that negative size arrays are translated
4726 to being zero size. */
4727 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
4728 stride, gfc_index_zero_node);
4729 tmp = fold_build3_loc (input_location, COND_EXPR,
4730 gfc_array_index_type, tmp,
4731 stride, gfc_index_zero_node);
4732 gfc_add_modify (pblock, stride, tmp);
4737 for (dim = as->rank; dim < as->rank + as->corank; dim++)
4739 /* Evaluate non-constant array bound expressions. */
4740 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
4741 if (as->lower[dim] && !INTEGER_CST_P (lbound))
4743 gfc_init_se (&se, NULL);
4744 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
4745 gfc_add_block_to_block (pblock, &se.pre);
4746 gfc_add_modify (pblock, lbound, se.expr);
4748 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
4749 if (as->upper[dim] && !INTEGER_CST_P (ubound))
4751 gfc_init_se (&se, NULL);
4752 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
4753 gfc_add_block_to_block (pblock, &se.pre);
4754 gfc_add_modify (pblock, ubound, se.expr);
4757 gfc_trans_vla_type_sizes (sym, pblock);
4764 /* Generate code to initialize/allocate an array variable. */
4767 gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym,
4768 gfc_wrapped_block * block)
4772 tree tmp = NULL_TREE;
4779 gcc_assert (!(sym->attr.pointer || sym->attr.allocatable));
4781 /* Do nothing for USEd variables. */
4782 if (sym->attr.use_assoc)
4785 type = TREE_TYPE (decl);
4786 gcc_assert (GFC_ARRAY_TYPE_P (type));
4787 onstack = TREE_CODE (type) != POINTER_TYPE;
4789 gfc_start_block (&init);
4791 /* Evaluate character string length. */
4792 if (sym->ts.type == BT_CHARACTER
4793 && onstack && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
4795 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
4797 gfc_trans_vla_type_sizes (sym, &init);
4799 /* Emit a DECL_EXPR for this variable, which will cause the
4800 gimplifier to allocate storage, and all that good stuff. */
4801 tmp = fold_build1_loc (input_location, DECL_EXPR, TREE_TYPE (decl), decl);
4802 gfc_add_expr_to_block (&init, tmp);
4807 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
4811 type = TREE_TYPE (type);
4813 gcc_assert (!sym->attr.use_assoc);
4814 gcc_assert (!TREE_STATIC (decl));
4815 gcc_assert (!sym->module);
4817 if (sym->ts.type == BT_CHARACTER
4818 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
4819 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
4821 size = gfc_trans_array_bounds (type, sym, &offset, &init);
4823 /* Don't actually allocate space for Cray Pointees. */
4824 if (sym->attr.cray_pointee)
4826 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
4827 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
4829 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
4833 if (gfc_option.flag_stack_arrays)
4835 gcc_assert (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE);
4836 space = build_decl (sym->declared_at.lb->location,
4837 VAR_DECL, create_tmp_var_name ("A"),
4838 TREE_TYPE (TREE_TYPE (decl)));
4839 gfc_trans_vla_type_sizes (sym, &init);
4843 /* The size is the number of elements in the array, so multiply by the
4844 size of an element to get the total size. */
4845 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
4846 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4847 size, fold_convert (gfc_array_index_type, tmp));
4849 /* Allocate memory to hold the data. */
4850 tmp = gfc_call_malloc (&init, TREE_TYPE (decl), size);
4851 gfc_add_modify (&init, decl, tmp);
4853 /* Free the temporary. */
4854 tmp = gfc_call_free (convert (pvoid_type_node, decl));
4858 /* Set offset of the array. */
4859 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
4860 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
4862 /* Automatic arrays should not have initializers. */
4863 gcc_assert (!sym->value);
4865 inittree = gfc_finish_block (&init);
4872 /* Don't create new scope, emit the DECL_EXPR in exactly the scope
4873 where also space is located. */
4874 gfc_init_block (&init);
4875 tmp = fold_build1_loc (input_location, DECL_EXPR,
4876 TREE_TYPE (space), space);
4877 gfc_add_expr_to_block (&init, tmp);
4878 addr = fold_build1_loc (sym->declared_at.lb->location,
4879 ADDR_EXPR, TREE_TYPE (decl), space);
4880 gfc_add_modify (&init, decl, addr);
4881 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
4884 gfc_add_init_cleanup (block, inittree, tmp);
4888 /* Generate entry and exit code for g77 calling convention arrays. */
4891 gfc_trans_g77_array (gfc_symbol * sym, gfc_wrapped_block * block)
4901 gfc_save_backend_locus (&loc);
4902 gfc_set_backend_locus (&sym->declared_at);
4904 /* Descriptor type. */
4905 parm = sym->backend_decl;
4906 type = TREE_TYPE (parm);
4907 gcc_assert (GFC_ARRAY_TYPE_P (type));
4909 gfc_start_block (&init);
4911 if (sym->ts.type == BT_CHARACTER
4912 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
4913 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
4915 /* Evaluate the bounds of the array. */
4916 gfc_trans_array_bounds (type, sym, &offset, &init);
4918 /* Set the offset. */
4919 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
4920 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
4922 /* Set the pointer itself if we aren't using the parameter directly. */
4923 if (TREE_CODE (parm) != PARM_DECL)
4925 tmp = convert (TREE_TYPE (parm), GFC_DECL_SAVED_DESCRIPTOR (parm));
4926 gfc_add_modify (&init, parm, tmp);
4928 stmt = gfc_finish_block (&init);
4930 gfc_restore_backend_locus (&loc);
4932 /* Add the initialization code to the start of the function. */
4934 if (sym->attr.optional || sym->attr.not_always_present)
4936 tmp = gfc_conv_expr_present (sym);
4937 stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
4940 gfc_add_init_cleanup (block, stmt, NULL_TREE);
4944 /* Modify the descriptor of an array parameter so that it has the
4945 correct lower bound. Also move the upper bound accordingly.
4946 If the array is not packed, it will be copied into a temporary.
4947 For each dimension we set the new lower and upper bounds. Then we copy the
4948 stride and calculate the offset for this dimension. We also work out
4949 what the stride of a packed array would be, and see it the two match.
4950 If the array need repacking, we set the stride to the values we just
4951 calculated, recalculate the offset and copy the array data.
4952 Code is also added to copy the data back at the end of the function.
4956 gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc,
4957 gfc_wrapped_block * block)
4964 tree stmtInit, stmtCleanup;
4971 tree stride, stride2;
4981 /* Do nothing for pointer and allocatable arrays. */
4982 if (sym->attr.pointer || sym->attr.allocatable)
4985 if (sym->attr.dummy && gfc_is_nodesc_array (sym))
4987 gfc_trans_g77_array (sym, block);
4991 gfc_save_backend_locus (&loc);
4992 gfc_set_backend_locus (&sym->declared_at);
4994 /* Descriptor type. */
4995 type = TREE_TYPE (tmpdesc);
4996 gcc_assert (GFC_ARRAY_TYPE_P (type));
4997 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
4998 dumdesc = build_fold_indirect_ref_loc (input_location, dumdesc);
4999 gfc_start_block (&init);
5001 if (sym->ts.type == BT_CHARACTER
5002 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5003 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5005 checkparm = (sym->as->type == AS_EXPLICIT
5006 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS));
5008 no_repack = !(GFC_DECL_PACKED_ARRAY (tmpdesc)
5009 || GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc));
5011 if (GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc))
5013 /* For non-constant shape arrays we only check if the first dimension
5014 is contiguous. Repacking higher dimensions wouldn't gain us
5015 anything as we still don't know the array stride. */
5016 partial = gfc_create_var (boolean_type_node, "partial");
5017 TREE_USED (partial) = 1;
5018 tmp = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
5019 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, tmp,
5020 gfc_index_one_node);
5021 gfc_add_modify (&init, partial, tmp);
5024 partial = NULL_TREE;
5026 /* The naming of stmt_unpacked and stmt_packed may be counter-intuitive
5027 here, however I think it does the right thing. */
5030 /* Set the first stride. */
5031 stride = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
5032 stride = gfc_evaluate_now (stride, &init);
5034 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
5035 stride, gfc_index_zero_node);
5036 tmp = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
5037 tmp, gfc_index_one_node, stride);
5038 stride = GFC_TYPE_ARRAY_STRIDE (type, 0);
5039 gfc_add_modify (&init, stride, tmp);
5041 /* Allow the user to disable array repacking. */
5042 stmt_unpacked = NULL_TREE;
5046 gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type, 0)));
5047 /* A library call to repack the array if necessary. */
5048 tmp = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5049 stmt_unpacked = build_call_expr_loc (input_location,
5050 gfor_fndecl_in_pack, 1, tmp);
5052 stride = gfc_index_one_node;
5054 if (gfc_option.warn_array_temp)
5055 gfc_warning ("Creating array temporary at %L", &loc);
5058 /* This is for the case where the array data is used directly without
5059 calling the repack function. */
5060 if (no_repack || partial != NULL_TREE)
5061 stmt_packed = gfc_conv_descriptor_data_get (dumdesc);
5063 stmt_packed = NULL_TREE;
5065 /* Assign the data pointer. */
5066 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
5068 /* Don't repack unknown shape arrays when the first stride is 1. */
5069 tmp = fold_build3_loc (input_location, COND_EXPR, TREE_TYPE (stmt_packed),
5070 partial, stmt_packed, stmt_unpacked);
5073 tmp = stmt_packed != NULL_TREE ? stmt_packed : stmt_unpacked;
5074 gfc_add_modify (&init, tmpdesc, fold_convert (type, tmp));
5076 offset = gfc_index_zero_node;
5077 size = gfc_index_one_node;
5079 /* Evaluate the bounds of the array. */
5080 for (n = 0; n < sym->as->rank; n++)
5082 if (checkparm || !sym->as->upper[n])
5084 /* Get the bounds of the actual parameter. */
5085 dubound = gfc_conv_descriptor_ubound_get (dumdesc, gfc_rank_cst[n]);
5086 dlbound = gfc_conv_descriptor_lbound_get (dumdesc, gfc_rank_cst[n]);
5090 dubound = NULL_TREE;
5091 dlbound = NULL_TREE;
5094 lbound = GFC_TYPE_ARRAY_LBOUND (type, n);
5095 if (!INTEGER_CST_P (lbound))
5097 gfc_init_se (&se, NULL);
5098 gfc_conv_expr_type (&se, sym->as->lower[n],
5099 gfc_array_index_type);
5100 gfc_add_block_to_block (&init, &se.pre);
5101 gfc_add_modify (&init, lbound, se.expr);
5104 ubound = GFC_TYPE_ARRAY_UBOUND (type, n);
5105 /* Set the desired upper bound. */
5106 if (sym->as->upper[n])
5108 /* We know what we want the upper bound to be. */
5109 if (!INTEGER_CST_P (ubound))
5111 gfc_init_se (&se, NULL);
5112 gfc_conv_expr_type (&se, sym->as->upper[n],
5113 gfc_array_index_type);
5114 gfc_add_block_to_block (&init, &se.pre);
5115 gfc_add_modify (&init, ubound, se.expr);
5118 /* Check the sizes match. */
5121 /* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */
5125 temp = fold_build2_loc (input_location, MINUS_EXPR,
5126 gfc_array_index_type, ubound, lbound);
5127 temp = fold_build2_loc (input_location, PLUS_EXPR,
5128 gfc_array_index_type,
5129 gfc_index_one_node, temp);
5130 stride2 = fold_build2_loc (input_location, MINUS_EXPR,
5131 gfc_array_index_type, dubound,
5133 stride2 = fold_build2_loc (input_location, PLUS_EXPR,
5134 gfc_array_index_type,
5135 gfc_index_one_node, stride2);
5136 tmp = fold_build2_loc (input_location, NE_EXPR,
5137 gfc_array_index_type, temp, stride2);
5138 asprintf (&msg, "Dimension %d of array '%s' has extent "
5139 "%%ld instead of %%ld", n+1, sym->name);
5141 gfc_trans_runtime_check (true, false, tmp, &init, &loc, msg,
5142 fold_convert (long_integer_type_node, temp),
5143 fold_convert (long_integer_type_node, stride2));
5150 /* For assumed shape arrays move the upper bound by the same amount
5151 as the lower bound. */
5152 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5153 gfc_array_index_type, dubound, dlbound);
5154 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5155 gfc_array_index_type, tmp, lbound);
5156 gfc_add_modify (&init, ubound, tmp);
5158 /* The offset of this dimension. offset = offset - lbound * stride. */
5159 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5161 offset = fold_build2_loc (input_location, MINUS_EXPR,
5162 gfc_array_index_type, offset, tmp);
5164 /* The size of this dimension, and the stride of the next. */
5165 if (n + 1 < sym->as->rank)
5167 stride = GFC_TYPE_ARRAY_STRIDE (type, n + 1);
5169 if (no_repack || partial != NULL_TREE)
5171 gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[n+1]);
5173 /* Figure out the stride if not a known constant. */
5174 if (!INTEGER_CST_P (stride))
5177 stmt_packed = NULL_TREE;
5180 /* Calculate stride = size * (ubound + 1 - lbound). */
5181 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5182 gfc_array_index_type,
5183 gfc_index_one_node, lbound);
5184 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5185 gfc_array_index_type, ubound, tmp);
5186 size = fold_build2_loc (input_location, MULT_EXPR,
5187 gfc_array_index_type, size, tmp);
5191 /* Assign the stride. */
5192 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
5193 tmp = fold_build3_loc (input_location, COND_EXPR,
5194 gfc_array_index_type, partial,
5195 stmt_unpacked, stmt_packed);
5197 tmp = (stmt_packed != NULL_TREE) ? stmt_packed : stmt_unpacked;
5198 gfc_add_modify (&init, stride, tmp);
5203 stride = GFC_TYPE_ARRAY_SIZE (type);
5205 if (stride && !INTEGER_CST_P (stride))
5207 /* Calculate size = stride * (ubound + 1 - lbound). */
5208 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5209 gfc_array_index_type,
5210 gfc_index_one_node, lbound);
5211 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5212 gfc_array_index_type,
5214 tmp = fold_build2_loc (input_location, MULT_EXPR,
5215 gfc_array_index_type,
5216 GFC_TYPE_ARRAY_STRIDE (type, n), tmp);
5217 gfc_add_modify (&init, stride, tmp);
5222 /* Set the offset. */
5223 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5224 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5226 gfc_trans_vla_type_sizes (sym, &init);
5228 stmtInit = gfc_finish_block (&init);
5230 /* Only do the entry/initialization code if the arg is present. */
5231 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5232 optional_arg = (sym->attr.optional
5233 || (sym->ns->proc_name->attr.entry_master
5234 && sym->attr.dummy));
5237 tmp = gfc_conv_expr_present (sym);
5238 stmtInit = build3_v (COND_EXPR, tmp, stmtInit,
5239 build_empty_stmt (input_location));
5244 stmtCleanup = NULL_TREE;
5247 stmtblock_t cleanup;
5248 gfc_start_block (&cleanup);
5250 if (sym->attr.intent != INTENT_IN)
5252 /* Copy the data back. */
5253 tmp = build_call_expr_loc (input_location,
5254 gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc);
5255 gfc_add_expr_to_block (&cleanup, tmp);
5258 /* Free the temporary. */
5259 tmp = gfc_call_free (tmpdesc);
5260 gfc_add_expr_to_block (&cleanup, tmp);
5262 stmtCleanup = gfc_finish_block (&cleanup);
5264 /* Only do the cleanup if the array was repacked. */
5265 tmp = build_fold_indirect_ref_loc (input_location, dumdesc);
5266 tmp = gfc_conv_descriptor_data_get (tmp);
5267 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
5269 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
5270 build_empty_stmt (input_location));
5274 tmp = gfc_conv_expr_present (sym);
5275 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
5276 build_empty_stmt (input_location));
5280 /* We don't need to free any memory allocated by internal_pack as it will
5281 be freed at the end of the function by pop_context. */
5282 gfc_add_init_cleanup (block, stmtInit, stmtCleanup);
5284 gfc_restore_backend_locus (&loc);
5288 /* Calculate the overall offset, including subreferences. */
5290 gfc_get_dataptr_offset (stmtblock_t *block, tree parm, tree desc, tree offset,
5291 bool subref, gfc_expr *expr)
5301 /* If offset is NULL and this is not a subreferenced array, there is
5303 if (offset == NULL_TREE)
5306 offset = gfc_index_zero_node;
5311 tmp = gfc_conv_array_data (desc);
5312 tmp = build_fold_indirect_ref_loc (input_location,
5314 tmp = gfc_build_array_ref (tmp, offset, NULL);
5316 /* Offset the data pointer for pointer assignments from arrays with
5317 subreferences; e.g. my_integer => my_type(:)%integer_component. */
5320 /* Go past the array reference. */
5321 for (ref = expr->ref; ref; ref = ref->next)
5322 if (ref->type == REF_ARRAY &&
5323 ref->u.ar.type != AR_ELEMENT)
5329 /* Calculate the offset for each subsequent subreference. */
5330 for (; ref; ref = ref->next)
5335 field = ref->u.c.component->backend_decl;
5336 gcc_assert (field && TREE_CODE (field) == FIELD_DECL);
5337 tmp = fold_build3_loc (input_location, COMPONENT_REF,
5339 tmp, field, NULL_TREE);
5343 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE);
5344 gfc_init_se (&start, NULL);
5345 gfc_conv_expr_type (&start, ref->u.ss.start, gfc_charlen_type_node);
5346 gfc_add_block_to_block (block, &start.pre);
5347 tmp = gfc_build_array_ref (tmp, start.expr, NULL);
5351 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE
5352 && ref->u.ar.type == AR_ELEMENT);
5354 /* TODO - Add bounds checking. */
5355 stride = gfc_index_one_node;
5356 index = gfc_index_zero_node;
5357 for (n = 0; n < ref->u.ar.dimen; n++)
5362 /* Update the index. */
5363 gfc_init_se (&start, NULL);
5364 gfc_conv_expr_type (&start, ref->u.ar.start[n], gfc_array_index_type);
5365 itmp = gfc_evaluate_now (start.expr, block);
5366 gfc_init_se (&start, NULL);
5367 gfc_conv_expr_type (&start, ref->u.ar.as->lower[n], gfc_array_index_type);
5368 jtmp = gfc_evaluate_now (start.expr, block);
5369 itmp = fold_build2_loc (input_location, MINUS_EXPR,
5370 gfc_array_index_type, itmp, jtmp);
5371 itmp = fold_build2_loc (input_location, MULT_EXPR,
5372 gfc_array_index_type, itmp, stride);
5373 index = fold_build2_loc (input_location, PLUS_EXPR,
5374 gfc_array_index_type, itmp, index);
5375 index = gfc_evaluate_now (index, block);
5377 /* Update the stride. */
5378 gfc_init_se (&start, NULL);
5379 gfc_conv_expr_type (&start, ref->u.ar.as->upper[n], gfc_array_index_type);
5380 itmp = fold_build2_loc (input_location, MINUS_EXPR,
5381 gfc_array_index_type, start.expr,
5383 itmp = fold_build2_loc (input_location, PLUS_EXPR,
5384 gfc_array_index_type,
5385 gfc_index_one_node, itmp);
5386 stride = fold_build2_loc (input_location, MULT_EXPR,
5387 gfc_array_index_type, stride, itmp);
5388 stride = gfc_evaluate_now (stride, block);
5391 /* Apply the index to obtain the array element. */
5392 tmp = gfc_build_array_ref (tmp, index, NULL);
5402 /* Set the target data pointer. */
5403 offset = gfc_build_addr_expr (gfc_array_dataptr_type (desc), tmp);
5404 gfc_conv_descriptor_data_set (block, parm, offset);
5408 /* gfc_conv_expr_descriptor needs the string length an expression
5409 so that the size of the temporary can be obtained. This is done
5410 by adding up the string lengths of all the elements in the
5411 expression. Function with non-constant expressions have their
5412 string lengths mapped onto the actual arguments using the
5413 interface mapping machinery in trans-expr.c. */
5415 get_array_charlen (gfc_expr *expr, gfc_se *se)
5417 gfc_interface_mapping mapping;
5418 gfc_formal_arglist *formal;
5419 gfc_actual_arglist *arg;
5422 if (expr->ts.u.cl->length
5423 && gfc_is_constant_expr (expr->ts.u.cl->length))
5425 if (!expr->ts.u.cl->backend_decl)
5426 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5430 switch (expr->expr_type)
5433 get_array_charlen (expr->value.op.op1, se);
5435 /* For parentheses the expression ts.u.cl is identical. */
5436 if (expr->value.op.op == INTRINSIC_PARENTHESES)
5439 expr->ts.u.cl->backend_decl =
5440 gfc_create_var (gfc_charlen_type_node, "sln");
5442 if (expr->value.op.op2)
5444 get_array_charlen (expr->value.op.op2, se);
5446 gcc_assert (expr->value.op.op == INTRINSIC_CONCAT);
5448 /* Add the string lengths and assign them to the expression
5449 string length backend declaration. */
5450 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
5451 fold_build2_loc (input_location, PLUS_EXPR,
5452 gfc_charlen_type_node,
5453 expr->value.op.op1->ts.u.cl->backend_decl,
5454 expr->value.op.op2->ts.u.cl->backend_decl));
5457 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
5458 expr->value.op.op1->ts.u.cl->backend_decl);
5462 if (expr->value.function.esym == NULL
5463 || expr->ts.u.cl->length->expr_type == EXPR_CONSTANT)
5465 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5469 /* Map expressions involving the dummy arguments onto the actual
5470 argument expressions. */
5471 gfc_init_interface_mapping (&mapping);
5472 formal = expr->symtree->n.sym->formal;
5473 arg = expr->value.function.actual;
5475 /* Set se = NULL in the calls to the interface mapping, to suppress any
5477 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
5482 gfc_add_interface_mapping (&mapping, formal->sym, NULL, arg->expr);
5485 gfc_init_se (&tse, NULL);
5487 /* Build the expression for the character length and convert it. */
5488 gfc_apply_interface_mapping (&mapping, &tse, expr->ts.u.cl->length);
5490 gfc_add_block_to_block (&se->pre, &tse.pre);
5491 gfc_add_block_to_block (&se->post, &tse.post);
5492 tse.expr = fold_convert (gfc_charlen_type_node, tse.expr);
5493 tse.expr = fold_build2_loc (input_location, MAX_EXPR,
5494 gfc_charlen_type_node, tse.expr,
5495 build_int_cst (gfc_charlen_type_node, 0));
5496 expr->ts.u.cl->backend_decl = tse.expr;
5497 gfc_free_interface_mapping (&mapping);
5501 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5506 /* Helper function to check dimensions. */
5508 dim_ok (gfc_ss_info *info)
5511 for (n = 0; n < info->dimen; n++)
5512 if (info->dim[n] != n)
5517 /* Convert an array for passing as an actual argument. Expressions and
5518 vector subscripts are evaluated and stored in a temporary, which is then
5519 passed. For whole arrays the descriptor is passed. For array sections
5520 a modified copy of the descriptor is passed, but using the original data.
5522 This function is also used for array pointer assignments, and there
5525 - se->want_pointer && !se->direct_byref
5526 EXPR is an actual argument. On exit, se->expr contains a
5527 pointer to the array descriptor.
5529 - !se->want_pointer && !se->direct_byref
5530 EXPR is an actual argument to an intrinsic function or the
5531 left-hand side of a pointer assignment. On exit, se->expr
5532 contains the descriptor for EXPR.
5534 - !se->want_pointer && se->direct_byref
5535 EXPR is the right-hand side of a pointer assignment and
5536 se->expr is the descriptor for the previously-evaluated
5537 left-hand side. The function creates an assignment from
5541 The se->force_tmp flag disables the non-copying descriptor optimization
5542 that is used for transpose. It may be used in cases where there is an
5543 alias between the transpose argument and another argument in the same
5547 gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss)
5559 bool subref_array_target = false;
5562 gcc_assert (ss != NULL);
5563 gcc_assert (ss != gfc_ss_terminator);
5565 /* Special case things we know we can pass easily. */
5566 switch (expr->expr_type)
5569 /* If we have a linear array section, we can pass it directly.
5570 Otherwise we need to copy it into a temporary. */
5572 gcc_assert (ss->type == GFC_SS_SECTION);
5573 gcc_assert (ss->expr == expr);
5574 info = &ss->data.info;
5576 /* Get the descriptor for the array. */
5577 gfc_conv_ss_descriptor (&se->pre, ss, 0);
5578 desc = info->descriptor;
5580 subref_array_target = se->direct_byref && is_subref_array (expr);
5581 need_tmp = gfc_ref_needs_temporary_p (expr->ref)
5582 && !subref_array_target;
5589 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
5591 /* Create a new descriptor if the array doesn't have one. */
5594 else if (info->ref->u.ar.type == AR_FULL)
5596 else if (se->direct_byref)
5599 full = gfc_full_array_ref_p (info->ref, NULL);
5601 if (full && dim_ok (info))
5603 if (se->direct_byref && !se->byref_noassign)
5605 /* Copy the descriptor for pointer assignments. */
5606 gfc_add_modify (&se->pre, se->expr, desc);
5608 /* Add any offsets from subreferences. */
5609 gfc_get_dataptr_offset (&se->pre, se->expr, desc, NULL_TREE,
5610 subref_array_target, expr);
5612 else if (se->want_pointer)
5614 /* We pass full arrays directly. This means that pointers and
5615 allocatable arrays should also work. */
5616 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
5623 if (expr->ts.type == BT_CHARACTER)
5624 se->string_length = gfc_get_expr_charlen (expr);
5632 /* We don't need to copy data in some cases. */
5633 arg = gfc_get_noncopying_intrinsic_argument (expr);
5636 /* This is a call to transpose... */
5637 gcc_assert (expr->value.function.isym->id == GFC_ISYM_TRANSPOSE);
5638 /* ... which has already been handled by the scalarizer, so
5639 that we just need to get its argument's descriptor. */
5640 gfc_conv_expr_descriptor (se, expr->value.function.actual->expr, ss);
5644 /* A transformational function return value will be a temporary
5645 array descriptor. We still need to go through the scalarizer
5646 to create the descriptor. Elemental functions ar handled as
5647 arbitrary expressions, i.e. copy to a temporary. */
5649 if (se->direct_byref)
5651 gcc_assert (ss->type == GFC_SS_FUNCTION && ss->expr == expr);
5653 /* For pointer assignments pass the descriptor directly. */
5657 gcc_assert (se->ss == ss);
5658 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
5659 gfc_conv_expr (se, expr);
5663 if (ss->expr != expr || ss->type != GFC_SS_FUNCTION)
5665 if (ss->expr != expr)
5666 /* Elemental function. */
5667 gcc_assert ((expr->value.function.esym != NULL
5668 && expr->value.function.esym->attr.elemental)
5669 || (expr->value.function.isym != NULL
5670 && expr->value.function.isym->elemental));
5672 gcc_assert (ss->type == GFC_SS_INTRINSIC);
5675 if (expr->ts.type == BT_CHARACTER
5676 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
5677 get_array_charlen (expr, se);
5683 /* Transformational function. */
5684 info = &ss->data.info;
5690 /* Constant array constructors don't need a temporary. */
5691 if (ss->type == GFC_SS_CONSTRUCTOR
5692 && expr->ts.type != BT_CHARACTER
5693 && gfc_constant_array_constructor_p (expr->value.constructor))
5696 info = &ss->data.info;
5706 /* Something complicated. Copy it into a temporary. */
5712 /* If we are creating a temporary, we don't need to bother about aliases
5717 gfc_init_loopinfo (&loop);
5719 /* Associate the SS with the loop. */
5720 gfc_add_ss_to_loop (&loop, ss);
5722 /* Tell the scalarizer not to bother creating loop variables, etc. */
5724 loop.array_parameter = 1;
5726 /* The right-hand side of a pointer assignment mustn't use a temporary. */
5727 gcc_assert (!se->direct_byref);
5729 /* Setup the scalarizing loops and bounds. */
5730 gfc_conv_ss_startstride (&loop);
5734 /* Tell the scalarizer to make a temporary. */
5735 loop.temp_ss = gfc_get_ss ();
5736 loop.temp_ss->type = GFC_SS_TEMP;
5737 loop.temp_ss->next = gfc_ss_terminator;
5739 if (expr->ts.type == BT_CHARACTER
5740 && !expr->ts.u.cl->backend_decl)
5741 get_array_charlen (expr, se);
5743 loop.temp_ss->data.temp.type = gfc_typenode_for_spec (&expr->ts);
5745 if (expr->ts.type == BT_CHARACTER)
5746 loop.temp_ss->string_length = expr->ts.u.cl->backend_decl;
5748 loop.temp_ss->string_length = NULL;
5750 se->string_length = loop.temp_ss->string_length;
5751 loop.temp_ss->data.temp.dimen = loop.dimen;
5752 loop.temp_ss->data.temp.codimen = loop.codimen;
5753 gfc_add_ss_to_loop (&loop, loop.temp_ss);
5756 gfc_conv_loop_setup (&loop, & expr->where);
5760 /* Copy into a temporary and pass that. We don't need to copy the data
5761 back because expressions and vector subscripts must be INTENT_IN. */
5762 /* TODO: Optimize passing function return values. */
5766 /* Start the copying loops. */
5767 gfc_mark_ss_chain_used (loop.temp_ss, 1);
5768 gfc_mark_ss_chain_used (ss, 1);
5769 gfc_start_scalarized_body (&loop, &block);
5771 /* Copy each data element. */
5772 gfc_init_se (&lse, NULL);
5773 gfc_copy_loopinfo_to_se (&lse, &loop);
5774 gfc_init_se (&rse, NULL);
5775 gfc_copy_loopinfo_to_se (&rse, &loop);
5777 lse.ss = loop.temp_ss;
5780 gfc_conv_scalarized_array_ref (&lse, NULL);
5781 if (expr->ts.type == BT_CHARACTER)
5783 gfc_conv_expr (&rse, expr);
5784 if (POINTER_TYPE_P (TREE_TYPE (rse.expr)))
5785 rse.expr = build_fold_indirect_ref_loc (input_location,
5789 gfc_conv_expr_val (&rse, expr);
5791 gfc_add_block_to_block (&block, &rse.pre);
5792 gfc_add_block_to_block (&block, &lse.pre);
5794 lse.string_length = rse.string_length;
5795 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, true,
5796 expr->expr_type == EXPR_VARIABLE, true);
5797 gfc_add_expr_to_block (&block, tmp);
5799 /* Finish the copying loops. */
5800 gfc_trans_scalarizing_loops (&loop, &block);
5802 desc = loop.temp_ss->data.info.descriptor;
5804 else if (expr->expr_type == EXPR_FUNCTION && dim_ok (info))
5806 desc = info->descriptor;
5807 se->string_length = ss->string_length;
5811 /* We pass sections without copying to a temporary. Make a new
5812 descriptor and point it at the section we want. The loop variable
5813 limits will be the limits of the section.
5814 A function may decide to repack the array to speed up access, but
5815 we're not bothered about that here. */
5816 int dim, ndim, codim;
5824 /* Set the string_length for a character array. */
5825 if (expr->ts.type == BT_CHARACTER)
5826 se->string_length = gfc_get_expr_charlen (expr);
5828 desc = info->descriptor;
5829 if (se->direct_byref && !se->byref_noassign)
5831 /* For pointer assignments we fill in the destination. */
5833 parmtype = TREE_TYPE (parm);
5837 /* Otherwise make a new one. */
5838 parmtype = gfc_get_element_type (TREE_TYPE (desc));
5839 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
5840 loop.codimen, loop.from,
5842 GFC_ARRAY_UNKNOWN, false);
5843 parm = gfc_create_var (parmtype, "parm");
5846 offset = gfc_index_zero_node;
5848 /* The following can be somewhat confusing. We have two
5849 descriptors, a new one and the original array.
5850 {parm, parmtype, dim} refer to the new one.
5851 {desc, type, n, loop} refer to the original, which maybe
5852 a descriptorless array.
5853 The bounds of the scalarization are the bounds of the section.
5854 We don't have to worry about numeric overflows when calculating
5855 the offsets because all elements are within the array data. */
5857 /* Set the dtype. */
5858 tmp = gfc_conv_descriptor_dtype (parm);
5859 gfc_add_modify (&loop.pre, tmp, gfc_get_dtype (parmtype));
5861 /* Set offset for assignments to pointer only to zero if it is not
5863 if (se->direct_byref
5864 && info->ref && info->ref->u.ar.type != AR_FULL)
5865 base = gfc_index_zero_node;
5866 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
5867 base = gfc_evaluate_now (gfc_conv_array_offset (desc), &loop.pre);
5871 ndim = info->ref ? info->ref->u.ar.dimen : info->dimen;
5872 codim = info->codimen;
5873 for (n = 0; n < ndim; n++)
5875 stride = gfc_conv_array_stride (desc, n);
5877 /* Work out the offset. */
5879 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
5881 gcc_assert (info->subscript[n]
5882 && info->subscript[n]->type == GFC_SS_SCALAR);
5883 start = info->subscript[n]->data.scalar.expr;
5887 /* Evaluate and remember the start of the section. */
5888 start = info->start[n];
5889 stride = gfc_evaluate_now (stride, &loop.pre);
5892 tmp = gfc_conv_array_lbound (desc, n);
5893 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (tmp),
5895 tmp = fold_build2_loc (input_location, MULT_EXPR, TREE_TYPE (tmp),
5897 offset = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (tmp),
5901 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
5903 /* For elemental dimensions, we only need the offset. */
5907 /* Vector subscripts need copying and are handled elsewhere. */
5909 gcc_assert (info->ref->u.ar.dimen_type[n] == DIMEN_RANGE);
5911 /* look for the corresponding scalarizer dimension: dim. */
5912 for (dim = 0; dim < ndim; dim++)
5913 if (info->dim[dim] == n)
5916 /* loop exited early: the DIM being looked for has been found. */
5917 gcc_assert (dim < ndim);
5919 /* Set the new lower bound. */
5920 from = loop.from[dim];
5923 /* If we have an array section or are assigning make sure that
5924 the lower bound is 1. References to the full
5925 array should otherwise keep the original bounds. */
5927 || info->ref->u.ar.type != AR_FULL)
5928 && !integer_onep (from))
5930 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5931 gfc_array_index_type, gfc_index_one_node,
5933 to = fold_build2_loc (input_location, PLUS_EXPR,
5934 gfc_array_index_type, to, tmp);
5935 from = gfc_index_one_node;
5937 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
5938 gfc_rank_cst[dim], from);
5940 /* Set the new upper bound. */
5941 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
5942 gfc_rank_cst[dim], to);
5944 /* Multiply the stride by the section stride to get the
5946 stride = fold_build2_loc (input_location, MULT_EXPR,
5947 gfc_array_index_type,
5948 stride, info->stride[n]);
5950 if (se->direct_byref
5952 && info->ref->u.ar.type != AR_FULL)
5954 base = fold_build2_loc (input_location, MINUS_EXPR,
5955 TREE_TYPE (base), base, stride);
5957 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
5959 tmp = gfc_conv_array_lbound (desc, n);
5960 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5961 TREE_TYPE (base), tmp, loop.from[dim]);
5962 tmp = fold_build2_loc (input_location, MULT_EXPR,
5963 TREE_TYPE (base), tmp,
5964 gfc_conv_array_stride (desc, n));
5965 base = fold_build2_loc (input_location, PLUS_EXPR,
5966 TREE_TYPE (base), tmp, base);
5969 /* Store the new stride. */
5970 gfc_conv_descriptor_stride_set (&loop.pre, parm,
5971 gfc_rank_cst[dim], stride);
5974 for (n = ndim; n < ndim + codim; n++)
5976 /* look for the corresponding scalarizer dimension: dim. */
5977 for (dim = 0; dim < ndim + codim; dim++)
5978 if (info->dim[dim] == n)
5981 /* loop exited early: the DIM being looked for has been found. */
5982 gcc_assert (dim < ndim + codim);
5984 from = loop.from[dim];
5986 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
5987 gfc_rank_cst[dim], from);
5988 if (n < ndim + codim - 1)
5989 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
5990 gfc_rank_cst[dim], to);
5994 if (se->data_not_needed)
5995 gfc_conv_descriptor_data_set (&loop.pre, parm,
5996 gfc_index_zero_node);
5998 /* Point the data pointer at the 1st element in the section. */
5999 gfc_get_dataptr_offset (&loop.pre, parm, desc, offset,
6000 subref_array_target, expr);
6002 if ((se->direct_byref || GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6003 && !se->data_not_needed)
6005 /* Set the offset. */
6006 gfc_conv_descriptor_offset_set (&loop.pre, parm, base);
6010 /* Only the callee knows what the correct offset it, so just set
6012 gfc_conv_descriptor_offset_set (&loop.pre, parm, gfc_index_zero_node);
6017 if (!se->direct_byref || se->byref_noassign)
6019 /* Get a pointer to the new descriptor. */
6020 if (se->want_pointer)
6021 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
6026 gfc_add_block_to_block (&se->pre, &loop.pre);
6027 gfc_add_block_to_block (&se->post, &loop.post);
6029 /* Cleanup the scalarizer. */
6030 gfc_cleanup_loop (&loop);
6033 /* Helper function for gfc_conv_array_parameter if array size needs to be
6037 array_parameter_size (tree desc, gfc_expr *expr, tree *size)
6040 if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6041 *size = GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc));
6042 else if (expr->rank > 1)
6043 *size = build_call_expr_loc (input_location,
6044 gfor_fndecl_size0, 1,
6045 gfc_build_addr_expr (NULL, desc));
6048 tree ubound = gfc_conv_descriptor_ubound_get (desc, gfc_index_zero_node);
6049 tree lbound = gfc_conv_descriptor_lbound_get (desc, gfc_index_zero_node);
6051 *size = fold_build2_loc (input_location, MINUS_EXPR,
6052 gfc_array_index_type, ubound, lbound);
6053 *size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
6054 *size, gfc_index_one_node);
6055 *size = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
6056 *size, gfc_index_zero_node);
6058 elem = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
6059 *size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6060 *size, fold_convert (gfc_array_index_type, elem));
6063 /* Convert an array for passing as an actual parameter. */
6064 /* TODO: Optimize passing g77 arrays. */
6067 gfc_conv_array_parameter (gfc_se * se, gfc_expr * expr, gfc_ss * ss, bool g77,
6068 const gfc_symbol *fsym, const char *proc_name,
6073 tree tmp = NULL_TREE;
6075 tree parent = DECL_CONTEXT (current_function_decl);
6076 bool full_array_var;
6077 bool this_array_result;
6080 bool array_constructor;
6081 bool good_allocatable;
6082 bool ultimate_ptr_comp;
6083 bool ultimate_alloc_comp;
6088 ultimate_ptr_comp = false;
6089 ultimate_alloc_comp = false;
6091 for (ref = expr->ref; ref; ref = ref->next)
6093 if (ref->next == NULL)
6096 if (ref->type == REF_COMPONENT)
6098 ultimate_ptr_comp = ref->u.c.component->attr.pointer;
6099 ultimate_alloc_comp = ref->u.c.component->attr.allocatable;
6103 full_array_var = false;
6106 if (expr->expr_type == EXPR_VARIABLE && ref && !ultimate_ptr_comp)
6107 full_array_var = gfc_full_array_ref_p (ref, &contiguous);
6109 sym = full_array_var ? expr->symtree->n.sym : NULL;
6111 /* The symbol should have an array specification. */
6112 gcc_assert (!sym || sym->as || ref->u.ar.as);
6114 if (expr->expr_type == EXPR_ARRAY && expr->ts.type == BT_CHARACTER)
6116 get_array_ctor_strlen (&se->pre, expr->value.constructor, &tmp);
6117 expr->ts.u.cl->backend_decl = tmp;
6118 se->string_length = tmp;
6121 /* Is this the result of the enclosing procedure? */
6122 this_array_result = (full_array_var && sym->attr.flavor == FL_PROCEDURE);
6123 if (this_array_result
6124 && (sym->backend_decl != current_function_decl)
6125 && (sym->backend_decl != parent))
6126 this_array_result = false;
6128 /* Passing address of the array if it is not pointer or assumed-shape. */
6129 if (full_array_var && g77 && !this_array_result)
6131 tmp = gfc_get_symbol_decl (sym);
6133 if (sym->ts.type == BT_CHARACTER)
6134 se->string_length = sym->ts.u.cl->backend_decl;
6136 if (sym->ts.type == BT_DERIVED || sym->ts.type == BT_CLASS)
6138 gfc_conv_expr_descriptor (se, expr, ss);
6139 se->expr = gfc_conv_array_data (se->expr);
6143 if (!sym->attr.pointer
6145 && sym->as->type != AS_ASSUMED_SHAPE
6146 && !sym->attr.allocatable)
6148 /* Some variables are declared directly, others are declared as
6149 pointers and allocated on the heap. */
6150 if (sym->attr.dummy || POINTER_TYPE_P (TREE_TYPE (tmp)))
6153 se->expr = gfc_build_addr_expr (NULL_TREE, tmp);
6155 array_parameter_size (tmp, expr, size);
6159 if (sym->attr.allocatable)
6161 if (sym->attr.dummy || sym->attr.result)
6163 gfc_conv_expr_descriptor (se, expr, ss);
6167 array_parameter_size (tmp, expr, size);
6168 se->expr = gfc_conv_array_data (tmp);
6173 /* A convenient reduction in scope. */
6174 contiguous = g77 && !this_array_result && contiguous;
6176 /* There is no need to pack and unpack the array, if it is contiguous
6177 and not a deferred- or assumed-shape array, or if it is simply
6179 no_pack = ((sym && sym->as
6180 && !sym->attr.pointer
6181 && sym->as->type != AS_DEFERRED
6182 && sym->as->type != AS_ASSUMED_SHAPE)
6184 (ref && ref->u.ar.as
6185 && ref->u.ar.as->type != AS_DEFERRED
6186 && ref->u.ar.as->type != AS_ASSUMED_SHAPE)
6188 gfc_is_simply_contiguous (expr, false));
6190 no_pack = contiguous && no_pack;
6192 /* Array constructors are always contiguous and do not need packing. */
6193 array_constructor = g77 && !this_array_result && expr->expr_type == EXPR_ARRAY;
6195 /* Same is true of contiguous sections from allocatable variables. */
6196 good_allocatable = contiguous
6198 && expr->symtree->n.sym->attr.allocatable;
6200 /* Or ultimate allocatable components. */
6201 ultimate_alloc_comp = contiguous && ultimate_alloc_comp;
6203 if (no_pack || array_constructor || good_allocatable || ultimate_alloc_comp)
6205 gfc_conv_expr_descriptor (se, expr, ss);
6206 if (expr->ts.type == BT_CHARACTER)
6207 se->string_length = expr->ts.u.cl->backend_decl;
6209 array_parameter_size (se->expr, expr, size);
6210 se->expr = gfc_conv_array_data (se->expr);
6214 if (this_array_result)
6216 /* Result of the enclosing function. */
6217 gfc_conv_expr_descriptor (se, expr, ss);
6219 array_parameter_size (se->expr, expr, size);
6220 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
6222 if (g77 && TREE_TYPE (TREE_TYPE (se->expr)) != NULL_TREE
6223 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr))))
6224 se->expr = gfc_conv_array_data (build_fold_indirect_ref_loc (input_location,
6231 /* Every other type of array. */
6232 se->want_pointer = 1;
6233 gfc_conv_expr_descriptor (se, expr, ss);
6235 array_parameter_size (build_fold_indirect_ref_loc (input_location,
6240 /* Deallocate the allocatable components of structures that are
6242 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
6243 && expr->ts.u.derived->attr.alloc_comp
6244 && expr->expr_type != EXPR_VARIABLE)
6246 tmp = build_fold_indirect_ref_loc (input_location, se->expr);
6247 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, tmp, expr->rank);
6249 /* The components shall be deallocated before their containing entity. */
6250 gfc_prepend_expr_to_block (&se->post, tmp);
6253 if (g77 || (fsym && fsym->attr.contiguous
6254 && !gfc_is_simply_contiguous (expr, false)))
6256 tree origptr = NULL_TREE;
6260 /* For contiguous arrays, save the original value of the descriptor. */
6263 origptr = gfc_create_var (pvoid_type_node, "origptr");
6264 tmp = build_fold_indirect_ref_loc (input_location, desc);
6265 tmp = gfc_conv_array_data (tmp);
6266 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6267 TREE_TYPE (origptr), origptr,
6268 fold_convert (TREE_TYPE (origptr), tmp));
6269 gfc_add_expr_to_block (&se->pre, tmp);
6272 /* Repack the array. */
6273 if (gfc_option.warn_array_temp)
6276 gfc_warning ("Creating array temporary at %L for argument '%s'",
6277 &expr->where, fsym->name);
6279 gfc_warning ("Creating array temporary at %L", &expr->where);
6282 ptr = build_call_expr_loc (input_location,
6283 gfor_fndecl_in_pack, 1, desc);
6285 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6287 tmp = gfc_conv_expr_present (sym);
6288 ptr = build3_loc (input_location, COND_EXPR, TREE_TYPE (se->expr),
6289 tmp, fold_convert (TREE_TYPE (se->expr), ptr),
6290 fold_convert (TREE_TYPE (se->expr), null_pointer_node));
6293 ptr = gfc_evaluate_now (ptr, &se->pre);
6295 /* Use the packed data for the actual argument, except for contiguous arrays,
6296 where the descriptor's data component is set. */
6301 tmp = build_fold_indirect_ref_loc (input_location, desc);
6302 gfc_conv_descriptor_data_set (&se->pre, tmp, ptr);
6305 if (gfc_option.rtcheck & GFC_RTCHECK_ARRAY_TEMPS)
6309 if (fsym && proc_name)
6310 asprintf (&msg, "An array temporary was created for argument "
6311 "'%s' of procedure '%s'", fsym->name, proc_name);
6313 asprintf (&msg, "An array temporary was created");
6315 tmp = build_fold_indirect_ref_loc (input_location,
6317 tmp = gfc_conv_array_data (tmp);
6318 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6319 fold_convert (TREE_TYPE (tmp), ptr), tmp);
6321 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6322 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
6324 gfc_conv_expr_present (sym), tmp);
6326 gfc_trans_runtime_check (false, true, tmp, &se->pre,
6331 gfc_start_block (&block);
6333 /* Copy the data back. */
6334 if (fsym == NULL || fsym->attr.intent != INTENT_IN)
6336 tmp = build_call_expr_loc (input_location,
6337 gfor_fndecl_in_unpack, 2, desc, ptr);
6338 gfc_add_expr_to_block (&block, tmp);
6341 /* Free the temporary. */
6342 tmp = gfc_call_free (convert (pvoid_type_node, ptr));
6343 gfc_add_expr_to_block (&block, tmp);
6345 stmt = gfc_finish_block (&block);
6347 gfc_init_block (&block);
6348 /* Only if it was repacked. This code needs to be executed before the
6349 loop cleanup code. */
6350 tmp = build_fold_indirect_ref_loc (input_location,
6352 tmp = gfc_conv_array_data (tmp);
6353 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6354 fold_convert (TREE_TYPE (tmp), ptr), tmp);
6356 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6357 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
6359 gfc_conv_expr_present (sym), tmp);
6361 tmp = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
6363 gfc_add_expr_to_block (&block, tmp);
6364 gfc_add_block_to_block (&block, &se->post);
6366 gfc_init_block (&se->post);
6368 /* Reset the descriptor pointer. */
6371 tmp = build_fold_indirect_ref_loc (input_location, desc);
6372 gfc_conv_descriptor_data_set (&se->post, tmp, origptr);
6375 gfc_add_block_to_block (&se->post, &block);
6380 /* Generate code to deallocate an array, if it is allocated. */
6383 gfc_trans_dealloc_allocated (tree descriptor)
6389 gfc_start_block (&block);
6391 var = gfc_conv_descriptor_data_get (descriptor);
6394 /* Call array_deallocate with an int * present in the second argument.
6395 Although it is ignored here, it's presence ensures that arrays that
6396 are already deallocated are ignored. */
6397 tmp = gfc_deallocate_with_status (var, NULL_TREE, true, NULL);
6398 gfc_add_expr_to_block (&block, tmp);
6400 /* Zero the data pointer. */
6401 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
6402 var, build_int_cst (TREE_TYPE (var), 0));
6403 gfc_add_expr_to_block (&block, tmp);
6405 return gfc_finish_block (&block);
6409 /* This helper function calculates the size in words of a full array. */
6412 get_full_array_size (stmtblock_t *block, tree decl, int rank)
6417 idx = gfc_rank_cst[rank - 1];
6418 nelems = gfc_conv_descriptor_ubound_get (decl, idx);
6419 tmp = gfc_conv_descriptor_lbound_get (decl, idx);
6420 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
6422 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
6423 tmp, gfc_index_one_node);
6424 tmp = gfc_evaluate_now (tmp, block);
6426 nelems = gfc_conv_descriptor_stride_get (decl, idx);
6427 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6429 return gfc_evaluate_now (tmp, block);
6433 /* Allocate dest to the same size as src, and copy src -> dest.
6434 If no_malloc is set, only the copy is done. */
6437 duplicate_allocatable (tree dest, tree src, tree type, int rank,
6447 /* If the source is null, set the destination to null. Then,
6448 allocate memory to the destination. */
6449 gfc_init_block (&block);
6453 tmp = null_pointer_node;
6454 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type, dest, tmp);
6455 gfc_add_expr_to_block (&block, tmp);
6456 null_data = gfc_finish_block (&block);
6458 gfc_init_block (&block);
6459 size = TYPE_SIZE_UNIT (TREE_TYPE (type));
6462 tmp = gfc_call_malloc (&block, type, size);
6463 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
6464 dest, fold_convert (type, tmp));
6465 gfc_add_expr_to_block (&block, tmp);
6468 tmp = built_in_decls[BUILT_IN_MEMCPY];
6469 tmp = build_call_expr_loc (input_location, tmp, 3,
6474 gfc_conv_descriptor_data_set (&block, dest, null_pointer_node);
6475 null_data = gfc_finish_block (&block);
6477 gfc_init_block (&block);
6478 nelems = get_full_array_size (&block, src, rank);
6479 tmp = fold_convert (gfc_array_index_type,
6480 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
6481 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6485 tmp = TREE_TYPE (gfc_conv_descriptor_data_get (src));
6486 tmp = gfc_call_malloc (&block, tmp, size);
6487 gfc_conv_descriptor_data_set (&block, dest, tmp);
6490 /* We know the temporary and the value will be the same length,
6491 so can use memcpy. */
6492 tmp = built_in_decls[BUILT_IN_MEMCPY];
6493 tmp = build_call_expr_loc (input_location,
6494 tmp, 3, gfc_conv_descriptor_data_get (dest),
6495 gfc_conv_descriptor_data_get (src), size);
6498 gfc_add_expr_to_block (&block, tmp);
6499 tmp = gfc_finish_block (&block);
6501 /* Null the destination if the source is null; otherwise do
6502 the allocate and copy. */
6506 null_cond = gfc_conv_descriptor_data_get (src);
6508 null_cond = convert (pvoid_type_node, null_cond);
6509 null_cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6510 null_cond, null_pointer_node);
6511 return build3_v (COND_EXPR, null_cond, tmp, null_data);
6515 /* Allocate dest to the same size as src, and copy data src -> dest. */
6518 gfc_duplicate_allocatable (tree dest, tree src, tree type, int rank)
6520 return duplicate_allocatable (dest, src, type, rank, false);
6524 /* Copy data src -> dest. */
6527 gfc_copy_allocatable_data (tree dest, tree src, tree type, int rank)
6529 return duplicate_allocatable (dest, src, type, rank, true);
6533 /* Recursively traverse an object of derived type, generating code to
6534 deallocate, nullify or copy allocatable components. This is the work horse
6535 function for the functions named in this enum. */
6537 enum {DEALLOCATE_ALLOC_COMP = 1, NULLIFY_ALLOC_COMP, COPY_ALLOC_COMP,
6538 COPY_ONLY_ALLOC_COMP};
6541 structure_alloc_comps (gfc_symbol * der_type, tree decl,
6542 tree dest, int rank, int purpose)
6546 stmtblock_t fnblock;
6547 stmtblock_t loopbody;
6558 tree null_cond = NULL_TREE;
6560 gfc_init_block (&fnblock);
6562 decl_type = TREE_TYPE (decl);
6564 if ((POINTER_TYPE_P (decl_type) && rank != 0)
6565 || (TREE_CODE (decl_type) == REFERENCE_TYPE && rank == 0))
6567 decl = build_fold_indirect_ref_loc (input_location,
6570 /* Just in case in gets dereferenced. */
6571 decl_type = TREE_TYPE (decl);
6573 /* If this an array of derived types with allocatable components
6574 build a loop and recursively call this function. */
6575 if (TREE_CODE (decl_type) == ARRAY_TYPE
6576 || GFC_DESCRIPTOR_TYPE_P (decl_type))
6578 tmp = gfc_conv_array_data (decl);
6579 var = build_fold_indirect_ref_loc (input_location,
6582 /* Get the number of elements - 1 and set the counter. */
6583 if (GFC_DESCRIPTOR_TYPE_P (decl_type))
6585 /* Use the descriptor for an allocatable array. Since this
6586 is a full array reference, we only need the descriptor
6587 information from dimension = rank. */
6588 tmp = get_full_array_size (&fnblock, decl, rank);
6589 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6590 gfc_array_index_type, tmp,
6591 gfc_index_one_node);
6593 null_cond = gfc_conv_descriptor_data_get (decl);
6594 null_cond = fold_build2_loc (input_location, NE_EXPR,
6595 boolean_type_node, null_cond,
6596 build_int_cst (TREE_TYPE (null_cond), 0));
6600 /* Otherwise use the TYPE_DOMAIN information. */
6601 tmp = array_type_nelts (decl_type);
6602 tmp = fold_convert (gfc_array_index_type, tmp);
6605 /* Remember that this is, in fact, the no. of elements - 1. */
6606 nelems = gfc_evaluate_now (tmp, &fnblock);
6607 index = gfc_create_var (gfc_array_index_type, "S");
6609 /* Build the body of the loop. */
6610 gfc_init_block (&loopbody);
6612 vref = gfc_build_array_ref (var, index, NULL);
6614 if (purpose == COPY_ALLOC_COMP)
6616 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
6618 tmp = gfc_duplicate_allocatable (dest, decl, decl_type, rank);
6619 gfc_add_expr_to_block (&fnblock, tmp);
6621 tmp = build_fold_indirect_ref_loc (input_location,
6622 gfc_conv_array_data (dest));
6623 dref = gfc_build_array_ref (tmp, index, NULL);
6624 tmp = structure_alloc_comps (der_type, vref, dref, rank, purpose);
6626 else if (purpose == COPY_ONLY_ALLOC_COMP)
6628 tmp = build_fold_indirect_ref_loc (input_location,
6629 gfc_conv_array_data (dest));
6630 dref = gfc_build_array_ref (tmp, index, NULL);
6631 tmp = structure_alloc_comps (der_type, vref, dref, rank,
6635 tmp = structure_alloc_comps (der_type, vref, NULL_TREE, rank, purpose);
6637 gfc_add_expr_to_block (&loopbody, tmp);
6639 /* Build the loop and return. */
6640 gfc_init_loopinfo (&loop);
6642 loop.from[0] = gfc_index_zero_node;
6643 loop.loopvar[0] = index;
6644 loop.to[0] = nelems;
6645 gfc_trans_scalarizing_loops (&loop, &loopbody);
6646 gfc_add_block_to_block (&fnblock, &loop.pre);
6648 tmp = gfc_finish_block (&fnblock);
6649 if (null_cond != NULL_TREE)
6650 tmp = build3_v (COND_EXPR, null_cond, tmp,
6651 build_empty_stmt (input_location));
6656 /* Otherwise, act on the components or recursively call self to
6657 act on a chain of components. */
6658 for (c = der_type->components; c; c = c->next)
6660 bool cmp_has_alloc_comps = (c->ts.type == BT_DERIVED
6661 || c->ts.type == BT_CLASS)
6662 && c->ts.u.derived->attr.alloc_comp;
6663 cdecl = c->backend_decl;
6664 ctype = TREE_TYPE (cdecl);
6668 case DEALLOCATE_ALLOC_COMP:
6669 if (c->attr.allocatable && c->attr.dimension)
6671 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6672 decl, cdecl, NULL_TREE);
6673 if (cmp_has_alloc_comps && !c->attr.pointer)
6675 /* Do not deallocate the components of ultimate pointer
6677 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
6678 c->as->rank, purpose);
6679 gfc_add_expr_to_block (&fnblock, tmp);
6681 tmp = gfc_trans_dealloc_allocated (comp);
6682 gfc_add_expr_to_block (&fnblock, tmp);
6684 else if (c->attr.allocatable)
6686 /* Allocatable scalar components. */
6687 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6688 decl, cdecl, NULL_TREE);
6690 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
6692 gfc_add_expr_to_block (&fnblock, tmp);
6694 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6695 void_type_node, comp,
6696 build_int_cst (TREE_TYPE (comp), 0));
6697 gfc_add_expr_to_block (&fnblock, tmp);
6699 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
6701 /* Allocatable scalar CLASS components. */
6702 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6703 decl, cdecl, NULL_TREE);
6705 /* Add reference to '_data' component. */
6706 tmp = CLASS_DATA (c)->backend_decl;
6707 comp = fold_build3_loc (input_location, COMPONENT_REF,
6708 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
6710 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
6711 CLASS_DATA (c)->ts);
6712 gfc_add_expr_to_block (&fnblock, tmp);
6714 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6715 void_type_node, comp,
6716 build_int_cst (TREE_TYPE (comp), 0));
6717 gfc_add_expr_to_block (&fnblock, tmp);
6721 case NULLIFY_ALLOC_COMP:
6722 if (c->attr.pointer)
6724 else if (c->attr.allocatable && c->attr.dimension)
6726 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6727 decl, cdecl, NULL_TREE);
6728 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
6730 else if (c->attr.allocatable)
6732 /* Allocatable scalar components. */
6733 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6734 decl, cdecl, NULL_TREE);
6735 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6736 void_type_node, comp,
6737 build_int_cst (TREE_TYPE (comp), 0));
6738 gfc_add_expr_to_block (&fnblock, tmp);
6740 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
6742 /* Allocatable scalar CLASS components. */
6743 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6744 decl, cdecl, NULL_TREE);
6745 /* Add reference to '_data' component. */
6746 tmp = CLASS_DATA (c)->backend_decl;
6747 comp = fold_build3_loc (input_location, COMPONENT_REF,
6748 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
6749 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6750 void_type_node, comp,
6751 build_int_cst (TREE_TYPE (comp), 0));
6752 gfc_add_expr_to_block (&fnblock, tmp);
6754 else if (cmp_has_alloc_comps)
6756 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6757 decl, cdecl, NULL_TREE);
6758 rank = c->as ? c->as->rank : 0;
6759 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
6761 gfc_add_expr_to_block (&fnblock, tmp);
6765 case COPY_ALLOC_COMP:
6766 if (c->attr.pointer)
6769 /* We need source and destination components. */
6770 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
6772 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
6774 dcmp = fold_convert (TREE_TYPE (comp), dcmp);
6776 if (c->attr.allocatable && !cmp_has_alloc_comps)
6778 rank = c->as ? c->as->rank : 0;
6779 tmp = gfc_duplicate_allocatable (dcmp, comp, ctype, rank);
6780 gfc_add_expr_to_block (&fnblock, tmp);
6783 if (cmp_has_alloc_comps)
6785 rank = c->as ? c->as->rank : 0;
6786 tmp = fold_convert (TREE_TYPE (dcmp), comp);
6787 gfc_add_modify (&fnblock, dcmp, tmp);
6788 tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp,
6790 gfc_add_expr_to_block (&fnblock, tmp);
6800 return gfc_finish_block (&fnblock);
6803 /* Recursively traverse an object of derived type, generating code to
6804 nullify allocatable components. */
6807 gfc_nullify_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
6809 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
6810 NULLIFY_ALLOC_COMP);
6814 /* Recursively traverse an object of derived type, generating code to
6815 deallocate allocatable components. */
6818 gfc_deallocate_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
6820 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
6821 DEALLOCATE_ALLOC_COMP);
6825 /* Recursively traverse an object of derived type, generating code to
6826 copy it and its allocatable components. */
6829 gfc_copy_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
6831 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ALLOC_COMP);
6835 /* Recursively traverse an object of derived type, generating code to
6836 copy only its allocatable components. */
6839 gfc_copy_only_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
6841 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ONLY_ALLOC_COMP);
6845 /* Returns the value of LBOUND for an expression. This could be broken out
6846 from gfc_conv_intrinsic_bound but this seemed to be simpler. This is
6847 called by gfc_alloc_allocatable_for_assignment. */
6849 get_std_lbound (gfc_expr *expr, tree desc, int dim, bool assumed_size)
6854 tree cond, cond1, cond3, cond4;
6858 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)))
6860 tmp = gfc_rank_cst[dim];
6861 lbound = gfc_conv_descriptor_lbound_get (desc, tmp);
6862 ubound = gfc_conv_descriptor_ubound_get (desc, tmp);
6863 stride = gfc_conv_descriptor_stride_get (desc, tmp);
6864 cond1 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
6866 cond3 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
6867 stride, gfc_index_zero_node);
6868 cond3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
6869 boolean_type_node, cond3, cond1);
6870 cond4 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
6871 stride, gfc_index_zero_node);
6873 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
6874 tmp, build_int_cst (gfc_array_index_type,
6877 cond = boolean_false_node;
6879 cond1 = fold_build2_loc (input_location, TRUTH_OR_EXPR,
6880 boolean_type_node, cond3, cond4);
6881 cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
6882 boolean_type_node, cond, cond1);
6884 return fold_build3_loc (input_location, COND_EXPR,
6885 gfc_array_index_type, cond,
6886 lbound, gfc_index_one_node);
6888 else if (expr->expr_type == EXPR_VARIABLE)
6890 tmp = TREE_TYPE (expr->symtree->n.sym->backend_decl);
6891 for (ref = expr->ref; ref; ref = ref->next)
6893 if (ref->type == REF_COMPONENT
6894 && ref->u.c.component->as
6896 && ref->next->u.ar.type == AR_FULL)
6897 tmp = TREE_TYPE (ref->u.c.component->backend_decl);
6899 return GFC_TYPE_ARRAY_LBOUND(tmp, dim);
6901 else if (expr->expr_type == EXPR_FUNCTION)
6903 /* A conversion function, so use the argument. */
6904 expr = expr->value.function.actual->expr;
6905 if (expr->expr_type != EXPR_VARIABLE)
6906 return gfc_index_one_node;
6907 desc = TREE_TYPE (expr->symtree->n.sym->backend_decl);
6908 return get_std_lbound (expr, desc, dim, assumed_size);
6911 return gfc_index_one_node;
6915 /* Returns true if an expression represents an lhs that can be reallocated
6919 gfc_is_reallocatable_lhs (gfc_expr *expr)
6926 /* An allocatable variable. */
6927 if (expr->symtree->n.sym->attr.allocatable
6929 && expr->ref->type == REF_ARRAY
6930 && expr->ref->u.ar.type == AR_FULL)
6933 /* All that can be left are allocatable components. */
6934 if ((expr->symtree->n.sym->ts.type != BT_DERIVED
6935 && expr->symtree->n.sym->ts.type != BT_CLASS)
6936 || !expr->symtree->n.sym->ts.u.derived->attr.alloc_comp)
6939 /* Find a component ref followed by an array reference. */
6940 for (ref = expr->ref; ref; ref = ref->next)
6942 && ref->type == REF_COMPONENT
6943 && ref->next->type == REF_ARRAY
6944 && !ref->next->next)
6950 /* Return true if valid reallocatable lhs. */
6951 if (ref->u.c.component->attr.allocatable
6952 && ref->next->u.ar.type == AR_FULL)
6959 /* Allocate the lhs of an assignment to an allocatable array, otherwise
6963 gfc_alloc_allocatable_for_assignment (gfc_loopinfo *loop,
6967 stmtblock_t realloc_block;
6968 stmtblock_t alloc_block;
6991 gfc_array_spec * as;
6993 /* x = f(...) with x allocatable. In this case, expr1 is the rhs.
6994 Find the lhs expression in the loop chain and set expr1 and
6995 expr2 accordingly. */
6996 if (expr1->expr_type == EXPR_FUNCTION && expr2 == NULL)
6999 /* Find the ss for the lhs. */
7001 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
7002 if (lss->expr && lss->expr->expr_type == EXPR_VARIABLE)
7004 if (lss == gfc_ss_terminator)
7009 /* Bail out if this is not a valid allocate on assignment. */
7010 if (!gfc_is_reallocatable_lhs (expr1)
7011 || (expr2 && !expr2->rank))
7014 /* Find the ss for the lhs. */
7016 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
7017 if (lss->expr == expr1)
7020 if (lss == gfc_ss_terminator)
7023 /* Find an ss for the rhs. For operator expressions, we see the
7024 ss's for the operands. Any one of these will do. */
7026 for (; rss && rss != gfc_ss_terminator; rss = rss->loop_chain)
7027 if (rss->expr != expr1 && rss != loop->temp_ss)
7030 if (expr2 && rss == gfc_ss_terminator)
7033 gfc_start_block (&fblock);
7035 /* Since the lhs is allocatable, this must be a descriptor type.
7036 Get the data and array size. */
7037 desc = lss->data.info.descriptor;
7038 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)));
7039 array1 = gfc_conv_descriptor_data_get (desc);
7041 /* 7.4.1.3 "If variable is an allocated allocatable variable, it is
7042 deallocated if expr is an array of different shape or any of the
7043 corresponding length type parameter values of variable and expr
7044 differ." This assures F95 compatibility. */
7045 jump_label1 = gfc_build_label_decl (NULL_TREE);
7046 jump_label2 = gfc_build_label_decl (NULL_TREE);
7048 /* Allocate if data is NULL. */
7049 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
7050 array1, build_int_cst (TREE_TYPE (array1), 0));
7051 tmp = build3_v (COND_EXPR, cond,
7052 build1_v (GOTO_EXPR, jump_label1),
7053 build_empty_stmt (input_location));
7054 gfc_add_expr_to_block (&fblock, tmp);
7056 /* Get arrayspec if expr is a full array. */
7057 if (expr2 && expr2->expr_type == EXPR_FUNCTION
7058 && expr2->value.function.isym
7059 && expr2->value.function.isym->conversion)
7061 /* For conversion functions, take the arg. */
7062 gfc_expr *arg = expr2->value.function.actual->expr;
7063 as = gfc_get_full_arrayspec_from_expr (arg);
7066 as = gfc_get_full_arrayspec_from_expr (expr2);
7070 /* If the lhs shape is not the same as the rhs jump to setting the
7071 bounds and doing the reallocation....... */
7072 for (n = 0; n < expr1->rank; n++)
7074 /* Check the shape. */
7075 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
7076 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[n]);
7077 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7078 gfc_array_index_type,
7079 loop->to[n], loop->from[n]);
7080 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7081 gfc_array_index_type,
7083 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7084 gfc_array_index_type,
7086 cond = fold_build2_loc (input_location, NE_EXPR,
7088 tmp, gfc_index_zero_node);
7089 tmp = build3_v (COND_EXPR, cond,
7090 build1_v (GOTO_EXPR, jump_label1),
7091 build_empty_stmt (input_location));
7092 gfc_add_expr_to_block (&fblock, tmp);
7095 /* ....else jump past the (re)alloc code. */
7096 tmp = build1_v (GOTO_EXPR, jump_label2);
7097 gfc_add_expr_to_block (&fblock, tmp);
7099 /* Add the label to start automatic (re)allocation. */
7100 tmp = build1_v (LABEL_EXPR, jump_label1);
7101 gfc_add_expr_to_block (&fblock, tmp);
7103 size1 = gfc_conv_descriptor_size (desc, expr1->rank);
7105 /* Get the rhs size. Fix both sizes. */
7107 desc2 = rss->data.info.descriptor;
7110 size2 = gfc_index_one_node;
7111 for (n = 0; n < expr2->rank; n++)
7113 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7114 gfc_array_index_type,
7115 loop->to[n], loop->from[n]);
7116 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7117 gfc_array_index_type,
7118 tmp, gfc_index_one_node);
7119 size2 = fold_build2_loc (input_location, MULT_EXPR,
7120 gfc_array_index_type,
7124 size1 = gfc_evaluate_now (size1, &fblock);
7125 size2 = gfc_evaluate_now (size2, &fblock);
7127 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7129 neq_size = gfc_evaluate_now (cond, &fblock);
7132 /* Now modify the lhs descriptor and the associated scalarizer
7133 variables. F2003 7.4.1.3: "If variable is or becomes an
7134 unallocated allocatable variable, then it is allocated with each
7135 deferred type parameter equal to the corresponding type parameters
7136 of expr , with the shape of expr , and with each lower bound equal
7137 to the corresponding element of LBOUND(expr)."
7138 Reuse size1 to keep a dimension-by-dimension track of the
7139 stride of the new array. */
7140 size1 = gfc_index_one_node;
7141 offset = gfc_index_zero_node;
7143 for (n = 0; n < expr2->rank; n++)
7145 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7146 gfc_array_index_type,
7147 loop->to[n], loop->from[n]);
7148 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7149 gfc_array_index_type,
7150 tmp, gfc_index_one_node);
7152 lbound = gfc_index_one_node;
7157 lbd = get_std_lbound (expr2, desc2, n,
7158 as->type == AS_ASSUMED_SIZE);
7159 ubound = fold_build2_loc (input_location,
7161 gfc_array_index_type,
7163 ubound = fold_build2_loc (input_location,
7165 gfc_array_index_type,
7170 gfc_conv_descriptor_lbound_set (&fblock, desc,
7173 gfc_conv_descriptor_ubound_set (&fblock, desc,
7176 gfc_conv_descriptor_stride_set (&fblock, desc,
7179 lbound = gfc_conv_descriptor_lbound_get (desc,
7181 tmp2 = fold_build2_loc (input_location, MULT_EXPR,
7182 gfc_array_index_type,
7184 offset = fold_build2_loc (input_location, MINUS_EXPR,
7185 gfc_array_index_type,
7187 size1 = fold_build2_loc (input_location, MULT_EXPR,
7188 gfc_array_index_type,
7192 /* Set the lhs descriptor and scalarizer offsets. For rank > 1,
7193 the array offset is saved and the info.offset is used for a
7194 running offset. Use the saved_offset instead. */
7195 tmp = gfc_conv_descriptor_offset (desc);
7196 gfc_add_modify (&fblock, tmp, offset);
7197 if (lss->data.info.saved_offset
7198 && TREE_CODE (lss->data.info.saved_offset) == VAR_DECL)
7199 gfc_add_modify (&fblock, lss->data.info.saved_offset, tmp);
7201 /* Now set the deltas for the lhs. */
7202 for (n = 0; n < expr1->rank; n++)
7204 tmp = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
7205 dim = lss->data.info.dim[n];
7206 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7207 gfc_array_index_type, tmp,
7209 if (lss->data.info.delta[dim]
7210 && TREE_CODE (lss->data.info.delta[dim]) == VAR_DECL)
7211 gfc_add_modify (&fblock, lss->data.info.delta[dim], tmp);
7214 /* Get the new lhs size in bytes. */
7215 if (expr1->ts.type == BT_CHARACTER && expr1->ts.deferred)
7217 tmp = expr2->ts.u.cl->backend_decl;
7218 gcc_assert (expr1->ts.u.cl->backend_decl);
7219 tmp = fold_convert (TREE_TYPE (expr1->ts.u.cl->backend_decl), tmp);
7220 gfc_add_modify (&fblock, expr1->ts.u.cl->backend_decl, tmp);
7222 else if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->backend_decl)
7224 tmp = TYPE_SIZE_UNIT (TREE_TYPE (gfc_typenode_for_spec (&expr1->ts)));
7225 tmp = fold_build2_loc (input_location, MULT_EXPR,
7226 gfc_array_index_type, tmp,
7227 expr1->ts.u.cl->backend_decl);
7230 tmp = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&expr1->ts));
7231 tmp = fold_convert (gfc_array_index_type, tmp);
7232 size2 = fold_build2_loc (input_location, MULT_EXPR,
7233 gfc_array_index_type,
7235 size2 = fold_convert (size_type_node, size2);
7236 size2 = gfc_evaluate_now (size2, &fblock);
7238 /* Realloc expression. Note that the scalarizer uses desc.data
7239 in the array reference - (*desc.data)[<element>]. */
7240 gfc_init_block (&realloc_block);
7241 tmp = build_call_expr_loc (input_location,
7242 built_in_decls[BUILT_IN_REALLOC], 2,
7243 fold_convert (pvoid_type_node, array1),
7245 gfc_conv_descriptor_data_set (&realloc_block,
7247 realloc_expr = gfc_finish_block (&realloc_block);
7249 /* Only reallocate if sizes are different. */
7250 tmp = build3_v (COND_EXPR, neq_size, realloc_expr,
7251 build_empty_stmt (input_location));
7255 /* Malloc expression. */
7256 gfc_init_block (&alloc_block);
7257 tmp = build_call_expr_loc (input_location,
7258 built_in_decls[BUILT_IN_MALLOC], 1,
7260 gfc_conv_descriptor_data_set (&alloc_block,
7262 tmp = gfc_conv_descriptor_dtype (desc);
7263 gfc_add_modify (&alloc_block, tmp, gfc_get_dtype (TREE_TYPE (desc)));
7264 alloc_expr = gfc_finish_block (&alloc_block);
7266 /* Malloc if not allocated; realloc otherwise. */
7267 tmp = build_int_cst (TREE_TYPE (array1), 0);
7268 cond = fold_build2_loc (input_location, EQ_EXPR,
7271 tmp = build3_v (COND_EXPR, cond, alloc_expr, realloc_expr);
7272 gfc_add_expr_to_block (&fblock, tmp);
7274 /* Make sure that the scalarizer data pointer is updated. */
7275 if (lss->data.info.data
7276 && TREE_CODE (lss->data.info.data) == VAR_DECL)
7278 tmp = gfc_conv_descriptor_data_get (desc);
7279 gfc_add_modify (&fblock, lss->data.info.data, tmp);
7282 /* Add the exit label. */
7283 tmp = build1_v (LABEL_EXPR, jump_label2);
7284 gfc_add_expr_to_block (&fblock, tmp);
7286 return gfc_finish_block (&fblock);
7290 /* NULLIFY an allocatable/pointer array on function entry, free it on exit.
7291 Do likewise, recursively if necessary, with the allocatable components of
7295 gfc_trans_deferred_array (gfc_symbol * sym, gfc_wrapped_block * block)
7301 stmtblock_t cleanup;
7304 bool sym_has_alloc_comp;
7306 sym_has_alloc_comp = (sym->ts.type == BT_DERIVED
7307 || sym->ts.type == BT_CLASS)
7308 && sym->ts.u.derived->attr.alloc_comp;
7310 /* Make sure the frontend gets these right. */
7311 if (!(sym->attr.pointer || sym->attr.allocatable || sym_has_alloc_comp))
7312 fatal_error ("Possible front-end bug: Deferred array size without pointer, "
7313 "allocatable attribute or derived type without allocatable "
7316 gfc_save_backend_locus (&loc);
7317 gfc_set_backend_locus (&sym->declared_at);
7318 gfc_init_block (&init);
7320 gcc_assert (TREE_CODE (sym->backend_decl) == VAR_DECL
7321 || TREE_CODE (sym->backend_decl) == PARM_DECL);
7323 if (sym->ts.type == BT_CHARACTER
7324 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
7326 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
7327 gfc_trans_vla_type_sizes (sym, &init);
7330 /* Dummy, use associated and result variables don't need anything special. */
7331 if (sym->attr.dummy || sym->attr.use_assoc || sym->attr.result)
7333 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
7334 gfc_restore_backend_locus (&loc);
7338 descriptor = sym->backend_decl;
7340 /* Although static, derived types with default initializers and
7341 allocatable components must not be nulled wholesale; instead they
7342 are treated component by component. */
7343 if (TREE_STATIC (descriptor) && !sym_has_alloc_comp)
7345 /* SAVEd variables are not freed on exit. */
7346 gfc_trans_static_array_pointer (sym);
7348 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
7349 gfc_restore_backend_locus (&loc);
7353 /* Get the descriptor type. */
7354 type = TREE_TYPE (sym->backend_decl);
7356 if (sym_has_alloc_comp && !(sym->attr.pointer || sym->attr.allocatable))
7359 && !(TREE_STATIC (sym->backend_decl) && sym->attr.is_main_program))
7361 if (sym->value == NULL
7362 || !gfc_has_default_initializer (sym->ts.u.derived))
7364 rank = sym->as ? sym->as->rank : 0;
7365 tmp = gfc_nullify_alloc_comp (sym->ts.u.derived,
7367 gfc_add_expr_to_block (&init, tmp);
7370 gfc_init_default_dt (sym, &init, false);
7373 else if (!GFC_DESCRIPTOR_TYPE_P (type))
7375 /* If the backend_decl is not a descriptor, we must have a pointer
7377 descriptor = build_fold_indirect_ref_loc (input_location,
7379 type = TREE_TYPE (descriptor);
7382 /* NULLIFY the data pointer. */
7383 if (GFC_DESCRIPTOR_TYPE_P (type) && !sym->attr.save)
7384 gfc_conv_descriptor_data_set (&init, descriptor, null_pointer_node);
7386 gfc_restore_backend_locus (&loc);
7387 gfc_init_block (&cleanup);
7389 /* Allocatable arrays need to be freed when they go out of scope.
7390 The allocatable components of pointers must not be touched. */
7391 if (sym_has_alloc_comp && !(sym->attr.function || sym->attr.result)
7392 && !sym->attr.pointer && !sym->attr.save)
7395 rank = sym->as ? sym->as->rank : 0;
7396 tmp = gfc_deallocate_alloc_comp (sym->ts.u.derived, descriptor, rank);
7397 gfc_add_expr_to_block (&cleanup, tmp);
7400 if (sym->attr.allocatable && sym->attr.dimension
7401 && !sym->attr.save && !sym->attr.result)
7403 tmp = gfc_trans_dealloc_allocated (sym->backend_decl);
7404 gfc_add_expr_to_block (&cleanup, tmp);
7407 gfc_add_init_cleanup (block, gfc_finish_block (&init),
7408 gfc_finish_block (&cleanup));
7411 /************ Expression Walking Functions ******************/
7413 /* Walk a variable reference.
7415 Possible extension - multiple component subscripts.
7416 x(:,:) = foo%a(:)%b(:)
7418 forall (i=..., j=...)
7419 x(i,j) = foo%a(j)%b(i)
7421 This adds a fair amount of complexity because you need to deal with more
7422 than one ref. Maybe handle in a similar manner to vector subscripts.
7423 Maybe not worth the effort. */
7427 gfc_walk_variable_expr (gfc_ss * ss, gfc_expr * expr)
7434 for (ref = expr->ref; ref; ref = ref->next)
7435 if (ref->type == REF_ARRAY && ref->u.ar.type != AR_ELEMENT)
7438 for (; ref; ref = ref->next)
7440 if (ref->type == REF_SUBSTRING)
7442 newss = gfc_get_ss ();
7443 newss->type = GFC_SS_SCALAR;
7444 newss->expr = ref->u.ss.start;
7448 newss = gfc_get_ss ();
7449 newss->type = GFC_SS_SCALAR;
7450 newss->expr = ref->u.ss.end;
7455 /* We're only interested in array sections from now on. */
7456 if (ref->type != REF_ARRAY)
7461 if (ar->as->rank == 0)
7463 /* Scalar coarray. */
7470 for (n = 0; n < ar->dimen + ar->codimen; n++)
7472 newss = gfc_get_ss ();
7473 newss->type = GFC_SS_SCALAR;
7474 newss->expr = ar->start[n];
7481 newss = gfc_get_ss ();
7482 newss->type = GFC_SS_SECTION;
7485 newss->data.info.dimen = ar->as->rank;
7486 newss->data.info.codimen = 0;
7487 newss->data.info.ref = ref;
7489 /* Make sure array is the same as array(:,:), this way
7490 we don't need to special case all the time. */
7491 ar->dimen = ar->as->rank;
7493 for (n = 0; n < ar->dimen; n++)
7495 newss->data.info.dim[n] = n;
7496 ar->dimen_type[n] = DIMEN_RANGE;
7498 gcc_assert (ar->start[n] == NULL);
7499 gcc_assert (ar->end[n] == NULL);
7500 gcc_assert (ar->stride[n] == NULL);
7502 for (n = ar->dimen; n < ar->dimen + ar->as->corank; n++)
7504 newss->data.info.dim[n] = n;
7505 ar->dimen_type[n] = DIMEN_RANGE;
7507 gcc_assert (ar->start[n] == NULL);
7508 gcc_assert (ar->end[n] == NULL);
7514 newss = gfc_get_ss ();
7515 newss->type = GFC_SS_SECTION;
7518 newss->data.info.dimen = 0;
7519 newss->data.info.codimen = 0;
7520 newss->data.info.ref = ref;
7522 /* We add SS chains for all the subscripts in the section. */
7523 for (n = 0; n < ar->dimen + ar->codimen; n++)
7527 switch (ar->dimen_type[n])
7529 case DIMEN_THIS_IMAGE:
7532 /* Add SS for elemental (scalar) subscripts. */
7533 gcc_assert (ar->start[n]);
7534 indexss = gfc_get_ss ();
7535 indexss->type = GFC_SS_SCALAR;
7536 indexss->expr = ar->start[n];
7537 indexss->next = gfc_ss_terminator;
7538 indexss->loop_chain = gfc_ss_terminator;
7539 newss->data.info.subscript[n] = indexss;
7543 /* We don't add anything for sections, just remember this
7544 dimension for later. */
7545 newss->data.info.dim[newss->data.info.dimen
7546 + newss->data.info.codimen] = n;
7548 newss->data.info.dimen++;
7552 /* Create a GFC_SS_VECTOR index in which we can store
7553 the vector's descriptor. */
7554 indexss = gfc_get_ss ();
7555 indexss->type = GFC_SS_VECTOR;
7556 indexss->expr = ar->start[n];
7557 indexss->next = gfc_ss_terminator;
7558 indexss->loop_chain = gfc_ss_terminator;
7559 newss->data.info.subscript[n] = indexss;
7560 newss->data.info.dim[newss->data.info.dimen
7561 + newss->data.info.codimen] = n;
7563 newss->data.info.dimen++;
7567 /* We should know what sort of section it is by now. */
7571 /* We should have at least one non-elemental dimension. */
7572 gcc_assert (newss->data.info.dimen > 0);
7577 /* We should know what sort of section it is by now. */
7586 /* Walk an expression operator. If only one operand of a binary expression is
7587 scalar, we must also add the scalar term to the SS chain. */
7590 gfc_walk_op_expr (gfc_ss * ss, gfc_expr * expr)
7596 head = gfc_walk_subexpr (ss, expr->value.op.op1);
7597 if (expr->value.op.op2 == NULL)
7600 head2 = gfc_walk_subexpr (head, expr->value.op.op2);
7602 /* All operands are scalar. Pass back and let the caller deal with it. */
7606 /* All operands require scalarization. */
7607 if (head != ss && (expr->value.op.op2 == NULL || head2 != head))
7610 /* One of the operands needs scalarization, the other is scalar.
7611 Create a gfc_ss for the scalar expression. */
7612 newss = gfc_get_ss ();
7613 newss->type = GFC_SS_SCALAR;
7616 /* First operand is scalar. We build the chain in reverse order, so
7617 add the scalar SS after the second operand. */
7619 while (head && head->next != ss)
7621 /* Check we haven't somehow broken the chain. */
7625 newss->expr = expr->value.op.op1;
7627 else /* head2 == head */
7629 gcc_assert (head2 == head);
7630 /* Second operand is scalar. */
7631 newss->next = head2;
7633 newss->expr = expr->value.op.op2;
7640 /* Reverse a SS chain. */
7643 gfc_reverse_ss (gfc_ss * ss)
7648 gcc_assert (ss != NULL);
7650 head = gfc_ss_terminator;
7651 while (ss != gfc_ss_terminator)
7654 /* Check we didn't somehow break the chain. */
7655 gcc_assert (next != NULL);
7665 /* Walk the arguments of an elemental function. */
7668 gfc_walk_elemental_function_args (gfc_ss * ss, gfc_actual_arglist *arg,
7676 head = gfc_ss_terminator;
7679 for (; arg; arg = arg->next)
7684 newss = gfc_walk_subexpr (head, arg->expr);
7687 /* Scalar argument. */
7688 newss = gfc_get_ss ();
7690 newss->expr = arg->expr;
7700 while (tail->next != gfc_ss_terminator)
7707 /* If all the arguments are scalar we don't need the argument SS. */
7708 gfc_free_ss_chain (head);
7713 /* Add it onto the existing chain. */
7719 /* Walk a function call. Scalar functions are passed back, and taken out of
7720 scalarization loops. For elemental functions we walk their arguments.
7721 The result of functions returning arrays is stored in a temporary outside
7722 the loop, so that the function is only called once. Hence we do not need
7723 to walk their arguments. */
7726 gfc_walk_function_expr (gfc_ss * ss, gfc_expr * expr)
7729 gfc_intrinsic_sym *isym;
7731 gfc_component *comp = NULL;
7734 isym = expr->value.function.isym;
7736 /* Handle intrinsic functions separately. */
7738 return gfc_walk_intrinsic_function (ss, expr, isym);
7740 sym = expr->value.function.esym;
7742 sym = expr->symtree->n.sym;
7744 /* A function that returns arrays. */
7745 gfc_is_proc_ptr_comp (expr, &comp);
7746 if ((!comp && gfc_return_by_reference (sym) && sym->result->attr.dimension)
7747 || (comp && comp->attr.dimension))
7749 newss = gfc_get_ss ();
7750 newss->type = GFC_SS_FUNCTION;
7753 newss->data.info.dimen = expr->rank;
7754 for (n = 0; n < newss->data.info.dimen; n++)
7755 newss->data.info.dim[n] = n;
7759 /* Walk the parameters of an elemental function. For now we always pass
7761 if (sym->attr.elemental)
7762 return gfc_walk_elemental_function_args (ss, expr->value.function.actual,
7765 /* Scalar functions are OK as these are evaluated outside the scalarization
7766 loop. Pass back and let the caller deal with it. */
7771 /* An array temporary is constructed for array constructors. */
7774 gfc_walk_array_constructor (gfc_ss * ss, gfc_expr * expr)
7779 newss = gfc_get_ss ();
7780 newss->type = GFC_SS_CONSTRUCTOR;
7783 newss->data.info.dimen = expr->rank;
7784 for (n = 0; n < expr->rank; n++)
7785 newss->data.info.dim[n] = n;
7791 /* Walk an expression. Add walked expressions to the head of the SS chain.
7792 A wholly scalar expression will not be added. */
7795 gfc_walk_subexpr (gfc_ss * ss, gfc_expr * expr)
7799 switch (expr->expr_type)
7802 head = gfc_walk_variable_expr (ss, expr);
7806 head = gfc_walk_op_expr (ss, expr);
7810 head = gfc_walk_function_expr (ss, expr);
7815 case EXPR_STRUCTURE:
7816 /* Pass back and let the caller deal with it. */
7820 head = gfc_walk_array_constructor (ss, expr);
7823 case EXPR_SUBSTRING:
7824 /* Pass back and let the caller deal with it. */
7828 internal_error ("bad expression type during walk (%d)",
7835 /* Entry point for expression walking.
7836 A return value equal to the passed chain means this is
7837 a scalar expression. It is up to the caller to take whatever action is
7838 necessary to translate these. */
7841 gfc_walk_expr (gfc_expr * expr)
7845 res = gfc_walk_subexpr (gfc_ss_terminator, expr);
7846 return gfc_reverse_ss (res);