1 /* Array translation routines
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 Free Software Foundation, Inc.
5 Contributed by Paul Brook <paul@nowt.org>
6 and Steven Bosscher <s.bosscher@student.tudelft.nl>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* trans-array.c-- Various array related code, including scalarization,
25 allocation, initialization and other support routines. */
27 /* How the scalarizer works.
28 In gfortran, array expressions use the same core routines as scalar
30 First, a Scalarization State (SS) chain is built. This is done by walking
31 the expression tree, and building a linear list of the terms in the
32 expression. As the tree is walked, scalar subexpressions are translated.
34 The scalarization parameters are stored in a gfc_loopinfo structure.
35 First the start and stride of each term is calculated by
36 gfc_conv_ss_startstride. During this process the expressions for the array
37 descriptors and data pointers are also translated.
39 If the expression is an assignment, we must then resolve any dependencies.
40 In fortran all the rhs values of an assignment must be evaluated before
41 any assignments take place. This can require a temporary array to store the
42 values. We also require a temporary when we are passing array expressions
43 or vector subscripts as procedure parameters.
45 Array sections are passed without copying to a temporary. These use the
46 scalarizer to determine the shape of the section. The flag
47 loop->array_parameter tells the scalarizer that the actual values and loop
48 variables will not be required.
50 The function gfc_conv_loop_setup generates the scalarization setup code.
51 It determines the range of the scalarizing loop variables. If a temporary
52 is required, this is created and initialized. Code for scalar expressions
53 taken outside the loop is also generated at this time. Next the offset and
54 scaling required to translate from loop variables to array indices for each
57 A call to gfc_start_scalarized_body marks the start of the scalarized
58 expression. This creates a scope and declares the loop variables. Before
59 calling this gfc_make_ss_chain_used must be used to indicate which terms
60 will be used inside this loop.
62 The scalar gfc_conv_* functions are then used to build the main body of the
63 scalarization loop. Scalarization loop variables and precalculated scalar
64 values are automatically substituted. Note that gfc_advance_se_ss_chain
65 must be used, rather than changing the se->ss directly.
67 For assignment expressions requiring a temporary two sub loops are
68 generated. The first stores the result of the expression in the temporary,
69 the second copies it to the result. A call to
70 gfc_trans_scalarized_loop_boundary marks the end of the main loop code and
71 the start of the copying loop. The temporary may be less than full rank.
73 Finally gfc_trans_scalarizing_loops is called to generate the implicit do
74 loops. The loops are added to the pre chain of the loopinfo. The post
75 chain may still contain cleanup code.
77 After the loop code has been added into its parent scope gfc_cleanup_loop
78 is called to free all the SS allocated by the scalarizer. */
82 #include "coretypes.h"
85 #include "diagnostic-core.h" /* For internal_error/fatal_error. */
88 #include "constructor.h"
90 #include "trans-stmt.h"
91 #include "trans-types.h"
92 #include "trans-array.h"
93 #include "trans-const.h"
94 #include "dependency.h"
96 static bool gfc_get_array_constructor_size (mpz_t *, gfc_constructor_base);
98 /* The contents of this structure aren't actually used, just the address. */
99 static gfc_ss gfc_ss_terminator_var;
100 gfc_ss * const gfc_ss_terminator = &gfc_ss_terminator_var;
104 gfc_array_dataptr_type (tree desc)
106 return (GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc)));
110 /* Build expressions to access the members of an array descriptor.
111 It's surprisingly easy to mess up here, so never access
112 an array descriptor by "brute force", always use these
113 functions. This also avoids problems if we change the format
114 of an array descriptor.
116 To understand these magic numbers, look at the comments
117 before gfc_build_array_type() in trans-types.c.
119 The code within these defines should be the only code which knows the format
120 of an array descriptor.
122 Any code just needing to read obtain the bounds of an array should use
123 gfc_conv_array_* rather than the following functions as these will return
124 know constant values, and work with arrays which do not have descriptors.
126 Don't forget to #undef these! */
129 #define OFFSET_FIELD 1
130 #define DTYPE_FIELD 2
131 #define DIMENSION_FIELD 3
132 #define CAF_TOKEN_FIELD 4
134 #define STRIDE_SUBFIELD 0
135 #define LBOUND_SUBFIELD 1
136 #define UBOUND_SUBFIELD 2
138 /* This provides READ-ONLY access to the data field. The field itself
139 doesn't have the proper type. */
142 gfc_conv_descriptor_data_get (tree desc)
146 type = TREE_TYPE (desc);
147 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
149 field = TYPE_FIELDS (type);
150 gcc_assert (DATA_FIELD == 0);
152 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
154 t = fold_convert (GFC_TYPE_ARRAY_DATAPTR_TYPE (type), t);
159 /* This provides WRITE access to the data field.
161 TUPLES_P is true if we are generating tuples.
163 This function gets called through the following macros:
164 gfc_conv_descriptor_data_set
165 gfc_conv_descriptor_data_set. */
168 gfc_conv_descriptor_data_set (stmtblock_t *block, tree desc, tree value)
172 type = TREE_TYPE (desc);
173 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
175 field = TYPE_FIELDS (type);
176 gcc_assert (DATA_FIELD == 0);
178 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
180 gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value));
184 /* This provides address access to the data field. This should only be
185 used by array allocation, passing this on to the runtime. */
188 gfc_conv_descriptor_data_addr (tree desc)
192 type = TREE_TYPE (desc);
193 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
195 field = TYPE_FIELDS (type);
196 gcc_assert (DATA_FIELD == 0);
198 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
200 return gfc_build_addr_expr (NULL_TREE, t);
204 gfc_conv_descriptor_offset (tree desc)
209 type = TREE_TYPE (desc);
210 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
212 field = gfc_advance_chain (TYPE_FIELDS (type), OFFSET_FIELD);
213 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
215 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
216 desc, field, NULL_TREE);
220 gfc_conv_descriptor_offset_get (tree desc)
222 return gfc_conv_descriptor_offset (desc);
226 gfc_conv_descriptor_offset_set (stmtblock_t *block, tree desc,
229 tree t = gfc_conv_descriptor_offset (desc);
230 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
235 gfc_conv_descriptor_dtype (tree desc)
240 type = TREE_TYPE (desc);
241 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
243 field = gfc_advance_chain (TYPE_FIELDS (type), DTYPE_FIELD);
244 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
246 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
247 desc, field, NULL_TREE);
251 gfc_conv_descriptor_dimension (tree desc, tree dim)
257 type = TREE_TYPE (desc);
258 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
260 field = gfc_advance_chain (TYPE_FIELDS (type), DIMENSION_FIELD);
261 gcc_assert (field != NULL_TREE
262 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
263 && TREE_CODE (TREE_TYPE (TREE_TYPE (field))) == RECORD_TYPE);
265 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
266 desc, field, NULL_TREE);
267 tmp = gfc_build_array_ref (tmp, dim, NULL);
273 gfc_conv_descriptor_token (tree desc)
278 type = TREE_TYPE (desc);
279 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
280 gcc_assert (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE);
281 gcc_assert (gfc_option.coarray == GFC_FCOARRAY_LIB);
282 field = gfc_advance_chain (TYPE_FIELDS (type), CAF_TOKEN_FIELD);
283 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == prvoid_type_node);
285 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
286 desc, field, NULL_TREE);
291 gfc_conv_descriptor_stride (tree desc, tree dim)
296 tmp = gfc_conv_descriptor_dimension (desc, dim);
297 field = TYPE_FIELDS (TREE_TYPE (tmp));
298 field = gfc_advance_chain (field, STRIDE_SUBFIELD);
299 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
301 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
302 tmp, field, NULL_TREE);
307 gfc_conv_descriptor_stride_get (tree desc, tree dim)
309 tree type = TREE_TYPE (desc);
310 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
311 if (integer_zerop (dim)
312 && (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE
313 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_SHAPE_CONT
314 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT))
315 return gfc_index_one_node;
317 return gfc_conv_descriptor_stride (desc, dim);
321 gfc_conv_descriptor_stride_set (stmtblock_t *block, tree desc,
322 tree dim, tree value)
324 tree t = gfc_conv_descriptor_stride (desc, dim);
325 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
329 gfc_conv_descriptor_lbound (tree desc, tree dim)
334 tmp = gfc_conv_descriptor_dimension (desc, dim);
335 field = TYPE_FIELDS (TREE_TYPE (tmp));
336 field = gfc_advance_chain (field, LBOUND_SUBFIELD);
337 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
339 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
340 tmp, field, NULL_TREE);
345 gfc_conv_descriptor_lbound_get (tree desc, tree dim)
347 return gfc_conv_descriptor_lbound (desc, dim);
351 gfc_conv_descriptor_lbound_set (stmtblock_t *block, tree desc,
352 tree dim, tree value)
354 tree t = gfc_conv_descriptor_lbound (desc, dim);
355 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
359 gfc_conv_descriptor_ubound (tree desc, tree dim)
364 tmp = gfc_conv_descriptor_dimension (desc, dim);
365 field = TYPE_FIELDS (TREE_TYPE (tmp));
366 field = gfc_advance_chain (field, UBOUND_SUBFIELD);
367 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
369 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
370 tmp, field, NULL_TREE);
375 gfc_conv_descriptor_ubound_get (tree desc, tree dim)
377 return gfc_conv_descriptor_ubound (desc, dim);
381 gfc_conv_descriptor_ubound_set (stmtblock_t *block, tree desc,
382 tree dim, tree value)
384 tree t = gfc_conv_descriptor_ubound (desc, dim);
385 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
388 /* Build a null array descriptor constructor. */
391 gfc_build_null_descriptor (tree type)
396 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
397 gcc_assert (DATA_FIELD == 0);
398 field = TYPE_FIELDS (type);
400 /* Set a NULL data pointer. */
401 tmp = build_constructor_single (type, field, null_pointer_node);
402 TREE_CONSTANT (tmp) = 1;
403 /* All other fields are ignored. */
409 /* Modify a descriptor such that the lbound of a given dimension is the value
410 specified. This also updates ubound and offset accordingly. */
413 gfc_conv_shift_descriptor_lbound (stmtblock_t* block, tree desc,
414 int dim, tree new_lbound)
416 tree offs, ubound, lbound, stride;
417 tree diff, offs_diff;
419 new_lbound = fold_convert (gfc_array_index_type, new_lbound);
421 offs = gfc_conv_descriptor_offset_get (desc);
422 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
423 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
424 stride = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[dim]);
426 /* Get difference (new - old) by which to shift stuff. */
427 diff = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
430 /* Shift ubound and offset accordingly. This has to be done before
431 updating the lbound, as they depend on the lbound expression! */
432 ubound = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
434 gfc_conv_descriptor_ubound_set (block, desc, gfc_rank_cst[dim], ubound);
435 offs_diff = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
437 offs = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
439 gfc_conv_descriptor_offset_set (block, desc, offs);
441 /* Finally set lbound to value we want. */
442 gfc_conv_descriptor_lbound_set (block, desc, gfc_rank_cst[dim], new_lbound);
446 /* Cleanup those #defines. */
451 #undef DIMENSION_FIELD
452 #undef CAF_TOKEN_FIELD
453 #undef STRIDE_SUBFIELD
454 #undef LBOUND_SUBFIELD
455 #undef UBOUND_SUBFIELD
458 /* Mark a SS chain as used. Flags specifies in which loops the SS is used.
459 flags & 1 = Main loop body.
460 flags & 2 = temp copy loop. */
463 gfc_mark_ss_chain_used (gfc_ss * ss, unsigned flags)
465 for (; ss != gfc_ss_terminator; ss = ss->next)
466 ss->info->useflags = flags;
469 static void gfc_free_ss (gfc_ss *);
472 /* Free a gfc_ss chain. */
475 gfc_free_ss_chain (gfc_ss * ss)
479 while (ss != gfc_ss_terminator)
481 gcc_assert (ss != NULL);
490 free_ss_info (gfc_ss_info *ss_info)
499 gfc_free_ss (gfc_ss * ss)
501 gfc_ss_info *ss_info;
506 switch (ss_info->type)
509 for (n = 0; n < ss->dimen; n++)
511 if (ss_info->data.array.subscript[ss->dim[n]])
512 gfc_free_ss_chain (ss_info->data.array.subscript[ss->dim[n]]);
520 free_ss_info (ss_info);
525 /* Creates and initializes an array type gfc_ss struct. */
528 gfc_get_array_ss (gfc_ss *next, gfc_expr *expr, int dimen, gfc_ss_type type)
531 gfc_ss_info *ss_info;
534 ss_info = gfc_get_ss_info ();
535 ss_info->type = type;
536 ss_info->expr = expr;
542 for (i = 0; i < ss->dimen; i++)
549 /* Creates and initializes a temporary type gfc_ss struct. */
552 gfc_get_temp_ss (tree type, tree string_length, int dimen)
555 gfc_ss_info *ss_info;
558 ss_info = gfc_get_ss_info ();
559 ss_info->type = GFC_SS_TEMP;
560 ss_info->string_length = string_length;
561 ss_info->data.temp.type = type;
565 ss->next = gfc_ss_terminator;
567 for (i = 0; i < ss->dimen; i++)
574 /* Creates and initializes a scalar type gfc_ss struct. */
577 gfc_get_scalar_ss (gfc_ss *next, gfc_expr *expr)
580 gfc_ss_info *ss_info;
582 ss_info = gfc_get_ss_info ();
583 ss_info->type = GFC_SS_SCALAR;
584 ss_info->expr = expr;
594 /* Free all the SS associated with a loop. */
597 gfc_cleanup_loop (gfc_loopinfo * loop)
603 while (ss != gfc_ss_terminator)
605 gcc_assert (ss != NULL);
606 next = ss->loop_chain;
613 /* Associate a SS chain with a loop. */
616 gfc_add_ss_to_loop (gfc_loopinfo * loop, gfc_ss * head)
620 if (head == gfc_ss_terminator)
624 for (; ss && ss != gfc_ss_terminator; ss = ss->next)
626 if (ss->next == gfc_ss_terminator)
627 ss->loop_chain = loop->ss;
629 ss->loop_chain = ss->next;
631 gcc_assert (ss == gfc_ss_terminator);
636 /* Generate an initializer for a static pointer or allocatable array. */
639 gfc_trans_static_array_pointer (gfc_symbol * sym)
643 gcc_assert (TREE_STATIC (sym->backend_decl));
644 /* Just zero the data member. */
645 type = TREE_TYPE (sym->backend_decl);
646 DECL_INITIAL (sym->backend_decl) = gfc_build_null_descriptor (type);
650 /* If the bounds of SE's loop have not yet been set, see if they can be
651 determined from array spec AS, which is the array spec of a called
652 function. MAPPING maps the callee's dummy arguments to the values
653 that the caller is passing. Add any initialization and finalization
657 gfc_set_loop_bounds_from_array_spec (gfc_interface_mapping * mapping,
658 gfc_se * se, gfc_array_spec * as)
666 if (as && as->type == AS_EXPLICIT)
667 for (n = 0; n < se->loop->dimen; n++)
669 dim = se->ss->dim[n];
670 gcc_assert (dim < as->rank);
671 gcc_assert (se->loop->dimen == as->rank);
672 if (se->loop->to[n] == NULL_TREE)
674 /* Evaluate the lower bound. */
675 gfc_init_se (&tmpse, NULL);
676 gfc_apply_interface_mapping (mapping, &tmpse, as->lower[dim]);
677 gfc_add_block_to_block (&se->pre, &tmpse.pre);
678 gfc_add_block_to_block (&se->post, &tmpse.post);
679 lower = fold_convert (gfc_array_index_type, tmpse.expr);
681 /* ...and the upper bound. */
682 gfc_init_se (&tmpse, NULL);
683 gfc_apply_interface_mapping (mapping, &tmpse, as->upper[dim]);
684 gfc_add_block_to_block (&se->pre, &tmpse.pre);
685 gfc_add_block_to_block (&se->post, &tmpse.post);
686 upper = fold_convert (gfc_array_index_type, tmpse.expr);
688 /* Set the upper bound of the loop to UPPER - LOWER. */
689 tmp = fold_build2_loc (input_location, MINUS_EXPR,
690 gfc_array_index_type, upper, lower);
691 tmp = gfc_evaluate_now (tmp, &se->pre);
692 se->loop->to[n] = tmp;
698 /* Generate code to allocate an array temporary, or create a variable to
699 hold the data. If size is NULL, zero the descriptor so that the
700 callee will allocate the array. If DEALLOC is true, also generate code to
701 free the array afterwards.
703 If INITIAL is not NULL, it is packed using internal_pack and the result used
704 as data instead of allocating a fresh, unitialized area of memory.
706 Initialization code is added to PRE and finalization code to POST.
707 DYNAMIC is true if the caller may want to extend the array later
708 using realloc. This prevents us from putting the array on the stack. */
711 gfc_trans_allocate_array_storage (stmtblock_t * pre, stmtblock_t * post,
712 gfc_array_info * info, tree size, tree nelem,
713 tree initial, bool dynamic, bool dealloc)
719 desc = info->descriptor;
720 info->offset = gfc_index_zero_node;
721 if (size == NULL_TREE || integer_zerop (size))
723 /* A callee allocated array. */
724 gfc_conv_descriptor_data_set (pre, desc, null_pointer_node);
729 /* Allocate the temporary. */
730 onstack = !dynamic && initial == NULL_TREE
731 && (gfc_option.flag_stack_arrays
732 || gfc_can_put_var_on_stack (size));
736 /* Make a temporary variable to hold the data. */
737 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (nelem),
738 nelem, gfc_index_one_node);
739 tmp = gfc_evaluate_now (tmp, pre);
740 tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node,
742 tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)),
744 tmp = gfc_create_var (tmp, "A");
745 /* If we're here only because of -fstack-arrays we have to
746 emit a DECL_EXPR to make the gimplifier emit alloca calls. */
747 if (!gfc_can_put_var_on_stack (size))
748 gfc_add_expr_to_block (pre,
749 fold_build1_loc (input_location,
750 DECL_EXPR, TREE_TYPE (tmp),
752 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
753 gfc_conv_descriptor_data_set (pre, desc, tmp);
757 /* Allocate memory to hold the data or call internal_pack. */
758 if (initial == NULL_TREE)
760 tmp = gfc_call_malloc (pre, NULL, size);
761 tmp = gfc_evaluate_now (tmp, pre);
768 stmtblock_t do_copying;
770 tmp = TREE_TYPE (initial); /* Pointer to descriptor. */
771 gcc_assert (TREE_CODE (tmp) == POINTER_TYPE);
772 tmp = TREE_TYPE (tmp); /* The descriptor itself. */
773 tmp = gfc_get_element_type (tmp);
774 gcc_assert (tmp == gfc_get_element_type (TREE_TYPE (desc)));
775 packed = gfc_create_var (build_pointer_type (tmp), "data");
777 tmp = build_call_expr_loc (input_location,
778 gfor_fndecl_in_pack, 1, initial);
779 tmp = fold_convert (TREE_TYPE (packed), tmp);
780 gfc_add_modify (pre, packed, tmp);
782 tmp = build_fold_indirect_ref_loc (input_location,
784 source_data = gfc_conv_descriptor_data_get (tmp);
786 /* internal_pack may return source->data without any allocation
787 or copying if it is already packed. If that's the case, we
788 need to allocate and copy manually. */
790 gfc_start_block (&do_copying);
791 tmp = gfc_call_malloc (&do_copying, NULL, size);
792 tmp = fold_convert (TREE_TYPE (packed), tmp);
793 gfc_add_modify (&do_copying, packed, tmp);
794 tmp = gfc_build_memcpy_call (packed, source_data, size);
795 gfc_add_expr_to_block (&do_copying, tmp);
797 was_packed = fold_build2_loc (input_location, EQ_EXPR,
798 boolean_type_node, packed,
800 tmp = gfc_finish_block (&do_copying);
801 tmp = build3_v (COND_EXPR, was_packed, tmp,
802 build_empty_stmt (input_location));
803 gfc_add_expr_to_block (pre, tmp);
805 tmp = fold_convert (pvoid_type_node, packed);
808 gfc_conv_descriptor_data_set (pre, desc, tmp);
811 info->data = gfc_conv_descriptor_data_get (desc);
813 /* The offset is zero because we create temporaries with a zero
815 gfc_conv_descriptor_offset_set (pre, desc, gfc_index_zero_node);
817 if (dealloc && !onstack)
819 /* Free the temporary. */
820 tmp = gfc_conv_descriptor_data_get (desc);
821 tmp = gfc_call_free (fold_convert (pvoid_type_node, tmp));
822 gfc_add_expr_to_block (post, tmp);
827 /* Get the array reference dimension corresponding to the given loop dimension.
828 It is different from the true array dimension given by the dim array in
829 the case of a partial array reference
830 It is different from the loop dimension in the case of a transposed array.
834 get_array_ref_dim (gfc_ss *ss, int loop_dim)
836 int n, array_dim, array_ref_dim;
839 array_dim = ss->dim[loop_dim];
841 for (n = 0; n < ss->dimen; n++)
842 if (ss->dim[n] < array_dim)
845 return array_ref_dim;
849 /* Generate code to create and initialize the descriptor for a temporary
850 array. This is used for both temporaries needed by the scalarizer, and
851 functions returning arrays. Adjusts the loop variables to be
852 zero-based, and calculates the loop bounds for callee allocated arrays.
853 Allocate the array unless it's callee allocated (we have a callee
854 allocated array if 'callee_alloc' is true, or if loop->to[n] is
855 NULL_TREE for any n). Also fills in the descriptor, data and offset
856 fields of info if known. Returns the size of the array, or NULL for a
857 callee allocated array.
859 PRE, POST, INITIAL, DYNAMIC and DEALLOC are as for
860 gfc_trans_allocate_array_storage.
864 gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post,
865 gfc_loopinfo * loop, gfc_ss * ss,
866 tree eltype, tree initial, bool dynamic,
867 bool dealloc, bool callee_alloc, locus * where)
869 gfc_array_info *info;
870 tree from[GFC_MAX_DIMENSIONS], to[GFC_MAX_DIMENSIONS];
880 memset (from, 0, sizeof (from));
881 memset (to, 0, sizeof (to));
883 info = &ss->info->data.array;
885 gcc_assert (ss->dimen > 0);
886 gcc_assert (loop->dimen == ss->dimen);
888 if (gfc_option.warn_array_temp && where)
889 gfc_warning ("Creating array temporary at %L", where);
891 /* Set the lower bound to zero. */
892 for (n = 0; n < loop->dimen; n++)
896 /* Callee allocated arrays may not have a known bound yet. */
898 loop->to[n] = gfc_evaluate_now (
899 fold_build2_loc (input_location, MINUS_EXPR,
900 gfc_array_index_type,
901 loop->to[n], loop->from[n]),
903 loop->from[n] = gfc_index_zero_node;
905 /* We have just changed the loop bounds, we must clear the
906 corresponding specloop, so that delta calculation is not skipped
907 later in set_delta. */
908 loop->specloop[n] = NULL;
910 /* We are constructing the temporary's descriptor based on the loop
911 dimensions. As the dimensions may be accessed in arbitrary order
912 (think of transpose) the size taken from the n'th loop may not map
913 to the n'th dimension of the array. We need to reconstruct loop infos
914 in the right order before using it to set the descriptor
916 tmp_dim = get_array_ref_dim (ss, n);
917 from[tmp_dim] = loop->from[n];
918 to[tmp_dim] = loop->to[n];
920 info->delta[dim] = gfc_index_zero_node;
921 info->start[dim] = gfc_index_zero_node;
922 info->end[dim] = gfc_index_zero_node;
923 info->stride[dim] = gfc_index_one_node;
926 /* Initialize the descriptor. */
928 gfc_get_array_type_bounds (eltype, ss->dimen, 0, from, to, 1,
929 GFC_ARRAY_UNKNOWN, true);
930 desc = gfc_create_var (type, "atmp");
931 GFC_DECL_PACKED_ARRAY (desc) = 1;
933 info->descriptor = desc;
934 size = gfc_index_one_node;
936 /* Fill in the array dtype. */
937 tmp = gfc_conv_descriptor_dtype (desc);
938 gfc_add_modify (pre, tmp, gfc_get_dtype (TREE_TYPE (desc)));
941 Fill in the bounds and stride. This is a packed array, so:
944 for (n = 0; n < rank; n++)
947 delta = ubound[n] + 1 - lbound[n];
950 size = size * sizeof(element);
955 /* If there is at least one null loop->to[n], it is a callee allocated
957 for (n = 0; n < loop->dimen; n++)
958 if (loop->to[n] == NULL_TREE)
964 for (n = 0; n < loop->dimen; n++)
968 if (size == NULL_TREE)
970 /* For a callee allocated array express the loop bounds in terms
971 of the descriptor fields. */
972 tmp = fold_build2_loc (input_location,
973 MINUS_EXPR, gfc_array_index_type,
974 gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]),
975 gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]));
980 /* Store the stride and bound components in the descriptor. */
981 gfc_conv_descriptor_stride_set (pre, desc, gfc_rank_cst[n], size);
983 gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n],
984 gfc_index_zero_node);
986 gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n],
989 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
990 to[n], gfc_index_one_node);
992 /* Check whether the size for this dimension is negative. */
993 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, tmp,
994 gfc_index_zero_node);
995 cond = gfc_evaluate_now (cond, pre);
1000 or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
1001 boolean_type_node, or_expr, cond);
1003 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
1005 size = gfc_evaluate_now (size, pre);
1008 /* Get the size of the array. */
1010 if (size && !callee_alloc)
1012 /* If or_expr is true, then the extent in at least one
1013 dimension is zero and the size is set to zero. */
1014 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
1015 or_expr, gfc_index_zero_node, size);
1018 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
1020 fold_convert (gfc_array_index_type,
1021 TYPE_SIZE_UNIT (gfc_get_element_type (type))));
1029 gfc_trans_allocate_array_storage (pre, post, info, size, nelem, initial,
1032 if (ss->dimen > loop->temp_dim)
1033 loop->temp_dim = ss->dimen;
1039 /* Return the number of iterations in a loop that starts at START,
1040 ends at END, and has step STEP. */
1043 gfc_get_iteration_count (tree start, tree end, tree step)
1048 type = TREE_TYPE (step);
1049 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, end, start);
1050 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, type, tmp, step);
1051 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, tmp,
1052 build_int_cst (type, 1));
1053 tmp = fold_build2_loc (input_location, MAX_EXPR, type, tmp,
1054 build_int_cst (type, 0));
1055 return fold_convert (gfc_array_index_type, tmp);
1059 /* Extend the data in array DESC by EXTRA elements. */
1062 gfc_grow_array (stmtblock_t * pblock, tree desc, tree extra)
1069 if (integer_zerop (extra))
1072 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[0]);
1074 /* Add EXTRA to the upper bound. */
1075 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1077 gfc_conv_descriptor_ubound_set (pblock, desc, gfc_rank_cst[0], tmp);
1079 /* Get the value of the current data pointer. */
1080 arg0 = gfc_conv_descriptor_data_get (desc);
1082 /* Calculate the new array size. */
1083 size = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
1084 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1085 ubound, gfc_index_one_node);
1086 arg1 = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
1087 fold_convert (size_type_node, tmp),
1088 fold_convert (size_type_node, size));
1090 /* Call the realloc() function. */
1091 tmp = gfc_call_realloc (pblock, arg0, arg1);
1092 gfc_conv_descriptor_data_set (pblock, desc, tmp);
1096 /* Return true if the bounds of iterator I can only be determined
1100 gfc_iterator_has_dynamic_bounds (gfc_iterator * i)
1102 return (i->start->expr_type != EXPR_CONSTANT
1103 || i->end->expr_type != EXPR_CONSTANT
1104 || i->step->expr_type != EXPR_CONSTANT);
1108 /* Split the size of constructor element EXPR into the sum of two terms,
1109 one of which can be determined at compile time and one of which must
1110 be calculated at run time. Set *SIZE to the former and return true
1111 if the latter might be nonzero. */
1114 gfc_get_array_constructor_element_size (mpz_t * size, gfc_expr * expr)
1116 if (expr->expr_type == EXPR_ARRAY)
1117 return gfc_get_array_constructor_size (size, expr->value.constructor);
1118 else if (expr->rank > 0)
1120 /* Calculate everything at run time. */
1121 mpz_set_ui (*size, 0);
1126 /* A single element. */
1127 mpz_set_ui (*size, 1);
1133 /* Like gfc_get_array_constructor_element_size, but applied to the whole
1134 of array constructor C. */
1137 gfc_get_array_constructor_size (mpz_t * size, gfc_constructor_base base)
1145 mpz_set_ui (*size, 0);
1150 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1153 if (i && gfc_iterator_has_dynamic_bounds (i))
1157 dynamic |= gfc_get_array_constructor_element_size (&len, c->expr);
1160 /* Multiply the static part of the element size by the
1161 number of iterations. */
1162 mpz_sub (val, i->end->value.integer, i->start->value.integer);
1163 mpz_fdiv_q (val, val, i->step->value.integer);
1164 mpz_add_ui (val, val, 1);
1165 if (mpz_sgn (val) > 0)
1166 mpz_mul (len, len, val);
1168 mpz_set_ui (len, 0);
1170 mpz_add (*size, *size, len);
1179 /* Make sure offset is a variable. */
1182 gfc_put_offset_into_var (stmtblock_t * pblock, tree * poffset,
1185 /* We should have already created the offset variable. We cannot
1186 create it here because we may be in an inner scope. */
1187 gcc_assert (*offsetvar != NULL_TREE);
1188 gfc_add_modify (pblock, *offsetvar, *poffset);
1189 *poffset = *offsetvar;
1190 TREE_USED (*offsetvar) = 1;
1194 /* Variables needed for bounds-checking. */
1195 static bool first_len;
1196 static tree first_len_val;
1197 static bool typespec_chararray_ctor;
1200 gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
1201 tree offset, gfc_se * se, gfc_expr * expr)
1205 gfc_conv_expr (se, expr);
1207 /* Store the value. */
1208 tmp = build_fold_indirect_ref_loc (input_location,
1209 gfc_conv_descriptor_data_get (desc));
1210 tmp = gfc_build_array_ref (tmp, offset, NULL);
1212 if (expr->ts.type == BT_CHARACTER)
1214 int i = gfc_validate_kind (BT_CHARACTER, expr->ts.kind, false);
1217 esize = size_in_bytes (gfc_get_element_type (TREE_TYPE (desc)));
1218 esize = fold_convert (gfc_charlen_type_node, esize);
1219 esize = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
1220 gfc_charlen_type_node, esize,
1221 build_int_cst (gfc_charlen_type_node,
1222 gfc_character_kinds[i].bit_size / 8));
1224 gfc_conv_string_parameter (se);
1225 if (POINTER_TYPE_P (TREE_TYPE (tmp)))
1227 /* The temporary is an array of pointers. */
1228 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1229 gfc_add_modify (&se->pre, tmp, se->expr);
1233 /* The temporary is an array of string values. */
1234 tmp = gfc_build_addr_expr (gfc_get_pchar_type (expr->ts.kind), tmp);
1235 /* We know the temporary and the value will be the same length,
1236 so can use memcpy. */
1237 gfc_trans_string_copy (&se->pre, esize, tmp, expr->ts.kind,
1238 se->string_length, se->expr, expr->ts.kind);
1240 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) && !typespec_chararray_ctor)
1244 gfc_add_modify (&se->pre, first_len_val,
1250 /* Verify that all constructor elements are of the same
1252 tree cond = fold_build2_loc (input_location, NE_EXPR,
1253 boolean_type_node, first_len_val,
1255 gfc_trans_runtime_check
1256 (true, false, cond, &se->pre, &expr->where,
1257 "Different CHARACTER lengths (%ld/%ld) in array constructor",
1258 fold_convert (long_integer_type_node, first_len_val),
1259 fold_convert (long_integer_type_node, se->string_length));
1265 /* TODO: Should the frontend already have done this conversion? */
1266 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1267 gfc_add_modify (&se->pre, tmp, se->expr);
1270 gfc_add_block_to_block (pblock, &se->pre);
1271 gfc_add_block_to_block (pblock, &se->post);
1275 /* Add the contents of an array to the constructor. DYNAMIC is as for
1276 gfc_trans_array_constructor_value. */
1279 gfc_trans_array_constructor_subarray (stmtblock_t * pblock,
1280 tree type ATTRIBUTE_UNUSED,
1281 tree desc, gfc_expr * expr,
1282 tree * poffset, tree * offsetvar,
1293 /* We need this to be a variable so we can increment it. */
1294 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1296 gfc_init_se (&se, NULL);
1298 /* Walk the array expression. */
1299 ss = gfc_walk_expr (expr);
1300 gcc_assert (ss != gfc_ss_terminator);
1302 /* Initialize the scalarizer. */
1303 gfc_init_loopinfo (&loop);
1304 gfc_add_ss_to_loop (&loop, ss);
1306 /* Initialize the loop. */
1307 gfc_conv_ss_startstride (&loop);
1308 gfc_conv_loop_setup (&loop, &expr->where);
1310 /* Make sure the constructed array has room for the new data. */
1313 /* Set SIZE to the total number of elements in the subarray. */
1314 size = gfc_index_one_node;
1315 for (n = 0; n < loop.dimen; n++)
1317 tmp = gfc_get_iteration_count (loop.from[n], loop.to[n],
1318 gfc_index_one_node);
1319 size = fold_build2_loc (input_location, MULT_EXPR,
1320 gfc_array_index_type, size, tmp);
1323 /* Grow the constructed array by SIZE elements. */
1324 gfc_grow_array (&loop.pre, desc, size);
1327 /* Make the loop body. */
1328 gfc_mark_ss_chain_used (ss, 1);
1329 gfc_start_scalarized_body (&loop, &body);
1330 gfc_copy_loopinfo_to_se (&se, &loop);
1333 gfc_trans_array_ctor_element (&body, desc, *poffset, &se, expr);
1334 gcc_assert (se.ss == gfc_ss_terminator);
1336 /* Increment the offset. */
1337 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1338 *poffset, gfc_index_one_node);
1339 gfc_add_modify (&body, *poffset, tmp);
1341 /* Finish the loop. */
1342 gfc_trans_scalarizing_loops (&loop, &body);
1343 gfc_add_block_to_block (&loop.pre, &loop.post);
1344 tmp = gfc_finish_block (&loop.pre);
1345 gfc_add_expr_to_block (pblock, tmp);
1347 gfc_cleanup_loop (&loop);
1351 /* Assign the values to the elements of an array constructor. DYNAMIC
1352 is true if descriptor DESC only contains enough data for the static
1353 size calculated by gfc_get_array_constructor_size. When true, memory
1354 for the dynamic parts must be allocated using realloc. */
1357 gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
1358 tree desc, gfc_constructor_base base,
1359 tree * poffset, tree * offsetvar,
1368 tree shadow_loopvar = NULL_TREE;
1369 gfc_saved_var saved_loopvar;
1372 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1374 /* If this is an iterator or an array, the offset must be a variable. */
1375 if ((c->iterator || c->expr->rank > 0) && INTEGER_CST_P (*poffset))
1376 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1378 /* Shadowing the iterator avoids changing its value and saves us from
1379 keeping track of it. Further, it makes sure that there's always a
1380 backend-decl for the symbol, even if there wasn't one before,
1381 e.g. in the case of an iterator that appears in a specification
1382 expression in an interface mapping. */
1385 gfc_symbol *sym = c->iterator->var->symtree->n.sym;
1386 tree type = gfc_typenode_for_spec (&sym->ts);
1388 shadow_loopvar = gfc_create_var (type, "shadow_loopvar");
1389 gfc_shadow_sym (sym, shadow_loopvar, &saved_loopvar);
1392 gfc_start_block (&body);
1394 if (c->expr->expr_type == EXPR_ARRAY)
1396 /* Array constructors can be nested. */
1397 gfc_trans_array_constructor_value (&body, type, desc,
1398 c->expr->value.constructor,
1399 poffset, offsetvar, dynamic);
1401 else if (c->expr->rank > 0)
1403 gfc_trans_array_constructor_subarray (&body, type, desc, c->expr,
1404 poffset, offsetvar, dynamic);
1408 /* This code really upsets the gimplifier so don't bother for now. */
1415 while (p && !(p->iterator || p->expr->expr_type != EXPR_CONSTANT))
1417 p = gfc_constructor_next (p);
1422 /* Scalar values. */
1423 gfc_init_se (&se, NULL);
1424 gfc_trans_array_ctor_element (&body, desc, *poffset,
1427 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1428 gfc_array_index_type,
1429 *poffset, gfc_index_one_node);
1433 /* Collect multiple scalar constants into a constructor. */
1434 VEC(constructor_elt,gc) *v = NULL;
1438 HOST_WIDE_INT idx = 0;
1441 /* Count the number of consecutive scalar constants. */
1442 while (p && !(p->iterator
1443 || p->expr->expr_type != EXPR_CONSTANT))
1445 gfc_init_se (&se, NULL);
1446 gfc_conv_constant (&se, p->expr);
1448 if (c->expr->ts.type != BT_CHARACTER)
1449 se.expr = fold_convert (type, se.expr);
1450 /* For constant character array constructors we build
1451 an array of pointers. */
1452 else if (POINTER_TYPE_P (type))
1453 se.expr = gfc_build_addr_expr
1454 (gfc_get_pchar_type (p->expr->ts.kind),
1457 CONSTRUCTOR_APPEND_ELT (v,
1458 build_int_cst (gfc_array_index_type,
1462 p = gfc_constructor_next (p);
1465 bound = size_int (n - 1);
1466 /* Create an array type to hold them. */
1467 tmptype = build_range_type (gfc_array_index_type,
1468 gfc_index_zero_node, bound);
1469 tmptype = build_array_type (type, tmptype);
1471 init = build_constructor (tmptype, v);
1472 TREE_CONSTANT (init) = 1;
1473 TREE_STATIC (init) = 1;
1474 /* Create a static variable to hold the data. */
1475 tmp = gfc_create_var (tmptype, "data");
1476 TREE_STATIC (tmp) = 1;
1477 TREE_CONSTANT (tmp) = 1;
1478 TREE_READONLY (tmp) = 1;
1479 DECL_INITIAL (tmp) = init;
1482 /* Use BUILTIN_MEMCPY to assign the values. */
1483 tmp = gfc_conv_descriptor_data_get (desc);
1484 tmp = build_fold_indirect_ref_loc (input_location,
1486 tmp = gfc_build_array_ref (tmp, *poffset, NULL);
1487 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
1488 init = gfc_build_addr_expr (NULL_TREE, init);
1490 size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
1491 bound = build_int_cst (size_type_node, n * size);
1492 tmp = build_call_expr_loc (input_location,
1493 builtin_decl_explicit (BUILT_IN_MEMCPY),
1494 3, tmp, init, bound);
1495 gfc_add_expr_to_block (&body, tmp);
1497 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1498 gfc_array_index_type, *poffset,
1499 build_int_cst (gfc_array_index_type, n));
1501 if (!INTEGER_CST_P (*poffset))
1503 gfc_add_modify (&body, *offsetvar, *poffset);
1504 *poffset = *offsetvar;
1508 /* The frontend should already have done any expansions
1512 /* Pass the code as is. */
1513 tmp = gfc_finish_block (&body);
1514 gfc_add_expr_to_block (pblock, tmp);
1518 /* Build the implied do-loop. */
1519 stmtblock_t implied_do_block;
1527 loopbody = gfc_finish_block (&body);
1529 /* Create a new block that holds the implied-do loop. A temporary
1530 loop-variable is used. */
1531 gfc_start_block(&implied_do_block);
1533 /* Initialize the loop. */
1534 gfc_init_se (&se, NULL);
1535 gfc_conv_expr_val (&se, c->iterator->start);
1536 gfc_add_block_to_block (&implied_do_block, &se.pre);
1537 gfc_add_modify (&implied_do_block, shadow_loopvar, se.expr);
1539 gfc_init_se (&se, NULL);
1540 gfc_conv_expr_val (&se, c->iterator->end);
1541 gfc_add_block_to_block (&implied_do_block, &se.pre);
1542 end = gfc_evaluate_now (se.expr, &implied_do_block);
1544 gfc_init_se (&se, NULL);
1545 gfc_conv_expr_val (&se, c->iterator->step);
1546 gfc_add_block_to_block (&implied_do_block, &se.pre);
1547 step = gfc_evaluate_now (se.expr, &implied_do_block);
1549 /* If this array expands dynamically, and the number of iterations
1550 is not constant, we won't have allocated space for the static
1551 part of C->EXPR's size. Do that now. */
1552 if (dynamic && gfc_iterator_has_dynamic_bounds (c->iterator))
1554 /* Get the number of iterations. */
1555 tmp = gfc_get_iteration_count (shadow_loopvar, end, step);
1557 /* Get the static part of C->EXPR's size. */
1558 gfc_get_array_constructor_element_size (&size, c->expr);
1559 tmp2 = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
1561 /* Grow the array by TMP * TMP2 elements. */
1562 tmp = fold_build2_loc (input_location, MULT_EXPR,
1563 gfc_array_index_type, tmp, tmp2);
1564 gfc_grow_array (&implied_do_block, desc, tmp);
1567 /* Generate the loop body. */
1568 exit_label = gfc_build_label_decl (NULL_TREE);
1569 gfc_start_block (&body);
1571 /* Generate the exit condition. Depending on the sign of
1572 the step variable we have to generate the correct
1574 tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
1575 step, build_int_cst (TREE_TYPE (step), 0));
1576 cond = fold_build3_loc (input_location, COND_EXPR,
1577 boolean_type_node, tmp,
1578 fold_build2_loc (input_location, GT_EXPR,
1579 boolean_type_node, shadow_loopvar, end),
1580 fold_build2_loc (input_location, LT_EXPR,
1581 boolean_type_node, shadow_loopvar, end));
1582 tmp = build1_v (GOTO_EXPR, exit_label);
1583 TREE_USED (exit_label) = 1;
1584 tmp = build3_v (COND_EXPR, cond, tmp,
1585 build_empty_stmt (input_location));
1586 gfc_add_expr_to_block (&body, tmp);
1588 /* The main loop body. */
1589 gfc_add_expr_to_block (&body, loopbody);
1591 /* Increase loop variable by step. */
1592 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1593 TREE_TYPE (shadow_loopvar), shadow_loopvar,
1595 gfc_add_modify (&body, shadow_loopvar, tmp);
1597 /* Finish the loop. */
1598 tmp = gfc_finish_block (&body);
1599 tmp = build1_v (LOOP_EXPR, tmp);
1600 gfc_add_expr_to_block (&implied_do_block, tmp);
1602 /* Add the exit label. */
1603 tmp = build1_v (LABEL_EXPR, exit_label);
1604 gfc_add_expr_to_block (&implied_do_block, tmp);
1606 /* Finishe the implied-do loop. */
1607 tmp = gfc_finish_block(&implied_do_block);
1608 gfc_add_expr_to_block(pblock, tmp);
1610 gfc_restore_sym (c->iterator->var->symtree->n.sym, &saved_loopvar);
1617 /* A catch-all to obtain the string length for anything that is not a
1618 a substring of non-constant length, a constant, array or variable. */
1621 get_array_ctor_all_strlen (stmtblock_t *block, gfc_expr *e, tree *len)
1626 /* Don't bother if we already know the length is a constant. */
1627 if (*len && INTEGER_CST_P (*len))
1630 if (!e->ref && e->ts.u.cl && e->ts.u.cl->length
1631 && e->ts.u.cl->length->expr_type == EXPR_CONSTANT)
1634 gfc_conv_const_charlen (e->ts.u.cl);
1635 *len = e->ts.u.cl->backend_decl;
1639 /* Otherwise, be brutal even if inefficient. */
1640 ss = gfc_walk_expr (e);
1641 gfc_init_se (&se, NULL);
1643 /* No function call, in case of side effects. */
1644 se.no_function_call = 1;
1645 if (ss == gfc_ss_terminator)
1646 gfc_conv_expr (&se, e);
1648 gfc_conv_expr_descriptor (&se, e, ss);
1650 /* Fix the value. */
1651 *len = gfc_evaluate_now (se.string_length, &se.pre);
1653 gfc_add_block_to_block (block, &se.pre);
1654 gfc_add_block_to_block (block, &se.post);
1656 e->ts.u.cl->backend_decl = *len;
1661 /* Figure out the string length of a variable reference expression.
1662 Used by get_array_ctor_strlen. */
1665 get_array_ctor_var_strlen (stmtblock_t *block, gfc_expr * expr, tree * len)
1671 /* Don't bother if we already know the length is a constant. */
1672 if (*len && INTEGER_CST_P (*len))
1675 ts = &expr->symtree->n.sym->ts;
1676 for (ref = expr->ref; ref; ref = ref->next)
1681 /* Array references don't change the string length. */
1685 /* Use the length of the component. */
1686 ts = &ref->u.c.component->ts;
1690 if (ref->u.ss.start->expr_type != EXPR_CONSTANT
1691 || ref->u.ss.end->expr_type != EXPR_CONSTANT)
1693 /* Note that this might evaluate expr. */
1694 get_array_ctor_all_strlen (block, expr, len);
1697 mpz_init_set_ui (char_len, 1);
1698 mpz_add (char_len, char_len, ref->u.ss.end->value.integer);
1699 mpz_sub (char_len, char_len, ref->u.ss.start->value.integer);
1700 *len = gfc_conv_mpz_to_tree (char_len, gfc_default_integer_kind);
1701 *len = convert (gfc_charlen_type_node, *len);
1702 mpz_clear (char_len);
1710 *len = ts->u.cl->backend_decl;
1714 /* Figure out the string length of a character array constructor.
1715 If len is NULL, don't calculate the length; this happens for recursive calls
1716 when a sub-array-constructor is an element but not at the first position,
1717 so when we're not interested in the length.
1718 Returns TRUE if all elements are character constants. */
1721 get_array_ctor_strlen (stmtblock_t *block, gfc_constructor_base base, tree * len)
1728 if (gfc_constructor_first (base) == NULL)
1731 *len = build_int_cstu (gfc_charlen_type_node, 0);
1735 /* Loop over all constructor elements to find out is_const, but in len we
1736 want to store the length of the first, not the last, element. We can
1737 of course exit the loop as soon as is_const is found to be false. */
1738 for (c = gfc_constructor_first (base);
1739 c && is_const; c = gfc_constructor_next (c))
1741 switch (c->expr->expr_type)
1744 if (len && !(*len && INTEGER_CST_P (*len)))
1745 *len = build_int_cstu (gfc_charlen_type_node,
1746 c->expr->value.character.length);
1750 if (!get_array_ctor_strlen (block, c->expr->value.constructor, len))
1757 get_array_ctor_var_strlen (block, c->expr, len);
1763 get_array_ctor_all_strlen (block, c->expr, len);
1767 /* After the first iteration, we don't want the length modified. */
1774 /* Check whether the array constructor C consists entirely of constant
1775 elements, and if so returns the number of those elements, otherwise
1776 return zero. Note, an empty or NULL array constructor returns zero. */
1778 unsigned HOST_WIDE_INT
1779 gfc_constant_array_constructor_p (gfc_constructor_base base)
1781 unsigned HOST_WIDE_INT nelem = 0;
1783 gfc_constructor *c = gfc_constructor_first (base);
1787 || c->expr->rank > 0
1788 || c->expr->expr_type != EXPR_CONSTANT)
1790 c = gfc_constructor_next (c);
1797 /* Given EXPR, the constant array constructor specified by an EXPR_ARRAY,
1798 and the tree type of it's elements, TYPE, return a static constant
1799 variable that is compile-time initialized. */
1802 gfc_build_constant_array_constructor (gfc_expr * expr, tree type)
1804 tree tmptype, init, tmp;
1805 HOST_WIDE_INT nelem;
1810 VEC(constructor_elt,gc) *v = NULL;
1812 /* First traverse the constructor list, converting the constants
1813 to tree to build an initializer. */
1815 c = gfc_constructor_first (expr->value.constructor);
1818 gfc_init_se (&se, NULL);
1819 gfc_conv_constant (&se, c->expr);
1820 if (c->expr->ts.type != BT_CHARACTER)
1821 se.expr = fold_convert (type, se.expr);
1822 else if (POINTER_TYPE_P (type))
1823 se.expr = gfc_build_addr_expr (gfc_get_pchar_type (c->expr->ts.kind),
1825 CONSTRUCTOR_APPEND_ELT (v, build_int_cst (gfc_array_index_type, nelem),
1827 c = gfc_constructor_next (c);
1831 /* Next determine the tree type for the array. We use the gfortran
1832 front-end's gfc_get_nodesc_array_type in order to create a suitable
1833 GFC_ARRAY_TYPE_P that may be used by the scalarizer. */
1835 memset (&as, 0, sizeof (gfc_array_spec));
1837 as.rank = expr->rank;
1838 as.type = AS_EXPLICIT;
1841 as.lower[0] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
1842 as.upper[0] = gfc_get_int_expr (gfc_default_integer_kind,
1846 for (i = 0; i < expr->rank; i++)
1848 int tmp = (int) mpz_get_si (expr->shape[i]);
1849 as.lower[i] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
1850 as.upper[i] = gfc_get_int_expr (gfc_default_integer_kind,
1854 tmptype = gfc_get_nodesc_array_type (type, &as, PACKED_STATIC, true);
1856 /* as is not needed anymore. */
1857 for (i = 0; i < as.rank + as.corank; i++)
1859 gfc_free_expr (as.lower[i]);
1860 gfc_free_expr (as.upper[i]);
1863 init = build_constructor (tmptype, v);
1865 TREE_CONSTANT (init) = 1;
1866 TREE_STATIC (init) = 1;
1868 tmp = gfc_create_var (tmptype, "A");
1869 TREE_STATIC (tmp) = 1;
1870 TREE_CONSTANT (tmp) = 1;
1871 TREE_READONLY (tmp) = 1;
1872 DECL_INITIAL (tmp) = init;
1878 /* Translate a constant EXPR_ARRAY array constructor for the scalarizer.
1879 This mostly initializes the scalarizer state info structure with the
1880 appropriate values to directly use the array created by the function
1881 gfc_build_constant_array_constructor. */
1884 trans_constant_array_constructor (gfc_ss * ss, tree type)
1886 gfc_array_info *info;
1890 tmp = gfc_build_constant_array_constructor (ss->info->expr, type);
1892 info = &ss->info->data.array;
1894 info->descriptor = tmp;
1895 info->data = gfc_build_addr_expr (NULL_TREE, tmp);
1896 info->offset = gfc_index_zero_node;
1898 for (i = 0; i < ss->dimen; i++)
1900 info->delta[i] = gfc_index_zero_node;
1901 info->start[i] = gfc_index_zero_node;
1902 info->end[i] = gfc_index_zero_node;
1903 info->stride[i] = gfc_index_one_node;
1907 /* Helper routine of gfc_trans_array_constructor to determine if the
1908 bounds of the loop specified by LOOP are constant and simple enough
1909 to use with trans_constant_array_constructor. Returns the
1910 iteration count of the loop if suitable, and NULL_TREE otherwise. */
1913 constant_array_constructor_loop_size (gfc_loopinfo * loop)
1915 tree size = gfc_index_one_node;
1919 for (i = 0; i < loop->dimen; i++)
1921 /* If the bounds aren't constant, return NULL_TREE. */
1922 if (!INTEGER_CST_P (loop->from[i]) || !INTEGER_CST_P (loop->to[i]))
1924 if (!integer_zerop (loop->from[i]))
1926 /* Only allow nonzero "from" in one-dimensional arrays. */
1927 if (loop->dimen != 1)
1929 tmp = fold_build2_loc (input_location, MINUS_EXPR,
1930 gfc_array_index_type,
1931 loop->to[i], loop->from[i]);
1935 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1936 tmp, gfc_index_one_node);
1937 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
1945 /* Array constructors are handled by constructing a temporary, then using that
1946 within the scalarization loop. This is not optimal, but seems by far the
1950 gfc_trans_array_constructor (gfc_loopinfo * loop, gfc_ss * ss, locus * where)
1952 gfc_constructor_base c;
1959 bool old_first_len, old_typespec_chararray_ctor;
1960 tree old_first_len_val;
1961 gfc_ss_info *ss_info;
1964 /* Save the old values for nested checking. */
1965 old_first_len = first_len;
1966 old_first_len_val = first_len_val;
1967 old_typespec_chararray_ctor = typespec_chararray_ctor;
1970 expr = ss_info->expr;
1972 /* Do bounds-checking here and in gfc_trans_array_ctor_element only if no
1973 typespec was given for the array constructor. */
1974 typespec_chararray_ctor = (expr->ts.u.cl
1975 && expr->ts.u.cl->length_from_typespec);
1977 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
1978 && expr->ts.type == BT_CHARACTER && !typespec_chararray_ctor)
1980 first_len_val = gfc_create_var (gfc_charlen_type_node, "len");
1984 gcc_assert (ss->dimen == loop->dimen);
1986 c = expr->value.constructor;
1987 if (expr->ts.type == BT_CHARACTER)
1991 /* get_array_ctor_strlen walks the elements of the constructor, if a
1992 typespec was given, we already know the string length and want the one
1994 if (typespec_chararray_ctor && expr->ts.u.cl->length
1995 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
1999 const_string = false;
2000 gfc_init_se (&length_se, NULL);
2001 gfc_conv_expr_type (&length_se, expr->ts.u.cl->length,
2002 gfc_charlen_type_node);
2003 ss_info->string_length = length_se.expr;
2004 gfc_add_block_to_block (&loop->pre, &length_se.pre);
2005 gfc_add_block_to_block (&loop->post, &length_se.post);
2008 const_string = get_array_ctor_strlen (&loop->pre, c,
2009 &ss_info->string_length);
2011 /* Complex character array constructors should have been taken care of
2012 and not end up here. */
2013 gcc_assert (ss_info->string_length);
2015 expr->ts.u.cl->backend_decl = ss_info->string_length;
2017 type = gfc_get_character_type_len (expr->ts.kind, ss_info->string_length);
2019 type = build_pointer_type (type);
2022 type = gfc_typenode_for_spec (&expr->ts);
2024 /* See if the constructor determines the loop bounds. */
2027 if (expr->shape && loop->dimen > 1 && loop->to[0] == NULL_TREE)
2029 /* We have a multidimensional parameter. */
2031 for (n = 0; n < expr->rank; n++)
2033 loop->from[n] = gfc_index_zero_node;
2034 loop->to[n] = gfc_conv_mpz_to_tree (expr->shape [n],
2035 gfc_index_integer_kind);
2036 loop->to[n] = fold_build2_loc (input_location, MINUS_EXPR,
2037 gfc_array_index_type,
2038 loop->to[n], gfc_index_one_node);
2042 if (loop->to[0] == NULL_TREE)
2046 /* We should have a 1-dimensional, zero-based loop. */
2047 gcc_assert (loop->dimen == 1);
2048 gcc_assert (integer_zerop (loop->from[0]));
2050 /* Split the constructor size into a static part and a dynamic part.
2051 Allocate the static size up-front and record whether the dynamic
2052 size might be nonzero. */
2054 dynamic = gfc_get_array_constructor_size (&size, c);
2055 mpz_sub_ui (size, size, 1);
2056 loop->to[0] = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
2060 /* Special case constant array constructors. */
2063 unsigned HOST_WIDE_INT nelem = gfc_constant_array_constructor_p (c);
2066 tree size = constant_array_constructor_loop_size (loop);
2067 if (size && compare_tree_int (size, nelem) == 0)
2069 trans_constant_array_constructor (ss, type);
2075 if (TREE_CODE (loop->to[0]) == VAR_DECL)
2078 gfc_trans_create_temp_array (&loop->pre, &loop->post, loop, ss,
2079 type, NULL_TREE, dynamic, true, false, where);
2081 desc = ss_info->data.array.descriptor;
2082 offset = gfc_index_zero_node;
2083 offsetvar = gfc_create_var_np (gfc_array_index_type, "offset");
2084 TREE_NO_WARNING (offsetvar) = 1;
2085 TREE_USED (offsetvar) = 0;
2086 gfc_trans_array_constructor_value (&loop->pre, type, desc, c,
2087 &offset, &offsetvar, dynamic);
2089 /* If the array grows dynamically, the upper bound of the loop variable
2090 is determined by the array's final upper bound. */
2093 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2094 gfc_array_index_type,
2095 offsetvar, gfc_index_one_node);
2096 tmp = gfc_evaluate_now (tmp, &loop->pre);
2097 gfc_conv_descriptor_ubound_set (&loop->pre, desc, gfc_rank_cst[0], tmp);
2098 if (loop->to[0] && TREE_CODE (loop->to[0]) == VAR_DECL)
2099 gfc_add_modify (&loop->pre, loop->to[0], tmp);
2104 if (TREE_USED (offsetvar))
2105 pushdecl (offsetvar);
2107 gcc_assert (INTEGER_CST_P (offset));
2110 /* Disable bound checking for now because it's probably broken. */
2111 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2118 /* Restore old values of globals. */
2119 first_len = old_first_len;
2120 first_len_val = old_first_len_val;
2121 typespec_chararray_ctor = old_typespec_chararray_ctor;
2125 /* INFO describes a GFC_SS_SECTION in loop LOOP, and this function is
2126 called after evaluating all of INFO's vector dimensions. Go through
2127 each such vector dimension and see if we can now fill in any missing
2131 set_vector_loop_bounds (gfc_loopinfo * loop, gfc_ss * ss)
2133 gfc_array_info *info;
2141 info = &ss->info->data.array;
2143 for (n = 0; n < loop->dimen; n++)
2146 if (info->ref->u.ar.dimen_type[dim] == DIMEN_VECTOR
2147 && loop->to[n] == NULL)
2149 /* Loop variable N indexes vector dimension DIM, and we don't
2150 yet know the upper bound of loop variable N. Set it to the
2151 difference between the vector's upper and lower bounds. */
2152 gcc_assert (loop->from[n] == gfc_index_zero_node);
2153 gcc_assert (info->subscript[dim]
2154 && info->subscript[dim]->info->type == GFC_SS_VECTOR);
2156 gfc_init_se (&se, NULL);
2157 desc = info->subscript[dim]->info->data.array.descriptor;
2158 zero = gfc_rank_cst[0];
2159 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2160 gfc_array_index_type,
2161 gfc_conv_descriptor_ubound_get (desc, zero),
2162 gfc_conv_descriptor_lbound_get (desc, zero));
2163 tmp = gfc_evaluate_now (tmp, &loop->pre);
2170 /* Add the pre and post chains for all the scalar expressions in a SS chain
2171 to loop. This is called after the loop parameters have been calculated,
2172 but before the actual scalarizing loops. */
2175 gfc_add_loop_ss_code (gfc_loopinfo * loop, gfc_ss * ss, bool subscript,
2179 gfc_ss_info *ss_info;
2180 gfc_array_info *info;
2184 /* TODO: This can generate bad code if there are ordering dependencies,
2185 e.g., a callee allocated function and an unknown size constructor. */
2186 gcc_assert (ss != NULL);
2188 for (; ss != gfc_ss_terminator; ss = ss->loop_chain)
2193 expr = ss_info->expr;
2194 info = &ss_info->data.array;
2196 switch (ss_info->type)
2199 /* Scalar expression. Evaluate this now. This includes elemental
2200 dimension indices, but not array section bounds. */
2201 gfc_init_se (&se, NULL);
2202 gfc_conv_expr (&se, expr);
2203 gfc_add_block_to_block (&loop->pre, &se.pre);
2205 if (expr->ts.type != BT_CHARACTER)
2207 /* Move the evaluation of scalar expressions outside the
2208 scalarization loop, except for WHERE assignments. */
2210 se.expr = convert(gfc_array_index_type, se.expr);
2211 if (!ss_info->where)
2212 se.expr = gfc_evaluate_now (se.expr, &loop->pre);
2213 gfc_add_block_to_block (&loop->pre, &se.post);
2216 gfc_add_block_to_block (&loop->post, &se.post);
2218 ss_info->data.scalar.value = se.expr;
2219 ss_info->string_length = se.string_length;
2222 case GFC_SS_REFERENCE:
2223 /* Scalar argument to elemental procedure. Evaluate this
2225 gfc_init_se (&se, NULL);
2226 gfc_conv_expr (&se, expr);
2227 gfc_add_block_to_block (&loop->pre, &se.pre);
2228 gfc_add_block_to_block (&loop->post, &se.post);
2230 ss_info->data.scalar.value = gfc_evaluate_now (se.expr, &loop->pre);
2231 ss_info->string_length = se.string_length;
2234 case GFC_SS_SECTION:
2235 /* Add the expressions for scalar and vector subscripts. */
2236 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2237 if (info->subscript[n])
2238 gfc_add_loop_ss_code (loop, info->subscript[n], true, where);
2240 set_vector_loop_bounds (loop, ss);
2244 /* Get the vector's descriptor and store it in SS. */
2245 gfc_init_se (&se, NULL);
2246 gfc_conv_expr_descriptor (&se, expr, gfc_walk_expr (expr));
2247 gfc_add_block_to_block (&loop->pre, &se.pre);
2248 gfc_add_block_to_block (&loop->post, &se.post);
2249 info->descriptor = se.expr;
2252 case GFC_SS_INTRINSIC:
2253 gfc_add_intrinsic_ss_code (loop, ss);
2256 case GFC_SS_FUNCTION:
2257 /* Array function return value. We call the function and save its
2258 result in a temporary for use inside the loop. */
2259 gfc_init_se (&se, NULL);
2262 gfc_conv_expr (&se, expr);
2263 gfc_add_block_to_block (&loop->pre, &se.pre);
2264 gfc_add_block_to_block (&loop->post, &se.post);
2265 ss_info->string_length = se.string_length;
2268 case GFC_SS_CONSTRUCTOR:
2269 if (expr->ts.type == BT_CHARACTER
2270 && ss_info->string_length == NULL
2272 && expr->ts.u.cl->length)
2274 gfc_init_se (&se, NULL);
2275 gfc_conv_expr_type (&se, expr->ts.u.cl->length,
2276 gfc_charlen_type_node);
2277 ss_info->string_length = se.expr;
2278 gfc_add_block_to_block (&loop->pre, &se.pre);
2279 gfc_add_block_to_block (&loop->post, &se.post);
2281 gfc_trans_array_constructor (loop, ss, where);
2285 case GFC_SS_COMPONENT:
2286 /* Do nothing. These are handled elsewhere. */
2296 /* Translate expressions for the descriptor and data pointer of a SS. */
2300 gfc_conv_ss_descriptor (stmtblock_t * block, gfc_ss * ss, int base)
2303 gfc_ss_info *ss_info;
2304 gfc_array_info *info;
2308 info = &ss_info->data.array;
2310 /* Get the descriptor for the array to be scalarized. */
2311 gcc_assert (ss_info->expr->expr_type == EXPR_VARIABLE);
2312 gfc_init_se (&se, NULL);
2313 se.descriptor_only = 1;
2314 gfc_conv_expr_lhs (&se, ss_info->expr);
2315 gfc_add_block_to_block (block, &se.pre);
2316 info->descriptor = se.expr;
2317 ss_info->string_length = se.string_length;
2321 /* Also the data pointer. */
2322 tmp = gfc_conv_array_data (se.expr);
2323 /* If this is a variable or address of a variable we use it directly.
2324 Otherwise we must evaluate it now to avoid breaking dependency
2325 analysis by pulling the expressions for elemental array indices
2328 || (TREE_CODE (tmp) == ADDR_EXPR
2329 && DECL_P (TREE_OPERAND (tmp, 0)))))
2330 tmp = gfc_evaluate_now (tmp, block);
2333 tmp = gfc_conv_array_offset (se.expr);
2334 info->offset = gfc_evaluate_now (tmp, block);
2336 /* Make absolutely sure that the saved_offset is indeed saved
2337 so that the variable is still accessible after the loops
2339 info->saved_offset = info->offset;
2344 /* Initialize a gfc_loopinfo structure. */
2347 gfc_init_loopinfo (gfc_loopinfo * loop)
2351 memset (loop, 0, sizeof (gfc_loopinfo));
2352 gfc_init_block (&loop->pre);
2353 gfc_init_block (&loop->post);
2355 /* Initially scalarize in order and default to no loop reversal. */
2356 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2359 loop->reverse[n] = GFC_INHIBIT_REVERSE;
2362 loop->ss = gfc_ss_terminator;
2366 /* Copies the loop variable info to a gfc_se structure. Does not copy the SS
2370 gfc_copy_loopinfo_to_se (gfc_se * se, gfc_loopinfo * loop)
2376 /* Return an expression for the data pointer of an array. */
2379 gfc_conv_array_data (tree descriptor)
2383 type = TREE_TYPE (descriptor);
2384 if (GFC_ARRAY_TYPE_P (type))
2386 if (TREE_CODE (type) == POINTER_TYPE)
2390 /* Descriptorless arrays. */
2391 return gfc_build_addr_expr (NULL_TREE, descriptor);
2395 return gfc_conv_descriptor_data_get (descriptor);
2399 /* Return an expression for the base offset of an array. */
2402 gfc_conv_array_offset (tree descriptor)
2406 type = TREE_TYPE (descriptor);
2407 if (GFC_ARRAY_TYPE_P (type))
2408 return GFC_TYPE_ARRAY_OFFSET (type);
2410 return gfc_conv_descriptor_offset_get (descriptor);
2414 /* Get an expression for the array stride. */
2417 gfc_conv_array_stride (tree descriptor, int dim)
2422 type = TREE_TYPE (descriptor);
2424 /* For descriptorless arrays use the array size. */
2425 tmp = GFC_TYPE_ARRAY_STRIDE (type, dim);
2426 if (tmp != NULL_TREE)
2429 tmp = gfc_conv_descriptor_stride_get (descriptor, gfc_rank_cst[dim]);
2434 /* Like gfc_conv_array_stride, but for the lower bound. */
2437 gfc_conv_array_lbound (tree descriptor, int dim)
2442 type = TREE_TYPE (descriptor);
2444 tmp = GFC_TYPE_ARRAY_LBOUND (type, dim);
2445 if (tmp != NULL_TREE)
2448 tmp = gfc_conv_descriptor_lbound_get (descriptor, gfc_rank_cst[dim]);
2453 /* Like gfc_conv_array_stride, but for the upper bound. */
2456 gfc_conv_array_ubound (tree descriptor, int dim)
2461 type = TREE_TYPE (descriptor);
2463 tmp = GFC_TYPE_ARRAY_UBOUND (type, dim);
2464 if (tmp != NULL_TREE)
2467 /* This should only ever happen when passing an assumed shape array
2468 as an actual parameter. The value will never be used. */
2469 if (GFC_ARRAY_TYPE_P (TREE_TYPE (descriptor)))
2470 return gfc_index_zero_node;
2472 tmp = gfc_conv_descriptor_ubound_get (descriptor, gfc_rank_cst[dim]);
2477 /* Generate code to perform an array index bound check. */
2480 trans_array_bound_check (gfc_se * se, gfc_ss *ss, tree index, int n,
2481 locus * where, bool check_upper)
2484 tree tmp_lo, tmp_up;
2487 const char * name = NULL;
2489 if (!(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS))
2492 descriptor = ss->info->data.array.descriptor;
2494 index = gfc_evaluate_now (index, &se->pre);
2496 /* We find a name for the error message. */
2497 name = ss->info->expr->symtree->n.sym->name;
2498 gcc_assert (name != NULL);
2500 if (TREE_CODE (descriptor) == VAR_DECL)
2501 name = IDENTIFIER_POINTER (DECL_NAME (descriptor));
2503 /* If upper bound is present, include both bounds in the error message. */
2506 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2507 tmp_up = gfc_conv_array_ubound (descriptor, n);
2510 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2511 "outside of expected range (%%ld:%%ld)", n+1, name);
2513 asprintf (&msg, "Index '%%ld' of dimension %d "
2514 "outside of expected range (%%ld:%%ld)", n+1);
2516 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2518 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2519 fold_convert (long_integer_type_node, index),
2520 fold_convert (long_integer_type_node, tmp_lo),
2521 fold_convert (long_integer_type_node, tmp_up));
2522 fault = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
2524 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2525 fold_convert (long_integer_type_node, index),
2526 fold_convert (long_integer_type_node, tmp_lo),
2527 fold_convert (long_integer_type_node, tmp_up));
2532 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2535 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2536 "below lower bound of %%ld", n+1, name);
2538 asprintf (&msg, "Index '%%ld' of dimension %d "
2539 "below lower bound of %%ld", n+1);
2541 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2543 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2544 fold_convert (long_integer_type_node, index),
2545 fold_convert (long_integer_type_node, tmp_lo));
2553 /* Return the offset for an index. Performs bound checking for elemental
2554 dimensions. Single element references are processed separately.
2555 DIM is the array dimension, I is the loop dimension. */
2558 conv_array_index_offset (gfc_se * se, gfc_ss * ss, int dim, int i,
2559 gfc_array_ref * ar, tree stride)
2561 gfc_array_info *info;
2566 info = &ss->info->data.array;
2568 /* Get the index into the array for this dimension. */
2571 gcc_assert (ar->type != AR_ELEMENT);
2572 switch (ar->dimen_type[dim])
2574 case DIMEN_THIS_IMAGE:
2578 /* Elemental dimension. */
2579 gcc_assert (info->subscript[dim]
2580 && info->subscript[dim]->info->type == GFC_SS_SCALAR);
2581 /* We've already translated this value outside the loop. */
2582 index = info->subscript[dim]->info->data.scalar.value;
2584 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2585 ar->as->type != AS_ASSUMED_SIZE
2586 || dim < ar->dimen - 1);
2590 gcc_assert (info && se->loop);
2591 gcc_assert (info->subscript[dim]
2592 && info->subscript[dim]->info->type == GFC_SS_VECTOR);
2593 desc = info->subscript[dim]->info->data.array.descriptor;
2595 /* Get a zero-based index into the vector. */
2596 index = fold_build2_loc (input_location, MINUS_EXPR,
2597 gfc_array_index_type,
2598 se->loop->loopvar[i], se->loop->from[i]);
2600 /* Multiply the index by the stride. */
2601 index = fold_build2_loc (input_location, MULT_EXPR,
2602 gfc_array_index_type,
2603 index, gfc_conv_array_stride (desc, 0));
2605 /* Read the vector to get an index into info->descriptor. */
2606 data = build_fold_indirect_ref_loc (input_location,
2607 gfc_conv_array_data (desc));
2608 index = gfc_build_array_ref (data, index, NULL);
2609 index = gfc_evaluate_now (index, &se->pre);
2610 index = fold_convert (gfc_array_index_type, index);
2612 /* Do any bounds checking on the final info->descriptor index. */
2613 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2614 ar->as->type != AS_ASSUMED_SIZE
2615 || dim < ar->dimen - 1);
2619 /* Scalarized dimension. */
2620 gcc_assert (info && se->loop);
2622 /* Multiply the loop variable by the stride and delta. */
2623 index = se->loop->loopvar[i];
2624 if (!integer_onep (info->stride[dim]))
2625 index = fold_build2_loc (input_location, MULT_EXPR,
2626 gfc_array_index_type, index,
2628 if (!integer_zerop (info->delta[dim]))
2629 index = fold_build2_loc (input_location, PLUS_EXPR,
2630 gfc_array_index_type, index,
2640 /* Temporary array or derived type component. */
2641 gcc_assert (se->loop);
2642 index = se->loop->loopvar[se->loop->order[i]];
2644 /* Pointer functions can have stride[0] different from unity.
2645 Use the stride returned by the function call and stored in
2646 the descriptor for the temporary. */
2647 if (se->ss && se->ss->info->type == GFC_SS_FUNCTION
2648 && se->ss->info->expr
2649 && se->ss->info->expr->symtree
2650 && se->ss->info->expr->symtree->n.sym->result
2651 && se->ss->info->expr->symtree->n.sym->result->attr.pointer)
2652 stride = gfc_conv_descriptor_stride_get (info->descriptor,
2655 if (!integer_zerop (info->delta[dim]))
2656 index = fold_build2_loc (input_location, PLUS_EXPR,
2657 gfc_array_index_type, index, info->delta[dim]);
2660 /* Multiply by the stride. */
2661 if (!integer_onep (stride))
2662 index = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
2669 /* Build a scalarized reference to an array. */
2672 gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar)
2674 gfc_array_info *info;
2675 tree decl = NULL_TREE;
2683 expr = ss->info->expr;
2684 info = &ss->info->data.array;
2686 n = se->loop->order[0];
2690 index = conv_array_index_offset (se, ss, ss->dim[n], n, ar, info->stride0);
2691 /* Add the offset for this dimension to the stored offset for all other
2693 if (!integer_zerop (info->offset))
2694 index = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2695 index, info->offset);
2697 if (expr && is_subref_array (expr))
2698 decl = expr->symtree->n.sym->backend_decl;
2700 tmp = build_fold_indirect_ref_loc (input_location, info->data);
2701 se->expr = gfc_build_array_ref (tmp, index, decl);
2705 /* Translate access of temporary array. */
2708 gfc_conv_tmp_array_ref (gfc_se * se)
2710 se->string_length = se->ss->info->string_length;
2711 gfc_conv_scalarized_array_ref (se, NULL);
2712 gfc_advance_se_ss_chain (se);
2715 /* Add T to the offset pair *OFFSET, *CST_OFFSET. */
2718 add_to_offset (tree *cst_offset, tree *offset, tree t)
2720 if (TREE_CODE (t) == INTEGER_CST)
2721 *cst_offset = int_const_binop (PLUS_EXPR, *cst_offset, t);
2724 if (!integer_zerop (*offset))
2725 *offset = fold_build2_loc (input_location, PLUS_EXPR,
2726 gfc_array_index_type, *offset, t);
2732 /* Build an array reference. se->expr already holds the array descriptor.
2733 This should be either a variable, indirect variable reference or component
2734 reference. For arrays which do not have a descriptor, se->expr will be
2736 a(i, j, k) = base[offset + i * stride[0] + j * stride[1] + k * stride[2]]*/
2739 gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar, gfc_symbol * sym,
2743 tree offset, cst_offset;
2751 gcc_assert (ar->codimen);
2753 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se->expr)))
2754 se->expr = build_fold_indirect_ref (gfc_conv_array_data (se->expr));
2757 if (GFC_ARRAY_TYPE_P (TREE_TYPE (se->expr))
2758 && TREE_CODE (TREE_TYPE (se->expr)) == POINTER_TYPE)
2759 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
2761 /* Use the actual tree type and not the wrapped coarray. */
2762 if (!se->want_pointer)
2763 se->expr = fold_convert (TYPE_MAIN_VARIANT (TREE_TYPE (se->expr)),
2770 /* Handle scalarized references separately. */
2771 if (ar->type != AR_ELEMENT)
2773 gfc_conv_scalarized_array_ref (se, ar);
2774 gfc_advance_se_ss_chain (se);
2778 cst_offset = offset = gfc_index_zero_node;
2779 add_to_offset (&cst_offset, &offset, gfc_conv_array_offset (se->expr));
2781 /* Calculate the offsets from all the dimensions. Make sure to associate
2782 the final offset so that we form a chain of loop invariant summands. */
2783 for (n = ar->dimen - 1; n >= 0; n--)
2785 /* Calculate the index for this dimension. */
2786 gfc_init_se (&indexse, se);
2787 gfc_conv_expr_type (&indexse, ar->start[n], gfc_array_index_type);
2788 gfc_add_block_to_block (&se->pre, &indexse.pre);
2790 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2792 /* Check array bounds. */
2796 /* Evaluate the indexse.expr only once. */
2797 indexse.expr = save_expr (indexse.expr);
2800 tmp = gfc_conv_array_lbound (se->expr, n);
2801 if (sym->attr.temporary)
2803 gfc_init_se (&tmpse, se);
2804 gfc_conv_expr_type (&tmpse, ar->as->lower[n],
2805 gfc_array_index_type);
2806 gfc_add_block_to_block (&se->pre, &tmpse.pre);
2810 cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2812 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2813 "below lower bound of %%ld", n+1, sym->name);
2814 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
2815 fold_convert (long_integer_type_node,
2817 fold_convert (long_integer_type_node, tmp));
2820 /* Upper bound, but not for the last dimension of assumed-size
2822 if (n < ar->dimen - 1 || ar->as->type != AS_ASSUMED_SIZE)
2824 tmp = gfc_conv_array_ubound (se->expr, n);
2825 if (sym->attr.temporary)
2827 gfc_init_se (&tmpse, se);
2828 gfc_conv_expr_type (&tmpse, ar->as->upper[n],
2829 gfc_array_index_type);
2830 gfc_add_block_to_block (&se->pre, &tmpse.pre);
2834 cond = fold_build2_loc (input_location, GT_EXPR,
2835 boolean_type_node, indexse.expr, tmp);
2836 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2837 "above upper bound of %%ld", n+1, sym->name);
2838 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
2839 fold_convert (long_integer_type_node,
2841 fold_convert (long_integer_type_node, tmp));
2846 /* Multiply the index by the stride. */
2847 stride = gfc_conv_array_stride (se->expr, n);
2848 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
2849 indexse.expr, stride);
2851 /* And add it to the total. */
2852 add_to_offset (&cst_offset, &offset, tmp);
2855 if (!integer_zerop (cst_offset))
2856 offset = fold_build2_loc (input_location, PLUS_EXPR,
2857 gfc_array_index_type, offset, cst_offset);
2859 /* Access the calculated element. */
2860 tmp = gfc_conv_array_data (se->expr);
2861 tmp = build_fold_indirect_ref (tmp);
2862 se->expr = gfc_build_array_ref (tmp, offset, sym->backend_decl);
2866 /* Add the offset corresponding to array's ARRAY_DIM dimension and loop's
2867 LOOP_DIM dimension (if any) to array's offset. */
2870 add_array_offset (stmtblock_t *pblock, gfc_loopinfo *loop, gfc_ss *ss,
2871 gfc_array_ref *ar, int array_dim, int loop_dim)
2874 gfc_array_info *info;
2877 info = &ss->info->data.array;
2879 gfc_init_se (&se, NULL);
2881 se.expr = info->descriptor;
2882 stride = gfc_conv_array_stride (info->descriptor, array_dim);
2883 index = conv_array_index_offset (&se, ss, array_dim, loop_dim, ar, stride);
2884 gfc_add_block_to_block (pblock, &se.pre);
2886 info->offset = fold_build2_loc (input_location, PLUS_EXPR,
2887 gfc_array_index_type,
2888 info->offset, index);
2889 info->offset = gfc_evaluate_now (info->offset, pblock);
2893 /* Generate the code to be executed immediately before entering a
2894 scalarization loop. */
2897 gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag,
2898 stmtblock_t * pblock)
2901 gfc_ss_info *ss_info;
2902 gfc_array_info *info;
2903 gfc_ss_type ss_type;
2908 /* This code will be executed before entering the scalarization loop
2909 for this dimension. */
2910 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
2914 if ((ss_info->useflags & flag) == 0)
2917 ss_type = ss_info->type;
2918 if (ss_type != GFC_SS_SECTION
2919 && ss_type != GFC_SS_FUNCTION
2920 && ss_type != GFC_SS_CONSTRUCTOR
2921 && ss_type != GFC_SS_COMPONENT)
2924 info = &ss_info->data.array;
2926 gcc_assert (dim < ss->dimen);
2927 gcc_assert (ss->dimen == loop->dimen);
2930 ar = &info->ref->u.ar;
2934 if (dim == loop->dimen - 1)
2939 /* For the time being, there is no loop reordering. */
2940 gcc_assert (i == loop->order[i]);
2943 if (dim == loop->dimen - 1)
2945 stride = gfc_conv_array_stride (info->descriptor, ss->dim[i]);
2947 /* Calculate the stride of the innermost loop. Hopefully this will
2948 allow the backend optimizers to do their stuff more effectively.
2950 info->stride0 = gfc_evaluate_now (stride, pblock);
2952 /* For the outermost loop calculate the offset due to any
2953 elemental dimensions. It will have been initialized with the
2954 base offset of the array. */
2957 for (i = 0; i < ar->dimen; i++)
2959 if (ar->dimen_type[i] != DIMEN_ELEMENT)
2962 add_array_offset (pblock, loop, ss, ar, i, /* unused */ -1);
2967 /* Add the offset for the previous loop dimension. */
2968 add_array_offset (pblock, loop, ss, ar, ss->dim[i], i);
2970 /* Remember this offset for the second loop. */
2971 if (dim == loop->temp_dim - 1)
2972 info->saved_offset = info->offset;
2977 /* Start a scalarized expression. Creates a scope and declares loop
2981 gfc_start_scalarized_body (gfc_loopinfo * loop, stmtblock_t * pbody)
2987 gcc_assert (!loop->array_parameter);
2989 for (dim = loop->dimen - 1; dim >= 0; dim--)
2991 n = loop->order[dim];
2993 gfc_start_block (&loop->code[n]);
2995 /* Create the loop variable. */
2996 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "S");
2998 if (dim < loop->temp_dim)
3002 /* Calculate values that will be constant within this loop. */
3003 gfc_trans_preloop_setup (loop, dim, flags, &loop->code[n]);
3005 gfc_start_block (pbody);
3009 /* Generates the actual loop code for a scalarization loop. */
3012 gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n,
3013 stmtblock_t * pbody)
3024 if ((ompws_flags & (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS))
3025 == (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS)
3026 && n == loop->dimen - 1)
3028 /* We create an OMP_FOR construct for the outermost scalarized loop. */
3029 init = make_tree_vec (1);
3030 cond = make_tree_vec (1);
3031 incr = make_tree_vec (1);
3033 /* Cycle statement is implemented with a goto. Exit statement must not
3034 be present for this loop. */
3035 exit_label = gfc_build_label_decl (NULL_TREE);
3036 TREE_USED (exit_label) = 1;
3038 /* Label for cycle statements (if needed). */
3039 tmp = build1_v (LABEL_EXPR, exit_label);
3040 gfc_add_expr_to_block (pbody, tmp);
3042 stmt = make_node (OMP_FOR);
3044 TREE_TYPE (stmt) = void_type_node;
3045 OMP_FOR_BODY (stmt) = loopbody = gfc_finish_block (pbody);
3047 OMP_FOR_CLAUSES (stmt) = build_omp_clause (input_location,
3048 OMP_CLAUSE_SCHEDULE);
3049 OMP_CLAUSE_SCHEDULE_KIND (OMP_FOR_CLAUSES (stmt))
3050 = OMP_CLAUSE_SCHEDULE_STATIC;
3051 if (ompws_flags & OMPWS_NOWAIT)
3052 OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (stmt))
3053 = build_omp_clause (input_location, OMP_CLAUSE_NOWAIT);
3055 /* Initialize the loopvar. */
3056 TREE_VEC_ELT (init, 0) = build2_v (MODIFY_EXPR, loop->loopvar[n],
3058 OMP_FOR_INIT (stmt) = init;
3059 /* The exit condition. */
3060 TREE_VEC_ELT (cond, 0) = build2_loc (input_location, LE_EXPR,
3062 loop->loopvar[n], loop->to[n]);
3063 SET_EXPR_LOCATION (TREE_VEC_ELT (cond, 0), input_location);
3064 OMP_FOR_COND (stmt) = cond;
3065 /* Increment the loopvar. */
3066 tmp = build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3067 loop->loopvar[n], gfc_index_one_node);
3068 TREE_VEC_ELT (incr, 0) = fold_build2_loc (input_location, MODIFY_EXPR,
3069 void_type_node, loop->loopvar[n], tmp);
3070 OMP_FOR_INCR (stmt) = incr;
3072 ompws_flags &= ~OMPWS_CURR_SINGLEUNIT;
3073 gfc_add_expr_to_block (&loop->code[n], stmt);
3077 bool reverse_loop = (loop->reverse[n] == GFC_REVERSE_SET)
3078 && (loop->temp_ss == NULL);
3080 loopbody = gfc_finish_block (pbody);
3084 tmp = loop->from[n];
3085 loop->from[n] = loop->to[n];
3089 /* Initialize the loopvar. */
3090 if (loop->loopvar[n] != loop->from[n])
3091 gfc_add_modify (&loop->code[n], loop->loopvar[n], loop->from[n]);
3093 exit_label = gfc_build_label_decl (NULL_TREE);
3095 /* Generate the loop body. */
3096 gfc_init_block (&block);
3098 /* The exit condition. */
3099 cond = fold_build2_loc (input_location, reverse_loop ? LT_EXPR : GT_EXPR,
3100 boolean_type_node, loop->loopvar[n], loop->to[n]);
3101 tmp = build1_v (GOTO_EXPR, exit_label);
3102 TREE_USED (exit_label) = 1;
3103 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
3104 gfc_add_expr_to_block (&block, tmp);
3106 /* The main body. */
3107 gfc_add_expr_to_block (&block, loopbody);
3109 /* Increment the loopvar. */
3110 tmp = fold_build2_loc (input_location,
3111 reverse_loop ? MINUS_EXPR : PLUS_EXPR,
3112 gfc_array_index_type, loop->loopvar[n],
3113 gfc_index_one_node);
3115 gfc_add_modify (&block, loop->loopvar[n], tmp);
3117 /* Build the loop. */
3118 tmp = gfc_finish_block (&block);
3119 tmp = build1_v (LOOP_EXPR, tmp);
3120 gfc_add_expr_to_block (&loop->code[n], tmp);
3122 /* Add the exit label. */
3123 tmp = build1_v (LABEL_EXPR, exit_label);
3124 gfc_add_expr_to_block (&loop->code[n], tmp);
3130 /* Finishes and generates the loops for a scalarized expression. */
3133 gfc_trans_scalarizing_loops (gfc_loopinfo * loop, stmtblock_t * body)
3138 stmtblock_t *pblock;
3142 /* Generate the loops. */
3143 for (dim = 0; dim < loop->dimen; dim++)
3145 n = loop->order[dim];
3146 gfc_trans_scalarized_loop_end (loop, n, pblock);
3147 loop->loopvar[n] = NULL_TREE;
3148 pblock = &loop->code[n];
3151 tmp = gfc_finish_block (pblock);
3152 gfc_add_expr_to_block (&loop->pre, tmp);
3154 /* Clear all the used flags. */
3155 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3156 ss->info->useflags = 0;
3160 /* Finish the main body of a scalarized expression, and start the secondary
3164 gfc_trans_scalarized_loop_boundary (gfc_loopinfo * loop, stmtblock_t * body)
3168 stmtblock_t *pblock;
3172 /* We finish as many loops as are used by the temporary. */
3173 for (dim = 0; dim < loop->temp_dim - 1; dim++)
3175 n = loop->order[dim];
3176 gfc_trans_scalarized_loop_end (loop, n, pblock);
3177 loop->loopvar[n] = NULL_TREE;
3178 pblock = &loop->code[n];
3181 /* We don't want to finish the outermost loop entirely. */
3182 n = loop->order[loop->temp_dim - 1];
3183 gfc_trans_scalarized_loop_end (loop, n, pblock);
3185 /* Restore the initial offsets. */
3186 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3188 gfc_ss_type ss_type;
3189 gfc_ss_info *ss_info;
3193 if ((ss_info->useflags & 2) == 0)
3196 ss_type = ss_info->type;
3197 if (ss_type != GFC_SS_SECTION
3198 && ss_type != GFC_SS_FUNCTION
3199 && ss_type != GFC_SS_CONSTRUCTOR
3200 && ss_type != GFC_SS_COMPONENT)
3203 ss_info->data.array.offset = ss_info->data.array.saved_offset;
3206 /* Restart all the inner loops we just finished. */
3207 for (dim = loop->temp_dim - 2; dim >= 0; dim--)
3209 n = loop->order[dim];
3211 gfc_start_block (&loop->code[n]);
3213 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "Q");
3215 gfc_trans_preloop_setup (loop, dim, 2, &loop->code[n]);
3218 /* Start a block for the secondary copying code. */
3219 gfc_start_block (body);
3223 /* Precalculate (either lower or upper) bound of an array section.
3224 BLOCK: Block in which the (pre)calculation code will go.
3225 BOUNDS[DIM]: Where the bound value will be stored once evaluated.
3226 VALUES[DIM]: Specified bound (NULL <=> unspecified).
3227 DESC: Array descriptor from which the bound will be picked if unspecified
3228 (either lower or upper bound according to LBOUND). */
3231 evaluate_bound (stmtblock_t *block, tree *bounds, gfc_expr ** values,
3232 tree desc, int dim, bool lbound)
3235 gfc_expr * input_val = values[dim];
3236 tree *output = &bounds[dim];
3241 /* Specified section bound. */
3242 gfc_init_se (&se, NULL);
3243 gfc_conv_expr_type (&se, input_val, gfc_array_index_type);
3244 gfc_add_block_to_block (block, &se.pre);
3249 /* No specific bound specified so use the bound of the array. */
3250 *output = lbound ? gfc_conv_array_lbound (desc, dim) :
3251 gfc_conv_array_ubound (desc, dim);
3253 *output = gfc_evaluate_now (*output, block);
3257 /* Calculate the lower bound of an array section. */
3260 gfc_conv_section_startstride (gfc_loopinfo * loop, gfc_ss * ss, int dim)
3262 gfc_expr *stride = NULL;
3265 gfc_array_info *info;
3268 gcc_assert (ss->info->type == GFC_SS_SECTION);
3270 info = &ss->info->data.array;
3271 ar = &info->ref->u.ar;
3273 if (ar->dimen_type[dim] == DIMEN_VECTOR)
3275 /* We use a zero-based index to access the vector. */
3276 info->start[dim] = gfc_index_zero_node;
3277 info->end[dim] = NULL;
3278 info->stride[dim] = gfc_index_one_node;
3282 gcc_assert (ar->dimen_type[dim] == DIMEN_RANGE
3283 || ar->dimen_type[dim] == DIMEN_THIS_IMAGE);
3284 desc = info->descriptor;
3285 stride = ar->stride[dim];
3287 /* Calculate the start of the range. For vector subscripts this will
3288 be the range of the vector. */
3289 evaluate_bound (&loop->pre, info->start, ar->start, desc, dim, true);
3291 /* Similarly calculate the end. Although this is not used in the
3292 scalarizer, it is needed when checking bounds and where the end
3293 is an expression with side-effects. */
3294 evaluate_bound (&loop->pre, info->end, ar->end, desc, dim, false);
3296 /* Calculate the stride. */
3298 info->stride[dim] = gfc_index_one_node;
3301 gfc_init_se (&se, NULL);
3302 gfc_conv_expr_type (&se, stride, gfc_array_index_type);
3303 gfc_add_block_to_block (&loop->pre, &se.pre);
3304 info->stride[dim] = gfc_evaluate_now (se.expr, &loop->pre);
3309 /* Calculates the range start and stride for a SS chain. Also gets the
3310 descriptor and data pointer. The range of vector subscripts is the size
3311 of the vector. Array bounds are also checked. */
3314 gfc_conv_ss_startstride (gfc_loopinfo * loop)
3322 /* Determine the rank of the loop. */
3323 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3325 switch (ss->info->type)
3327 case GFC_SS_SECTION:
3328 case GFC_SS_CONSTRUCTOR:
3329 case GFC_SS_FUNCTION:
3330 case GFC_SS_COMPONENT:
3331 loop->dimen = ss->dimen;
3334 /* As usual, lbound and ubound are exceptions!. */
3335 case GFC_SS_INTRINSIC:
3336 switch (ss->info->expr->value.function.isym->id)
3338 case GFC_ISYM_LBOUND:
3339 case GFC_ISYM_UBOUND:
3340 case GFC_ISYM_LCOBOUND:
3341 case GFC_ISYM_UCOBOUND:
3342 case GFC_ISYM_THIS_IMAGE:
3343 loop->dimen = ss->dimen;
3355 /* We should have determined the rank of the expression by now. If
3356 not, that's bad news. */
3360 /* Loop over all the SS in the chain. */
3361 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3363 gfc_ss_info *ss_info;
3364 gfc_array_info *info;
3368 expr = ss_info->expr;
3369 info = &ss_info->data.array;
3371 if (expr && expr->shape && !info->shape)
3372 info->shape = expr->shape;
3374 switch (ss_info->type)
3376 case GFC_SS_SECTION:
3377 /* Get the descriptor for the array. */
3378 gfc_conv_ss_descriptor (&loop->pre, ss, !loop->array_parameter);
3380 for (n = 0; n < ss->dimen; n++)
3381 gfc_conv_section_startstride (loop, ss, ss->dim[n]);
3384 case GFC_SS_INTRINSIC:
3385 switch (expr->value.function.isym->id)
3387 /* Fall through to supply start and stride. */
3388 case GFC_ISYM_LBOUND:
3389 case GFC_ISYM_UBOUND:
3390 case GFC_ISYM_LCOBOUND:
3391 case GFC_ISYM_UCOBOUND:
3392 case GFC_ISYM_THIS_IMAGE:
3399 case GFC_SS_CONSTRUCTOR:
3400 case GFC_SS_FUNCTION:
3401 for (n = 0; n < ss->dimen; n++)
3403 int dim = ss->dim[n];
3405 info->start[dim] = gfc_index_zero_node;
3406 info->end[dim] = gfc_index_zero_node;
3407 info->stride[dim] = gfc_index_one_node;
3416 /* The rest is just runtime bound checking. */
3417 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3420 tree lbound, ubound;
3422 tree size[GFC_MAX_DIMENSIONS];
3423 tree stride_pos, stride_neg, non_zerosized, tmp2, tmp3;
3424 gfc_array_info *info;
3428 gfc_start_block (&block);
3430 for (n = 0; n < loop->dimen; n++)
3431 size[n] = NULL_TREE;
3433 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3436 gfc_ss_info *ss_info;
3439 const char *expr_name;
3442 if (ss_info->type != GFC_SS_SECTION)
3445 /* Catch allocatable lhs in f2003. */
3446 if (gfc_option.flag_realloc_lhs && ss->is_alloc_lhs)
3449 expr = ss_info->expr;
3450 expr_loc = &expr->where;
3451 expr_name = expr->symtree->name;
3453 gfc_start_block (&inner);
3455 /* TODO: range checking for mapped dimensions. */
3456 info = &ss_info->data.array;
3458 /* This code only checks ranges. Elemental and vector
3459 dimensions are checked later. */
3460 for (n = 0; n < loop->dimen; n++)
3465 if (info->ref->u.ar.dimen_type[dim] != DIMEN_RANGE)
3468 if (dim == info->ref->u.ar.dimen - 1
3469 && info->ref->u.ar.as->type == AS_ASSUMED_SIZE)
3470 check_upper = false;
3474 /* Zero stride is not allowed. */
3475 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
3476 info->stride[dim], gfc_index_zero_node);
3477 asprintf (&msg, "Zero stride is not allowed, for dimension %d "
3478 "of array '%s'", dim + 1, expr_name);
3479 gfc_trans_runtime_check (true, false, tmp, &inner,
3483 desc = info->descriptor;
3485 /* This is the run-time equivalent of resolve.c's
3486 check_dimension(). The logical is more readable there
3487 than it is here, with all the trees. */
3488 lbound = gfc_conv_array_lbound (desc, dim);
3489 end = info->end[dim];
3491 ubound = gfc_conv_array_ubound (desc, dim);
3495 /* non_zerosized is true when the selected range is not
3497 stride_pos = fold_build2_loc (input_location, GT_EXPR,
3498 boolean_type_node, info->stride[dim],
3499 gfc_index_zero_node);
3500 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
3501 info->start[dim], end);
3502 stride_pos = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3503 boolean_type_node, stride_pos, tmp);
3505 stride_neg = fold_build2_loc (input_location, LT_EXPR,
3507 info->stride[dim], gfc_index_zero_node);
3508 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
3509 info->start[dim], end);
3510 stride_neg = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3513 non_zerosized = fold_build2_loc (input_location, TRUTH_OR_EXPR,
3515 stride_pos, stride_neg);
3517 /* Check the start of the range against the lower and upper
3518 bounds of the array, if the range is not empty.
3519 If upper bound is present, include both bounds in the
3523 tmp = fold_build2_loc (input_location, LT_EXPR,
3525 info->start[dim], lbound);
3526 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3528 non_zerosized, tmp);
3529 tmp2 = fold_build2_loc (input_location, GT_EXPR,
3531 info->start[dim], ubound);
3532 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3534 non_zerosized, tmp2);
3535 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3536 "outside of expected range (%%ld:%%ld)",
3537 dim + 1, expr_name);
3538 gfc_trans_runtime_check (true, false, tmp, &inner,
3540 fold_convert (long_integer_type_node, info->start[dim]),
3541 fold_convert (long_integer_type_node, lbound),
3542 fold_convert (long_integer_type_node, ubound));
3543 gfc_trans_runtime_check (true, false, tmp2, &inner,
3545 fold_convert (long_integer_type_node, info->start[dim]),
3546 fold_convert (long_integer_type_node, lbound),
3547 fold_convert (long_integer_type_node, ubound));
3552 tmp = fold_build2_loc (input_location, LT_EXPR,
3554 info->start[dim], lbound);
3555 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3556 boolean_type_node, non_zerosized, tmp);
3557 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3558 "below lower bound of %%ld",
3559 dim + 1, expr_name);
3560 gfc_trans_runtime_check (true, false, tmp, &inner,
3562 fold_convert (long_integer_type_node, info->start[dim]),
3563 fold_convert (long_integer_type_node, lbound));
3567 /* Compute the last element of the range, which is not
3568 necessarily "end" (think 0:5:3, which doesn't contain 5)
3569 and check it against both lower and upper bounds. */
3571 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3572 gfc_array_index_type, end,
3574 tmp = fold_build2_loc (input_location, TRUNC_MOD_EXPR,
3575 gfc_array_index_type, tmp,
3577 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3578 gfc_array_index_type, end, tmp);
3579 tmp2 = fold_build2_loc (input_location, LT_EXPR,
3580 boolean_type_node, tmp, lbound);
3581 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3582 boolean_type_node, non_zerosized, tmp2);
3585 tmp3 = fold_build2_loc (input_location, GT_EXPR,
3586 boolean_type_node, tmp, ubound);
3587 tmp3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3588 boolean_type_node, non_zerosized, tmp3);
3589 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3590 "outside of expected range (%%ld:%%ld)",
3591 dim + 1, expr_name);
3592 gfc_trans_runtime_check (true, false, tmp2, &inner,
3594 fold_convert (long_integer_type_node, tmp),
3595 fold_convert (long_integer_type_node, ubound),
3596 fold_convert (long_integer_type_node, lbound));
3597 gfc_trans_runtime_check (true, false, tmp3, &inner,
3599 fold_convert (long_integer_type_node, tmp),
3600 fold_convert (long_integer_type_node, ubound),
3601 fold_convert (long_integer_type_node, lbound));
3606 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3607 "below lower bound of %%ld",
3608 dim + 1, expr_name);
3609 gfc_trans_runtime_check (true, false, tmp2, &inner,
3611 fold_convert (long_integer_type_node, tmp),
3612 fold_convert (long_integer_type_node, lbound));
3616 /* Check the section sizes match. */
3617 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3618 gfc_array_index_type, end,
3620 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
3621 gfc_array_index_type, tmp,
3623 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3624 gfc_array_index_type,
3625 gfc_index_one_node, tmp);
3626 tmp = fold_build2_loc (input_location, MAX_EXPR,
3627 gfc_array_index_type, tmp,
3628 build_int_cst (gfc_array_index_type, 0));
3629 /* We remember the size of the first section, and check all the
3630 others against this. */
3633 tmp3 = fold_build2_loc (input_location, NE_EXPR,
3634 boolean_type_node, tmp, size[n]);
3635 asprintf (&msg, "Array bound mismatch for dimension %d "
3636 "of array '%s' (%%ld/%%ld)",
3637 dim + 1, expr_name);
3639 gfc_trans_runtime_check (true, false, tmp3, &inner,
3641 fold_convert (long_integer_type_node, tmp),
3642 fold_convert (long_integer_type_node, size[n]));
3647 size[n] = gfc_evaluate_now (tmp, &inner);
3650 tmp = gfc_finish_block (&inner);
3652 /* For optional arguments, only check bounds if the argument is
3654 if (expr->symtree->n.sym->attr.optional
3655 || expr->symtree->n.sym->attr.not_always_present)
3656 tmp = build3_v (COND_EXPR,
3657 gfc_conv_expr_present (expr->symtree->n.sym),
3658 tmp, build_empty_stmt (input_location));
3660 gfc_add_expr_to_block (&block, tmp);
3664 tmp = gfc_finish_block (&block);
3665 gfc_add_expr_to_block (&loop->pre, tmp);
3669 /* Return true if both symbols could refer to the same data object. Does
3670 not take account of aliasing due to equivalence statements. */
3673 symbols_could_alias (gfc_symbol *lsym, gfc_symbol *rsym, bool lsym_pointer,
3674 bool lsym_target, bool rsym_pointer, bool rsym_target)
3676 /* Aliasing isn't possible if the symbols have different base types. */
3677 if (gfc_compare_types (&lsym->ts, &rsym->ts) == 0)
3680 /* Pointers can point to other pointers and target objects. */
3682 if ((lsym_pointer && (rsym_pointer || rsym_target))
3683 || (rsym_pointer && (lsym_pointer || lsym_target)))
3686 /* Special case: Argument association, cf. F90 12.4.1.6, F2003 12.4.1.7
3687 and F2008 12.5.2.13 items 3b and 4b. The pointer case (a) is already
3689 if (lsym_target && rsym_target
3690 && ((lsym->attr.dummy && !lsym->attr.contiguous
3691 && (!lsym->attr.dimension || lsym->as->type == AS_ASSUMED_SHAPE))
3692 || (rsym->attr.dummy && !rsym->attr.contiguous
3693 && (!rsym->attr.dimension
3694 || rsym->as->type == AS_ASSUMED_SHAPE))))
3701 /* Return true if the two SS could be aliased, i.e. both point to the same data
3703 /* TODO: resolve aliases based on frontend expressions. */
3706 gfc_could_be_alias (gfc_ss * lss, gfc_ss * rss)
3710 gfc_expr *lexpr, *rexpr;
3713 bool lsym_pointer, lsym_target, rsym_pointer, rsym_target;
3715 lexpr = lss->info->expr;
3716 rexpr = rss->info->expr;
3718 lsym = lexpr->symtree->n.sym;
3719 rsym = rexpr->symtree->n.sym;
3721 lsym_pointer = lsym->attr.pointer;
3722 lsym_target = lsym->attr.target;
3723 rsym_pointer = rsym->attr.pointer;
3724 rsym_target = rsym->attr.target;
3726 if (symbols_could_alias (lsym, rsym, lsym_pointer, lsym_target,
3727 rsym_pointer, rsym_target))
3730 if (rsym->ts.type != BT_DERIVED && rsym->ts.type != BT_CLASS
3731 && lsym->ts.type != BT_DERIVED && lsym->ts.type != BT_CLASS)
3734 /* For derived types we must check all the component types. We can ignore
3735 array references as these will have the same base type as the previous
3737 for (lref = lexpr->ref; lref != lss->info->data.array.ref; lref = lref->next)
3739 if (lref->type != REF_COMPONENT)
3742 lsym_pointer = lsym_pointer || lref->u.c.sym->attr.pointer;
3743 lsym_target = lsym_target || lref->u.c.sym->attr.target;
3745 if (symbols_could_alias (lref->u.c.sym, rsym, lsym_pointer, lsym_target,
3746 rsym_pointer, rsym_target))
3749 if ((lsym_pointer && (rsym_pointer || rsym_target))
3750 || (rsym_pointer && (lsym_pointer || lsym_target)))
3752 if (gfc_compare_types (&lref->u.c.component->ts,
3757 for (rref = rexpr->ref; rref != rss->info->data.array.ref;
3760 if (rref->type != REF_COMPONENT)
3763 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
3764 rsym_target = lsym_target || rref->u.c.sym->attr.target;
3766 if (symbols_could_alias (lref->u.c.sym, rref->u.c.sym,
3767 lsym_pointer, lsym_target,
3768 rsym_pointer, rsym_target))
3771 if ((lsym_pointer && (rsym_pointer || rsym_target))
3772 || (rsym_pointer && (lsym_pointer || lsym_target)))
3774 if (gfc_compare_types (&lref->u.c.component->ts,
3775 &rref->u.c.sym->ts))
3777 if (gfc_compare_types (&lref->u.c.sym->ts,
3778 &rref->u.c.component->ts))
3780 if (gfc_compare_types (&lref->u.c.component->ts,
3781 &rref->u.c.component->ts))
3787 lsym_pointer = lsym->attr.pointer;
3788 lsym_target = lsym->attr.target;
3789 lsym_pointer = lsym->attr.pointer;
3790 lsym_target = lsym->attr.target;
3792 for (rref = rexpr->ref; rref != rss->info->data.array.ref; rref = rref->next)
3794 if (rref->type != REF_COMPONENT)
3797 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
3798 rsym_target = lsym_target || rref->u.c.sym->attr.target;
3800 if (symbols_could_alias (rref->u.c.sym, lsym,
3801 lsym_pointer, lsym_target,
3802 rsym_pointer, rsym_target))
3805 if ((lsym_pointer && (rsym_pointer || rsym_target))
3806 || (rsym_pointer && (lsym_pointer || lsym_target)))
3808 if (gfc_compare_types (&lsym->ts, &rref->u.c.component->ts))
3817 /* Resolve array data dependencies. Creates a temporary if required. */
3818 /* TODO: Calc dependencies with gfc_expr rather than gfc_ss, and move to
3822 gfc_conv_resolve_dependencies (gfc_loopinfo * loop, gfc_ss * dest,
3828 gfc_expr *dest_expr;
3833 loop->temp_ss = NULL;
3834 dest_expr = dest->info->expr;
3836 for (ss = rss; ss != gfc_ss_terminator; ss = ss->next)
3838 if (ss->info->type != GFC_SS_SECTION)
3841 ss_expr = ss->info->expr;
3843 if (dest_expr->symtree->n.sym != ss_expr->symtree->n.sym)
3845 if (gfc_could_be_alias (dest, ss)
3846 || gfc_are_equivalenced_arrays (dest_expr, ss_expr))
3854 lref = dest_expr->ref;
3855 rref = ss_expr->ref;
3857 nDepend = gfc_dep_resolver (lref, rref, &loop->reverse[0]);
3862 for (i = 0; i < dest->dimen; i++)
3863 for (j = 0; j < ss->dimen; j++)
3865 && dest->dim[i] == ss->dim[j])
3867 /* If we don't access array elements in the same order,
3868 there is a dependency. */
3873 /* TODO : loop shifting. */
3876 /* Mark the dimensions for LOOP SHIFTING */
3877 for (n = 0; n < loop->dimen; n++)
3879 int dim = dest->data.info.dim[n];
3881 if (lref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
3883 else if (! gfc_is_same_range (&lref->u.ar,
3884 &rref->u.ar, dim, 0))
3888 /* Put all the dimensions with dependencies in the
3891 for (n = 0; n < loop->dimen; n++)
3893 gcc_assert (loop->order[n] == n);
3895 loop->order[dim++] = n;
3897 for (n = 0; n < loop->dimen; n++)
3900 loop->order[dim++] = n;
3903 gcc_assert (dim == loop->dimen);
3914 tree base_type = gfc_typenode_for_spec (&dest_expr->ts);
3915 if (GFC_ARRAY_TYPE_P (base_type)
3916 || GFC_DESCRIPTOR_TYPE_P (base_type))
3917 base_type = gfc_get_element_type (base_type);
3918 loop->temp_ss = gfc_get_temp_ss (base_type, dest->info->string_length,
3920 gfc_add_ss_to_loop (loop, loop->temp_ss);
3923 loop->temp_ss = NULL;
3927 /* Browse through each array's information from the scalarizer and set the loop
3928 bounds according to the "best" one (per dimension), i.e. the one which
3929 provides the most information (constant bounds, shape, etc). */
3932 set_loop_bounds (gfc_loopinfo *loop)
3934 int n, dim, spec_dim;
3935 gfc_array_info *info;
3936 gfc_array_info *specinfo;
3940 bool dynamic[GFC_MAX_DIMENSIONS];
3944 loopspec = loop->specloop;
3947 for (n = 0; n < loop->dimen; n++)
3951 /* We use one SS term, and use that to determine the bounds of the
3952 loop for this dimension. We try to pick the simplest term. */
3953 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3955 gfc_ss_type ss_type;
3957 ss_type = ss->info->type;
3958 if (ss_type == GFC_SS_SCALAR
3959 || ss_type == GFC_SS_TEMP
3960 || ss_type == GFC_SS_REFERENCE)
3963 info = &ss->info->data.array;
3966 if (loopspec[n] != NULL)
3968 specinfo = &loopspec[n]->info->data.array;
3969 spec_dim = loopspec[n]->dim[n];
3973 /* Silence unitialized warnings. */
3980 gcc_assert (info->shape[dim]);
3981 /* The frontend has worked out the size for us. */
3984 || !integer_zerop (specinfo->start[spec_dim]))
3985 /* Prefer zero-based descriptors if possible. */
3990 if (ss_type == GFC_SS_CONSTRUCTOR)
3992 gfc_constructor_base base;
3993 /* An unknown size constructor will always be rank one.
3994 Higher rank constructors will either have known shape,
3995 or still be wrapped in a call to reshape. */
3996 gcc_assert (loop->dimen == 1);
3998 /* Always prefer to use the constructor bounds if the size
3999 can be determined at compile time. Prefer not to otherwise,
4000 since the general case involves realloc, and it's better to
4001 avoid that overhead if possible. */
4002 base = ss->info->expr->value.constructor;
4003 dynamic[n] = gfc_get_array_constructor_size (&i, base);
4004 if (!dynamic[n] || !loopspec[n])
4009 /* TODO: Pick the best bound if we have a choice between a
4010 function and something else. */
4011 if (ss_type == GFC_SS_FUNCTION)
4017 /* Avoid using an allocatable lhs in an assignment, since
4018 there might be a reallocation coming. */
4019 if (loopspec[n] && ss->is_alloc_lhs)
4022 if (ss_type != GFC_SS_SECTION)
4027 /* Criteria for choosing a loop specifier (most important first):
4028 doesn't need realloc
4034 else if ((loopspec[n]->info->type == GFC_SS_CONSTRUCTOR && dynamic[n])
4035 || n >= loop->dimen)
4037 else if (integer_onep (info->stride[dim])
4038 && !integer_onep (specinfo->stride[spec_dim]))
4040 else if (INTEGER_CST_P (info->stride[dim])
4041 && !INTEGER_CST_P (specinfo->stride[spec_dim]))
4043 else if (INTEGER_CST_P (info->start[dim])
4044 && !INTEGER_CST_P (specinfo->start[spec_dim]))
4046 /* We don't work out the upper bound.
4047 else if (INTEGER_CST_P (info->finish[n])
4048 && ! INTEGER_CST_P (specinfo->finish[n]))
4049 loopspec[n] = ss; */
4052 /* We should have found the scalarization loop specifier. If not,
4054 gcc_assert (loopspec[n]);
4056 info = &loopspec[n]->info->data.array;
4057 dim = loopspec[n]->dim[n];
4059 /* Set the extents of this range. */
4060 cshape = info->shape;
4061 if (cshape && INTEGER_CST_P (info->start[dim])
4062 && INTEGER_CST_P (info->stride[dim]))
4064 loop->from[n] = info->start[dim];
4065 mpz_set (i, cshape[get_array_ref_dim (loopspec[n], n)]);
4066 mpz_sub_ui (i, i, 1);
4067 /* To = from + (size - 1) * stride. */
4068 tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind);
4069 if (!integer_onep (info->stride[dim]))
4070 tmp = fold_build2_loc (input_location, MULT_EXPR,
4071 gfc_array_index_type, tmp,
4073 loop->to[n] = fold_build2_loc (input_location, PLUS_EXPR,
4074 gfc_array_index_type,
4075 loop->from[n], tmp);
4079 loop->from[n] = info->start[dim];
4080 switch (loopspec[n]->info->type)
4082 case GFC_SS_CONSTRUCTOR:
4083 /* The upper bound is calculated when we expand the
4085 gcc_assert (loop->to[n] == NULL_TREE);
4088 case GFC_SS_SECTION:
4089 /* Use the end expression if it exists and is not constant,
4090 so that it is only evaluated once. */
4091 loop->to[n] = info->end[dim];
4094 case GFC_SS_FUNCTION:
4095 /* The loop bound will be set when we generate the call. */
4096 gcc_assert (loop->to[n] == NULL_TREE);
4104 /* Transform everything so we have a simple incrementing variable. */
4105 if (n < loop->dimen && integer_onep (info->stride[dim]))
4106 info->delta[dim] = gfc_index_zero_node;
4107 else if (n < loop->dimen)
4109 /* Set the delta for this section. */
4110 info->delta[dim] = gfc_evaluate_now (loop->from[n], &loop->pre);
4111 /* Number of iterations is (end - start + step) / step.
4112 with start = 0, this simplifies to
4114 for (i = 0; i<=last; i++){...}; */
4115 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4116 gfc_array_index_type, loop->to[n],
4118 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
4119 gfc_array_index_type, tmp, info->stride[dim]);
4120 tmp = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
4121 tmp, build_int_cst (gfc_array_index_type, -1));
4122 loop->to[n] = gfc_evaluate_now (tmp, &loop->pre);
4123 /* Make the loop variable start at 0. */
4124 loop->from[n] = gfc_index_zero_node;
4131 static void set_delta (gfc_loopinfo *loop);
4134 /* Initialize the scalarization loop. Creates the loop variables. Determines
4135 the range of the loop variables. Creates a temporary if required.
4136 Also generates code for scalar expressions which have been
4137 moved outside the loop. */
4140 gfc_conv_loop_setup (gfc_loopinfo * loop, locus * where)
4145 set_loop_bounds (loop);
4147 /* Add all the scalar code that can be taken out of the loops.
4148 This may include calculating the loop bounds, so do it before
4149 allocating the temporary. */
4150 gfc_add_loop_ss_code (loop, loop->ss, false, where);
4152 tmp_ss = loop->temp_ss;
4153 /* If we want a temporary then create it. */
4156 gfc_ss_info *tmp_ss_info;
4158 tmp_ss_info = tmp_ss->info;
4159 gcc_assert (tmp_ss_info->type == GFC_SS_TEMP);
4161 /* Make absolutely sure that this is a complete type. */
4162 if (tmp_ss_info->string_length)
4163 tmp_ss_info->data.temp.type
4164 = gfc_get_character_type_len_for_eltype
4165 (TREE_TYPE (tmp_ss_info->data.temp.type),
4166 tmp_ss_info->string_length);
4168 tmp = tmp_ss_info->data.temp.type;
4169 memset (&tmp_ss_info->data.array, 0, sizeof (gfc_array_info));
4170 tmp_ss_info->type = GFC_SS_SECTION;
4172 gcc_assert (tmp_ss->dimen != 0);
4174 gfc_trans_create_temp_array (&loop->pre, &loop->post, loop,
4175 tmp_ss, tmp, NULL_TREE,
4176 false, true, false, where);
4179 /* For array parameters we don't have loop variables, so don't calculate the
4181 if (loop->array_parameter)
4188 /* Calculates how to transform from loop variables to array indices for each
4189 array: once loop bounds are chosen, sets the difference (DELTA field) between
4190 loop bounds and array reference bounds, for each array info. */
4193 set_delta (gfc_loopinfo *loop)
4195 gfc_ss *ss, **loopspec;
4196 gfc_array_info *info;
4200 loopspec = loop->specloop;
4202 /* Calculate the translation from loop variables to array indices. */
4203 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4205 gfc_ss_type ss_type;
4207 ss_type = ss->info->type;
4208 if (ss_type != GFC_SS_SECTION
4209 && ss_type != GFC_SS_COMPONENT
4210 && ss_type != GFC_SS_CONSTRUCTOR)
4213 info = &ss->info->data.array;
4215 for (n = 0; n < ss->dimen; n++)
4217 /* If we are specifying the range the delta is already set. */
4218 if (loopspec[n] != ss)
4222 /* Calculate the offset relative to the loop variable.
4223 First multiply by the stride. */
4224 tmp = loop->from[n];
4225 if (!integer_onep (info->stride[dim]))
4226 tmp = fold_build2_loc (input_location, MULT_EXPR,
4227 gfc_array_index_type,
4228 tmp, info->stride[dim]);
4230 /* Then subtract this from our starting value. */
4231 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4232 gfc_array_index_type,
4233 info->start[dim], tmp);
4235 info->delta[dim] = gfc_evaluate_now (tmp, &loop->pre);
4242 /* Calculate the size of a given array dimension from the bounds. This
4243 is simply (ubound - lbound + 1) if this expression is positive
4244 or 0 if it is negative (pick either one if it is zero). Optionally
4245 (if or_expr is present) OR the (expression != 0) condition to it. */
4248 gfc_conv_array_extent_dim (tree lbound, tree ubound, tree* or_expr)
4253 /* Calculate (ubound - lbound + 1). */
4254 res = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4256 res = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, res,
4257 gfc_index_one_node);
4259 /* Check whether the size for this dimension is negative. */
4260 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, res,
4261 gfc_index_zero_node);
4262 res = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, cond,
4263 gfc_index_zero_node, res);
4265 /* Build OR expression. */
4267 *or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
4268 boolean_type_node, *or_expr, cond);
4274 /* For an array descriptor, get the total number of elements. This is just
4275 the product of the extents along from_dim to to_dim. */
4278 gfc_conv_descriptor_size_1 (tree desc, int from_dim, int to_dim)
4283 res = gfc_index_one_node;
4285 for (dim = from_dim; dim < to_dim; ++dim)
4291 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
4292 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
4294 extent = gfc_conv_array_extent_dim (lbound, ubound, NULL);
4295 res = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4303 /* Full size of an array. */
4306 gfc_conv_descriptor_size (tree desc, int rank)
4308 return gfc_conv_descriptor_size_1 (desc, 0, rank);
4312 /* Size of a coarray for all dimensions but the last. */
4315 gfc_conv_descriptor_cosize (tree desc, int rank, int corank)
4317 return gfc_conv_descriptor_size_1 (desc, rank, rank + corank - 1);
4321 /* Fills in an array descriptor, and returns the size of the array.
4322 The size will be a simple_val, ie a variable or a constant. Also
4323 calculates the offset of the base. The pointer argument overflow,
4324 which should be of integer type, will increase in value if overflow
4325 occurs during the size calculation. Returns the size of the array.
4329 for (n = 0; n < rank; n++)
4331 a.lbound[n] = specified_lower_bound;
4332 offset = offset + a.lbond[n] * stride;
4334 a.ubound[n] = specified_upper_bound;
4335 a.stride[n] = stride;
4336 size = size >= 0 ? ubound + size : 0; //size = ubound + 1 - lbound
4337 overflow += size == 0 ? 0: (MAX/size < stride ? 1: 0);
4338 stride = stride * size;
4340 for (n = rank; n < rank+corank; n++)
4341 (Set lcobound/ucobound as above.)
4342 element_size = sizeof (array element);
4345 stride = (size_t) stride;
4346 overflow += element_size == 0 ? 0: (MAX/element_size < stride ? 1: 0);
4347 stride = stride * element_size;
4353 gfc_array_init_size (tree descriptor, int rank, int corank, tree * poffset,
4354 gfc_expr ** lower, gfc_expr ** upper, stmtblock_t * pblock,
4355 stmtblock_t * descriptor_block, tree * overflow)
4368 stmtblock_t thenblock;
4369 stmtblock_t elseblock;
4374 type = TREE_TYPE (descriptor);
4376 stride = gfc_index_one_node;
4377 offset = gfc_index_zero_node;
4379 /* Set the dtype. */
4380 tmp = gfc_conv_descriptor_dtype (descriptor);
4381 gfc_add_modify (descriptor_block, tmp, gfc_get_dtype (TREE_TYPE (descriptor)));
4383 or_expr = boolean_false_node;
4385 for (n = 0; n < rank; n++)
4390 /* We have 3 possibilities for determining the size of the array:
4391 lower == NULL => lbound = 1, ubound = upper[n]
4392 upper[n] = NULL => lbound = 1, ubound = lower[n]
4393 upper[n] != NULL => lbound = lower[n], ubound = upper[n] */
4396 /* Set lower bound. */
4397 gfc_init_se (&se, NULL);
4399 se.expr = gfc_index_one_node;
4402 gcc_assert (lower[n]);
4405 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4406 gfc_add_block_to_block (pblock, &se.pre);
4410 se.expr = gfc_index_one_node;
4414 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
4415 gfc_rank_cst[n], se.expr);
4416 conv_lbound = se.expr;
4418 /* Work out the offset for this component. */
4419 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4421 offset = fold_build2_loc (input_location, MINUS_EXPR,
4422 gfc_array_index_type, offset, tmp);
4424 /* Set upper bound. */
4425 gfc_init_se (&se, NULL);
4426 gcc_assert (ubound);
4427 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4428 gfc_add_block_to_block (pblock, &se.pre);
4430 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
4431 gfc_rank_cst[n], se.expr);
4432 conv_ubound = se.expr;
4434 /* Store the stride. */
4435 gfc_conv_descriptor_stride_set (descriptor_block, descriptor,
4436 gfc_rank_cst[n], stride);
4438 /* Calculate size and check whether extent is negative. */
4439 size = gfc_conv_array_extent_dim (conv_lbound, conv_ubound, &or_expr);
4440 size = gfc_evaluate_now (size, pblock);
4442 /* Check whether multiplying the stride by the number of
4443 elements in this dimension would overflow. We must also check
4444 whether the current dimension has zero size in order to avoid
4447 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
4448 gfc_array_index_type,
4449 fold_convert (gfc_array_index_type,
4450 TYPE_MAX_VALUE (gfc_array_index_type)),
4452 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
4453 boolean_type_node, tmp, stride));
4454 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4455 integer_one_node, integer_zero_node);
4456 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
4457 boolean_type_node, size,
4458 gfc_index_zero_node));
4459 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4460 integer_zero_node, tmp);
4461 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
4463 *overflow = gfc_evaluate_now (tmp, pblock);
4465 /* Multiply the stride by the number of elements in this dimension. */
4466 stride = fold_build2_loc (input_location, MULT_EXPR,
4467 gfc_array_index_type, stride, size);
4468 stride = gfc_evaluate_now (stride, pblock);
4471 for (n = rank; n < rank + corank; n++)
4475 /* Set lower bound. */
4476 gfc_init_se (&se, NULL);
4477 if (lower == NULL || lower[n] == NULL)
4479 gcc_assert (n == rank + corank - 1);
4480 se.expr = gfc_index_one_node;
4484 if (ubound || n == rank + corank - 1)
4486 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4487 gfc_add_block_to_block (pblock, &se.pre);
4491 se.expr = gfc_index_one_node;
4495 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
4496 gfc_rank_cst[n], se.expr);
4498 if (n < rank + corank - 1)
4500 gfc_init_se (&se, NULL);
4501 gcc_assert (ubound);
4502 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4503 gfc_add_block_to_block (pblock, &se.pre);
4504 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
4505 gfc_rank_cst[n], se.expr);
4509 /* The stride is the number of elements in the array, so multiply by the
4510 size of an element to get the total size. */
4511 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
4512 /* Convert to size_t. */
4513 element_size = fold_convert (size_type_node, tmp);
4516 return element_size;
4518 stride = fold_convert (size_type_node, stride);
4520 /* First check for overflow. Since an array of type character can
4521 have zero element_size, we must check for that before
4523 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
4525 TYPE_MAX_VALUE (size_type_node), element_size);
4526 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
4527 boolean_type_node, tmp, stride));
4528 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4529 integer_one_node, integer_zero_node);
4530 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
4531 boolean_type_node, element_size,
4532 build_int_cst (size_type_node, 0)));
4533 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4534 integer_zero_node, tmp);
4535 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
4537 *overflow = gfc_evaluate_now (tmp, pblock);
4539 size = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
4540 stride, element_size);
4542 if (poffset != NULL)
4544 offset = gfc_evaluate_now (offset, pblock);
4548 if (integer_zerop (or_expr))
4550 if (integer_onep (or_expr))
4551 return build_int_cst (size_type_node, 0);
4553 var = gfc_create_var (TREE_TYPE (size), "size");
4554 gfc_start_block (&thenblock);
4555 gfc_add_modify (&thenblock, var, build_int_cst (size_type_node, 0));
4556 thencase = gfc_finish_block (&thenblock);
4558 gfc_start_block (&elseblock);
4559 gfc_add_modify (&elseblock, var, size);
4560 elsecase = gfc_finish_block (&elseblock);
4562 tmp = gfc_evaluate_now (or_expr, pblock);
4563 tmp = build3_v (COND_EXPR, tmp, thencase, elsecase);
4564 gfc_add_expr_to_block (pblock, tmp);
4570 /* Initializes the descriptor and generates a call to _gfor_allocate. Does
4571 the work for an ALLOCATE statement. */
4575 gfc_array_allocate (gfc_se * se, gfc_expr * expr, tree status, tree errmsg,
4580 tree offset = NULL_TREE;
4581 tree token = NULL_TREE;
4584 tree error = NULL_TREE;
4585 tree overflow; /* Boolean storing whether size calculation overflows. */
4586 tree var_overflow = NULL_TREE;
4588 tree set_descriptor;
4589 stmtblock_t set_descriptor_block;
4590 stmtblock_t elseblock;
4593 gfc_ref *ref, *prev_ref = NULL;
4594 bool allocatable, coarray, dimension;
4598 /* Find the last reference in the chain. */
4599 while (ref && ref->next != NULL)
4601 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT
4602 || (ref->u.ar.dimen == 0 && ref->u.ar.codimen > 0));
4607 if (ref == NULL || ref->type != REF_ARRAY)
4612 allocatable = expr->symtree->n.sym->attr.allocatable;
4613 coarray = expr->symtree->n.sym->attr.codimension;
4614 dimension = expr->symtree->n.sym->attr.dimension;
4618 allocatable = prev_ref->u.c.component->attr.allocatable;
4619 coarray = prev_ref->u.c.component->attr.codimension;
4620 dimension = prev_ref->u.c.component->attr.dimension;
4624 gcc_assert (coarray);
4626 /* Figure out the size of the array. */
4627 switch (ref->u.ar.type)
4633 upper = ref->u.ar.start;
4639 lower = ref->u.ar.start;
4640 upper = ref->u.ar.end;
4644 gcc_assert (ref->u.ar.as->type == AS_EXPLICIT);
4646 lower = ref->u.ar.as->lower;
4647 upper = ref->u.ar.as->upper;
4655 overflow = integer_zero_node;
4657 gfc_init_block (&set_descriptor_block);
4658 size = gfc_array_init_size (se->expr, ref->u.ar.as->rank,
4659 ref->u.ar.as->corank, &offset, lower, upper,
4660 &se->pre, &set_descriptor_block, &overflow);
4665 var_overflow = gfc_create_var (integer_type_node, "overflow");
4666 gfc_add_modify (&se->pre, var_overflow, overflow);
4668 /* Generate the block of code handling overflow. */
4669 msg = gfc_build_addr_expr (pchar_type_node,
4670 gfc_build_localized_cstring_const
4671 ("Integer overflow when calculating the amount of "
4672 "memory to allocate"));
4673 error = build_call_expr_loc (input_location, gfor_fndecl_runtime_error,
4677 if (status != NULL_TREE)
4679 tree status_type = TREE_TYPE (status);
4680 stmtblock_t set_status_block;
4682 gfc_start_block (&set_status_block);
4683 gfc_add_modify (&set_status_block, status,
4684 build_int_cst (status_type, LIBERROR_ALLOCATION));
4685 error = gfc_finish_block (&set_status_block);
4688 gfc_start_block (&elseblock);
4690 /* Allocate memory to store the data. */
4691 pointer = gfc_conv_descriptor_data_get (se->expr);
4692 STRIP_NOPS (pointer);
4694 if (coarray && gfc_option.coarray == GFC_FCOARRAY_LIB)
4695 token = gfc_build_addr_expr (NULL_TREE,
4696 gfc_conv_descriptor_token (se->expr));
4698 /* The allocatable variant takes the old pointer as first argument. */
4700 gfc_allocate_allocatable (&elseblock, pointer, size, token,
4701 status, errmsg, errlen, expr);
4703 gfc_allocate_using_malloc (&elseblock, pointer, size, status);
4707 cond = gfc_unlikely (fold_build2_loc (input_location, NE_EXPR,
4708 boolean_type_node, var_overflow, integer_zero_node));
4709 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
4710 error, gfc_finish_block (&elseblock));
4713 tmp = gfc_finish_block (&elseblock);
4715 gfc_add_expr_to_block (&se->pre, tmp);
4717 /* Update the array descriptors. */
4719 gfc_conv_descriptor_offset_set (&set_descriptor_block, se->expr, offset);
4721 set_descriptor = gfc_finish_block (&set_descriptor_block);
4722 if (status != NULL_TREE)
4724 cond = fold_build2_loc (input_location, EQ_EXPR,
4725 boolean_type_node, status,
4726 build_int_cst (TREE_TYPE (status), 0));
4727 gfc_add_expr_to_block (&se->pre,
4728 fold_build3_loc (input_location, COND_EXPR, void_type_node,
4729 gfc_likely (cond), set_descriptor,
4730 build_empty_stmt (input_location)));
4733 gfc_add_expr_to_block (&se->pre, set_descriptor);
4735 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
4736 && expr->ts.u.derived->attr.alloc_comp)
4738 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, se->expr,
4739 ref->u.ar.as->rank);
4740 gfc_add_expr_to_block (&se->pre, tmp);
4747 /* Deallocate an array variable. Also used when an allocated variable goes
4752 gfc_array_deallocate (tree descriptor, tree pstat, gfc_expr* expr)
4758 gfc_start_block (&block);
4759 /* Get a pointer to the data. */
4760 var = gfc_conv_descriptor_data_get (descriptor);
4763 /* Parameter is the address of the data component. */
4764 tmp = gfc_deallocate_with_status (var, pstat, false, expr);
4765 gfc_add_expr_to_block (&block, tmp);
4767 /* Zero the data pointer. */
4768 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
4769 var, build_int_cst (TREE_TYPE (var), 0));
4770 gfc_add_expr_to_block (&block, tmp);
4772 return gfc_finish_block (&block);
4776 /* Create an array constructor from an initialization expression.
4777 We assume the frontend already did any expansions and conversions. */
4780 gfc_conv_array_initializer (tree type, gfc_expr * expr)
4786 unsigned HOST_WIDE_INT lo;
4788 VEC(constructor_elt,gc) *v = NULL;
4790 switch (expr->expr_type)
4793 case EXPR_STRUCTURE:
4794 /* A single scalar or derived type value. Create an array with all
4795 elements equal to that value. */
4796 gfc_init_se (&se, NULL);
4798 if (expr->expr_type == EXPR_CONSTANT)
4799 gfc_conv_constant (&se, expr);
4801 gfc_conv_structure (&se, expr, 1);
4803 tmp = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4804 gcc_assert (tmp && INTEGER_CST_P (tmp));
4805 hi = TREE_INT_CST_HIGH (tmp);
4806 lo = TREE_INT_CST_LOW (tmp);
4810 /* This will probably eat buckets of memory for large arrays. */
4811 while (hi != 0 || lo != 0)
4813 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, se.expr);
4821 /* Create a vector of all the elements. */
4822 for (c = gfc_constructor_first (expr->value.constructor);
4823 c; c = gfc_constructor_next (c))
4827 /* Problems occur when we get something like
4828 integer :: a(lots) = (/(i, i=1, lots)/) */
4829 gfc_fatal_error ("The number of elements in the array constructor "
4830 "at %L requires an increase of the allowed %d "
4831 "upper limit. See -fmax-array-constructor "
4832 "option", &expr->where,
4833 gfc_option.flag_max_array_constructor);
4836 if (mpz_cmp_si (c->offset, 0) != 0)
4837 index = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
4841 if (mpz_cmp_si (c->repeat, 1) > 0)
4847 mpz_add (maxval, c->offset, c->repeat);
4848 mpz_sub_ui (maxval, maxval, 1);
4849 tmp2 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
4850 if (mpz_cmp_si (c->offset, 0) != 0)
4852 mpz_add_ui (maxval, c->offset, 1);
4853 tmp1 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
4856 tmp1 = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
4858 range = fold_build2 (RANGE_EXPR, gfc_array_index_type, tmp1, tmp2);
4864 gfc_init_se (&se, NULL);
4865 switch (c->expr->expr_type)
4868 gfc_conv_constant (&se, c->expr);
4871 case EXPR_STRUCTURE:
4872 gfc_conv_structure (&se, c->expr, 1);
4876 /* Catch those occasional beasts that do not simplify
4877 for one reason or another, assuming that if they are
4878 standard defying the frontend will catch them. */
4879 gfc_conv_expr (&se, c->expr);
4883 if (range == NULL_TREE)
4884 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
4887 if (index != NULL_TREE)
4888 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
4889 CONSTRUCTOR_APPEND_ELT (v, range, se.expr);
4895 return gfc_build_null_descriptor (type);
4901 /* Create a constructor from the list of elements. */
4902 tmp = build_constructor (type, v);
4903 TREE_CONSTANT (tmp) = 1;
4908 /* Generate code to evaluate non-constant coarray cobounds. */
4911 gfc_trans_array_cobounds (tree type, stmtblock_t * pblock,
4912 const gfc_symbol *sym)
4922 for (dim = as->rank; dim < as->rank + as->corank; dim++)
4924 /* Evaluate non-constant array bound expressions. */
4925 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
4926 if (as->lower[dim] && !INTEGER_CST_P (lbound))
4928 gfc_init_se (&se, NULL);
4929 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
4930 gfc_add_block_to_block (pblock, &se.pre);
4931 gfc_add_modify (pblock, lbound, se.expr);
4933 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
4934 if (as->upper[dim] && !INTEGER_CST_P (ubound))
4936 gfc_init_se (&se, NULL);
4937 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
4938 gfc_add_block_to_block (pblock, &se.pre);
4939 gfc_add_modify (pblock, ubound, se.expr);
4945 /* Generate code to evaluate non-constant array bounds. Sets *poffset and
4946 returns the size (in elements) of the array. */
4949 gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
4950 stmtblock_t * pblock)
4965 size = gfc_index_one_node;
4966 offset = gfc_index_zero_node;
4967 for (dim = 0; dim < as->rank; dim++)
4969 /* Evaluate non-constant array bound expressions. */
4970 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
4971 if (as->lower[dim] && !INTEGER_CST_P (lbound))
4973 gfc_init_se (&se, NULL);
4974 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
4975 gfc_add_block_to_block (pblock, &se.pre);
4976 gfc_add_modify (pblock, lbound, se.expr);
4978 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
4979 if (as->upper[dim] && !INTEGER_CST_P (ubound))
4981 gfc_init_se (&se, NULL);
4982 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
4983 gfc_add_block_to_block (pblock, &se.pre);
4984 gfc_add_modify (pblock, ubound, se.expr);
4986 /* The offset of this dimension. offset = offset - lbound * stride. */
4987 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4989 offset = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4992 /* The size of this dimension, and the stride of the next. */
4993 if (dim + 1 < as->rank)
4994 stride = GFC_TYPE_ARRAY_STRIDE (type, dim + 1);
4996 stride = GFC_TYPE_ARRAY_SIZE (type);
4998 if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride)))
5000 /* Calculate stride = size * (ubound + 1 - lbound). */
5001 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5002 gfc_array_index_type,
5003 gfc_index_one_node, lbound);
5004 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5005 gfc_array_index_type, ubound, tmp);
5006 tmp = fold_build2_loc (input_location, MULT_EXPR,
5007 gfc_array_index_type, size, tmp);
5009 gfc_add_modify (pblock, stride, tmp);
5011 stride = gfc_evaluate_now (tmp, pblock);
5013 /* Make sure that negative size arrays are translated
5014 to being zero size. */
5015 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
5016 stride, gfc_index_zero_node);
5017 tmp = fold_build3_loc (input_location, COND_EXPR,
5018 gfc_array_index_type, tmp,
5019 stride, gfc_index_zero_node);
5020 gfc_add_modify (pblock, stride, tmp);
5026 gfc_trans_array_cobounds (type, pblock, sym);
5027 gfc_trans_vla_type_sizes (sym, pblock);
5034 /* Generate code to initialize/allocate an array variable. */
5037 gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym,
5038 gfc_wrapped_block * block)
5042 tree tmp = NULL_TREE;
5049 gcc_assert (!(sym->attr.pointer || sym->attr.allocatable));
5051 /* Do nothing for USEd variables. */
5052 if (sym->attr.use_assoc)
5055 type = TREE_TYPE (decl);
5056 gcc_assert (GFC_ARRAY_TYPE_P (type));
5057 onstack = TREE_CODE (type) != POINTER_TYPE;
5059 gfc_init_block (&init);
5061 /* Evaluate character string length. */
5062 if (sym->ts.type == BT_CHARACTER
5063 && onstack && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
5065 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5067 gfc_trans_vla_type_sizes (sym, &init);
5069 /* Emit a DECL_EXPR for this variable, which will cause the
5070 gimplifier to allocate storage, and all that good stuff. */
5071 tmp = fold_build1_loc (input_location, DECL_EXPR, TREE_TYPE (decl), decl);
5072 gfc_add_expr_to_block (&init, tmp);
5077 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5081 type = TREE_TYPE (type);
5083 gcc_assert (!sym->attr.use_assoc);
5084 gcc_assert (!TREE_STATIC (decl));
5085 gcc_assert (!sym->module);
5087 if (sym->ts.type == BT_CHARACTER
5088 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
5089 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5091 size = gfc_trans_array_bounds (type, sym, &offset, &init);
5093 /* Don't actually allocate space for Cray Pointees. */
5094 if (sym->attr.cray_pointee)
5096 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5097 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5099 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5103 if (gfc_option.flag_stack_arrays)
5105 gcc_assert (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE);
5106 space = build_decl (sym->declared_at.lb->location,
5107 VAR_DECL, create_tmp_var_name ("A"),
5108 TREE_TYPE (TREE_TYPE (decl)));
5109 gfc_trans_vla_type_sizes (sym, &init);
5113 /* The size is the number of elements in the array, so multiply by the
5114 size of an element to get the total size. */
5115 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
5116 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5117 size, fold_convert (gfc_array_index_type, tmp));
5119 /* Allocate memory to hold the data. */
5120 tmp = gfc_call_malloc (&init, TREE_TYPE (decl), size);
5121 gfc_add_modify (&init, decl, tmp);
5123 /* Free the temporary. */
5124 tmp = gfc_call_free (convert (pvoid_type_node, decl));
5128 /* Set offset of the array. */
5129 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5130 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5132 /* Automatic arrays should not have initializers. */
5133 gcc_assert (!sym->value);
5135 inittree = gfc_finish_block (&init);
5142 /* Don't create new scope, emit the DECL_EXPR in exactly the scope
5143 where also space is located. */
5144 gfc_init_block (&init);
5145 tmp = fold_build1_loc (input_location, DECL_EXPR,
5146 TREE_TYPE (space), space);
5147 gfc_add_expr_to_block (&init, tmp);
5148 addr = fold_build1_loc (sym->declared_at.lb->location,
5149 ADDR_EXPR, TREE_TYPE (decl), space);
5150 gfc_add_modify (&init, decl, addr);
5151 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5154 gfc_add_init_cleanup (block, inittree, tmp);
5158 /* Generate entry and exit code for g77 calling convention arrays. */
5161 gfc_trans_g77_array (gfc_symbol * sym, gfc_wrapped_block * block)
5171 gfc_save_backend_locus (&loc);
5172 gfc_set_backend_locus (&sym->declared_at);
5174 /* Descriptor type. */
5175 parm = sym->backend_decl;
5176 type = TREE_TYPE (parm);
5177 gcc_assert (GFC_ARRAY_TYPE_P (type));
5179 gfc_start_block (&init);
5181 if (sym->ts.type == BT_CHARACTER
5182 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5183 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5185 /* Evaluate the bounds of the array. */
5186 gfc_trans_array_bounds (type, sym, &offset, &init);
5188 /* Set the offset. */
5189 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5190 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5192 /* Set the pointer itself if we aren't using the parameter directly. */
5193 if (TREE_CODE (parm) != PARM_DECL)
5195 tmp = convert (TREE_TYPE (parm), GFC_DECL_SAVED_DESCRIPTOR (parm));
5196 gfc_add_modify (&init, parm, tmp);
5198 stmt = gfc_finish_block (&init);
5200 gfc_restore_backend_locus (&loc);
5202 /* Add the initialization code to the start of the function. */
5204 if (sym->attr.optional || sym->attr.not_always_present)
5206 tmp = gfc_conv_expr_present (sym);
5207 stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
5210 gfc_add_init_cleanup (block, stmt, NULL_TREE);
5214 /* Modify the descriptor of an array parameter so that it has the
5215 correct lower bound. Also move the upper bound accordingly.
5216 If the array is not packed, it will be copied into a temporary.
5217 For each dimension we set the new lower and upper bounds. Then we copy the
5218 stride and calculate the offset for this dimension. We also work out
5219 what the stride of a packed array would be, and see it the two match.
5220 If the array need repacking, we set the stride to the values we just
5221 calculated, recalculate the offset and copy the array data.
5222 Code is also added to copy the data back at the end of the function.
5226 gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc,
5227 gfc_wrapped_block * block)
5234 tree stmtInit, stmtCleanup;
5241 tree stride, stride2;
5251 /* Do nothing for pointer and allocatable arrays. */
5252 if (sym->attr.pointer || sym->attr.allocatable)
5255 if (sym->attr.dummy && gfc_is_nodesc_array (sym))
5257 gfc_trans_g77_array (sym, block);
5261 gfc_save_backend_locus (&loc);
5262 gfc_set_backend_locus (&sym->declared_at);
5264 /* Descriptor type. */
5265 type = TREE_TYPE (tmpdesc);
5266 gcc_assert (GFC_ARRAY_TYPE_P (type));
5267 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5268 dumdesc = build_fold_indirect_ref_loc (input_location, dumdesc);
5269 gfc_start_block (&init);
5271 if (sym->ts.type == BT_CHARACTER
5272 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5273 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5275 checkparm = (sym->as->type == AS_EXPLICIT
5276 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS));
5278 no_repack = !(GFC_DECL_PACKED_ARRAY (tmpdesc)
5279 || GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc));
5281 if (GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc))
5283 /* For non-constant shape arrays we only check if the first dimension
5284 is contiguous. Repacking higher dimensions wouldn't gain us
5285 anything as we still don't know the array stride. */
5286 partial = gfc_create_var (boolean_type_node, "partial");
5287 TREE_USED (partial) = 1;
5288 tmp = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
5289 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, tmp,
5290 gfc_index_one_node);
5291 gfc_add_modify (&init, partial, tmp);
5294 partial = NULL_TREE;
5296 /* The naming of stmt_unpacked and stmt_packed may be counter-intuitive
5297 here, however I think it does the right thing. */
5300 /* Set the first stride. */
5301 stride = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
5302 stride = gfc_evaluate_now (stride, &init);
5304 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
5305 stride, gfc_index_zero_node);
5306 tmp = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
5307 tmp, gfc_index_one_node, stride);
5308 stride = GFC_TYPE_ARRAY_STRIDE (type, 0);
5309 gfc_add_modify (&init, stride, tmp);
5311 /* Allow the user to disable array repacking. */
5312 stmt_unpacked = NULL_TREE;
5316 gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type, 0)));
5317 /* A library call to repack the array if necessary. */
5318 tmp = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5319 stmt_unpacked = build_call_expr_loc (input_location,
5320 gfor_fndecl_in_pack, 1, tmp);
5322 stride = gfc_index_one_node;
5324 if (gfc_option.warn_array_temp)
5325 gfc_warning ("Creating array temporary at %L", &loc);
5328 /* This is for the case where the array data is used directly without
5329 calling the repack function. */
5330 if (no_repack || partial != NULL_TREE)
5331 stmt_packed = gfc_conv_descriptor_data_get (dumdesc);
5333 stmt_packed = NULL_TREE;
5335 /* Assign the data pointer. */
5336 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
5338 /* Don't repack unknown shape arrays when the first stride is 1. */
5339 tmp = fold_build3_loc (input_location, COND_EXPR, TREE_TYPE (stmt_packed),
5340 partial, stmt_packed, stmt_unpacked);
5343 tmp = stmt_packed != NULL_TREE ? stmt_packed : stmt_unpacked;
5344 gfc_add_modify (&init, tmpdesc, fold_convert (type, tmp));
5346 offset = gfc_index_zero_node;
5347 size = gfc_index_one_node;
5349 /* Evaluate the bounds of the array. */
5350 for (n = 0; n < sym->as->rank; n++)
5352 if (checkparm || !sym->as->upper[n])
5354 /* Get the bounds of the actual parameter. */
5355 dubound = gfc_conv_descriptor_ubound_get (dumdesc, gfc_rank_cst[n]);
5356 dlbound = gfc_conv_descriptor_lbound_get (dumdesc, gfc_rank_cst[n]);
5360 dubound = NULL_TREE;
5361 dlbound = NULL_TREE;
5364 lbound = GFC_TYPE_ARRAY_LBOUND (type, n);
5365 if (!INTEGER_CST_P (lbound))
5367 gfc_init_se (&se, NULL);
5368 gfc_conv_expr_type (&se, sym->as->lower[n],
5369 gfc_array_index_type);
5370 gfc_add_block_to_block (&init, &se.pre);
5371 gfc_add_modify (&init, lbound, se.expr);
5374 ubound = GFC_TYPE_ARRAY_UBOUND (type, n);
5375 /* Set the desired upper bound. */
5376 if (sym->as->upper[n])
5378 /* We know what we want the upper bound to be. */
5379 if (!INTEGER_CST_P (ubound))
5381 gfc_init_se (&se, NULL);
5382 gfc_conv_expr_type (&se, sym->as->upper[n],
5383 gfc_array_index_type);
5384 gfc_add_block_to_block (&init, &se.pre);
5385 gfc_add_modify (&init, ubound, se.expr);
5388 /* Check the sizes match. */
5391 /* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */
5395 temp = fold_build2_loc (input_location, MINUS_EXPR,
5396 gfc_array_index_type, ubound, lbound);
5397 temp = fold_build2_loc (input_location, PLUS_EXPR,
5398 gfc_array_index_type,
5399 gfc_index_one_node, temp);
5400 stride2 = fold_build2_loc (input_location, MINUS_EXPR,
5401 gfc_array_index_type, dubound,
5403 stride2 = fold_build2_loc (input_location, PLUS_EXPR,
5404 gfc_array_index_type,
5405 gfc_index_one_node, stride2);
5406 tmp = fold_build2_loc (input_location, NE_EXPR,
5407 gfc_array_index_type, temp, stride2);
5408 asprintf (&msg, "Dimension %d of array '%s' has extent "
5409 "%%ld instead of %%ld", n+1, sym->name);
5411 gfc_trans_runtime_check (true, false, tmp, &init, &loc, msg,
5412 fold_convert (long_integer_type_node, temp),
5413 fold_convert (long_integer_type_node, stride2));
5420 /* For assumed shape arrays move the upper bound by the same amount
5421 as the lower bound. */
5422 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5423 gfc_array_index_type, dubound, dlbound);
5424 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5425 gfc_array_index_type, tmp, lbound);
5426 gfc_add_modify (&init, ubound, tmp);
5428 /* The offset of this dimension. offset = offset - lbound * stride. */
5429 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5431 offset = fold_build2_loc (input_location, MINUS_EXPR,
5432 gfc_array_index_type, offset, tmp);
5434 /* The size of this dimension, and the stride of the next. */
5435 if (n + 1 < sym->as->rank)
5437 stride = GFC_TYPE_ARRAY_STRIDE (type, n + 1);
5439 if (no_repack || partial != NULL_TREE)
5441 gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[n+1]);
5443 /* Figure out the stride if not a known constant. */
5444 if (!INTEGER_CST_P (stride))
5447 stmt_packed = NULL_TREE;
5450 /* Calculate stride = size * (ubound + 1 - lbound). */
5451 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5452 gfc_array_index_type,
5453 gfc_index_one_node, lbound);
5454 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5455 gfc_array_index_type, ubound, tmp);
5456 size = fold_build2_loc (input_location, MULT_EXPR,
5457 gfc_array_index_type, size, tmp);
5461 /* Assign the stride. */
5462 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
5463 tmp = fold_build3_loc (input_location, COND_EXPR,
5464 gfc_array_index_type, partial,
5465 stmt_unpacked, stmt_packed);
5467 tmp = (stmt_packed != NULL_TREE) ? stmt_packed : stmt_unpacked;
5468 gfc_add_modify (&init, stride, tmp);
5473 stride = GFC_TYPE_ARRAY_SIZE (type);
5475 if (stride && !INTEGER_CST_P (stride))
5477 /* Calculate size = stride * (ubound + 1 - lbound). */
5478 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5479 gfc_array_index_type,
5480 gfc_index_one_node, lbound);
5481 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5482 gfc_array_index_type,
5484 tmp = fold_build2_loc (input_location, MULT_EXPR,
5485 gfc_array_index_type,
5486 GFC_TYPE_ARRAY_STRIDE (type, n), tmp);
5487 gfc_add_modify (&init, stride, tmp);
5492 gfc_trans_array_cobounds (type, &init, sym);
5494 /* Set the offset. */
5495 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5496 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5498 gfc_trans_vla_type_sizes (sym, &init);
5500 stmtInit = gfc_finish_block (&init);
5502 /* Only do the entry/initialization code if the arg is present. */
5503 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5504 optional_arg = (sym->attr.optional
5505 || (sym->ns->proc_name->attr.entry_master
5506 && sym->attr.dummy));
5509 tmp = gfc_conv_expr_present (sym);
5510 stmtInit = build3_v (COND_EXPR, tmp, stmtInit,
5511 build_empty_stmt (input_location));
5516 stmtCleanup = NULL_TREE;
5519 stmtblock_t cleanup;
5520 gfc_start_block (&cleanup);
5522 if (sym->attr.intent != INTENT_IN)
5524 /* Copy the data back. */
5525 tmp = build_call_expr_loc (input_location,
5526 gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc);
5527 gfc_add_expr_to_block (&cleanup, tmp);
5530 /* Free the temporary. */
5531 tmp = gfc_call_free (tmpdesc);
5532 gfc_add_expr_to_block (&cleanup, tmp);
5534 stmtCleanup = gfc_finish_block (&cleanup);
5536 /* Only do the cleanup if the array was repacked. */
5537 tmp = build_fold_indirect_ref_loc (input_location, dumdesc);
5538 tmp = gfc_conv_descriptor_data_get (tmp);
5539 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
5541 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
5542 build_empty_stmt (input_location));
5546 tmp = gfc_conv_expr_present (sym);
5547 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
5548 build_empty_stmt (input_location));
5552 /* We don't need to free any memory allocated by internal_pack as it will
5553 be freed at the end of the function by pop_context. */
5554 gfc_add_init_cleanup (block, stmtInit, stmtCleanup);
5556 gfc_restore_backend_locus (&loc);
5560 /* Calculate the overall offset, including subreferences. */
5562 gfc_get_dataptr_offset (stmtblock_t *block, tree parm, tree desc, tree offset,
5563 bool subref, gfc_expr *expr)
5573 /* If offset is NULL and this is not a subreferenced array, there is
5575 if (offset == NULL_TREE)
5578 offset = gfc_index_zero_node;
5583 tmp = gfc_conv_array_data (desc);
5584 tmp = build_fold_indirect_ref_loc (input_location,
5586 tmp = gfc_build_array_ref (tmp, offset, NULL);
5588 /* Offset the data pointer for pointer assignments from arrays with
5589 subreferences; e.g. my_integer => my_type(:)%integer_component. */
5592 /* Go past the array reference. */
5593 for (ref = expr->ref; ref; ref = ref->next)
5594 if (ref->type == REF_ARRAY &&
5595 ref->u.ar.type != AR_ELEMENT)
5601 /* Calculate the offset for each subsequent subreference. */
5602 for (; ref; ref = ref->next)
5607 field = ref->u.c.component->backend_decl;
5608 gcc_assert (field && TREE_CODE (field) == FIELD_DECL);
5609 tmp = fold_build3_loc (input_location, COMPONENT_REF,
5611 tmp, field, NULL_TREE);
5615 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE);
5616 gfc_init_se (&start, NULL);
5617 gfc_conv_expr_type (&start, ref->u.ss.start, gfc_charlen_type_node);
5618 gfc_add_block_to_block (block, &start.pre);
5619 tmp = gfc_build_array_ref (tmp, start.expr, NULL);
5623 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE
5624 && ref->u.ar.type == AR_ELEMENT);
5626 /* TODO - Add bounds checking. */
5627 stride = gfc_index_one_node;
5628 index = gfc_index_zero_node;
5629 for (n = 0; n < ref->u.ar.dimen; n++)
5634 /* Update the index. */
5635 gfc_init_se (&start, NULL);
5636 gfc_conv_expr_type (&start, ref->u.ar.start[n], gfc_array_index_type);
5637 itmp = gfc_evaluate_now (start.expr, block);
5638 gfc_init_se (&start, NULL);
5639 gfc_conv_expr_type (&start, ref->u.ar.as->lower[n], gfc_array_index_type);
5640 jtmp = gfc_evaluate_now (start.expr, block);
5641 itmp = fold_build2_loc (input_location, MINUS_EXPR,
5642 gfc_array_index_type, itmp, jtmp);
5643 itmp = fold_build2_loc (input_location, MULT_EXPR,
5644 gfc_array_index_type, itmp, stride);
5645 index = fold_build2_loc (input_location, PLUS_EXPR,
5646 gfc_array_index_type, itmp, index);
5647 index = gfc_evaluate_now (index, block);
5649 /* Update the stride. */
5650 gfc_init_se (&start, NULL);
5651 gfc_conv_expr_type (&start, ref->u.ar.as->upper[n], gfc_array_index_type);
5652 itmp = fold_build2_loc (input_location, MINUS_EXPR,
5653 gfc_array_index_type, start.expr,
5655 itmp = fold_build2_loc (input_location, PLUS_EXPR,
5656 gfc_array_index_type,
5657 gfc_index_one_node, itmp);
5658 stride = fold_build2_loc (input_location, MULT_EXPR,
5659 gfc_array_index_type, stride, itmp);
5660 stride = gfc_evaluate_now (stride, block);
5663 /* Apply the index to obtain the array element. */
5664 tmp = gfc_build_array_ref (tmp, index, NULL);
5674 /* Set the target data pointer. */
5675 offset = gfc_build_addr_expr (gfc_array_dataptr_type (desc), tmp);
5676 gfc_conv_descriptor_data_set (block, parm, offset);
5680 /* gfc_conv_expr_descriptor needs the string length an expression
5681 so that the size of the temporary can be obtained. This is done
5682 by adding up the string lengths of all the elements in the
5683 expression. Function with non-constant expressions have their
5684 string lengths mapped onto the actual arguments using the
5685 interface mapping machinery in trans-expr.c. */
5687 get_array_charlen (gfc_expr *expr, gfc_se *se)
5689 gfc_interface_mapping mapping;
5690 gfc_formal_arglist *formal;
5691 gfc_actual_arglist *arg;
5694 if (expr->ts.u.cl->length
5695 && gfc_is_constant_expr (expr->ts.u.cl->length))
5697 if (!expr->ts.u.cl->backend_decl)
5698 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5702 switch (expr->expr_type)
5705 get_array_charlen (expr->value.op.op1, se);
5707 /* For parentheses the expression ts.u.cl is identical. */
5708 if (expr->value.op.op == INTRINSIC_PARENTHESES)
5711 expr->ts.u.cl->backend_decl =
5712 gfc_create_var (gfc_charlen_type_node, "sln");
5714 if (expr->value.op.op2)
5716 get_array_charlen (expr->value.op.op2, se);
5718 gcc_assert (expr->value.op.op == INTRINSIC_CONCAT);
5720 /* Add the string lengths and assign them to the expression
5721 string length backend declaration. */
5722 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
5723 fold_build2_loc (input_location, PLUS_EXPR,
5724 gfc_charlen_type_node,
5725 expr->value.op.op1->ts.u.cl->backend_decl,
5726 expr->value.op.op2->ts.u.cl->backend_decl));
5729 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
5730 expr->value.op.op1->ts.u.cl->backend_decl);
5734 if (expr->value.function.esym == NULL
5735 || expr->ts.u.cl->length->expr_type == EXPR_CONSTANT)
5737 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5741 /* Map expressions involving the dummy arguments onto the actual
5742 argument expressions. */
5743 gfc_init_interface_mapping (&mapping);
5744 formal = expr->symtree->n.sym->formal;
5745 arg = expr->value.function.actual;
5747 /* Set se = NULL in the calls to the interface mapping, to suppress any
5749 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
5754 gfc_add_interface_mapping (&mapping, formal->sym, NULL, arg->expr);
5757 gfc_init_se (&tse, NULL);
5759 /* Build the expression for the character length and convert it. */
5760 gfc_apply_interface_mapping (&mapping, &tse, expr->ts.u.cl->length);
5762 gfc_add_block_to_block (&se->pre, &tse.pre);
5763 gfc_add_block_to_block (&se->post, &tse.post);
5764 tse.expr = fold_convert (gfc_charlen_type_node, tse.expr);
5765 tse.expr = fold_build2_loc (input_location, MAX_EXPR,
5766 gfc_charlen_type_node, tse.expr,
5767 build_int_cst (gfc_charlen_type_node, 0));
5768 expr->ts.u.cl->backend_decl = tse.expr;
5769 gfc_free_interface_mapping (&mapping);
5773 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
5779 /* Helper function to check dimensions. */
5781 transposed_dims (gfc_ss *ss)
5785 for (n = 0; n < ss->dimen; n++)
5786 if (ss->dim[n] != n)
5791 /* Convert an array for passing as an actual argument. Expressions and
5792 vector subscripts are evaluated and stored in a temporary, which is then
5793 passed. For whole arrays the descriptor is passed. For array sections
5794 a modified copy of the descriptor is passed, but using the original data.
5796 This function is also used for array pointer assignments, and there
5799 - se->want_pointer && !se->direct_byref
5800 EXPR is an actual argument. On exit, se->expr contains a
5801 pointer to the array descriptor.
5803 - !se->want_pointer && !se->direct_byref
5804 EXPR is an actual argument to an intrinsic function or the
5805 left-hand side of a pointer assignment. On exit, se->expr
5806 contains the descriptor for EXPR.
5808 - !se->want_pointer && se->direct_byref
5809 EXPR is the right-hand side of a pointer assignment and
5810 se->expr is the descriptor for the previously-evaluated
5811 left-hand side. The function creates an assignment from
5815 The se->force_tmp flag disables the non-copying descriptor optimization
5816 that is used for transpose. It may be used in cases where there is an
5817 alias between the transpose argument and another argument in the same
5821 gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss)
5823 gfc_ss_type ss_type;
5824 gfc_ss_info *ss_info;
5826 gfc_array_info *info;
5835 bool subref_array_target = false;
5836 gfc_expr *arg, *ss_expr;
5838 gcc_assert (ss != NULL);
5839 gcc_assert (ss != gfc_ss_terminator);
5842 ss_type = ss_info->type;
5843 ss_expr = ss_info->expr;
5845 /* Special case things we know we can pass easily. */
5846 switch (expr->expr_type)
5849 /* If we have a linear array section, we can pass it directly.
5850 Otherwise we need to copy it into a temporary. */
5852 gcc_assert (ss_type == GFC_SS_SECTION);
5853 gcc_assert (ss_expr == expr);
5854 info = &ss_info->data.array;
5856 /* Get the descriptor for the array. */
5857 gfc_conv_ss_descriptor (&se->pre, ss, 0);
5858 desc = info->descriptor;
5860 subref_array_target = se->direct_byref && is_subref_array (expr);
5861 need_tmp = gfc_ref_needs_temporary_p (expr->ref)
5862 && !subref_array_target;
5869 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
5871 /* Create a new descriptor if the array doesn't have one. */
5874 else if (info->ref->u.ar.type == AR_FULL)
5876 else if (se->direct_byref)
5879 full = gfc_full_array_ref_p (info->ref, NULL);
5881 if (full && !transposed_dims (ss))
5883 if (se->direct_byref && !se->byref_noassign)
5885 /* Copy the descriptor for pointer assignments. */
5886 gfc_add_modify (&se->pre, se->expr, desc);
5888 /* Add any offsets from subreferences. */
5889 gfc_get_dataptr_offset (&se->pre, se->expr, desc, NULL_TREE,
5890 subref_array_target, expr);
5892 else if (se->want_pointer)
5894 /* We pass full arrays directly. This means that pointers and
5895 allocatable arrays should also work. */
5896 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
5903 if (expr->ts.type == BT_CHARACTER)
5904 se->string_length = gfc_get_expr_charlen (expr);
5912 /* We don't need to copy data in some cases. */
5913 arg = gfc_get_noncopying_intrinsic_argument (expr);
5916 /* This is a call to transpose... */
5917 gcc_assert (expr->value.function.isym->id == GFC_ISYM_TRANSPOSE);
5918 /* ... which has already been handled by the scalarizer, so
5919 that we just need to get its argument's descriptor. */
5920 gfc_conv_expr_descriptor (se, expr->value.function.actual->expr, ss);
5924 /* A transformational function return value will be a temporary
5925 array descriptor. We still need to go through the scalarizer
5926 to create the descriptor. Elemental functions ar handled as
5927 arbitrary expressions, i.e. copy to a temporary. */
5929 if (se->direct_byref)
5931 gcc_assert (ss_type == GFC_SS_FUNCTION && ss_expr == expr);
5933 /* For pointer assignments pass the descriptor directly. */
5937 gcc_assert (se->ss == ss);
5938 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
5939 gfc_conv_expr (se, expr);
5943 if (ss_expr != expr || ss_type != GFC_SS_FUNCTION)
5945 if (ss_expr != expr)
5946 /* Elemental function. */
5947 gcc_assert ((expr->value.function.esym != NULL
5948 && expr->value.function.esym->attr.elemental)
5949 || (expr->value.function.isym != NULL
5950 && expr->value.function.isym->elemental));
5952 gcc_assert (ss_type == GFC_SS_INTRINSIC);
5955 if (expr->ts.type == BT_CHARACTER
5956 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
5957 get_array_charlen (expr, se);
5963 /* Transformational function. */
5964 info = &ss_info->data.array;
5970 /* Constant array constructors don't need a temporary. */
5971 if (ss_type == GFC_SS_CONSTRUCTOR
5972 && expr->ts.type != BT_CHARACTER
5973 && gfc_constant_array_constructor_p (expr->value.constructor))
5976 info = &ss_info->data.array;
5986 /* Something complicated. Copy it into a temporary. */
5992 /* If we are creating a temporary, we don't need to bother about aliases
5997 gfc_init_loopinfo (&loop);
5999 /* Associate the SS with the loop. */
6000 gfc_add_ss_to_loop (&loop, ss);
6002 /* Tell the scalarizer not to bother creating loop variables, etc. */
6004 loop.array_parameter = 1;
6006 /* The right-hand side of a pointer assignment mustn't use a temporary. */
6007 gcc_assert (!se->direct_byref);
6009 /* Setup the scalarizing loops and bounds. */
6010 gfc_conv_ss_startstride (&loop);
6014 if (expr->ts.type == BT_CHARACTER && !expr->ts.u.cl->backend_decl)
6015 get_array_charlen (expr, se);
6017 /* Tell the scalarizer to make a temporary. */
6018 loop.temp_ss = gfc_get_temp_ss (gfc_typenode_for_spec (&expr->ts),
6019 ((expr->ts.type == BT_CHARACTER)
6020 ? expr->ts.u.cl->backend_decl
6024 se->string_length = loop.temp_ss->info->string_length;
6025 gcc_assert (loop.temp_ss->dimen == loop.dimen);
6026 gfc_add_ss_to_loop (&loop, loop.temp_ss);
6029 gfc_conv_loop_setup (&loop, & expr->where);
6033 /* Copy into a temporary and pass that. We don't need to copy the data
6034 back because expressions and vector subscripts must be INTENT_IN. */
6035 /* TODO: Optimize passing function return values. */
6039 /* Start the copying loops. */
6040 gfc_mark_ss_chain_used (loop.temp_ss, 1);
6041 gfc_mark_ss_chain_used (ss, 1);
6042 gfc_start_scalarized_body (&loop, &block);
6044 /* Copy each data element. */
6045 gfc_init_se (&lse, NULL);
6046 gfc_copy_loopinfo_to_se (&lse, &loop);
6047 gfc_init_se (&rse, NULL);
6048 gfc_copy_loopinfo_to_se (&rse, &loop);
6050 lse.ss = loop.temp_ss;
6053 gfc_conv_scalarized_array_ref (&lse, NULL);
6054 if (expr->ts.type == BT_CHARACTER)
6056 gfc_conv_expr (&rse, expr);
6057 if (POINTER_TYPE_P (TREE_TYPE (rse.expr)))
6058 rse.expr = build_fold_indirect_ref_loc (input_location,
6062 gfc_conv_expr_val (&rse, expr);
6064 gfc_add_block_to_block (&block, &rse.pre);
6065 gfc_add_block_to_block (&block, &lse.pre);
6067 lse.string_length = rse.string_length;
6068 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, true,
6069 expr->expr_type == EXPR_VARIABLE
6070 || expr->expr_type == EXPR_ARRAY, true);
6071 gfc_add_expr_to_block (&block, tmp);
6073 /* Finish the copying loops. */
6074 gfc_trans_scalarizing_loops (&loop, &block);
6076 desc = loop.temp_ss->info->data.array.descriptor;
6078 else if (expr->expr_type == EXPR_FUNCTION && !transposed_dims (ss))
6080 desc = info->descriptor;
6081 se->string_length = ss_info->string_length;
6085 /* We pass sections without copying to a temporary. Make a new
6086 descriptor and point it at the section we want. The loop variable
6087 limits will be the limits of the section.
6088 A function may decide to repack the array to speed up access, but
6089 we're not bothered about that here. */
6090 int dim, ndim, codim;
6098 ndim = info->ref ? info->ref->u.ar.dimen : ss->dimen;
6100 if (se->want_coarray)
6102 gfc_array_ref *ar = &info->ref->u.ar;
6104 codim = gfc_get_corank (expr);
6105 for (n = 0; n < codim - 1; n++)
6107 /* Make sure we are not lost somehow. */
6108 gcc_assert (ar->dimen_type[n + ndim] == DIMEN_THIS_IMAGE);
6110 /* Make sure the call to gfc_conv_section_startstride won't
6111 generate unnecessary code to calculate stride. */
6112 gcc_assert (ar->stride[n + ndim] == NULL);
6114 gfc_conv_section_startstride (&loop, ss, n + ndim);
6115 loop.from[n + loop.dimen] = info->start[n + ndim];
6116 loop.to[n + loop.dimen] = info->end[n + ndim];
6119 gcc_assert (n == codim - 1);
6120 evaluate_bound (&loop.pre, info->start, ar->start,
6121 info->descriptor, n + ndim, true);
6122 loop.from[n + loop.dimen] = info->start[n + ndim];
6127 /* Set the string_length for a character array. */
6128 if (expr->ts.type == BT_CHARACTER)
6129 se->string_length = gfc_get_expr_charlen (expr);
6131 desc = info->descriptor;
6132 if (se->direct_byref && !se->byref_noassign)
6134 /* For pointer assignments we fill in the destination. */
6136 parmtype = TREE_TYPE (parm);
6140 /* Otherwise make a new one. */
6141 parmtype = gfc_get_element_type (TREE_TYPE (desc));
6142 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, codim,
6143 loop.from, loop.to, 0,
6144 GFC_ARRAY_UNKNOWN, false);
6145 parm = gfc_create_var (parmtype, "parm");
6148 offset = gfc_index_zero_node;
6150 /* The following can be somewhat confusing. We have two
6151 descriptors, a new one and the original array.
6152 {parm, parmtype, dim} refer to the new one.
6153 {desc, type, n, loop} refer to the original, which maybe
6154 a descriptorless array.
6155 The bounds of the scalarization are the bounds of the section.
6156 We don't have to worry about numeric overflows when calculating
6157 the offsets because all elements are within the array data. */
6159 /* Set the dtype. */
6160 tmp = gfc_conv_descriptor_dtype (parm);
6161 gfc_add_modify (&loop.pre, tmp, gfc_get_dtype (parmtype));
6163 /* Set offset for assignments to pointer only to zero if it is not
6165 if (se->direct_byref
6166 && info->ref && info->ref->u.ar.type != AR_FULL)
6167 base = gfc_index_zero_node;
6168 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6169 base = gfc_evaluate_now (gfc_conv_array_offset (desc), &loop.pre);
6173 for (n = 0; n < ndim; n++)
6175 stride = gfc_conv_array_stride (desc, n);
6177 /* Work out the offset. */
6179 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
6181 gcc_assert (info->subscript[n]
6182 && info->subscript[n]->info->type == GFC_SS_SCALAR);
6183 start = info->subscript[n]->info->data.scalar.value;
6187 /* Evaluate and remember the start of the section. */
6188 start = info->start[n];
6189 stride = gfc_evaluate_now (stride, &loop.pre);
6192 tmp = gfc_conv_array_lbound (desc, n);
6193 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (tmp),
6195 tmp = fold_build2_loc (input_location, MULT_EXPR, TREE_TYPE (tmp),
6197 offset = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (tmp),
6201 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
6203 /* For elemental dimensions, we only need the offset. */
6207 /* Vector subscripts need copying and are handled elsewhere. */
6209 gcc_assert (info->ref->u.ar.dimen_type[n] == DIMEN_RANGE);
6211 /* look for the corresponding scalarizer dimension: dim. */
6212 for (dim = 0; dim < ndim; dim++)
6213 if (ss->dim[dim] == n)
6216 /* loop exited early: the DIM being looked for has been found. */
6217 gcc_assert (dim < ndim);
6219 /* Set the new lower bound. */
6220 from = loop.from[dim];
6223 /* If we have an array section or are assigning make sure that
6224 the lower bound is 1. References to the full
6225 array should otherwise keep the original bounds. */
6227 || info->ref->u.ar.type != AR_FULL)
6228 && !integer_onep (from))
6230 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6231 gfc_array_index_type, gfc_index_one_node,
6233 to = fold_build2_loc (input_location, PLUS_EXPR,
6234 gfc_array_index_type, to, tmp);
6235 from = gfc_index_one_node;
6237 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
6238 gfc_rank_cst[dim], from);
6240 /* Set the new upper bound. */
6241 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
6242 gfc_rank_cst[dim], to);
6244 /* Multiply the stride by the section stride to get the
6246 stride = fold_build2_loc (input_location, MULT_EXPR,
6247 gfc_array_index_type,
6248 stride, info->stride[n]);
6250 if (se->direct_byref
6252 && info->ref->u.ar.type != AR_FULL)
6254 base = fold_build2_loc (input_location, MINUS_EXPR,
6255 TREE_TYPE (base), base, stride);
6257 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6259 tmp = gfc_conv_array_lbound (desc, n);
6260 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6261 TREE_TYPE (base), tmp, loop.from[dim]);
6262 tmp = fold_build2_loc (input_location, MULT_EXPR,
6263 TREE_TYPE (base), tmp,
6264 gfc_conv_array_stride (desc, n));
6265 base = fold_build2_loc (input_location, PLUS_EXPR,
6266 TREE_TYPE (base), tmp, base);
6269 /* Store the new stride. */
6270 gfc_conv_descriptor_stride_set (&loop.pre, parm,
6271 gfc_rank_cst[dim], stride);
6274 for (n = loop.dimen; n < loop.dimen + codim; n++)
6276 from = loop.from[n];
6278 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
6279 gfc_rank_cst[n], from);
6280 if (n < loop.dimen + codim - 1)
6281 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
6282 gfc_rank_cst[n], to);
6285 if (se->data_not_needed)
6286 gfc_conv_descriptor_data_set (&loop.pre, parm,
6287 gfc_index_zero_node);
6289 /* Point the data pointer at the 1st element in the section. */
6290 gfc_get_dataptr_offset (&loop.pre, parm, desc, offset,
6291 subref_array_target, expr);
6293 if ((se->direct_byref || GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6294 && !se->data_not_needed)
6296 /* Set the offset. */
6297 gfc_conv_descriptor_offset_set (&loop.pre, parm, base);
6301 /* Only the callee knows what the correct offset it, so just set
6303 gfc_conv_descriptor_offset_set (&loop.pre, parm, gfc_index_zero_node);
6308 if (!se->direct_byref || se->byref_noassign)
6310 /* Get a pointer to the new descriptor. */
6311 if (se->want_pointer)
6312 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
6317 gfc_add_block_to_block (&se->pre, &loop.pre);
6318 gfc_add_block_to_block (&se->post, &loop.post);
6320 /* Cleanup the scalarizer. */
6321 gfc_cleanup_loop (&loop);
6324 /* Helper function for gfc_conv_array_parameter if array size needs to be
6328 array_parameter_size (tree desc, gfc_expr *expr, tree *size)
6331 if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6332 *size = GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc));
6333 else if (expr->rank > 1)
6334 *size = build_call_expr_loc (input_location,
6335 gfor_fndecl_size0, 1,
6336 gfc_build_addr_expr (NULL, desc));
6339 tree ubound = gfc_conv_descriptor_ubound_get (desc, gfc_index_zero_node);
6340 tree lbound = gfc_conv_descriptor_lbound_get (desc, gfc_index_zero_node);
6342 *size = fold_build2_loc (input_location, MINUS_EXPR,
6343 gfc_array_index_type, ubound, lbound);
6344 *size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
6345 *size, gfc_index_one_node);
6346 *size = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
6347 *size, gfc_index_zero_node);
6349 elem = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
6350 *size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6351 *size, fold_convert (gfc_array_index_type, elem));
6354 /* Convert an array for passing as an actual parameter. */
6355 /* TODO: Optimize passing g77 arrays. */
6358 gfc_conv_array_parameter (gfc_se * se, gfc_expr * expr, gfc_ss * ss, bool g77,
6359 const gfc_symbol *fsym, const char *proc_name,
6364 tree tmp = NULL_TREE;
6366 tree parent = DECL_CONTEXT (current_function_decl);
6367 bool full_array_var;
6368 bool this_array_result;
6371 bool array_constructor;
6372 bool good_allocatable;
6373 bool ultimate_ptr_comp;
6374 bool ultimate_alloc_comp;
6379 ultimate_ptr_comp = false;
6380 ultimate_alloc_comp = false;
6382 for (ref = expr->ref; ref; ref = ref->next)
6384 if (ref->next == NULL)
6387 if (ref->type == REF_COMPONENT)
6389 ultimate_ptr_comp = ref->u.c.component->attr.pointer;
6390 ultimate_alloc_comp = ref->u.c.component->attr.allocatable;
6394 full_array_var = false;
6397 if (expr->expr_type == EXPR_VARIABLE && ref && !ultimate_ptr_comp)
6398 full_array_var = gfc_full_array_ref_p (ref, &contiguous);
6400 sym = full_array_var ? expr->symtree->n.sym : NULL;
6402 /* The symbol should have an array specification. */
6403 gcc_assert (!sym || sym->as || ref->u.ar.as);
6405 if (expr->expr_type == EXPR_ARRAY && expr->ts.type == BT_CHARACTER)
6407 get_array_ctor_strlen (&se->pre, expr->value.constructor, &tmp);
6408 expr->ts.u.cl->backend_decl = tmp;
6409 se->string_length = tmp;
6412 /* Is this the result of the enclosing procedure? */
6413 this_array_result = (full_array_var && sym->attr.flavor == FL_PROCEDURE);
6414 if (this_array_result
6415 && (sym->backend_decl != current_function_decl)
6416 && (sym->backend_decl != parent))
6417 this_array_result = false;
6419 /* Passing address of the array if it is not pointer or assumed-shape. */
6420 if (full_array_var && g77 && !this_array_result)
6422 tmp = gfc_get_symbol_decl (sym);
6424 if (sym->ts.type == BT_CHARACTER)
6425 se->string_length = sym->ts.u.cl->backend_decl;
6427 if (sym->ts.type == BT_DERIVED || sym->ts.type == BT_CLASS)
6429 gfc_conv_expr_descriptor (se, expr, ss);
6430 se->expr = gfc_conv_array_data (se->expr);
6434 if (!sym->attr.pointer
6436 && sym->as->type != AS_ASSUMED_SHAPE
6437 && !sym->attr.allocatable)
6439 /* Some variables are declared directly, others are declared as
6440 pointers and allocated on the heap. */
6441 if (sym->attr.dummy || POINTER_TYPE_P (TREE_TYPE (tmp)))
6444 se->expr = gfc_build_addr_expr (NULL_TREE, tmp);
6446 array_parameter_size (tmp, expr, size);
6450 if (sym->attr.allocatable)
6452 if (sym->attr.dummy || sym->attr.result)
6454 gfc_conv_expr_descriptor (se, expr, ss);
6458 array_parameter_size (tmp, expr, size);
6459 se->expr = gfc_conv_array_data (tmp);
6464 /* A convenient reduction in scope. */
6465 contiguous = g77 && !this_array_result && contiguous;
6467 /* There is no need to pack and unpack the array, if it is contiguous
6468 and not a deferred- or assumed-shape array, or if it is simply
6470 no_pack = ((sym && sym->as
6471 && !sym->attr.pointer
6472 && sym->as->type != AS_DEFERRED
6473 && sym->as->type != AS_ASSUMED_SHAPE)
6475 (ref && ref->u.ar.as
6476 && ref->u.ar.as->type != AS_DEFERRED
6477 && ref->u.ar.as->type != AS_ASSUMED_SHAPE)
6479 gfc_is_simply_contiguous (expr, false));
6481 no_pack = contiguous && no_pack;
6483 /* Array constructors are always contiguous and do not need packing. */
6484 array_constructor = g77 && !this_array_result && expr->expr_type == EXPR_ARRAY;
6486 /* Same is true of contiguous sections from allocatable variables. */
6487 good_allocatable = contiguous
6489 && expr->symtree->n.sym->attr.allocatable;
6491 /* Or ultimate allocatable components. */
6492 ultimate_alloc_comp = contiguous && ultimate_alloc_comp;
6494 if (no_pack || array_constructor || good_allocatable || ultimate_alloc_comp)
6496 gfc_conv_expr_descriptor (se, expr, ss);
6497 if (expr->ts.type == BT_CHARACTER)
6498 se->string_length = expr->ts.u.cl->backend_decl;
6500 array_parameter_size (se->expr, expr, size);
6501 se->expr = gfc_conv_array_data (se->expr);
6505 if (this_array_result)
6507 /* Result of the enclosing function. */
6508 gfc_conv_expr_descriptor (se, expr, ss);
6510 array_parameter_size (se->expr, expr, size);
6511 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
6513 if (g77 && TREE_TYPE (TREE_TYPE (se->expr)) != NULL_TREE
6514 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr))))
6515 se->expr = gfc_conv_array_data (build_fold_indirect_ref_loc (input_location,
6522 /* Every other type of array. */
6523 se->want_pointer = 1;
6524 gfc_conv_expr_descriptor (se, expr, ss);
6526 array_parameter_size (build_fold_indirect_ref_loc (input_location,
6531 /* Deallocate the allocatable components of structures that are
6533 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
6534 && expr->ts.u.derived->attr.alloc_comp
6535 && expr->expr_type != EXPR_VARIABLE)
6537 tmp = build_fold_indirect_ref_loc (input_location, se->expr);
6538 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, tmp, expr->rank);
6540 /* The components shall be deallocated before their containing entity. */
6541 gfc_prepend_expr_to_block (&se->post, tmp);
6544 if (g77 || (fsym && fsym->attr.contiguous
6545 && !gfc_is_simply_contiguous (expr, false)))
6547 tree origptr = NULL_TREE;
6551 /* For contiguous arrays, save the original value of the descriptor. */
6554 origptr = gfc_create_var (pvoid_type_node, "origptr");
6555 tmp = build_fold_indirect_ref_loc (input_location, desc);
6556 tmp = gfc_conv_array_data (tmp);
6557 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6558 TREE_TYPE (origptr), origptr,
6559 fold_convert (TREE_TYPE (origptr), tmp));
6560 gfc_add_expr_to_block (&se->pre, tmp);
6563 /* Repack the array. */
6564 if (gfc_option.warn_array_temp)
6567 gfc_warning ("Creating array temporary at %L for argument '%s'",
6568 &expr->where, fsym->name);
6570 gfc_warning ("Creating array temporary at %L", &expr->where);
6573 ptr = build_call_expr_loc (input_location,
6574 gfor_fndecl_in_pack, 1, desc);
6576 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6578 tmp = gfc_conv_expr_present (sym);
6579 ptr = build3_loc (input_location, COND_EXPR, TREE_TYPE (se->expr),
6580 tmp, fold_convert (TREE_TYPE (se->expr), ptr),
6581 fold_convert (TREE_TYPE (se->expr), null_pointer_node));
6584 ptr = gfc_evaluate_now (ptr, &se->pre);
6586 /* Use the packed data for the actual argument, except for contiguous arrays,
6587 where the descriptor's data component is set. */
6592 tmp = build_fold_indirect_ref_loc (input_location, desc);
6593 gfc_conv_descriptor_data_set (&se->pre, tmp, ptr);
6596 if (gfc_option.rtcheck & GFC_RTCHECK_ARRAY_TEMPS)
6600 if (fsym && proc_name)
6601 asprintf (&msg, "An array temporary was created for argument "
6602 "'%s' of procedure '%s'", fsym->name, proc_name);
6604 asprintf (&msg, "An array temporary was created");
6606 tmp = build_fold_indirect_ref_loc (input_location,
6608 tmp = gfc_conv_array_data (tmp);
6609 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6610 fold_convert (TREE_TYPE (tmp), ptr), tmp);
6612 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6613 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
6615 gfc_conv_expr_present (sym), tmp);
6617 gfc_trans_runtime_check (false, true, tmp, &se->pre,
6622 gfc_start_block (&block);
6624 /* Copy the data back. */
6625 if (fsym == NULL || fsym->attr.intent != INTENT_IN)
6627 tmp = build_call_expr_loc (input_location,
6628 gfor_fndecl_in_unpack, 2, desc, ptr);
6629 gfc_add_expr_to_block (&block, tmp);
6632 /* Free the temporary. */
6633 tmp = gfc_call_free (convert (pvoid_type_node, ptr));
6634 gfc_add_expr_to_block (&block, tmp);
6636 stmt = gfc_finish_block (&block);
6638 gfc_init_block (&block);
6639 /* Only if it was repacked. This code needs to be executed before the
6640 loop cleanup code. */
6641 tmp = build_fold_indirect_ref_loc (input_location,
6643 tmp = gfc_conv_array_data (tmp);
6644 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6645 fold_convert (TREE_TYPE (tmp), ptr), tmp);
6647 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
6648 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
6650 gfc_conv_expr_present (sym), tmp);
6652 tmp = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
6654 gfc_add_expr_to_block (&block, tmp);
6655 gfc_add_block_to_block (&block, &se->post);
6657 gfc_init_block (&se->post);
6659 /* Reset the descriptor pointer. */
6662 tmp = build_fold_indirect_ref_loc (input_location, desc);
6663 gfc_conv_descriptor_data_set (&se->post, tmp, origptr);
6666 gfc_add_block_to_block (&se->post, &block);
6671 /* Generate code to deallocate an array, if it is allocated. */
6674 gfc_trans_dealloc_allocated (tree descriptor)
6680 gfc_start_block (&block);
6682 var = gfc_conv_descriptor_data_get (descriptor);
6685 /* Call array_deallocate with an int * present in the second argument.
6686 Although it is ignored here, it's presence ensures that arrays that
6687 are already deallocated are ignored. */
6688 tmp = gfc_deallocate_with_status (var, NULL_TREE, true, NULL);
6689 gfc_add_expr_to_block (&block, tmp);
6691 /* Zero the data pointer. */
6692 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
6693 var, build_int_cst (TREE_TYPE (var), 0));
6694 gfc_add_expr_to_block (&block, tmp);
6696 return gfc_finish_block (&block);
6700 /* This helper function calculates the size in words of a full array. */
6703 get_full_array_size (stmtblock_t *block, tree decl, int rank)
6708 idx = gfc_rank_cst[rank - 1];
6709 nelems = gfc_conv_descriptor_ubound_get (decl, idx);
6710 tmp = gfc_conv_descriptor_lbound_get (decl, idx);
6711 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
6713 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
6714 tmp, gfc_index_one_node);
6715 tmp = gfc_evaluate_now (tmp, block);
6717 nelems = gfc_conv_descriptor_stride_get (decl, idx);
6718 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6720 return gfc_evaluate_now (tmp, block);
6724 /* Allocate dest to the same size as src, and copy src -> dest.
6725 If no_malloc is set, only the copy is done. */
6728 duplicate_allocatable (tree dest, tree src, tree type, int rank,
6738 /* If the source is null, set the destination to null. Then,
6739 allocate memory to the destination. */
6740 gfc_init_block (&block);
6744 tmp = null_pointer_node;
6745 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type, dest, tmp);
6746 gfc_add_expr_to_block (&block, tmp);
6747 null_data = gfc_finish_block (&block);
6749 gfc_init_block (&block);
6750 size = TYPE_SIZE_UNIT (TREE_TYPE (type));
6753 tmp = gfc_call_malloc (&block, type, size);
6754 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
6755 dest, fold_convert (type, tmp));
6756 gfc_add_expr_to_block (&block, tmp);
6759 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
6760 tmp = build_call_expr_loc (input_location, tmp, 3,
6765 gfc_conv_descriptor_data_set (&block, dest, null_pointer_node);
6766 null_data = gfc_finish_block (&block);
6768 gfc_init_block (&block);
6769 nelems = get_full_array_size (&block, src, rank);
6770 tmp = fold_convert (gfc_array_index_type,
6771 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
6772 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6776 tmp = TREE_TYPE (gfc_conv_descriptor_data_get (src));
6777 tmp = gfc_call_malloc (&block, tmp, size);
6778 gfc_conv_descriptor_data_set (&block, dest, tmp);
6781 /* We know the temporary and the value will be the same length,
6782 so can use memcpy. */
6783 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
6784 tmp = build_call_expr_loc (input_location,
6785 tmp, 3, gfc_conv_descriptor_data_get (dest),
6786 gfc_conv_descriptor_data_get (src), size);
6789 gfc_add_expr_to_block (&block, tmp);
6790 tmp = gfc_finish_block (&block);
6792 /* Null the destination if the source is null; otherwise do
6793 the allocate and copy. */
6797 null_cond = gfc_conv_descriptor_data_get (src);
6799 null_cond = convert (pvoid_type_node, null_cond);
6800 null_cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6801 null_cond, null_pointer_node);
6802 return build3_v (COND_EXPR, null_cond, tmp, null_data);
6806 /* Allocate dest to the same size as src, and copy data src -> dest. */
6809 gfc_duplicate_allocatable (tree dest, tree src, tree type, int rank)
6811 return duplicate_allocatable (dest, src, type, rank, false);
6815 /* Copy data src -> dest. */
6818 gfc_copy_allocatable_data (tree dest, tree src, tree type, int rank)
6820 return duplicate_allocatable (dest, src, type, rank, true);
6824 /* Recursively traverse an object of derived type, generating code to
6825 deallocate, nullify or copy allocatable components. This is the work horse
6826 function for the functions named in this enum. */
6828 enum {DEALLOCATE_ALLOC_COMP = 1, NULLIFY_ALLOC_COMP, COPY_ALLOC_COMP,
6829 COPY_ONLY_ALLOC_COMP};
6832 structure_alloc_comps (gfc_symbol * der_type, tree decl,
6833 tree dest, int rank, int purpose)
6837 stmtblock_t fnblock;
6838 stmtblock_t loopbody;
6849 tree null_cond = NULL_TREE;
6851 gfc_init_block (&fnblock);
6853 decl_type = TREE_TYPE (decl);
6855 if ((POINTER_TYPE_P (decl_type) && rank != 0)
6856 || (TREE_CODE (decl_type) == REFERENCE_TYPE && rank == 0))
6858 decl = build_fold_indirect_ref_loc (input_location,
6861 /* Just in case in gets dereferenced. */
6862 decl_type = TREE_TYPE (decl);
6864 /* If this an array of derived types with allocatable components
6865 build a loop and recursively call this function. */
6866 if (TREE_CODE (decl_type) == ARRAY_TYPE
6867 || GFC_DESCRIPTOR_TYPE_P (decl_type))
6869 tmp = gfc_conv_array_data (decl);
6870 var = build_fold_indirect_ref_loc (input_location,
6873 /* Get the number of elements - 1 and set the counter. */
6874 if (GFC_DESCRIPTOR_TYPE_P (decl_type))
6876 /* Use the descriptor for an allocatable array. Since this
6877 is a full array reference, we only need the descriptor
6878 information from dimension = rank. */
6879 tmp = get_full_array_size (&fnblock, decl, rank);
6880 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6881 gfc_array_index_type, tmp,
6882 gfc_index_one_node);
6884 null_cond = gfc_conv_descriptor_data_get (decl);
6885 null_cond = fold_build2_loc (input_location, NE_EXPR,
6886 boolean_type_node, null_cond,
6887 build_int_cst (TREE_TYPE (null_cond), 0));
6891 /* Otherwise use the TYPE_DOMAIN information. */
6892 tmp = array_type_nelts (decl_type);
6893 tmp = fold_convert (gfc_array_index_type, tmp);
6896 /* Remember that this is, in fact, the no. of elements - 1. */
6897 nelems = gfc_evaluate_now (tmp, &fnblock);
6898 index = gfc_create_var (gfc_array_index_type, "S");
6900 /* Build the body of the loop. */
6901 gfc_init_block (&loopbody);
6903 vref = gfc_build_array_ref (var, index, NULL);
6905 if (purpose == COPY_ALLOC_COMP)
6907 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
6909 tmp = gfc_duplicate_allocatable (dest, decl, decl_type, rank);
6910 gfc_add_expr_to_block (&fnblock, tmp);
6912 tmp = build_fold_indirect_ref_loc (input_location,
6913 gfc_conv_array_data (dest));
6914 dref = gfc_build_array_ref (tmp, index, NULL);
6915 tmp = structure_alloc_comps (der_type, vref, dref, rank, purpose);
6917 else if (purpose == COPY_ONLY_ALLOC_COMP)
6919 tmp = build_fold_indirect_ref_loc (input_location,
6920 gfc_conv_array_data (dest));
6921 dref = gfc_build_array_ref (tmp, index, NULL);
6922 tmp = structure_alloc_comps (der_type, vref, dref, rank,
6926 tmp = structure_alloc_comps (der_type, vref, NULL_TREE, rank, purpose);
6928 gfc_add_expr_to_block (&loopbody, tmp);
6930 /* Build the loop and return. */
6931 gfc_init_loopinfo (&loop);
6933 loop.from[0] = gfc_index_zero_node;
6934 loop.loopvar[0] = index;
6935 loop.to[0] = nelems;
6936 gfc_trans_scalarizing_loops (&loop, &loopbody);
6937 gfc_add_block_to_block (&fnblock, &loop.pre);
6939 tmp = gfc_finish_block (&fnblock);
6940 if (null_cond != NULL_TREE)
6941 tmp = build3_v (COND_EXPR, null_cond, tmp,
6942 build_empty_stmt (input_location));
6947 /* Otherwise, act on the components or recursively call self to
6948 act on a chain of components. */
6949 for (c = der_type->components; c; c = c->next)
6951 bool cmp_has_alloc_comps = (c->ts.type == BT_DERIVED
6952 || c->ts.type == BT_CLASS)
6953 && c->ts.u.derived->attr.alloc_comp;
6954 cdecl = c->backend_decl;
6955 ctype = TREE_TYPE (cdecl);
6959 case DEALLOCATE_ALLOC_COMP:
6960 if (cmp_has_alloc_comps && !c->attr.pointer)
6962 /* Do not deallocate the components of ultimate pointer
6964 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6965 decl, cdecl, NULL_TREE);
6966 rank = c->as ? c->as->rank : 0;
6967 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
6969 gfc_add_expr_to_block (&fnblock, tmp);
6972 if (c->attr.allocatable
6973 && (c->attr.dimension || c->attr.codimension))
6975 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6976 decl, cdecl, NULL_TREE);
6977 tmp = gfc_trans_dealloc_allocated (comp);
6978 gfc_add_expr_to_block (&fnblock, tmp);
6980 else if (c->attr.allocatable)
6982 /* Allocatable scalar components. */
6983 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6984 decl, cdecl, NULL_TREE);
6986 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
6988 gfc_add_expr_to_block (&fnblock, tmp);
6990 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
6991 void_type_node, comp,
6992 build_int_cst (TREE_TYPE (comp), 0));
6993 gfc_add_expr_to_block (&fnblock, tmp);
6995 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
6997 /* Allocatable scalar CLASS components. */
6998 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
6999 decl, cdecl, NULL_TREE);
7001 /* Add reference to '_data' component. */
7002 tmp = CLASS_DATA (c)->backend_decl;
7003 comp = fold_build3_loc (input_location, COMPONENT_REF,
7004 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
7006 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
7007 CLASS_DATA (c)->ts);
7008 gfc_add_expr_to_block (&fnblock, tmp);
7010 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7011 void_type_node, comp,
7012 build_int_cst (TREE_TYPE (comp), 0));
7013 gfc_add_expr_to_block (&fnblock, tmp);
7017 case NULLIFY_ALLOC_COMP:
7018 if (c->attr.pointer)
7020 else if (c->attr.allocatable
7021 && (c->attr.dimension|| c->attr.codimension))
7023 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7024 decl, cdecl, NULL_TREE);
7025 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
7027 else if (c->attr.allocatable)
7029 /* Allocatable scalar components. */
7030 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7031 decl, cdecl, NULL_TREE);
7032 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7033 void_type_node, comp,
7034 build_int_cst (TREE_TYPE (comp), 0));
7035 gfc_add_expr_to_block (&fnblock, tmp);
7037 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
7039 /* Allocatable scalar CLASS components. */
7040 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7041 decl, cdecl, NULL_TREE);
7042 /* Add reference to '_data' component. */
7043 tmp = CLASS_DATA (c)->backend_decl;
7044 comp = fold_build3_loc (input_location, COMPONENT_REF,
7045 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
7046 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7047 void_type_node, comp,
7048 build_int_cst (TREE_TYPE (comp), 0));
7049 gfc_add_expr_to_block (&fnblock, tmp);
7051 else if (cmp_has_alloc_comps)
7053 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7054 decl, cdecl, NULL_TREE);
7055 rank = c->as ? c->as->rank : 0;
7056 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
7058 gfc_add_expr_to_block (&fnblock, tmp);
7062 case COPY_ALLOC_COMP:
7063 if (c->attr.pointer)
7066 /* We need source and destination components. */
7067 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
7069 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
7071 dcmp = fold_convert (TREE_TYPE (comp), dcmp);
7073 if (c->attr.allocatable && !cmp_has_alloc_comps)
7075 rank = c->as ? c->as->rank : 0;
7076 tmp = gfc_duplicate_allocatable (dcmp, comp, ctype, rank);
7077 gfc_add_expr_to_block (&fnblock, tmp);
7080 if (cmp_has_alloc_comps)
7082 rank = c->as ? c->as->rank : 0;
7083 tmp = fold_convert (TREE_TYPE (dcmp), comp);
7084 gfc_add_modify (&fnblock, dcmp, tmp);
7085 tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp,
7087 gfc_add_expr_to_block (&fnblock, tmp);
7097 return gfc_finish_block (&fnblock);
7100 /* Recursively traverse an object of derived type, generating code to
7101 nullify allocatable components. */
7104 gfc_nullify_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
7106 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
7107 NULLIFY_ALLOC_COMP);
7111 /* Recursively traverse an object of derived type, generating code to
7112 deallocate allocatable components. */
7115 gfc_deallocate_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
7117 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
7118 DEALLOCATE_ALLOC_COMP);
7122 /* Recursively traverse an object of derived type, generating code to
7123 copy it and its allocatable components. */
7126 gfc_copy_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
7128 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ALLOC_COMP);
7132 /* Recursively traverse an object of derived type, generating code to
7133 copy only its allocatable components. */
7136 gfc_copy_only_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
7138 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ONLY_ALLOC_COMP);
7142 /* Returns the value of LBOUND for an expression. This could be broken out
7143 from gfc_conv_intrinsic_bound but this seemed to be simpler. This is
7144 called by gfc_alloc_allocatable_for_assignment. */
7146 get_std_lbound (gfc_expr *expr, tree desc, int dim, bool assumed_size)
7151 tree cond, cond1, cond3, cond4;
7155 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)))
7157 tmp = gfc_rank_cst[dim];
7158 lbound = gfc_conv_descriptor_lbound_get (desc, tmp);
7159 ubound = gfc_conv_descriptor_ubound_get (desc, tmp);
7160 stride = gfc_conv_descriptor_stride_get (desc, tmp);
7161 cond1 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
7163 cond3 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
7164 stride, gfc_index_zero_node);
7165 cond3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7166 boolean_type_node, cond3, cond1);
7167 cond4 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
7168 stride, gfc_index_zero_node);
7170 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
7171 tmp, build_int_cst (gfc_array_index_type,
7174 cond = boolean_false_node;
7176 cond1 = fold_build2_loc (input_location, TRUTH_OR_EXPR,
7177 boolean_type_node, cond3, cond4);
7178 cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
7179 boolean_type_node, cond, cond1);
7181 return fold_build3_loc (input_location, COND_EXPR,
7182 gfc_array_index_type, cond,
7183 lbound, gfc_index_one_node);
7185 else if (expr->expr_type == EXPR_VARIABLE)
7187 tmp = TREE_TYPE (expr->symtree->n.sym->backend_decl);
7188 for (ref = expr->ref; ref; ref = ref->next)
7190 if (ref->type == REF_COMPONENT
7191 && ref->u.c.component->as
7193 && ref->next->u.ar.type == AR_FULL)
7194 tmp = TREE_TYPE (ref->u.c.component->backend_decl);
7196 return GFC_TYPE_ARRAY_LBOUND(tmp, dim);
7198 else if (expr->expr_type == EXPR_FUNCTION)
7200 /* A conversion function, so use the argument. */
7201 expr = expr->value.function.actual->expr;
7202 if (expr->expr_type != EXPR_VARIABLE)
7203 return gfc_index_one_node;
7204 desc = TREE_TYPE (expr->symtree->n.sym->backend_decl);
7205 return get_std_lbound (expr, desc, dim, assumed_size);
7208 return gfc_index_one_node;
7212 /* Returns true if an expression represents an lhs that can be reallocated
7216 gfc_is_reallocatable_lhs (gfc_expr *expr)
7223 /* An allocatable variable. */
7224 if (expr->symtree->n.sym->attr.allocatable
7226 && expr->ref->type == REF_ARRAY
7227 && expr->ref->u.ar.type == AR_FULL)
7230 /* All that can be left are allocatable components. */
7231 if ((expr->symtree->n.sym->ts.type != BT_DERIVED
7232 && expr->symtree->n.sym->ts.type != BT_CLASS)
7233 || !expr->symtree->n.sym->ts.u.derived->attr.alloc_comp)
7236 /* Find a component ref followed by an array reference. */
7237 for (ref = expr->ref; ref; ref = ref->next)
7239 && ref->type == REF_COMPONENT
7240 && ref->next->type == REF_ARRAY
7241 && !ref->next->next)
7247 /* Return true if valid reallocatable lhs. */
7248 if (ref->u.c.component->attr.allocatable
7249 && ref->next->u.ar.type == AR_FULL)
7256 /* Allocate the lhs of an assignment to an allocatable array, otherwise
7260 gfc_alloc_allocatable_for_assignment (gfc_loopinfo *loop,
7264 stmtblock_t realloc_block;
7265 stmtblock_t alloc_block;
7269 gfc_array_info *linfo;
7289 gfc_array_spec * as;
7291 /* x = f(...) with x allocatable. In this case, expr1 is the rhs.
7292 Find the lhs expression in the loop chain and set expr1 and
7293 expr2 accordingly. */
7294 if (expr1->expr_type == EXPR_FUNCTION && expr2 == NULL)
7297 /* Find the ss for the lhs. */
7299 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
7300 if (lss->info->expr && lss->info->expr->expr_type == EXPR_VARIABLE)
7302 if (lss == gfc_ss_terminator)
7304 expr1 = lss->info->expr;
7307 /* Bail out if this is not a valid allocate on assignment. */
7308 if (!gfc_is_reallocatable_lhs (expr1)
7309 || (expr2 && !expr2->rank))
7312 /* Find the ss for the lhs. */
7314 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
7315 if (lss->info->expr == expr1)
7318 if (lss == gfc_ss_terminator)
7321 linfo = &lss->info->data.array;
7323 /* Find an ss for the rhs. For operator expressions, we see the
7324 ss's for the operands. Any one of these will do. */
7326 for (; rss && rss != gfc_ss_terminator; rss = rss->loop_chain)
7327 if (rss->info->expr != expr1 && rss != loop->temp_ss)
7330 if (expr2 && rss == gfc_ss_terminator)
7333 gfc_start_block (&fblock);
7335 /* Since the lhs is allocatable, this must be a descriptor type.
7336 Get the data and array size. */
7337 desc = linfo->descriptor;
7338 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)));
7339 array1 = gfc_conv_descriptor_data_get (desc);
7341 /* 7.4.1.3 "If variable is an allocated allocatable variable, it is
7342 deallocated if expr is an array of different shape or any of the
7343 corresponding length type parameter values of variable and expr
7344 differ." This assures F95 compatibility. */
7345 jump_label1 = gfc_build_label_decl (NULL_TREE);
7346 jump_label2 = gfc_build_label_decl (NULL_TREE);
7348 /* Allocate if data is NULL. */
7349 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
7350 array1, build_int_cst (TREE_TYPE (array1), 0));
7351 tmp = build3_v (COND_EXPR, cond,
7352 build1_v (GOTO_EXPR, jump_label1),
7353 build_empty_stmt (input_location));
7354 gfc_add_expr_to_block (&fblock, tmp);
7356 /* Get arrayspec if expr is a full array. */
7357 if (expr2 && expr2->expr_type == EXPR_FUNCTION
7358 && expr2->value.function.isym
7359 && expr2->value.function.isym->conversion)
7361 /* For conversion functions, take the arg. */
7362 gfc_expr *arg = expr2->value.function.actual->expr;
7363 as = gfc_get_full_arrayspec_from_expr (arg);
7366 as = gfc_get_full_arrayspec_from_expr (expr2);
7370 /* If the lhs shape is not the same as the rhs jump to setting the
7371 bounds and doing the reallocation....... */
7372 for (n = 0; n < expr1->rank; n++)
7374 /* Check the shape. */
7375 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
7376 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[n]);
7377 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7378 gfc_array_index_type,
7379 loop->to[n], loop->from[n]);
7380 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7381 gfc_array_index_type,
7383 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7384 gfc_array_index_type,
7386 cond = fold_build2_loc (input_location, NE_EXPR,
7388 tmp, gfc_index_zero_node);
7389 tmp = build3_v (COND_EXPR, cond,
7390 build1_v (GOTO_EXPR, jump_label1),
7391 build_empty_stmt (input_location));
7392 gfc_add_expr_to_block (&fblock, tmp);
7395 /* ....else jump past the (re)alloc code. */
7396 tmp = build1_v (GOTO_EXPR, jump_label2);
7397 gfc_add_expr_to_block (&fblock, tmp);
7399 /* Add the label to start automatic (re)allocation. */
7400 tmp = build1_v (LABEL_EXPR, jump_label1);
7401 gfc_add_expr_to_block (&fblock, tmp);
7403 size1 = gfc_conv_descriptor_size (desc, expr1->rank);
7405 /* Get the rhs size. Fix both sizes. */
7407 desc2 = rss->info->data.array.descriptor;
7410 size2 = gfc_index_one_node;
7411 for (n = 0; n < expr2->rank; n++)
7413 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7414 gfc_array_index_type,
7415 loop->to[n], loop->from[n]);
7416 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7417 gfc_array_index_type,
7418 tmp, gfc_index_one_node);
7419 size2 = fold_build2_loc (input_location, MULT_EXPR,
7420 gfc_array_index_type,
7424 size1 = gfc_evaluate_now (size1, &fblock);
7425 size2 = gfc_evaluate_now (size2, &fblock);
7427 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7429 neq_size = gfc_evaluate_now (cond, &fblock);
7432 /* Now modify the lhs descriptor and the associated scalarizer
7433 variables. F2003 7.4.1.3: "If variable is or becomes an
7434 unallocated allocatable variable, then it is allocated with each
7435 deferred type parameter equal to the corresponding type parameters
7436 of expr , with the shape of expr , and with each lower bound equal
7437 to the corresponding element of LBOUND(expr)."
7438 Reuse size1 to keep a dimension-by-dimension track of the
7439 stride of the new array. */
7440 size1 = gfc_index_one_node;
7441 offset = gfc_index_zero_node;
7443 for (n = 0; n < expr2->rank; n++)
7445 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7446 gfc_array_index_type,
7447 loop->to[n], loop->from[n]);
7448 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7449 gfc_array_index_type,
7450 tmp, gfc_index_one_node);
7452 lbound = gfc_index_one_node;
7457 lbd = get_std_lbound (expr2, desc2, n,
7458 as->type == AS_ASSUMED_SIZE);
7459 ubound = fold_build2_loc (input_location,
7461 gfc_array_index_type,
7463 ubound = fold_build2_loc (input_location,
7465 gfc_array_index_type,
7470 gfc_conv_descriptor_lbound_set (&fblock, desc,
7473 gfc_conv_descriptor_ubound_set (&fblock, desc,
7476 gfc_conv_descriptor_stride_set (&fblock, desc,
7479 lbound = gfc_conv_descriptor_lbound_get (desc,
7481 tmp2 = fold_build2_loc (input_location, MULT_EXPR,
7482 gfc_array_index_type,
7484 offset = fold_build2_loc (input_location, MINUS_EXPR,
7485 gfc_array_index_type,
7487 size1 = fold_build2_loc (input_location, MULT_EXPR,
7488 gfc_array_index_type,
7492 /* Set the lhs descriptor and scalarizer offsets. For rank > 1,
7493 the array offset is saved and the info.offset is used for a
7494 running offset. Use the saved_offset instead. */
7495 tmp = gfc_conv_descriptor_offset (desc);
7496 gfc_add_modify (&fblock, tmp, offset);
7497 if (linfo->saved_offset
7498 && TREE_CODE (linfo->saved_offset) == VAR_DECL)
7499 gfc_add_modify (&fblock, linfo->saved_offset, tmp);
7501 /* Now set the deltas for the lhs. */
7502 for (n = 0; n < expr1->rank; n++)
7504 tmp = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
7506 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7507 gfc_array_index_type, tmp,
7509 if (linfo->delta[dim]
7510 && TREE_CODE (linfo->delta[dim]) == VAR_DECL)
7511 gfc_add_modify (&fblock, linfo->delta[dim], tmp);
7514 /* Get the new lhs size in bytes. */
7515 if (expr1->ts.type == BT_CHARACTER && expr1->ts.deferred)
7517 tmp = expr2->ts.u.cl->backend_decl;
7518 gcc_assert (expr1->ts.u.cl->backend_decl);
7519 tmp = fold_convert (TREE_TYPE (expr1->ts.u.cl->backend_decl), tmp);
7520 gfc_add_modify (&fblock, expr1->ts.u.cl->backend_decl, tmp);
7522 else if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->backend_decl)
7524 tmp = TYPE_SIZE_UNIT (TREE_TYPE (gfc_typenode_for_spec (&expr1->ts)));
7525 tmp = fold_build2_loc (input_location, MULT_EXPR,
7526 gfc_array_index_type, tmp,
7527 expr1->ts.u.cl->backend_decl);
7530 tmp = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&expr1->ts));
7531 tmp = fold_convert (gfc_array_index_type, tmp);
7532 size2 = fold_build2_loc (input_location, MULT_EXPR,
7533 gfc_array_index_type,
7535 size2 = fold_convert (size_type_node, size2);
7536 size2 = gfc_evaluate_now (size2, &fblock);
7538 /* Realloc expression. Note that the scalarizer uses desc.data
7539 in the array reference - (*desc.data)[<element>]. */
7540 gfc_init_block (&realloc_block);
7541 tmp = build_call_expr_loc (input_location,
7542 builtin_decl_explicit (BUILT_IN_REALLOC), 2,
7543 fold_convert (pvoid_type_node, array1),
7545 gfc_conv_descriptor_data_set (&realloc_block,
7547 realloc_expr = gfc_finish_block (&realloc_block);
7549 /* Only reallocate if sizes are different. */
7550 tmp = build3_v (COND_EXPR, neq_size, realloc_expr,
7551 build_empty_stmt (input_location));
7555 /* Malloc expression. */
7556 gfc_init_block (&alloc_block);
7557 tmp = build_call_expr_loc (input_location,
7558 builtin_decl_explicit (BUILT_IN_MALLOC),
7560 gfc_conv_descriptor_data_set (&alloc_block,
7562 tmp = gfc_conv_descriptor_dtype (desc);
7563 gfc_add_modify (&alloc_block, tmp, gfc_get_dtype (TREE_TYPE (desc)));
7564 alloc_expr = gfc_finish_block (&alloc_block);
7566 /* Malloc if not allocated; realloc otherwise. */
7567 tmp = build_int_cst (TREE_TYPE (array1), 0);
7568 cond = fold_build2_loc (input_location, EQ_EXPR,
7571 tmp = build3_v (COND_EXPR, cond, alloc_expr, realloc_expr);
7572 gfc_add_expr_to_block (&fblock, tmp);
7574 /* Make sure that the scalarizer data pointer is updated. */
7576 && TREE_CODE (linfo->data) == VAR_DECL)
7578 tmp = gfc_conv_descriptor_data_get (desc);
7579 gfc_add_modify (&fblock, linfo->data, tmp);
7582 /* Add the exit label. */
7583 tmp = build1_v (LABEL_EXPR, jump_label2);
7584 gfc_add_expr_to_block (&fblock, tmp);
7586 return gfc_finish_block (&fblock);
7590 /* NULLIFY an allocatable/pointer array on function entry, free it on exit.
7591 Do likewise, recursively if necessary, with the allocatable components of
7595 gfc_trans_deferred_array (gfc_symbol * sym, gfc_wrapped_block * block)
7601 stmtblock_t cleanup;
7604 bool sym_has_alloc_comp;
7606 sym_has_alloc_comp = (sym->ts.type == BT_DERIVED
7607 || sym->ts.type == BT_CLASS)
7608 && sym->ts.u.derived->attr.alloc_comp;
7610 /* Make sure the frontend gets these right. */
7611 if (!(sym->attr.pointer || sym->attr.allocatable || sym_has_alloc_comp))
7612 fatal_error ("Possible front-end bug: Deferred array size without pointer, "
7613 "allocatable attribute or derived type without allocatable "
7616 gfc_save_backend_locus (&loc);
7617 gfc_set_backend_locus (&sym->declared_at);
7618 gfc_init_block (&init);
7620 gcc_assert (TREE_CODE (sym->backend_decl) == VAR_DECL
7621 || TREE_CODE (sym->backend_decl) == PARM_DECL);
7623 if (sym->ts.type == BT_CHARACTER
7624 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
7626 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
7627 gfc_trans_vla_type_sizes (sym, &init);
7630 /* Dummy, use associated and result variables don't need anything special. */
7631 if (sym->attr.dummy || sym->attr.use_assoc || sym->attr.result)
7633 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
7634 gfc_restore_backend_locus (&loc);
7638 descriptor = sym->backend_decl;
7640 /* Although static, derived types with default initializers and
7641 allocatable components must not be nulled wholesale; instead they
7642 are treated component by component. */
7643 if (TREE_STATIC (descriptor) && !sym_has_alloc_comp)
7645 /* SAVEd variables are not freed on exit. */
7646 gfc_trans_static_array_pointer (sym);
7648 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
7649 gfc_restore_backend_locus (&loc);
7653 /* Get the descriptor type. */
7654 type = TREE_TYPE (sym->backend_decl);
7656 if (sym_has_alloc_comp && !(sym->attr.pointer || sym->attr.allocatable))
7659 && !(TREE_STATIC (sym->backend_decl) && sym->attr.is_main_program))
7661 if (sym->value == NULL
7662 || !gfc_has_default_initializer (sym->ts.u.derived))
7664 rank = sym->as ? sym->as->rank : 0;
7665 tmp = gfc_nullify_alloc_comp (sym->ts.u.derived,
7667 gfc_add_expr_to_block (&init, tmp);
7670 gfc_init_default_dt (sym, &init, false);
7673 else if (!GFC_DESCRIPTOR_TYPE_P (type))
7675 /* If the backend_decl is not a descriptor, we must have a pointer
7677 descriptor = build_fold_indirect_ref_loc (input_location,
7679 type = TREE_TYPE (descriptor);
7682 /* NULLIFY the data pointer. */
7683 if (GFC_DESCRIPTOR_TYPE_P (type) && !sym->attr.save)
7684 gfc_conv_descriptor_data_set (&init, descriptor, null_pointer_node);
7686 gfc_restore_backend_locus (&loc);
7687 gfc_init_block (&cleanup);
7689 /* Allocatable arrays need to be freed when they go out of scope.
7690 The allocatable components of pointers must not be touched. */
7691 if (sym_has_alloc_comp && !(sym->attr.function || sym->attr.result)
7692 && !sym->attr.pointer && !sym->attr.save)
7695 rank = sym->as ? sym->as->rank : 0;
7696 tmp = gfc_deallocate_alloc_comp (sym->ts.u.derived, descriptor, rank);
7697 gfc_add_expr_to_block (&cleanup, tmp);
7700 if (sym->attr.allocatable && (sym->attr.dimension || sym->attr.codimension)
7701 && !sym->attr.save && !sym->attr.result)
7703 tmp = gfc_trans_dealloc_allocated (sym->backend_decl);
7704 gfc_add_expr_to_block (&cleanup, tmp);
7707 gfc_add_init_cleanup (block, gfc_finish_block (&init),
7708 gfc_finish_block (&cleanup));
7711 /************ Expression Walking Functions ******************/
7713 /* Walk a variable reference.
7715 Possible extension - multiple component subscripts.
7716 x(:,:) = foo%a(:)%b(:)
7718 forall (i=..., j=...)
7719 x(i,j) = foo%a(j)%b(i)
7721 This adds a fair amount of complexity because you need to deal with more
7722 than one ref. Maybe handle in a similar manner to vector subscripts.
7723 Maybe not worth the effort. */
7727 gfc_walk_variable_expr (gfc_ss * ss, gfc_expr * expr)
7731 for (ref = expr->ref; ref; ref = ref->next)
7732 if (ref->type == REF_ARRAY && ref->u.ar.type != AR_ELEMENT)
7735 return gfc_walk_array_ref (ss, expr, ref);
7740 gfc_walk_array_ref (gfc_ss * ss, gfc_expr * expr, gfc_ref * ref)
7746 for (; ref; ref = ref->next)
7748 if (ref->type == REF_SUBSTRING)
7750 ss = gfc_get_scalar_ss (ss, ref->u.ss.start);
7751 ss = gfc_get_scalar_ss (ss, ref->u.ss.end);
7754 /* We're only interested in array sections from now on. */
7755 if (ref->type != REF_ARRAY)
7763 for (n = ar->dimen - 1; n >= 0; n--)
7764 ss = gfc_get_scalar_ss (ss, ar->start[n]);
7768 newss = gfc_get_array_ss (ss, expr, ar->as->rank, GFC_SS_SECTION);
7769 newss->info->data.array.ref = ref;
7771 /* Make sure array is the same as array(:,:), this way
7772 we don't need to special case all the time. */
7773 ar->dimen = ar->as->rank;
7774 for (n = 0; n < ar->dimen; n++)
7776 ar->dimen_type[n] = DIMEN_RANGE;
7778 gcc_assert (ar->start[n] == NULL);
7779 gcc_assert (ar->end[n] == NULL);
7780 gcc_assert (ar->stride[n] == NULL);
7786 newss = gfc_get_array_ss (ss, expr, 0, GFC_SS_SECTION);
7787 newss->info->data.array.ref = ref;
7789 /* We add SS chains for all the subscripts in the section. */
7790 for (n = 0; n < ar->dimen; n++)
7794 switch (ar->dimen_type[n])
7797 /* Add SS for elemental (scalar) subscripts. */
7798 gcc_assert (ar->start[n]);
7799 indexss = gfc_get_scalar_ss (gfc_ss_terminator, ar->start[n]);
7800 indexss->loop_chain = gfc_ss_terminator;
7801 newss->info->data.array.subscript[n] = indexss;
7805 /* We don't add anything for sections, just remember this
7806 dimension for later. */
7807 newss->dim[newss->dimen] = n;
7812 /* Create a GFC_SS_VECTOR index in which we can store
7813 the vector's descriptor. */
7814 indexss = gfc_get_array_ss (gfc_ss_terminator, ar->start[n],
7816 indexss->loop_chain = gfc_ss_terminator;
7817 newss->info->data.array.subscript[n] = indexss;
7818 newss->dim[newss->dimen] = n;
7823 /* We should know what sort of section it is by now. */
7827 /* We should have at least one non-elemental dimension,
7828 unless we are creating a descriptor for a (scalar) coarray. */
7829 gcc_assert (newss->dimen > 0
7830 || newss->info->data.array.ref->u.ar.as->corank > 0);
7835 /* We should know what sort of section it is by now. */
7844 /* Walk an expression operator. If only one operand of a binary expression is
7845 scalar, we must also add the scalar term to the SS chain. */
7848 gfc_walk_op_expr (gfc_ss * ss, gfc_expr * expr)
7853 head = gfc_walk_subexpr (ss, expr->value.op.op1);
7854 if (expr->value.op.op2 == NULL)
7857 head2 = gfc_walk_subexpr (head, expr->value.op.op2);
7859 /* All operands are scalar. Pass back and let the caller deal with it. */
7863 /* All operands require scalarization. */
7864 if (head != ss && (expr->value.op.op2 == NULL || head2 != head))
7867 /* One of the operands needs scalarization, the other is scalar.
7868 Create a gfc_ss for the scalar expression. */
7871 /* First operand is scalar. We build the chain in reverse order, so
7872 add the scalar SS after the second operand. */
7874 while (head && head->next != ss)
7876 /* Check we haven't somehow broken the chain. */
7878 head->next = gfc_get_scalar_ss (ss, expr->value.op.op1);
7880 else /* head2 == head */
7882 gcc_assert (head2 == head);
7883 /* Second operand is scalar. */
7884 head2 = gfc_get_scalar_ss (head2, expr->value.op.op2);
7891 /* Reverse a SS chain. */
7894 gfc_reverse_ss (gfc_ss * ss)
7899 gcc_assert (ss != NULL);
7901 head = gfc_ss_terminator;
7902 while (ss != gfc_ss_terminator)
7905 /* Check we didn't somehow break the chain. */
7906 gcc_assert (next != NULL);
7916 /* Walk the arguments of an elemental function. */
7919 gfc_walk_elemental_function_args (gfc_ss * ss, gfc_actual_arglist *arg,
7927 head = gfc_ss_terminator;
7930 for (; arg; arg = arg->next)
7935 newss = gfc_walk_subexpr (head, arg->expr);
7938 /* Scalar argument. */
7939 gcc_assert (type == GFC_SS_SCALAR || type == GFC_SS_REFERENCE);
7940 newss = gfc_get_scalar_ss (head, arg->expr);
7941 newss->info->type = type;
7950 while (tail->next != gfc_ss_terminator)
7957 /* If all the arguments are scalar we don't need the argument SS. */
7958 gfc_free_ss_chain (head);
7963 /* Add it onto the existing chain. */
7969 /* Walk a function call. Scalar functions are passed back, and taken out of
7970 scalarization loops. For elemental functions we walk their arguments.
7971 The result of functions returning arrays is stored in a temporary outside
7972 the loop, so that the function is only called once. Hence we do not need
7973 to walk their arguments. */
7976 gfc_walk_function_expr (gfc_ss * ss, gfc_expr * expr)
7978 gfc_intrinsic_sym *isym;
7980 gfc_component *comp = NULL;
7982 isym = expr->value.function.isym;
7984 /* Handle intrinsic functions separately. */
7986 return gfc_walk_intrinsic_function (ss, expr, isym);
7988 sym = expr->value.function.esym;
7990 sym = expr->symtree->n.sym;
7992 /* A function that returns arrays. */
7993 gfc_is_proc_ptr_comp (expr, &comp);
7994 if ((!comp && gfc_return_by_reference (sym) && sym->result->attr.dimension)
7995 || (comp && comp->attr.dimension))
7996 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_FUNCTION);
7998 /* Walk the parameters of an elemental function. For now we always pass
8000 if (sym->attr.elemental)
8001 return gfc_walk_elemental_function_args (ss, expr->value.function.actual,
8004 /* Scalar functions are OK as these are evaluated outside the scalarization
8005 loop. Pass back and let the caller deal with it. */
8010 /* An array temporary is constructed for array constructors. */
8013 gfc_walk_array_constructor (gfc_ss * ss, gfc_expr * expr)
8015 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_CONSTRUCTOR);
8019 /* Walk an expression. Add walked expressions to the head of the SS chain.
8020 A wholly scalar expression will not be added. */
8023 gfc_walk_subexpr (gfc_ss * ss, gfc_expr * expr)
8027 switch (expr->expr_type)
8030 head = gfc_walk_variable_expr (ss, expr);
8034 head = gfc_walk_op_expr (ss, expr);
8038 head = gfc_walk_function_expr (ss, expr);
8043 case EXPR_STRUCTURE:
8044 /* Pass back and let the caller deal with it. */
8048 head = gfc_walk_array_constructor (ss, expr);
8051 case EXPR_SUBSTRING:
8052 /* Pass back and let the caller deal with it. */
8056 internal_error ("bad expression type during walk (%d)",
8063 /* Entry point for expression walking.
8064 A return value equal to the passed chain means this is
8065 a scalar expression. It is up to the caller to take whatever action is
8066 necessary to translate these. */
8069 gfc_walk_expr (gfc_expr * expr)
8073 res = gfc_walk_subexpr (gfc_ss_terminator, expr);
8074 return gfc_reverse_ss (res);