1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
47 #include "ssaexpand.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 /* Return an expression tree corresponding to the RHS of GIMPLE
64 gimple_assign_rhs_to_tree (gimple stmt)
67 enum gimple_rhs_class grhs_class;
69 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
71 if (grhs_class == GIMPLE_TERNARY_RHS)
72 t = build3 (gimple_assign_rhs_code (stmt),
73 TREE_TYPE (gimple_assign_lhs (stmt)),
74 gimple_assign_rhs1 (stmt),
75 gimple_assign_rhs2 (stmt),
76 gimple_assign_rhs3 (stmt));
77 else if (grhs_class == GIMPLE_BINARY_RHS)
78 t = build2 (gimple_assign_rhs_code (stmt),
79 TREE_TYPE (gimple_assign_lhs (stmt)),
80 gimple_assign_rhs1 (stmt),
81 gimple_assign_rhs2 (stmt));
82 else if (grhs_class == GIMPLE_UNARY_RHS)
83 t = build1 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt));
86 else if (grhs_class == GIMPLE_SINGLE_RHS)
88 t = gimple_assign_rhs1 (stmt);
89 /* Avoid modifying this tree in place below. */
90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
91 && gimple_location (stmt) != EXPR_LOCATION (t))
92 || (gimple_block (stmt)
93 && currently_expanding_to_rtl
95 && gimple_block (stmt) != TREE_BLOCK (t)))
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
103 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
104 TREE_BLOCK (t) = gimple_block (stmt);
110 #ifndef STACK_ALIGNMENT_NEEDED
111 #define STACK_ALIGNMENT_NEEDED 1
114 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
116 /* Associate declaration T with storage space X. If T is no
117 SSA name this is exactly SET_DECL_RTL, otherwise make the
118 partition of T associated with X. */
120 set_rtl (tree t, rtx x)
122 if (TREE_CODE (t) == SSA_NAME)
124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
127 /* For the benefit of debug information at -O0 (where vartracking
128 doesn't run) record the place also in the base DECL if it's
129 a normal variable (not a parameter). */
130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
132 tree var = SSA_NAME_VAR (t);
133 /* If we don't yet have something recorded, just record it now. */
134 if (!DECL_RTL_SET_P (var))
135 SET_DECL_RTL (var, x);
136 /* If we have it set alrady to "multiple places" don't
138 else if (DECL_RTL (var) == pc_rtx)
140 /* If we have something recorded and it's not the same place
141 as we want to record now, we have multiple partitions for the
142 same base variable, with different places. We can't just
143 randomly chose one, hence we have to say that we don't know.
144 This only happens with optimization, and there var-tracking
145 will figure out the right thing. */
146 else if (DECL_RTL (var) != x)
147 SET_DECL_RTL (var, pc_rtx);
154 /* This structure holds data relevant to one variable that will be
155 placed in a stack slot. */
161 /* The offset of the variable. During partitioning, this is the
162 offset relative to the partition. After partitioning, this
163 is relative to the stack frame. */
164 HOST_WIDE_INT offset;
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
174 /* The partition representative. */
175 size_t representative;
177 /* The next stack variable in the partition, or EOC. */
180 /* The numbers of conflicting stack variables. */
184 #define EOC ((size_t)-1)
186 /* We have an array of such objects while deciding allocation. */
187 static struct stack_var *stack_vars;
188 static size_t stack_vars_alloc;
189 static size_t stack_vars_num;
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
192 is non-decreasing. */
193 static size_t *stack_vars_sorted;
195 /* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198 static int frame_phase;
200 /* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202 static bool has_protected_decls;
204 /* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206 static bool has_short_buffer;
208 /* Compute the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
212 align_local_variable (tree decl)
214 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
215 DECL_ALIGN (decl) = align;
216 return align / BITS_PER_UNIT;
219 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
220 Return the frame offset. */
223 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
225 HOST_WIDE_INT offset, new_frame_offset;
227 new_frame_offset = frame_offset;
228 if (FRAME_GROWS_DOWNWARD)
230 new_frame_offset -= size + frame_phase;
231 new_frame_offset &= -align;
232 new_frame_offset += frame_phase;
233 offset = new_frame_offset;
237 new_frame_offset -= frame_phase;
238 new_frame_offset += align - 1;
239 new_frame_offset &= -align;
240 new_frame_offset += frame_phase;
241 offset = new_frame_offset;
242 new_frame_offset += size;
244 frame_offset = new_frame_offset;
246 if (frame_offset_overflow (frame_offset, cfun->decl))
247 frame_offset = offset = 0;
252 /* Accumulate DECL into STACK_VARS. */
255 add_stack_var (tree decl)
259 if (stack_vars_num >= stack_vars_alloc)
261 if (stack_vars_alloc)
262 stack_vars_alloc = stack_vars_alloc * 3 / 2;
264 stack_vars_alloc = 32;
266 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
268 v = &stack_vars[stack_vars_num];
272 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
273 /* Ensure that all variables have size, so that &a != &b for any two
274 variables that are simultaneously live. */
277 v->alignb = align_local_variable (SSAVAR (decl));
279 /* All variables are initially in their own partition. */
280 v->representative = stack_vars_num;
283 /* All variables initially conflict with no other. */
286 /* Ensure that this decl doesn't get put onto the list twice. */
287 set_rtl (decl, pc_rtx);
292 /* Make the decls associated with luid's X and Y conflict. */
295 add_stack_var_conflict (size_t x, size_t y)
297 struct stack_var *a = &stack_vars[x];
298 struct stack_var *b = &stack_vars[y];
300 a->conflicts = BITMAP_ALLOC (NULL);
302 b->conflicts = BITMAP_ALLOC (NULL);
303 bitmap_set_bit (a->conflicts, y);
304 bitmap_set_bit (b->conflicts, x);
307 /* Check whether the decls associated with luid's X and Y conflict. */
310 stack_var_conflict_p (size_t x, size_t y)
312 struct stack_var *a = &stack_vars[x];
313 struct stack_var *b = &stack_vars[y];
314 if (!a->conflicts || !b->conflicts)
316 return bitmap_bit_p (a->conflicts, y);
319 /* Returns true if TYPE is or contains a union type. */
322 aggregate_contains_union_type (tree type)
326 if (TREE_CODE (type) == UNION_TYPE
327 || TREE_CODE (type) == QUAL_UNION_TYPE)
329 if (TREE_CODE (type) == ARRAY_TYPE)
330 return aggregate_contains_union_type (TREE_TYPE (type));
331 if (TREE_CODE (type) != RECORD_TYPE)
334 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
335 if (TREE_CODE (field) == FIELD_DECL)
336 if (aggregate_contains_union_type (TREE_TYPE (field)))
342 /* A subroutine of expand_used_vars. If two variables X and Y have alias
343 sets that do not conflict, then do add a conflict for these variables
344 in the interference graph. We also need to make sure to add conflicts
345 for union containing structures. Else RTL alias analysis comes along
346 and due to type based aliasing rules decides that for two overlapping
347 union temporaries { short s; int i; } accesses to the same mem through
348 different types may not alias and happily reorders stores across
349 life-time boundaries of the temporaries (See PR25654).
350 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
353 add_alias_set_conflicts (void)
355 size_t i, j, n = stack_vars_num;
357 for (i = 0; i < n; ++i)
359 tree type_i = TREE_TYPE (stack_vars[i].decl);
360 bool aggr_i = AGGREGATE_TYPE_P (type_i);
363 contains_union = aggregate_contains_union_type (type_i);
364 for (j = 0; j < i; ++j)
366 tree type_j = TREE_TYPE (stack_vars[j].decl);
367 bool aggr_j = AGGREGATE_TYPE_P (type_j);
369 /* Either the objects conflict by means of type based
370 aliasing rules, or we need to add a conflict. */
371 || !objects_must_conflict_p (type_i, type_j)
372 /* In case the types do not conflict ensure that access
373 to elements will conflict. In case of unions we have
374 to be careful as type based aliasing rules may say
375 access to the same memory does not conflict. So play
376 safe and add a conflict in this case when
377 -fstrict-aliasing is used. */
378 || (contains_union && flag_strict_aliasing))
379 add_stack_var_conflict (i, j);
384 /* A subroutine of partition_stack_vars. A comparison function for qsort,
385 sorting an array of indices by the properties of the object. */
388 stack_var_cmp (const void *a, const void *b)
390 size_t ia = *(const size_t *)a;
391 size_t ib = *(const size_t *)b;
392 unsigned int aligna = stack_vars[ia].alignb;
393 unsigned int alignb = stack_vars[ib].alignb;
394 HOST_WIDE_INT sizea = stack_vars[ia].size;
395 HOST_WIDE_INT sizeb = stack_vars[ib].size;
396 tree decla = stack_vars[ia].decl;
397 tree declb = stack_vars[ib].decl;
399 unsigned int uida, uidb;
401 /* Primary compare on "large" alignment. Large comes first. */
402 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
403 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
404 if (largea != largeb)
405 return (int)largeb - (int)largea;
407 /* Secondary compare on size, decreasing */
413 /* Tertiary compare on true alignment, decreasing. */
419 /* Final compare on ID for sort stability, increasing.
420 Two SSA names are compared by their version, SSA names come before
421 non-SSA names, and two normal decls are compared by their DECL_UID. */
422 if (TREE_CODE (decla) == SSA_NAME)
424 if (TREE_CODE (declb) == SSA_NAME)
425 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
429 else if (TREE_CODE (declb) == SSA_NAME)
432 uida = DECL_UID (decla), uidb = DECL_UID (declb);
441 /* If the points-to solution *PI points to variables that are in a partition
442 together with other variables add all partition members to the pointed-to
446 add_partitioned_vars_to_ptset (struct pt_solution *pt,
447 struct pointer_map_t *decls_to_partitions,
448 struct pointer_set_t *visited, bitmap temp)
456 /* The pointed-to vars bitmap is shared, it is enough to
458 || pointer_set_insert(visited, pt->vars))
463 /* By using a temporary bitmap to store all members of the partitions
464 we have to add we make sure to visit each of the partitions only
466 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
468 || !bitmap_bit_p (temp, i))
469 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
470 (void *)(size_t) i)))
471 bitmap_ior_into (temp, *part);
472 if (!bitmap_empty_p (temp))
473 bitmap_ior_into (pt->vars, temp);
476 /* Update points-to sets based on partition info, so we can use them on RTL.
477 The bitmaps representing stack partitions will be saved until expand,
478 where partitioned decls used as bases in memory expressions will be
482 update_alias_info_with_stack_vars (void)
484 struct pointer_map_t *decls_to_partitions = NULL;
486 tree var = NULL_TREE;
488 for (i = 0; i < stack_vars_num; i++)
492 struct ptr_info_def *pi;
494 /* Not interested in partitions with single variable. */
495 if (stack_vars[i].representative != i
496 || stack_vars[i].next == EOC)
499 if (!decls_to_partitions)
501 decls_to_partitions = pointer_map_create ();
502 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
505 /* Create an SSA_NAME that points to the partition for use
506 as base during alias-oracle queries on RTL for bases that
507 have been partitioned. */
508 if (var == NULL_TREE)
509 var = create_tmp_var (ptr_type_node, NULL);
510 name = make_ssa_name (var, NULL);
512 /* Create bitmaps representing partitions. They will be used for
513 points-to sets later, so use GGC alloc. */
514 part = BITMAP_GGC_ALLOC ();
515 for (j = i; j != EOC; j = stack_vars[j].next)
517 tree decl = stack_vars[j].decl;
518 unsigned int uid = DECL_PT_UID (decl);
519 /* We should never end up partitioning SSA names (though they
520 may end up on the stack). Neither should we allocate stack
521 space to something that is unused and thus unreferenced, except
522 for -O0 where we are preserving even unreferenced variables. */
523 gcc_assert (DECL_P (decl)
525 || referenced_var_lookup (cfun, DECL_UID (decl))));
526 bitmap_set_bit (part, uid);
527 *((bitmap *) pointer_map_insert (decls_to_partitions,
528 (void *)(size_t) uid)) = part;
529 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
533 /* Make the SSA name point to all partition members. */
534 pi = get_ptr_info (name);
535 pt_solution_set (&pi->pt, part, false, false);
538 /* Make all points-to sets that contain one member of a partition
539 contain all members of the partition. */
540 if (decls_to_partitions)
543 struct pointer_set_t *visited = pointer_set_create ();
544 bitmap temp = BITMAP_ALLOC (NULL);
546 for (i = 1; i < num_ssa_names; i++)
548 tree name = ssa_name (i);
549 struct ptr_info_def *pi;
552 && POINTER_TYPE_P (TREE_TYPE (name))
553 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
554 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
558 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
559 decls_to_partitions, visited, temp);
561 pointer_set_destroy (visited);
562 pointer_map_destroy (decls_to_partitions);
567 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
568 partitioning algorithm. Partitions A and B are known to be non-conflicting.
569 Merge them into a single partition A.
571 At the same time, add OFFSET to all variables in partition B. At the end
572 of the partitioning process we've have a nice block easy to lay out within
576 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
579 struct stack_var *vb = &stack_vars[b];
583 /* Update each element of partition B with the given offset,
584 and merge them into partition A. */
585 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
587 stack_vars[i].offset += offset;
588 stack_vars[i].representative = a;
590 stack_vars[last].next = stack_vars[a].next;
591 stack_vars[a].next = b;
593 /* Update the required alignment of partition A to account for B. */
594 if (stack_vars[a].alignb < stack_vars[b].alignb)
595 stack_vars[a].alignb = stack_vars[b].alignb;
597 /* Update the interference graph and merge the conflicts. */
600 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
601 add_stack_var_conflict (a, stack_vars[u].representative);
602 BITMAP_FREE (vb->conflicts);
606 /* A subroutine of expand_used_vars. Binpack the variables into
607 partitions constrained by the interference graph. The overall
608 algorithm used is as follows:
610 Sort the objects by size.
615 Look for the largest non-conflicting object B with size <= S.
625 partition_stack_vars (void)
627 size_t si, sj, n = stack_vars_num;
629 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
630 for (si = 0; si < n; ++si)
631 stack_vars_sorted[si] = si;
636 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
638 for (si = 0; si < n; ++si)
640 size_t i = stack_vars_sorted[si];
641 HOST_WIDE_INT isize = stack_vars[i].size;
642 unsigned int ialign = stack_vars[i].alignb;
643 HOST_WIDE_INT offset = 0;
645 for (sj = si; sj-- > 0; )
647 size_t j = stack_vars_sorted[sj];
648 HOST_WIDE_INT jsize = stack_vars[j].size;
649 unsigned int jalign = stack_vars[j].alignb;
651 /* Ignore objects that aren't partition representatives. */
652 if (stack_vars[j].representative != j)
655 /* Ignore objects too large for the remaining space. */
659 /* Ignore conflicting objects. */
660 if (stack_var_conflict_p (i, j))
663 /* Do not mix objects of "small" (supported) alignment
664 and "large" (unsupported) alignment. */
665 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
666 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
669 /* Refine the remaining space check to include alignment. */
670 if (offset & (jalign - 1))
672 HOST_WIDE_INT toff = offset;
674 toff &= -(HOST_WIDE_INT)jalign;
675 if (isize - (toff - offset) < jsize)
678 isize -= toff - offset;
682 /* UNION the objects, placing J at OFFSET. */
683 union_stack_vars (i, j, offset);
691 update_alias_info_with_stack_vars ();
694 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
697 dump_stack_var_partition (void)
699 size_t si, i, j, n = stack_vars_num;
701 for (si = 0; si < n; ++si)
703 i = stack_vars_sorted[si];
705 /* Skip variables that aren't partition representatives, for now. */
706 if (stack_vars[i].representative != i)
709 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
710 " align %u\n", (unsigned long) i, stack_vars[i].size,
711 stack_vars[i].alignb);
713 for (j = i; j != EOC; j = stack_vars[j].next)
715 fputc ('\t', dump_file);
716 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
717 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
718 stack_vars[j].offset);
723 /* Assign rtl to DECL at BASE + OFFSET. */
726 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
727 HOST_WIDE_INT offset)
732 /* If this fails, we've overflowed the stack frame. Error nicely? */
733 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
735 x = plus_constant (base, offset);
736 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
738 if (TREE_CODE (decl) != SSA_NAME)
740 /* Set alignment we actually gave this decl if it isn't an SSA name.
741 If it is we generate stack slots only accidentally so it isn't as
742 important, we'll simply use the alignment that is already set. */
743 if (base == virtual_stack_vars_rtx)
744 offset -= frame_phase;
745 align = offset & -offset;
746 align *= BITS_PER_UNIT;
747 if (align == 0 || align > base_align)
750 /* One would think that we could assert that we're not decreasing
751 alignment here, but (at least) the i386 port does exactly this
752 via the MINIMUM_ALIGNMENT hook. */
754 DECL_ALIGN (decl) = align;
755 DECL_USER_ALIGN (decl) = 0;
758 set_mem_attributes (x, SSAVAR (decl), true);
762 /* A subroutine of expand_used_vars. Give each partition representative
763 a unique location within the stack frame. Update each partition member
764 with that location. */
767 expand_stack_vars (bool (*pred) (tree))
769 size_t si, i, j, n = stack_vars_num;
770 HOST_WIDE_INT large_size = 0, large_alloc = 0;
771 rtx large_base = NULL;
772 unsigned large_align = 0;
775 /* Determine if there are any variables requiring "large" alignment.
776 Since these are dynamically allocated, we only process these if
777 no predicate involved. */
778 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
779 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
781 /* Find the total size of these variables. */
782 for (si = 0; si < n; ++si)
786 i = stack_vars_sorted[si];
787 alignb = stack_vars[i].alignb;
789 /* Stop when we get to the first decl with "small" alignment. */
790 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
793 /* Skip variables that aren't partition representatives. */
794 if (stack_vars[i].representative != i)
797 /* Skip variables that have already had rtl assigned. See also
798 add_stack_var where we perpetrate this pc_rtx hack. */
799 decl = stack_vars[i].decl;
800 if ((TREE_CODE (decl) == SSA_NAME
801 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
802 : DECL_RTL (decl)) != pc_rtx)
805 large_size += alignb - 1;
806 large_size &= -(HOST_WIDE_INT)alignb;
807 large_size += stack_vars[i].size;
810 /* If there were any, allocate space. */
812 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
816 for (si = 0; si < n; ++si)
819 unsigned base_align, alignb;
820 HOST_WIDE_INT offset;
822 i = stack_vars_sorted[si];
824 /* Skip variables that aren't partition representatives, for now. */
825 if (stack_vars[i].representative != i)
828 /* Skip variables that have already had rtl assigned. See also
829 add_stack_var where we perpetrate this pc_rtx hack. */
830 decl = stack_vars[i].decl;
831 if ((TREE_CODE (decl) == SSA_NAME
832 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
833 : DECL_RTL (decl)) != pc_rtx)
836 /* Check the predicate to see whether this variable should be
837 allocated in this pass. */
838 if (pred && !pred (decl))
841 alignb = stack_vars[i].alignb;
842 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
844 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
845 base = virtual_stack_vars_rtx;
846 base_align = crtl->max_used_stack_slot_alignment;
850 /* Large alignment is only processed in the last pass. */
853 gcc_assert (large_base != NULL);
855 large_alloc += alignb - 1;
856 large_alloc &= -(HOST_WIDE_INT)alignb;
857 offset = large_alloc;
858 large_alloc += stack_vars[i].size;
861 base_align = large_align;
864 /* Create rtl for each variable based on their location within the
866 for (j = i; j != EOC; j = stack_vars[j].next)
868 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
869 expand_one_stack_var_at (stack_vars[j].decl,
871 stack_vars[j].offset + offset);
875 gcc_assert (large_alloc == large_size);
878 /* Take into account all sizes of partitions and reset DECL_RTLs. */
880 account_stack_vars (void)
882 size_t si, j, i, n = stack_vars_num;
883 HOST_WIDE_INT size = 0;
885 for (si = 0; si < n; ++si)
887 i = stack_vars_sorted[si];
889 /* Skip variables that aren't partition representatives, for now. */
890 if (stack_vars[i].representative != i)
893 size += stack_vars[i].size;
894 for (j = i; j != EOC; j = stack_vars[j].next)
895 set_rtl (stack_vars[j].decl, NULL);
900 /* A subroutine of expand_one_var. Called to immediately assign rtl
901 to a variable to be allocated in the stack frame. */
904 expand_one_stack_var (tree var)
906 HOST_WIDE_INT size, offset;
909 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
910 byte_align = align_local_variable (SSAVAR (var));
912 /* We handle highly aligned variables in expand_stack_vars. */
913 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
915 offset = alloc_stack_frame_space (size, byte_align);
917 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
918 crtl->max_used_stack_slot_alignment, offset);
921 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
922 that will reside in a hard register. */
925 expand_one_hard_reg_var (tree var)
927 rest_of_decl_compilation (var, 0, 0);
930 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
931 that will reside in a pseudo register. */
934 expand_one_register_var (tree var)
936 tree decl = SSAVAR (var);
937 tree type = TREE_TYPE (decl);
938 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
939 rtx x = gen_reg_rtx (reg_mode);
943 /* Note if the object is a user variable. */
944 if (!DECL_ARTIFICIAL (decl))
947 if (POINTER_TYPE_P (type))
948 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
951 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
952 has some associated error, e.g. its type is error-mark. We just need
953 to pick something that won't crash the rest of the compiler. */
956 expand_one_error_var (tree var)
958 enum machine_mode mode = DECL_MODE (var);
962 x = gen_rtx_MEM (BLKmode, const0_rtx);
963 else if (mode == VOIDmode)
966 x = gen_reg_rtx (mode);
968 SET_DECL_RTL (var, x);
971 /* A subroutine of expand_one_var. VAR is a variable that will be
972 allocated to the local stack frame. Return true if we wish to
973 add VAR to STACK_VARS so that it will be coalesced with other
974 variables. Return false to allocate VAR immediately.
976 This function is used to reduce the number of variables considered
977 for coalescing, which reduces the size of the quadratic problem. */
980 defer_stack_allocation (tree var, bool toplevel)
982 /* If stack protection is enabled, *all* stack variables must be deferred,
983 so that we can re-order the strings to the top of the frame. */
984 if (flag_stack_protect)
987 /* We handle "large" alignment via dynamic allocation. We want to handle
988 this extra complication in only one place, so defer them. */
989 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
992 /* Variables in the outermost scope automatically conflict with
993 every other variable. The only reason to want to defer them
994 at all is that, after sorting, we can more efficiently pack
995 small variables in the stack frame. Continue to defer at -O2. */
996 if (toplevel && optimize < 2)
999 /* Without optimization, *most* variables are allocated from the
1000 stack, which makes the quadratic problem large exactly when we
1001 want compilation to proceed as quickly as possible. On the
1002 other hand, we don't want the function's stack frame size to
1003 get completely out of hand. So we avoid adding scalars and
1004 "small" aggregates to the list at all. */
1005 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1011 /* A subroutine of expand_used_vars. Expand one variable according to
1012 its flavor. Variables to be placed on the stack are not actually
1013 expanded yet, merely recorded.
1014 When REALLY_EXPAND is false, only add stack values to be allocated.
1015 Return stack usage this variable is supposed to take.
1018 static HOST_WIDE_INT
1019 expand_one_var (tree var, bool toplevel, bool really_expand)
1021 unsigned int align = BITS_PER_UNIT;
1026 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1028 /* Because we don't know if VAR will be in register or on stack,
1029 we conservatively assume it will be on stack even if VAR is
1030 eventually put into register after RA pass. For non-automatic
1031 variables, which won't be on stack, we collect alignment of
1032 type and ignore user specified alignment. */
1033 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1034 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1035 TYPE_MODE (TREE_TYPE (var)),
1036 TYPE_ALIGN (TREE_TYPE (var)));
1037 else if (DECL_HAS_VALUE_EXPR_P (var)
1038 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1039 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1040 or variables which were assigned a stack slot already by
1041 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1042 changed from the offset chosen to it. */
1043 align = crtl->stack_alignment_estimated;
1045 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1047 /* If the variable alignment is very large we'll dynamicaly allocate
1048 it, which means that in-frame portion is just a pointer. */
1049 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1050 align = POINTER_SIZE;
1053 if (SUPPORTS_STACK_ALIGNMENT
1054 && crtl->stack_alignment_estimated < align)
1056 /* stack_alignment_estimated shouldn't change after stack
1057 realign decision made */
1058 gcc_assert(!crtl->stack_realign_processed);
1059 crtl->stack_alignment_estimated = align;
1062 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1063 So here we only make sure stack_alignment_needed >= align. */
1064 if (crtl->stack_alignment_needed < align)
1065 crtl->stack_alignment_needed = align;
1066 if (crtl->max_used_stack_slot_alignment < align)
1067 crtl->max_used_stack_slot_alignment = align;
1069 if (TREE_CODE (origvar) == SSA_NAME)
1071 gcc_assert (TREE_CODE (var) != VAR_DECL
1072 || (!DECL_EXTERNAL (var)
1073 && !DECL_HAS_VALUE_EXPR_P (var)
1074 && !TREE_STATIC (var)
1075 && TREE_TYPE (var) != error_mark_node
1076 && !DECL_HARD_REGISTER (var)
1079 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1081 else if (DECL_EXTERNAL (var))
1083 else if (DECL_HAS_VALUE_EXPR_P (var))
1085 else if (TREE_STATIC (var))
1087 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1089 else if (TREE_TYPE (var) == error_mark_node)
1092 expand_one_error_var (var);
1094 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1097 expand_one_hard_reg_var (var);
1099 else if (use_register_for_decl (var))
1102 expand_one_register_var (origvar);
1104 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1108 error ("size of variable %q+D is too large", var);
1109 expand_one_error_var (var);
1112 else if (defer_stack_allocation (var, toplevel))
1113 add_stack_var (origvar);
1117 expand_one_stack_var (origvar);
1118 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1123 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1124 expanding variables. Those variables that can be put into registers
1125 are allocated pseudos; those that can't are put on the stack.
1127 TOPLEVEL is true if this is the outermost BLOCK. */
1130 expand_used_vars_for_block (tree block, bool toplevel)
1132 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1135 old_sv_num = toplevel ? 0 : stack_vars_num;
1137 /* Expand all variables at this level. */
1138 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1140 expand_one_var (t, toplevel, true);
1142 this_sv_num = stack_vars_num;
1144 /* Expand all variables at containing levels. */
1145 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1146 expand_used_vars_for_block (t, false);
1148 /* Since we do not track exact variable lifetimes (which is not even
1149 possible for variables whose address escapes), we mirror the block
1150 tree in the interference graph. Here we cause all variables at this
1151 level, and all sublevels, to conflict. */
1152 if (old_sv_num < this_sv_num)
1154 new_sv_num = stack_vars_num;
1156 for (i = old_sv_num; i < new_sv_num; ++i)
1157 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1158 add_stack_var_conflict (i, j);
1162 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1163 and clear TREE_USED on all local variables. */
1166 clear_tree_used (tree block)
1170 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1171 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1174 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1175 clear_tree_used (t);
1178 /* Examine TYPE and determine a bit mask of the following features. */
1180 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1181 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1182 #define SPCT_HAS_ARRAY 4
1183 #define SPCT_HAS_AGGREGATE 8
1186 stack_protect_classify_type (tree type)
1188 unsigned int ret = 0;
1191 switch (TREE_CODE (type))
1194 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1195 if (t == char_type_node
1196 || t == signed_char_type_node
1197 || t == unsigned_char_type_node)
1199 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1200 unsigned HOST_WIDE_INT len;
1202 if (!TYPE_SIZE_UNIT (type)
1203 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1206 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1209 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1211 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1214 ret = SPCT_HAS_ARRAY;
1218 case QUAL_UNION_TYPE:
1220 ret = SPCT_HAS_AGGREGATE;
1221 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1222 if (TREE_CODE (t) == FIELD_DECL)
1223 ret |= stack_protect_classify_type (TREE_TYPE (t));
1233 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1234 part of the local stack frame. Remember if we ever return nonzero for
1235 any variable in this function. The return value is the phase number in
1236 which the variable should be allocated. */
1239 stack_protect_decl_phase (tree decl)
1241 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1244 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1245 has_short_buffer = true;
1247 if (flag_stack_protect == 2)
1249 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1250 && !(bits & SPCT_HAS_AGGREGATE))
1252 else if (bits & SPCT_HAS_ARRAY)
1256 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1259 has_protected_decls = true;
1264 /* Two helper routines that check for phase 1 and phase 2. These are used
1265 as callbacks for expand_stack_vars. */
1268 stack_protect_decl_phase_1 (tree decl)
1270 return stack_protect_decl_phase (decl) == 1;
1274 stack_protect_decl_phase_2 (tree decl)
1276 return stack_protect_decl_phase (decl) == 2;
1279 /* Ensure that variables in different stack protection phases conflict
1280 so that they are not merged and share the same stack slot. */
1283 add_stack_protection_conflicts (void)
1285 size_t i, j, n = stack_vars_num;
1286 unsigned char *phase;
1288 phase = XNEWVEC (unsigned char, n);
1289 for (i = 0; i < n; ++i)
1290 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1292 for (i = 0; i < n; ++i)
1294 unsigned char ph_i = phase[i];
1295 for (j = 0; j < i; ++j)
1296 if (ph_i != phase[j])
1297 add_stack_var_conflict (i, j);
1303 /* Create a decl for the guard at the top of the stack frame. */
1306 create_stack_guard (void)
1308 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1309 VAR_DECL, NULL, ptr_type_node);
1310 TREE_THIS_VOLATILE (guard) = 1;
1311 TREE_USED (guard) = 1;
1312 expand_one_stack_var (guard);
1313 crtl->stack_protect_guard = guard;
1316 /* Prepare for expanding variables. */
1318 init_vars_expansion (void)
1322 /* Set TREE_USED on all variables in the local_decls. */
1323 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1326 /* Clear TREE_USED on all variables associated with a block scope. */
1327 clear_tree_used (DECL_INITIAL (current_function_decl));
1329 /* Initialize local stack smashing state. */
1330 has_protected_decls = false;
1331 has_short_buffer = false;
1334 /* Free up stack variable graph data. */
1336 fini_vars_expansion (void)
1338 size_t i, n = stack_vars_num;
1339 for (i = 0; i < n; i++)
1340 BITMAP_FREE (stack_vars[i].conflicts);
1341 XDELETEVEC (stack_vars);
1342 XDELETEVEC (stack_vars_sorted);
1344 stack_vars_alloc = stack_vars_num = 0;
1347 /* Make a fair guess for the size of the stack frame of the function
1348 in NODE. This doesn't have to be exact, the result is only used in
1349 the inline heuristics. So we don't want to run the full stack var
1350 packing algorithm (which is quadratic in the number of stack vars).
1351 Instead, we calculate the total size of all stack vars. This turns
1352 out to be a pretty fair estimate -- packing of stack vars doesn't
1353 happen very often. */
1356 estimated_stack_frame_size (struct cgraph_node *node)
1358 HOST_WIDE_INT size = 0;
1361 tree old_cur_fun_decl = current_function_decl;
1362 referenced_var_iterator rvi;
1363 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1365 current_function_decl = node->decl;
1368 gcc_checking_assert (gimple_referenced_vars (fn));
1369 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1370 size += expand_one_var (var, true, false);
1372 if (stack_vars_num > 0)
1374 /* Fake sorting the stack vars for account_stack_vars (). */
1375 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1376 for (i = 0; i < stack_vars_num; ++i)
1377 stack_vars_sorted[i] = i;
1378 size += account_stack_vars ();
1379 fini_vars_expansion ();
1382 current_function_decl = old_cur_fun_decl;
1386 /* Expand all variables used in the function. */
1389 expand_used_vars (void)
1391 tree var, outer_block = DECL_INITIAL (current_function_decl);
1392 VEC(tree,heap) *maybe_local_decls = NULL;
1396 /* Compute the phase of the stack frame for this function. */
1398 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1399 int off = STARTING_FRAME_OFFSET % align;
1400 frame_phase = off ? align - off : 0;
1403 init_vars_expansion ();
1405 for (i = 0; i < SA.map->num_partitions; i++)
1407 tree var = partition_to_var (SA.map, i);
1409 gcc_assert (is_gimple_reg (var));
1410 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1411 expand_one_var (var, true, true);
1414 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1415 contain the default def (representing the parm or result itself)
1416 we don't do anything here. But those which don't contain the
1417 default def (representing a temporary based on the parm/result)
1418 we need to allocate space just like for normal VAR_DECLs. */
1419 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1421 expand_one_var (var, true, true);
1422 gcc_assert (SA.partition_to_pseudo[i]);
1427 /* At this point all variables on the local_decls with TREE_USED
1428 set are not associated with any block scope. Lay them out. */
1430 len = VEC_length (tree, cfun->local_decls);
1431 FOR_EACH_LOCAL_DECL (cfun, i, var)
1433 bool expand_now = false;
1435 /* Expanded above already. */
1436 if (is_gimple_reg (var))
1438 TREE_USED (var) = 0;
1441 /* We didn't set a block for static or extern because it's hard
1442 to tell the difference between a global variable (re)declared
1443 in a local scope, and one that's really declared there to
1444 begin with. And it doesn't really matter much, since we're
1445 not giving them stack space. Expand them now. */
1446 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1449 /* If the variable is not associated with any block, then it
1450 was created by the optimizers, and could be live anywhere
1452 else if (TREE_USED (var))
1455 /* Finally, mark all variables on the list as used. We'll use
1456 this in a moment when we expand those associated with scopes. */
1457 TREE_USED (var) = 1;
1460 expand_one_var (var, true, true);
1463 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1465 rtx rtl = DECL_RTL_IF_SET (var);
1467 /* Keep artificial non-ignored vars in cfun->local_decls
1468 chain until instantiate_decls. */
1469 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1470 add_local_decl (cfun, var);
1471 else if (rtl == NULL_RTX)
1472 /* If rtl isn't set yet, which can happen e.g. with
1473 -fstack-protector, retry before returning from this
1475 VEC_safe_push (tree, heap, maybe_local_decls, var);
1479 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1481 +-----------------+-----------------+
1482 | ...processed... | ...duplicates...|
1483 +-----------------+-----------------+
1485 +-- LEN points here.
1487 We just want the duplicates, as those are the artificial
1488 non-ignored vars that we want to keep until instantiate_decls.
1489 Move them down and truncate the array. */
1490 if (!VEC_empty (tree, cfun->local_decls))
1491 VEC_block_remove (tree, cfun->local_decls, 0, len);
1493 /* At this point, all variables within the block tree with TREE_USED
1494 set are actually used by the optimized function. Lay them out. */
1495 expand_used_vars_for_block (outer_block, true);
1497 if (stack_vars_num > 0)
1499 /* Due to the way alias sets work, no variables with non-conflicting
1500 alias sets may be assigned the same address. Add conflicts to
1502 add_alias_set_conflicts ();
1504 /* If stack protection is enabled, we don't share space between
1505 vulnerable data and non-vulnerable data. */
1506 if (flag_stack_protect)
1507 add_stack_protection_conflicts ();
1509 /* Now that we have collected all stack variables, and have computed a
1510 minimal interference graph, attempt to save some stack space. */
1511 partition_stack_vars ();
1513 dump_stack_var_partition ();
1516 /* There are several conditions under which we should create a
1517 stack guard: protect-all, alloca used, protected decls present. */
1518 if (flag_stack_protect == 2
1519 || (flag_stack_protect
1520 && (cfun->calls_alloca || has_protected_decls)))
1521 create_stack_guard ();
1523 /* Assign rtl to each variable based on these partitions. */
1524 if (stack_vars_num > 0)
1526 /* Reorder decls to be protected by iterating over the variables
1527 array multiple times, and allocating out of each phase in turn. */
1528 /* ??? We could probably integrate this into the qsort we did
1529 earlier, such that we naturally see these variables first,
1530 and thus naturally allocate things in the right order. */
1531 if (has_protected_decls)
1533 /* Phase 1 contains only character arrays. */
1534 expand_stack_vars (stack_protect_decl_phase_1);
1536 /* Phase 2 contains other kinds of arrays. */
1537 if (flag_stack_protect == 2)
1538 expand_stack_vars (stack_protect_decl_phase_2);
1541 expand_stack_vars (NULL);
1543 fini_vars_expansion ();
1546 /* If there were any artificial non-ignored vars without rtl
1547 found earlier, see if deferred stack allocation hasn't assigned
1549 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1551 rtx rtl = DECL_RTL_IF_SET (var);
1553 /* Keep artificial non-ignored vars in cfun->local_decls
1554 chain until instantiate_decls. */
1555 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1556 add_local_decl (cfun, var);
1558 VEC_free (tree, heap, maybe_local_decls);
1560 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1561 if (STACK_ALIGNMENT_NEEDED)
1563 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1564 if (!FRAME_GROWS_DOWNWARD)
1565 frame_offset += align - 1;
1566 frame_offset &= -align;
1571 /* If we need to produce a detailed dump, print the tree representation
1572 for STMT to the dump file. SINCE is the last RTX after which the RTL
1573 generated for STMT should have been appended. */
1576 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1578 if (dump_file && (dump_flags & TDF_DETAILS))
1580 fprintf (dump_file, "\n;; ");
1581 print_gimple_stmt (dump_file, stmt, 0,
1582 TDF_SLIM | (dump_flags & TDF_LINENO));
1583 fprintf (dump_file, "\n");
1585 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1589 /* Maps the blocks that do not contain tree labels to rtx labels. */
1591 static struct pointer_map_t *lab_rtx_for_bb;
1593 /* Returns the label_rtx expression for a label starting basic block BB. */
1596 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1598 gimple_stmt_iterator gsi;
1603 if (bb->flags & BB_RTL)
1604 return block_label (bb);
1606 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1610 /* Find the tree label if it is present. */
1612 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1614 lab_stmt = gsi_stmt (gsi);
1615 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1618 lab = gimple_label_label (lab_stmt);
1619 if (DECL_NONLOCAL (lab))
1622 return label_rtx (lab);
1625 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1626 *elt = gen_label_rtx ();
1631 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1632 of a basic block where we just expanded the conditional at the end,
1633 possibly clean up the CFG and instruction sequence. LAST is the
1634 last instruction before the just emitted jump sequence. */
1637 maybe_cleanup_end_of_block (edge e, rtx last)
1639 /* Special case: when jumpif decides that the condition is
1640 trivial it emits an unconditional jump (and the necessary
1641 barrier). But we still have two edges, the fallthru one is
1642 wrong. purge_dead_edges would clean this up later. Unfortunately
1643 we have to insert insns (and split edges) before
1644 find_many_sub_basic_blocks and hence before purge_dead_edges.
1645 But splitting edges might create new blocks which depend on the
1646 fact that if there are two edges there's no barrier. So the
1647 barrier would get lost and verify_flow_info would ICE. Instead
1648 of auditing all edge splitters to care for the barrier (which
1649 normally isn't there in a cleaned CFG), fix it here. */
1650 if (BARRIER_P (get_last_insn ()))
1654 /* Now, we have a single successor block, if we have insns to
1655 insert on the remaining edge we potentially will insert
1656 it at the end of this block (if the dest block isn't feasible)
1657 in order to avoid splitting the edge. This insertion will take
1658 place in front of the last jump. But we might have emitted
1659 multiple jumps (conditional and one unconditional) to the
1660 same destination. Inserting in front of the last one then
1661 is a problem. See PR 40021. We fix this by deleting all
1662 jumps except the last unconditional one. */
1663 insn = PREV_INSN (get_last_insn ());
1664 /* Make sure we have an unconditional jump. Otherwise we're
1666 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1667 for (insn = PREV_INSN (insn); insn != last;)
1669 insn = PREV_INSN (insn);
1670 if (JUMP_P (NEXT_INSN (insn)))
1672 if (!any_condjump_p (NEXT_INSN (insn)))
1674 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1675 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1677 delete_insn (NEXT_INSN (insn));
1683 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1684 Returns a new basic block if we've terminated the current basic
1685 block and created a new one. */
1688 expand_gimple_cond (basic_block bb, gimple stmt)
1690 basic_block new_bb, dest;
1695 enum tree_code code;
1698 code = gimple_cond_code (stmt);
1699 op0 = gimple_cond_lhs (stmt);
1700 op1 = gimple_cond_rhs (stmt);
1701 /* We're sometimes presented with such code:
1705 This would expand to two comparisons which then later might
1706 be cleaned up by combine. But some pattern matchers like if-conversion
1707 work better when there's only one compare, so make up for this
1708 here as special exception if TER would have made the same change. */
1709 if (gimple_cond_single_var_p (stmt)
1711 && TREE_CODE (op0) == SSA_NAME
1712 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1714 gimple second = SSA_NAME_DEF_STMT (op0);
1715 if (gimple_code (second) == GIMPLE_ASSIGN)
1717 enum tree_code code2 = gimple_assign_rhs_code (second);
1718 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1721 op0 = gimple_assign_rhs1 (second);
1722 op1 = gimple_assign_rhs2 (second);
1724 /* If jumps are cheap turn some more codes into
1726 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1728 if ((code2 == BIT_AND_EXPR
1729 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1730 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1731 || code2 == TRUTH_AND_EXPR)
1733 code = TRUTH_ANDIF_EXPR;
1734 op0 = gimple_assign_rhs1 (second);
1735 op1 = gimple_assign_rhs2 (second);
1737 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1739 code = TRUTH_ORIF_EXPR;
1740 op0 = gimple_assign_rhs1 (second);
1741 op1 = gimple_assign_rhs2 (second);
1747 last2 = last = get_last_insn ();
1749 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1750 set_curr_insn_source_location (gimple_location (stmt));
1751 set_curr_insn_block (gimple_block (stmt));
1753 /* These flags have no purpose in RTL land. */
1754 true_edge->flags &= ~EDGE_TRUE_VALUE;
1755 false_edge->flags &= ~EDGE_FALSE_VALUE;
1757 /* We can either have a pure conditional jump with one fallthru edge or
1758 two-way jump that needs to be decomposed into two basic blocks. */
1759 if (false_edge->dest == bb->next_bb)
1761 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1762 true_edge->probability);
1763 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1764 if (true_edge->goto_locus)
1766 set_curr_insn_source_location (true_edge->goto_locus);
1767 set_curr_insn_block (true_edge->goto_block);
1768 true_edge->goto_locus = curr_insn_locator ();
1770 true_edge->goto_block = NULL;
1771 false_edge->flags |= EDGE_FALLTHRU;
1772 maybe_cleanup_end_of_block (false_edge, last);
1775 if (true_edge->dest == bb->next_bb)
1777 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1778 false_edge->probability);
1779 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1780 if (false_edge->goto_locus)
1782 set_curr_insn_source_location (false_edge->goto_locus);
1783 set_curr_insn_block (false_edge->goto_block);
1784 false_edge->goto_locus = curr_insn_locator ();
1786 false_edge->goto_block = NULL;
1787 true_edge->flags |= EDGE_FALLTHRU;
1788 maybe_cleanup_end_of_block (true_edge, last);
1792 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1793 true_edge->probability);
1794 last = get_last_insn ();
1795 if (false_edge->goto_locus)
1797 set_curr_insn_source_location (false_edge->goto_locus);
1798 set_curr_insn_block (false_edge->goto_block);
1799 false_edge->goto_locus = curr_insn_locator ();
1801 false_edge->goto_block = NULL;
1802 emit_jump (label_rtx_for_bb (false_edge->dest));
1805 if (BARRIER_P (BB_END (bb)))
1806 BB_END (bb) = PREV_INSN (BB_END (bb));
1807 update_bb_for_insn (bb);
1809 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1810 dest = false_edge->dest;
1811 redirect_edge_succ (false_edge, new_bb);
1812 false_edge->flags |= EDGE_FALLTHRU;
1813 new_bb->count = false_edge->count;
1814 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1815 new_edge = make_edge (new_bb, dest, 0);
1816 new_edge->probability = REG_BR_PROB_BASE;
1817 new_edge->count = new_bb->count;
1818 if (BARRIER_P (BB_END (new_bb)))
1819 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1820 update_bb_for_insn (new_bb);
1822 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1824 if (true_edge->goto_locus)
1826 set_curr_insn_source_location (true_edge->goto_locus);
1827 set_curr_insn_block (true_edge->goto_block);
1828 true_edge->goto_locus = curr_insn_locator ();
1830 true_edge->goto_block = NULL;
1835 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1839 expand_call_stmt (gimple stmt)
1841 tree exp, decl, lhs;
1845 if (gimple_call_internal_p (stmt))
1847 expand_internal_call (stmt);
1851 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1853 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1854 decl = gimple_call_fndecl (stmt);
1855 builtin_p = decl && DECL_BUILT_IN (decl);
1857 /* If this is not a builtin function, the function type through which the
1858 call is made may be different from the type of the function. */
1861 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
1862 CALL_EXPR_FN (exp));
1864 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1865 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1867 for (i = 0; i < gimple_call_num_args (stmt); i++)
1869 tree arg = gimple_call_arg (stmt, i);
1871 /* TER addresses into arguments of builtin functions so we have a
1872 chance to infer more correct alignment information. See PR39954. */
1874 && TREE_CODE (arg) == SSA_NAME
1875 && (def = get_gimple_for_ssa_name (arg))
1876 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1877 arg = gimple_assign_rhs1 (def);
1878 CALL_EXPR_ARG (exp, i) = arg;
1881 if (gimple_has_side_effects (stmt))
1882 TREE_SIDE_EFFECTS (exp) = 1;
1884 if (gimple_call_nothrow_p (stmt))
1885 TREE_NOTHROW (exp) = 1;
1887 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1888 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1890 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1891 && DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA)
1892 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
1894 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1895 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1896 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1897 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1898 TREE_BLOCK (exp) = gimple_block (stmt);
1900 lhs = gimple_call_lhs (stmt);
1902 expand_assignment (lhs, exp, false);
1904 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1907 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1908 STMT that doesn't require special handling for outgoing edges. That
1909 is no tailcalls and no GIMPLE_COND. */
1912 expand_gimple_stmt_1 (gimple stmt)
1916 set_curr_insn_source_location (gimple_location (stmt));
1917 set_curr_insn_block (gimple_block (stmt));
1919 switch (gimple_code (stmt))
1922 op0 = gimple_goto_dest (stmt);
1923 if (TREE_CODE (op0) == LABEL_DECL)
1926 expand_computed_goto (op0);
1929 expand_label (gimple_label_label (stmt));
1932 case GIMPLE_PREDICT:
1938 expand_asm_stmt (stmt);
1941 expand_call_stmt (stmt);
1945 op0 = gimple_return_retval (stmt);
1947 if (op0 && op0 != error_mark_node)
1949 tree result = DECL_RESULT (current_function_decl);
1951 /* If we are not returning the current function's RESULT_DECL,
1952 build an assignment to it. */
1955 /* I believe that a function's RESULT_DECL is unique. */
1956 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1958 /* ??? We'd like to use simply expand_assignment here,
1959 but this fails if the value is of BLKmode but the return
1960 decl is a register. expand_return has special handling
1961 for this combination, which eventually should move
1962 to common code. See comments there. Until then, let's
1963 build a modify expression :-/ */
1964 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1969 expand_null_return ();
1971 expand_return (op0);
1976 tree lhs = gimple_assign_lhs (stmt);
1978 /* Tree expand used to fiddle with |= and &= of two bitfield
1979 COMPONENT_REFs here. This can't happen with gimple, the LHS
1980 of binary assigns must be a gimple reg. */
1982 if (TREE_CODE (lhs) != SSA_NAME
1983 || get_gimple_rhs_class (gimple_expr_code (stmt))
1984 == GIMPLE_SINGLE_RHS)
1986 tree rhs = gimple_assign_rhs1 (stmt);
1987 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1988 == GIMPLE_SINGLE_RHS);
1989 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1990 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1991 expand_assignment (lhs, rhs,
1992 gimple_assign_nontemporal_move_p (stmt));
1997 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1998 struct separate_ops ops;
1999 bool promoted = false;
2001 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2002 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2005 ops.code = gimple_assign_rhs_code (stmt);
2006 ops.type = TREE_TYPE (lhs);
2007 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2009 case GIMPLE_TERNARY_RHS:
2010 ops.op2 = gimple_assign_rhs3 (stmt);
2012 case GIMPLE_BINARY_RHS:
2013 ops.op1 = gimple_assign_rhs2 (stmt);
2015 case GIMPLE_UNARY_RHS:
2016 ops.op0 = gimple_assign_rhs1 (stmt);
2021 ops.location = gimple_location (stmt);
2023 /* If we want to use a nontemporal store, force the value to
2024 register first. If we store into a promoted register,
2025 don't directly expand to target. */
2026 temp = nontemporal || promoted ? NULL_RTX : target;
2027 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2034 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2035 /* If TEMP is a VOIDmode constant, use convert_modes to make
2036 sure that we properly convert it. */
2037 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2039 temp = convert_modes (GET_MODE (target),
2040 TYPE_MODE (ops.type),
2042 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2043 GET_MODE (target), temp, unsignedp);
2046 convert_move (SUBREG_REG (target), temp, unsignedp);
2048 else if (nontemporal && emit_storent_insn (target, temp))
2052 temp = force_operand (temp, target);
2054 emit_move_insn (target, temp);
2065 /* Expand one gimple statement STMT and return the last RTL instruction
2066 before any of the newly generated ones.
2068 In addition to generating the necessary RTL instructions this also
2069 sets REG_EH_REGION notes if necessary and sets the current source
2070 location for diagnostics. */
2073 expand_gimple_stmt (gimple stmt)
2075 location_t saved_location = input_location;
2076 rtx last = get_last_insn ();
2081 /* We need to save and restore the current source location so that errors
2082 discovered during expansion are emitted with the right location. But
2083 it would be better if the diagnostic routines used the source location
2084 embedded in the tree nodes rather than globals. */
2085 if (gimple_has_location (stmt))
2086 input_location = gimple_location (stmt);
2088 expand_gimple_stmt_1 (stmt);
2090 /* Free any temporaries used to evaluate this statement. */
2093 input_location = saved_location;
2095 /* Mark all insns that may trap. */
2096 lp_nr = lookup_stmt_eh_lp (stmt);
2100 for (insn = next_real_insn (last); insn;
2101 insn = next_real_insn (insn))
2103 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2104 /* If we want exceptions for non-call insns, any
2105 may_trap_p instruction may throw. */
2106 && GET_CODE (PATTERN (insn)) != CLOBBER
2107 && GET_CODE (PATTERN (insn)) != USE
2108 && insn_could_throw_p (insn))
2109 make_reg_eh_region_note (insn, 0, lp_nr);
2116 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2117 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2118 generated a tail call (something that might be denied by the ABI
2119 rules governing the call; see calls.c).
2121 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2122 can still reach the rest of BB. The case here is __builtin_sqrt,
2123 where the NaN result goes through the external function (with a
2124 tailcall) and the normal result happens via a sqrt instruction. */
2127 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2135 last2 = last = expand_gimple_stmt (stmt);
2137 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2138 if (CALL_P (last) && SIBLING_CALL_P (last))
2141 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2143 *can_fallthru = true;
2147 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2148 Any instructions emitted here are about to be deleted. */
2149 do_pending_stack_adjust ();
2151 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2152 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2153 EH or abnormal edges, we shouldn't have created a tail call in
2154 the first place. So it seems to me we should just be removing
2155 all edges here, or redirecting the existing fallthru edge to
2161 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2163 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2165 if (e->dest != EXIT_BLOCK_PTR)
2167 e->dest->count -= e->count;
2168 e->dest->frequency -= EDGE_FREQUENCY (e);
2169 if (e->dest->count < 0)
2171 if (e->dest->frequency < 0)
2172 e->dest->frequency = 0;
2175 probability += e->probability;
2182 /* This is somewhat ugly: the call_expr expander often emits instructions
2183 after the sibcall (to perform the function return). These confuse the
2184 find_many_sub_basic_blocks code, so we need to get rid of these. */
2185 last = NEXT_INSN (last);
2186 gcc_assert (BARRIER_P (last));
2188 *can_fallthru = false;
2189 while (NEXT_INSN (last))
2191 /* For instance an sqrt builtin expander expands if with
2192 sibcall in the then and label for `else`. */
2193 if (LABEL_P (NEXT_INSN (last)))
2195 *can_fallthru = true;
2198 delete_insn (NEXT_INSN (last));
2201 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2202 e->probability += probability;
2205 update_bb_for_insn (bb);
2207 if (NEXT_INSN (last))
2209 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2212 if (BARRIER_P (last))
2213 BB_END (bb) = PREV_INSN (last);
2216 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2221 /* Return the difference between the floor and the truncated result of
2222 a signed division by OP1 with remainder MOD. */
2224 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2226 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2227 return gen_rtx_IF_THEN_ELSE
2228 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2229 gen_rtx_IF_THEN_ELSE
2230 (mode, gen_rtx_LT (BImode,
2231 gen_rtx_DIV (mode, op1, mod),
2233 constm1_rtx, const0_rtx),
2237 /* Return the difference between the ceil and the truncated result of
2238 a signed division by OP1 with remainder MOD. */
2240 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2242 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2243 return gen_rtx_IF_THEN_ELSE
2244 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2245 gen_rtx_IF_THEN_ELSE
2246 (mode, gen_rtx_GT (BImode,
2247 gen_rtx_DIV (mode, op1, mod),
2249 const1_rtx, const0_rtx),
2253 /* Return the difference between the ceil and the truncated result of
2254 an unsigned division by OP1 with remainder MOD. */
2256 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2258 /* (mod != 0 ? 1 : 0) */
2259 return gen_rtx_IF_THEN_ELSE
2260 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2261 const1_rtx, const0_rtx);
2264 /* Return the difference between the rounded and the truncated result
2265 of a signed division by OP1 with remainder MOD. Halfway cases are
2266 rounded away from zero, rather than to the nearest even number. */
2268 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2270 /* (abs (mod) >= abs (op1) - abs (mod)
2271 ? (op1 / mod > 0 ? 1 : -1)
2273 return gen_rtx_IF_THEN_ELSE
2274 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2275 gen_rtx_MINUS (mode,
2276 gen_rtx_ABS (mode, op1),
2277 gen_rtx_ABS (mode, mod))),
2278 gen_rtx_IF_THEN_ELSE
2279 (mode, gen_rtx_GT (BImode,
2280 gen_rtx_DIV (mode, op1, mod),
2282 const1_rtx, constm1_rtx),
2286 /* Return the difference between the rounded and the truncated result
2287 of a unsigned division by OP1 with remainder MOD. Halfway cases
2288 are rounded away from zero, rather than to the nearest even
2291 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2293 /* (mod >= op1 - mod ? 1 : 0) */
2294 return gen_rtx_IF_THEN_ELSE
2295 (mode, gen_rtx_GE (BImode, mod,
2296 gen_rtx_MINUS (mode, op1, mod)),
2297 const1_rtx, const0_rtx);
2300 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2304 convert_debug_memory_address (enum machine_mode mode, rtx x,
2307 enum machine_mode xmode = GET_MODE (x);
2309 #ifndef POINTERS_EXTEND_UNSIGNED
2310 gcc_assert (mode == Pmode
2311 || mode == targetm.addr_space.address_mode (as));
2312 gcc_assert (xmode == mode || xmode == VOIDmode);
2315 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2316 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2318 gcc_assert (mode == address_mode || mode == pointer_mode);
2320 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2323 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2324 x = simplify_gen_subreg (mode, x, xmode,
2325 subreg_lowpart_offset
2327 else if (POINTERS_EXTEND_UNSIGNED > 0)
2328 x = gen_rtx_ZERO_EXTEND (mode, x);
2329 else if (!POINTERS_EXTEND_UNSIGNED)
2330 x = gen_rtx_SIGN_EXTEND (mode, x);
2333 switch (GET_CODE (x))
2336 if ((SUBREG_PROMOTED_VAR_P (x)
2337 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2338 || (GET_CODE (SUBREG_REG (x)) == PLUS
2339 && REG_P (XEXP (SUBREG_REG (x), 0))
2340 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2341 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2342 && GET_MODE (SUBREG_REG (x)) == mode)
2343 return SUBREG_REG (x);
2346 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2347 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2350 temp = shallow_copy_rtx (x);
2351 PUT_MODE (temp, mode);
2354 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2356 temp = gen_rtx_CONST (mode, temp);
2360 if (CONST_INT_P (XEXP (x, 1)))
2362 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2364 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2370 /* Don't know how to express ptr_extend as operation in debug info. */
2373 #endif /* POINTERS_EXTEND_UNSIGNED */
2378 /* Return an RTX equivalent to the value of the tree expression
2382 expand_debug_expr (tree exp)
2384 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2385 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2386 enum machine_mode inner_mode = VOIDmode;
2387 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2390 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2392 case tcc_expression:
2393 switch (TREE_CODE (exp))
2397 case WIDEN_MULT_PLUS_EXPR:
2398 case WIDEN_MULT_MINUS_EXPR:
2402 case TRUTH_ANDIF_EXPR:
2403 case TRUTH_ORIF_EXPR:
2404 case TRUTH_AND_EXPR:
2406 case TRUTH_XOR_EXPR:
2409 case TRUTH_NOT_EXPR:
2418 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2425 case tcc_comparison:
2426 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2433 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2434 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2444 case tcc_exceptional:
2445 case tcc_declaration:
2451 switch (TREE_CODE (exp))
2454 if (!lookup_constant_def (exp))
2456 if (strlen (TREE_STRING_POINTER (exp)) + 1
2457 != (size_t) TREE_STRING_LENGTH (exp))
2459 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2460 op0 = gen_rtx_MEM (BLKmode, op0);
2461 set_mem_attributes (op0, exp, 0);
2464 /* Fall through... */
2469 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2473 gcc_assert (COMPLEX_MODE_P (mode));
2474 op0 = expand_debug_expr (TREE_REALPART (exp));
2475 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2476 return gen_rtx_CONCAT (mode, op0, op1);
2478 case DEBUG_EXPR_DECL:
2479 op0 = DECL_RTL_IF_SET (exp);
2484 op0 = gen_rtx_DEBUG_EXPR (mode);
2485 DEBUG_EXPR_TREE_DECL (op0) = exp;
2486 SET_DECL_RTL (exp, op0);
2496 op0 = DECL_RTL_IF_SET (exp);
2498 /* This decl was probably optimized away. */
2501 if (TREE_CODE (exp) != VAR_DECL
2502 || DECL_EXTERNAL (exp)
2503 || !TREE_STATIC (exp)
2505 || DECL_HARD_REGISTER (exp)
2506 || DECL_IN_CONSTANT_POOL (exp)
2507 || mode == VOIDmode)
2510 op0 = make_decl_rtl_for_debug (exp);
2512 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2513 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2517 op0 = copy_rtx (op0);
2519 if (GET_MODE (op0) == BLKmode
2520 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2521 below would ICE. While it is likely a FE bug,
2522 try to be robust here. See PR43166. */
2524 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2526 gcc_assert (MEM_P (op0));
2527 op0 = adjust_address_nv (op0, mode, 0);
2538 inner_mode = GET_MODE (op0);
2540 if (mode == inner_mode)
2543 if (inner_mode == VOIDmode)
2545 if (TREE_CODE (exp) == SSA_NAME)
2546 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2548 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2549 if (mode == inner_mode)
2553 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2555 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2556 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2557 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2558 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2560 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2562 else if (FLOAT_MODE_P (mode))
2564 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2565 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2566 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2568 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2570 else if (FLOAT_MODE_P (inner_mode))
2573 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2575 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2577 else if (CONSTANT_P (op0)
2578 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2579 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2580 subreg_lowpart_offset (mode,
2582 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2583 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2585 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2587 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2593 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2595 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2596 TREE_OPERAND (exp, 0),
2597 TREE_OPERAND (exp, 1));
2599 return expand_debug_expr (newexp);
2603 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2607 if (TREE_CODE (exp) == MEM_REF)
2609 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2610 || (GET_CODE (op0) == PLUS
2611 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2612 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2613 Instead just use get_inner_reference. */
2616 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2617 if (!op1 || !CONST_INT_P (op1))
2620 op0 = plus_constant (op0, INTVAL (op1));
2623 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2624 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2626 as = ADDR_SPACE_GENERIC;
2628 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2630 if (op0 == NULL_RTX)
2633 op0 = gen_rtx_MEM (mode, op0);
2634 set_mem_attributes (op0, exp, 0);
2635 if (TREE_CODE (exp) == MEM_REF
2636 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2637 set_mem_expr (op0, NULL_TREE);
2638 set_mem_addr_space (op0, as);
2642 case TARGET_MEM_REF:
2643 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2644 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2647 op0 = expand_debug_expr
2648 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2652 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2653 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2655 as = ADDR_SPACE_GENERIC;
2657 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2659 if (op0 == NULL_RTX)
2662 op0 = gen_rtx_MEM (mode, op0);
2664 set_mem_attributes (op0, exp, 0);
2665 set_mem_addr_space (op0, as);
2671 case ARRAY_RANGE_REF:
2676 case VIEW_CONVERT_EXPR:
2678 enum machine_mode mode1;
2679 HOST_WIDE_INT bitsize, bitpos;
2682 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2683 &mode1, &unsignedp, &volatilep, false);
2689 orig_op0 = op0 = expand_debug_expr (tem);
2696 enum machine_mode addrmode, offmode;
2701 op0 = XEXP (op0, 0);
2702 addrmode = GET_MODE (op0);
2703 if (addrmode == VOIDmode)
2706 op1 = expand_debug_expr (offset);
2710 offmode = GET_MODE (op1);
2711 if (offmode == VOIDmode)
2712 offmode = TYPE_MODE (TREE_TYPE (offset));
2714 if (addrmode != offmode)
2715 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2716 subreg_lowpart_offset (addrmode,
2719 /* Don't use offset_address here, we don't need a
2720 recognizable address, and we don't want to generate
2722 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2728 if (mode1 == VOIDmode)
2730 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2731 if (bitpos >= BITS_PER_UNIT)
2733 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2734 bitpos %= BITS_PER_UNIT;
2736 else if (bitpos < 0)
2739 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2740 op0 = adjust_address_nv (op0, mode1, units);
2741 bitpos += units * BITS_PER_UNIT;
2743 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2744 op0 = adjust_address_nv (op0, mode, 0);
2745 else if (GET_MODE (op0) != mode1)
2746 op0 = adjust_address_nv (op0, mode1, 0);
2748 op0 = copy_rtx (op0);
2749 if (op0 == orig_op0)
2750 op0 = shallow_copy_rtx (op0);
2751 set_mem_attributes (op0, exp, 0);
2754 if (bitpos == 0 && mode == GET_MODE (op0))
2760 if (GET_MODE (op0) == BLKmode)
2763 if ((bitpos % BITS_PER_UNIT) == 0
2764 && bitsize == GET_MODE_BITSIZE (mode1))
2766 enum machine_mode opmode = GET_MODE (op0);
2768 if (opmode == VOIDmode)
2769 opmode = TYPE_MODE (TREE_TYPE (tem));
2771 /* This condition may hold if we're expanding the address
2772 right past the end of an array that turned out not to
2773 be addressable (i.e., the address was only computed in
2774 debug stmts). The gen_subreg below would rightfully
2775 crash, and the address doesn't really exist, so just
2777 if (bitpos >= GET_MODE_BITSIZE (opmode))
2780 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2781 return simplify_gen_subreg (mode, op0, opmode,
2782 bitpos / BITS_PER_UNIT);
2785 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2786 && TYPE_UNSIGNED (TREE_TYPE (exp))
2788 : ZERO_EXTRACT, mode,
2789 GET_MODE (op0) != VOIDmode
2791 : TYPE_MODE (TREE_TYPE (tem)),
2792 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2796 return simplify_gen_unary (ABS, mode, op0, mode);
2799 return simplify_gen_unary (NEG, mode, op0, mode);
2802 return simplify_gen_unary (NOT, mode, op0, mode);
2805 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
2807 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
2810 case FIX_TRUNC_EXPR:
2811 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
2814 case POINTER_PLUS_EXPR:
2815 /* For the rare target where pointers are not the same size as
2816 size_t, we need to check for mis-matched modes and correct
2819 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2820 && GET_MODE (op0) != GET_MODE (op1))
2822 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2823 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
2826 /* We always sign-extend, regardless of the signedness of
2827 the operand, because the operand is always unsigned
2828 here even if the original C expression is signed. */
2829 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
2834 return simplify_gen_binary (PLUS, mode, op0, op1);
2837 return simplify_gen_binary (MINUS, mode, op0, op1);
2840 return simplify_gen_binary (MULT, mode, op0, op1);
2843 case TRUNC_DIV_EXPR:
2844 case EXACT_DIV_EXPR:
2846 return simplify_gen_binary (UDIV, mode, op0, op1);
2848 return simplify_gen_binary (DIV, mode, op0, op1);
2850 case TRUNC_MOD_EXPR:
2851 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
2853 case FLOOR_DIV_EXPR:
2855 return simplify_gen_binary (UDIV, mode, op0, op1);
2858 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
2859 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2860 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2861 return simplify_gen_binary (PLUS, mode, div, adj);
2864 case FLOOR_MOD_EXPR:
2866 return simplify_gen_binary (UMOD, mode, op0, op1);
2869 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2870 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2871 adj = simplify_gen_unary (NEG, mode,
2872 simplify_gen_binary (MULT, mode, adj, op1),
2874 return simplify_gen_binary (PLUS, mode, mod, adj);
2880 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
2881 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
2882 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2883 return simplify_gen_binary (PLUS, mode, div, adj);
2887 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
2888 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2889 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2890 return simplify_gen_binary (PLUS, mode, div, adj);
2896 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
2897 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2898 adj = simplify_gen_unary (NEG, mode,
2899 simplify_gen_binary (MULT, mode, adj, op1),
2901 return simplify_gen_binary (PLUS, mode, mod, adj);
2905 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2906 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2907 adj = simplify_gen_unary (NEG, mode,
2908 simplify_gen_binary (MULT, mode, adj, op1),
2910 return simplify_gen_binary (PLUS, mode, mod, adj);
2913 case ROUND_DIV_EXPR:
2916 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
2917 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
2918 rtx adj = round_udiv_adjust (mode, mod, op1);
2919 return simplify_gen_binary (PLUS, mode, div, adj);
2923 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
2924 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2925 rtx adj = round_sdiv_adjust (mode, mod, op1);
2926 return simplify_gen_binary (PLUS, mode, div, adj);
2929 case ROUND_MOD_EXPR:
2932 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
2933 rtx adj = round_udiv_adjust (mode, mod, op1);
2934 adj = simplify_gen_unary (NEG, mode,
2935 simplify_gen_binary (MULT, mode, adj, op1),
2937 return simplify_gen_binary (PLUS, mode, mod, adj);
2941 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2942 rtx adj = round_sdiv_adjust (mode, mod, op1);
2943 adj = simplify_gen_unary (NEG, mode,
2944 simplify_gen_binary (MULT, mode, adj, op1),
2946 return simplify_gen_binary (PLUS, mode, mod, adj);
2950 return simplify_gen_binary (ASHIFT, mode, op0, op1);
2954 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
2956 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
2959 return simplify_gen_binary (ROTATE, mode, op0, op1);
2962 return simplify_gen_binary (ROTATERT, mode, op0, op1);
2965 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
2968 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
2971 case TRUTH_AND_EXPR:
2972 return simplify_gen_binary (AND, mode, op0, op1);
2976 return simplify_gen_binary (IOR, mode, op0, op1);
2979 case TRUTH_XOR_EXPR:
2980 return simplify_gen_binary (XOR, mode, op0, op1);
2982 case TRUTH_ANDIF_EXPR:
2983 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2985 case TRUTH_ORIF_EXPR:
2986 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2988 case TRUTH_NOT_EXPR:
2989 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
2992 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
2996 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3000 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3004 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3008 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3011 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3013 case UNORDERED_EXPR:
3014 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3017 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3020 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3023 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3026 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3029 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3032 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3035 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3038 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3041 gcc_assert (COMPLEX_MODE_P (mode));
3042 if (GET_MODE (op0) == VOIDmode)
3043 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3044 if (GET_MODE (op1) == VOIDmode)
3045 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3046 return gen_rtx_CONCAT (mode, op0, op1);
3049 if (GET_CODE (op0) == CONCAT)
3050 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3051 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3053 GET_MODE_INNER (mode)));
3056 enum machine_mode imode = GET_MODE_INNER (mode);
3061 re = adjust_address_nv (op0, imode, 0);
3062 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3066 enum machine_mode ifmode = int_mode_for_mode (mode);
3067 enum machine_mode ihmode = int_mode_for_mode (imode);
3069 if (ifmode == BLKmode || ihmode == BLKmode)
3071 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3074 re = gen_rtx_SUBREG (ifmode, re, 0);
3075 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3076 if (imode != ihmode)
3077 re = gen_rtx_SUBREG (imode, re, 0);
3078 im = copy_rtx (op0);
3080 im = gen_rtx_SUBREG (ifmode, im, 0);
3081 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3082 if (imode != ihmode)
3083 im = gen_rtx_SUBREG (imode, im, 0);
3085 im = gen_rtx_NEG (imode, im);
3086 return gen_rtx_CONCAT (mode, re, im);
3090 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3091 if (!op0 || !MEM_P (op0))
3093 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3094 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3095 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3096 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3097 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3099 if (handled_component_p (TREE_OPERAND (exp, 0)))
3101 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3103 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3104 &bitoffset, &bitsize, &maxsize);
3105 if ((TREE_CODE (decl) == VAR_DECL
3106 || TREE_CODE (decl) == PARM_DECL
3107 || TREE_CODE (decl) == RESULT_DECL)
3108 && !TREE_ADDRESSABLE (decl)
3109 && (bitoffset % BITS_PER_UNIT) == 0
3111 && bitsize == maxsize)
3112 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3113 bitoffset / BITS_PER_UNIT);
3119 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3120 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3125 exp = build_constructor_from_list (TREE_TYPE (exp),
3126 TREE_VECTOR_CST_ELTS (exp));
3130 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3135 op0 = gen_rtx_CONCATN
3136 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3138 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3140 op1 = expand_debug_expr (val);
3143 XVECEXP (op0, 0, i) = op1;
3146 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3148 op1 = expand_debug_expr
3149 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3154 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3155 XVECEXP (op0, 0, i) = op1;
3161 goto flag_unsupported;
3164 /* ??? Maybe handle some builtins? */
3169 gimple g = get_gimple_for_ssa_name (exp);
3172 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3178 int part = var_to_partition (SA.map, exp);
3180 if (part == NO_PARTITION)
3182 /* If this is a reference to an incoming value of parameter
3183 that is never used in the code or where the incoming
3184 value is never used in the code, use PARM_DECL's
3186 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3187 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3189 rtx incoming = DECL_INCOMING_RTL (SSA_NAME_VAR (exp));
3191 && GET_MODE (incoming) != BLKmode
3192 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3193 || (MEM_P (incoming)
3194 && REG_P (XEXP (incoming, 0))
3195 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3197 op0 = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3198 ENTRY_VALUE_EXP (op0) = incoming;
3201 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3209 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3211 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3219 /* Vector stuff. For most of the codes we don't have rtl codes. */
3220 case REALIGN_LOAD_EXPR:
3221 case REDUC_MAX_EXPR:
3222 case REDUC_MIN_EXPR:
3223 case REDUC_PLUS_EXPR:
3225 case VEC_EXTRACT_EVEN_EXPR:
3226 case VEC_EXTRACT_ODD_EXPR:
3227 case VEC_INTERLEAVE_HIGH_EXPR:
3228 case VEC_INTERLEAVE_LOW_EXPR:
3229 case VEC_LSHIFT_EXPR:
3230 case VEC_PACK_FIX_TRUNC_EXPR:
3231 case VEC_PACK_SAT_EXPR:
3232 case VEC_PACK_TRUNC_EXPR:
3233 case VEC_RSHIFT_EXPR:
3234 case VEC_UNPACK_FLOAT_HI_EXPR:
3235 case VEC_UNPACK_FLOAT_LO_EXPR:
3236 case VEC_UNPACK_HI_EXPR:
3237 case VEC_UNPACK_LO_EXPR:
3238 case VEC_WIDEN_MULT_HI_EXPR:
3239 case VEC_WIDEN_MULT_LO_EXPR:
3243 case ADDR_SPACE_CONVERT_EXPR:
3244 case FIXED_CONVERT_EXPR:
3246 case WITH_SIZE_EXPR:
3250 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3251 && SCALAR_INT_MODE_P (mode))
3254 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3256 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3259 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3261 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3263 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3264 return simplify_gen_binary (PLUS, mode, op0, op2);
3268 case WIDEN_MULT_EXPR:
3269 case WIDEN_MULT_PLUS_EXPR:
3270 case WIDEN_MULT_MINUS_EXPR:
3271 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3272 && SCALAR_INT_MODE_P (mode))
3274 inner_mode = GET_MODE (op0);
3275 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3276 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3278 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3279 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3280 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3282 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3283 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3284 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3286 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3287 return simplify_gen_binary (PLUS, mode, op0, op2);
3289 return simplify_gen_binary (MINUS, mode, op2, op0);
3293 case WIDEN_SUM_EXPR:
3294 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3295 && SCALAR_INT_MODE_P (mode))
3298 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3300 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3302 return simplify_gen_binary (PLUS, mode, op0, op1);
3307 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3311 #ifdef ENABLE_CHECKING
3320 /* Expand the _LOCs in debug insns. We run this after expanding all
3321 regular insns, so that any variables referenced in the function
3322 will have their DECL_RTLs set. */
3325 expand_debug_locations (void)
3328 rtx last = get_last_insn ();
3329 int save_strict_alias = flag_strict_aliasing;
3331 /* New alias sets while setting up memory attributes cause
3332 -fcompare-debug failures, even though it doesn't bring about any
3334 flag_strict_aliasing = 0;
3336 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3337 if (DEBUG_INSN_P (insn))
3339 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3341 enum machine_mode mode;
3343 if (value == NULL_TREE)
3347 val = expand_debug_expr (value);
3348 gcc_assert (last == get_last_insn ());
3352 val = gen_rtx_UNKNOWN_VAR_LOC ();
3355 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3357 gcc_assert (mode == GET_MODE (val)
3358 || (GET_MODE (val) == VOIDmode
3359 && (CONST_INT_P (val)
3360 || GET_CODE (val) == CONST_FIXED
3361 || GET_CODE (val) == CONST_DOUBLE
3362 || GET_CODE (val) == LABEL_REF)));
3365 INSN_VAR_LOCATION_LOC (insn) = val;
3368 flag_strict_aliasing = save_strict_alias;
3371 /* Expand basic block BB from GIMPLE trees to RTL. */
3374 expand_gimple_basic_block (basic_block bb)
3376 gimple_stmt_iterator gsi;
3385 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3388 /* Note that since we are now transitioning from GIMPLE to RTL, we
3389 cannot use the gsi_*_bb() routines because they expect the basic
3390 block to be in GIMPLE, instead of RTL. Therefore, we need to
3391 access the BB sequence directly. */
3392 stmts = bb_seq (bb);
3393 bb->il.gimple = NULL;
3394 rtl_profile_for_bb (bb);
3395 init_rtl_bb_info (bb);
3396 bb->flags |= BB_RTL;
3398 /* Remove the RETURN_EXPR if we may fall though to the exit
3400 gsi = gsi_last (stmts);
3401 if (!gsi_end_p (gsi)
3402 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3404 gimple ret_stmt = gsi_stmt (gsi);
3406 gcc_assert (single_succ_p (bb));
3407 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3409 if (bb->next_bb == EXIT_BLOCK_PTR
3410 && !gimple_return_retval (ret_stmt))
3412 gsi_remove (&gsi, false);
3413 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3417 gsi = gsi_start (stmts);
3418 if (!gsi_end_p (gsi))
3420 stmt = gsi_stmt (gsi);
3421 if (gimple_code (stmt) != GIMPLE_LABEL)
3425 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3429 last = get_last_insn ();
3433 expand_gimple_stmt (stmt);
3438 emit_label ((rtx) *elt);
3440 /* Java emits line number notes in the top of labels.
3441 ??? Make this go away once line number notes are obsoleted. */
3442 BB_HEAD (bb) = NEXT_INSN (last);
3443 if (NOTE_P (BB_HEAD (bb)))
3444 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3445 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3447 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3450 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3452 NOTE_BASIC_BLOCK (note) = bb;
3454 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3458 stmt = gsi_stmt (gsi);
3460 /* If this statement is a non-debug one, and we generate debug
3461 insns, then this one might be the last real use of a TERed
3462 SSA_NAME, but where there are still some debug uses further
3463 down. Expanding the current SSA name in such further debug
3464 uses by their RHS might lead to wrong debug info, as coalescing
3465 might make the operands of such RHS be placed into the same
3466 pseudo as something else. Like so:
3467 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3471 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3472 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3473 the write to a_2 would actually have clobbered the place which
3476 So, instead of that, we recognize the situation, and generate
3477 debug temporaries at the last real use of TERed SSA names:
3484 if (MAY_HAVE_DEBUG_INSNS
3486 && !is_gimple_debug (stmt))
3492 location_t sloc = get_curr_insn_source_location ();
3493 tree sblock = get_curr_insn_block ();
3495 /* Look for SSA names that have their last use here (TERed
3496 names always have only one real use). */
3497 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3498 if ((def = get_gimple_for_ssa_name (op)))
3500 imm_use_iterator imm_iter;
3501 use_operand_p use_p;
3502 bool have_debug_uses = false;
3504 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3506 if (gimple_debug_bind_p (USE_STMT (use_p)))
3508 have_debug_uses = true;
3513 if (have_debug_uses)
3515 /* OP is a TERed SSA name, with DEF it's defining
3516 statement, and where OP is used in further debug
3517 instructions. Generate a debug temporary, and
3518 replace all uses of OP in debug insns with that
3521 tree value = gimple_assign_rhs_to_tree (def);
3522 tree vexpr = make_node (DEBUG_EXPR_DECL);
3524 enum machine_mode mode;
3526 set_curr_insn_source_location (gimple_location (def));
3527 set_curr_insn_block (gimple_block (def));
3529 DECL_ARTIFICIAL (vexpr) = 1;
3530 TREE_TYPE (vexpr) = TREE_TYPE (value);
3532 mode = DECL_MODE (value);
3534 mode = TYPE_MODE (TREE_TYPE (value));
3535 DECL_MODE (vexpr) = mode;
3537 val = gen_rtx_VAR_LOCATION
3538 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3540 emit_debug_insn (val);
3542 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3544 if (!gimple_debug_bind_p (debugstmt))
3547 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3548 SET_USE (use_p, vexpr);
3550 update_stmt (debugstmt);
3554 set_curr_insn_source_location (sloc);
3555 set_curr_insn_block (sblock);
3558 currently_expanding_gimple_stmt = stmt;
3560 /* Expand this statement, then evaluate the resulting RTL and
3561 fixup the CFG accordingly. */
3562 if (gimple_code (stmt) == GIMPLE_COND)
3564 new_bb = expand_gimple_cond (bb, stmt);
3568 else if (gimple_debug_bind_p (stmt))
3570 location_t sloc = get_curr_insn_source_location ();
3571 tree sblock = get_curr_insn_block ();
3572 gimple_stmt_iterator nsi = gsi;
3576 tree var = gimple_debug_bind_get_var (stmt);
3579 enum machine_mode mode;
3581 if (gimple_debug_bind_has_value_p (stmt))
3582 value = gimple_debug_bind_get_value (stmt);
3586 last = get_last_insn ();
3588 set_curr_insn_source_location (gimple_location (stmt));
3589 set_curr_insn_block (gimple_block (stmt));
3592 mode = DECL_MODE (var);
3594 mode = TYPE_MODE (TREE_TYPE (var));
3596 val = gen_rtx_VAR_LOCATION
3597 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3599 emit_debug_insn (val);
3601 if (dump_file && (dump_flags & TDF_DETAILS))
3603 /* We can't dump the insn with a TREE where an RTX
3605 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3606 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3607 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3610 /* In order not to generate too many debug temporaries,
3611 we delink all uses of debug statements we already expanded.
3612 Therefore debug statements between definition and real
3613 use of TERed SSA names will continue to use the SSA name,
3614 and not be replaced with debug temps. */
3615 delink_stmt_imm_use (stmt);
3619 if (gsi_end_p (nsi))
3621 stmt = gsi_stmt (nsi);
3622 if (!gimple_debug_bind_p (stmt))
3626 set_curr_insn_source_location (sloc);
3627 set_curr_insn_block (sblock);
3631 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3634 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3645 def_operand_p def_p;
3646 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3650 /* Ignore this stmt if it is in the list of
3651 replaceable expressions. */
3653 && bitmap_bit_p (SA.values,
3654 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3657 last = expand_gimple_stmt (stmt);
3658 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3663 currently_expanding_gimple_stmt = NULL;
3665 /* Expand implicit goto and convert goto_locus. */
3666 FOR_EACH_EDGE (e, ei, bb->succs)
3668 if (e->goto_locus && e->goto_block)
3670 set_curr_insn_source_location (e->goto_locus);
3671 set_curr_insn_block (e->goto_block);
3672 e->goto_locus = curr_insn_locator ();
3674 e->goto_block = NULL;
3675 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3677 emit_jump (label_rtx_for_bb (e->dest));
3678 e->flags &= ~EDGE_FALLTHRU;
3682 /* Expanded RTL can create a jump in the last instruction of block.
3683 This later might be assumed to be a jump to successor and break edge insertion.
3684 We need to insert dummy move to prevent this. PR41440. */
3685 if (single_succ_p (bb)
3686 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3687 && (last = get_last_insn ())
3690 rtx dummy = gen_reg_rtx (SImode);
3691 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3694 do_pending_stack_adjust ();
3696 /* Find the block tail. The last insn in the block is the insn
3697 before a barrier and/or table jump insn. */
3698 last = get_last_insn ();
3699 if (BARRIER_P (last))
3700 last = PREV_INSN (last);
3701 if (JUMP_TABLE_DATA_P (last))
3702 last = PREV_INSN (PREV_INSN (last));
3705 update_bb_for_insn (bb);
3711 /* Create a basic block for initialization code. */
3714 construct_init_block (void)
3716 basic_block init_block, first_block;
3720 /* Multiple entry points not supported yet. */
3721 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3722 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3723 init_rtl_bb_info (EXIT_BLOCK_PTR);
3724 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3725 EXIT_BLOCK_PTR->flags |= BB_RTL;
3727 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3729 /* When entry edge points to first basic block, we don't need jump,
3730 otherwise we have to jump into proper target. */
3731 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3733 tree label = gimple_block_label (e->dest);
3735 emit_jump (label_rtx (label));
3739 flags = EDGE_FALLTHRU;
3741 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3744 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3745 init_block->count = ENTRY_BLOCK_PTR->count;
3748 first_block = e->dest;
3749 redirect_edge_succ (e, init_block);
3750 e = make_edge (init_block, first_block, flags);
3753 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3754 e->probability = REG_BR_PROB_BASE;
3755 e->count = ENTRY_BLOCK_PTR->count;
3757 update_bb_for_insn (init_block);
3761 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3762 found in the block tree. */
3765 set_block_levels (tree block, int level)
3769 BLOCK_NUMBER (block) = level;
3770 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3771 block = BLOCK_CHAIN (block);
3775 /* Create a block containing landing pads and similar stuff. */
3778 construct_exit_block (void)
3780 rtx head = get_last_insn ();
3782 basic_block exit_block;
3786 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3788 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3790 /* Make sure the locus is set to the end of the function, so that
3791 epilogue line numbers and warnings are set properly. */
3792 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3793 input_location = cfun->function_end_locus;
3795 /* The following insns belong to the top scope. */
3796 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3798 /* Generate rtl for function exit. */
3799 expand_function_end ();
3801 end = get_last_insn ();
3804 /* While emitting the function end we could move end of the last basic block.
3806 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3807 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3808 head = NEXT_INSN (head);
3809 exit_block = create_basic_block (NEXT_INSN (head), end,
3810 EXIT_BLOCK_PTR->prev_bb);
3811 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3812 exit_block->count = EXIT_BLOCK_PTR->count;
3815 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3817 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3818 if (!(e->flags & EDGE_ABNORMAL))
3819 redirect_edge_succ (e, exit_block);
3824 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3825 e->probability = REG_BR_PROB_BASE;
3826 e->count = EXIT_BLOCK_PTR->count;
3827 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3830 e->count -= e2->count;
3831 exit_block->count -= e2->count;
3832 exit_block->frequency -= EDGE_FREQUENCY (e2);
3836 if (exit_block->count < 0)
3837 exit_block->count = 0;
3838 if (exit_block->frequency < 0)
3839 exit_block->frequency = 0;
3840 update_bb_for_insn (exit_block);
3843 /* Helper function for discover_nonconstant_array_refs.
3844 Look for ARRAY_REF nodes with non-constant indexes and mark them
3848 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3849 void *data ATTRIBUTE_UNUSED)
3853 if (IS_TYPE_OR_DECL_P (t))
3855 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3857 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3858 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3859 && (!TREE_OPERAND (t, 2)
3860 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3861 || (TREE_CODE (t) == COMPONENT_REF
3862 && (!TREE_OPERAND (t,2)
3863 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3864 || TREE_CODE (t) == BIT_FIELD_REF
3865 || TREE_CODE (t) == REALPART_EXPR
3866 || TREE_CODE (t) == IMAGPART_EXPR
3867 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3868 || CONVERT_EXPR_P (t))
3869 t = TREE_OPERAND (t, 0);
3871 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3873 t = get_base_address (t);
3875 && DECL_MODE (t) != BLKmode)
3876 TREE_ADDRESSABLE (t) = 1;
3885 /* RTL expansion is not able to compile array references with variable
3886 offsets for arrays stored in single register. Discover such
3887 expressions and mark variables as addressable to avoid this
3891 discover_nonconstant_array_refs (void)
3894 gimple_stmt_iterator gsi;
3897 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3899 gimple stmt = gsi_stmt (gsi);
3900 if (!is_gimple_debug (stmt))
3901 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3905 /* This function sets crtl->args.internal_arg_pointer to a virtual
3906 register if DRAP is needed. Local register allocator will replace
3907 virtual_incoming_args_rtx with the virtual register. */
3910 expand_stack_alignment (void)
3913 unsigned int preferred_stack_boundary;
3915 if (! SUPPORTS_STACK_ALIGNMENT)
3918 if (cfun->calls_alloca
3919 || cfun->has_nonlocal_label
3920 || crtl->has_nonlocal_goto)
3921 crtl->need_drap = true;
3923 /* Call update_stack_boundary here again to update incoming stack
3924 boundary. It may set incoming stack alignment to a different
3925 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3926 use the minimum incoming stack alignment to check if it is OK
3927 to perform sibcall optimization since sibcall optimization will
3928 only align the outgoing stack to incoming stack boundary. */
3929 if (targetm.calls.update_stack_boundary)
3930 targetm.calls.update_stack_boundary ();
3932 /* The incoming stack frame has to be aligned at least at
3933 parm_stack_boundary. */
3934 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3936 /* Update crtl->stack_alignment_estimated and use it later to align
3937 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3938 exceptions since callgraph doesn't collect incoming stack alignment
3940 if (cfun->can_throw_non_call_exceptions
3941 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3942 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3944 preferred_stack_boundary = crtl->preferred_stack_boundary;
3945 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3946 crtl->stack_alignment_estimated = preferred_stack_boundary;
3947 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3948 crtl->stack_alignment_needed = preferred_stack_boundary;
3950 gcc_assert (crtl->stack_alignment_needed
3951 <= crtl->stack_alignment_estimated);
3953 crtl->stack_realign_needed
3954 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3955 crtl->stack_realign_tried = crtl->stack_realign_needed;
3957 crtl->stack_realign_processed = true;
3959 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3961 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3962 drap_rtx = targetm.calls.get_drap_rtx ();
3964 /* stack_realign_drap and drap_rtx must match. */
3965 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3967 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3968 if (NULL != drap_rtx)
3970 crtl->args.internal_arg_pointer = drap_rtx;
3972 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3974 fixup_tail_calls ();
3978 /* Translate the intermediate representation contained in the CFG
3979 from GIMPLE trees to RTL.
3981 We do conversion per basic block and preserve/update the tree CFG.
3982 This implies we have to do some magic as the CFG can simultaneously
3983 consist of basic blocks containing RTL and GIMPLE trees. This can
3984 confuse the CFG hooks, so be careful to not manipulate CFG during
3988 gimple_expand_cfg (void)
3990 basic_block bb, init_block;
3997 timevar_push (TV_OUT_OF_SSA);
3998 rewrite_out_of_ssa (&SA);
3999 timevar_pop (TV_OUT_OF_SSA);
4000 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4003 /* Some backends want to know that we are expanding to RTL. */
4004 currently_expanding_to_rtl = 1;
4006 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4008 insn_locators_alloc ();
4009 if (!DECL_IS_BUILTIN (current_function_decl))
4011 /* Eventually, all FEs should explicitly set function_start_locus. */
4012 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4013 set_curr_insn_source_location
4014 (DECL_SOURCE_LOCATION (current_function_decl));
4016 set_curr_insn_source_location (cfun->function_start_locus);
4019 set_curr_insn_source_location (UNKNOWN_LOCATION);
4020 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4021 prologue_locator = curr_insn_locator ();
4023 #ifdef INSN_SCHEDULING
4024 init_sched_attrs ();
4027 /* Make sure first insn is a note even if we don't want linenums.
4028 This makes sure the first insn will never be deleted.
4029 Also, final expects a note to appear there. */
4030 emit_note (NOTE_INSN_DELETED);
4032 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4033 discover_nonconstant_array_refs ();
4035 targetm.expand_to_rtl_hook ();
4036 crtl->stack_alignment_needed = STACK_BOUNDARY;
4037 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4038 crtl->stack_alignment_estimated = 0;
4039 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4040 cfun->cfg->max_jumptable_ents = 0;
4042 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4043 of the function section at exapnsion time to predict distance of calls. */
4044 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4046 /* Expand the variables recorded during gimple lowering. */
4047 timevar_push (TV_VAR_EXPAND);
4050 expand_used_vars ();
4052 var_seq = get_insns ();
4054 timevar_pop (TV_VAR_EXPAND);
4056 /* Honor stack protection warnings. */
4057 if (warn_stack_protect)
4059 if (cfun->calls_alloca)
4060 warning (OPT_Wstack_protector,
4061 "stack protector not protecting local variables: "
4062 "variable length buffer");
4063 if (has_short_buffer && !crtl->stack_protect_guard)
4064 warning (OPT_Wstack_protector,
4065 "stack protector not protecting function: "
4066 "all local arrays are less than %d bytes long",
4067 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4070 /* Set up parameters and prepare for return, for the function. */
4071 expand_function_start (current_function_decl);
4073 /* If we emitted any instructions for setting up the variables,
4074 emit them before the FUNCTION_START note. */
4077 emit_insn_before (var_seq, parm_birth_insn);
4079 /* In expand_function_end we'll insert the alloca save/restore
4080 before parm_birth_insn. We've just insertted an alloca call.
4081 Adjust the pointer to match. */
4082 parm_birth_insn = var_seq;
4085 /* Now that we also have the parameter RTXs, copy them over to our
4087 for (i = 0; i < SA.map->num_partitions; i++)
4089 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4091 if (TREE_CODE (var) != VAR_DECL
4092 && !SA.partition_to_pseudo[i])
4093 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4094 gcc_assert (SA.partition_to_pseudo[i]);
4096 /* If this decl was marked as living in multiple places, reset
4097 this now to NULL. */
4098 if (DECL_RTL_IF_SET (var) == pc_rtx)
4099 SET_DECL_RTL (var, NULL);
4101 /* Some RTL parts really want to look at DECL_RTL(x) when x
4102 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4103 SET_DECL_RTL here making this available, but that would mean
4104 to select one of the potentially many RTLs for one DECL. Instead
4105 of doing that we simply reset the MEM_EXPR of the RTL in question,
4106 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4107 if (!DECL_RTL_SET_P (var))
4109 if (MEM_P (SA.partition_to_pseudo[i]))
4110 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4114 /* If this function is `main', emit a call to `__main'
4115 to run global initializers, etc. */
4116 if (DECL_NAME (current_function_decl)
4117 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4118 && DECL_FILE_SCOPE_P (current_function_decl))
4119 expand_main_function ();
4121 /* Initialize the stack_protect_guard field. This must happen after the
4122 call to __main (if any) so that the external decl is initialized. */
4123 if (crtl->stack_protect_guard)
4124 stack_protect_prologue ();
4126 expand_phi_nodes (&SA);
4128 /* Register rtl specific functions for cfg. */
4129 rtl_register_cfg_hooks ();
4131 init_block = construct_init_block ();
4133 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4134 remaining edges later. */
4135 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4136 e->flags &= ~EDGE_EXECUTABLE;
4138 lab_rtx_for_bb = pointer_map_create ();
4139 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4140 bb = expand_gimple_basic_block (bb);
4142 if (MAY_HAVE_DEBUG_INSNS)
4143 expand_debug_locations ();
4145 execute_free_datastructures ();
4146 timevar_push (TV_OUT_OF_SSA);
4147 finish_out_of_ssa (&SA);
4148 timevar_pop (TV_OUT_OF_SSA);
4150 timevar_push (TV_POST_EXPAND);
4151 /* We are no longer in SSA form. */
4152 cfun->gimple_df->in_ssa_p = false;
4154 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4155 conservatively to true until they are all profile aware. */
4156 pointer_map_destroy (lab_rtx_for_bb);
4159 construct_exit_block ();
4160 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4161 insn_locators_finalize ();
4163 /* Zap the tree EH table. */
4164 set_eh_throw_stmt_table (cfun, NULL);
4166 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4167 split edges which edge insertions might do. */
4168 rebuild_jump_labels (get_insns ());
4170 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4174 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4178 rebuild_jump_labels_chain (e->insns.r);
4179 /* Avoid putting insns before parm_birth_insn. */
4180 if (e->src == ENTRY_BLOCK_PTR
4181 && single_succ_p (ENTRY_BLOCK_PTR)
4184 rtx insns = e->insns.r;
4185 e->insns.r = NULL_RTX;
4186 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4189 commit_one_edge_insertion (e);
4196 /* We're done expanding trees to RTL. */
4197 currently_expanding_to_rtl = 0;
4199 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4203 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4205 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4206 e->flags &= ~EDGE_EXECUTABLE;
4208 /* At the moment not all abnormal edges match the RTL
4209 representation. It is safe to remove them here as
4210 find_many_sub_basic_blocks will rediscover them.
4211 In the future we should get this fixed properly. */
4212 if ((e->flags & EDGE_ABNORMAL)
4213 && !(e->flags & EDGE_SIBCALL))
4220 blocks = sbitmap_alloc (last_basic_block);
4221 sbitmap_ones (blocks);
4222 find_many_sub_basic_blocks (blocks);
4223 sbitmap_free (blocks);
4224 purge_all_dead_edges ();
4228 expand_stack_alignment ();
4230 #ifdef ENABLE_CHECKING
4231 verify_flow_info ();
4234 /* There's no need to defer outputting this function any more; we
4235 know we want to output it. */
4236 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4238 /* Now that we're done expanding trees to RTL, we shouldn't have any
4239 more CONCATs anywhere. */
4240 generating_concat_p = 0;
4245 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4246 /* And the pass manager will dump RTL for us. */
4249 /* If we're emitting a nested function, make sure its parent gets
4250 emitted as well. Doing otherwise confuses debug info. */
4253 for (parent = DECL_CONTEXT (current_function_decl);
4254 parent != NULL_TREE;
4255 parent = get_containing_scope (parent))
4256 if (TREE_CODE (parent) == FUNCTION_DECL)
4257 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4260 /* We are now committed to emitting code for this function. Do any
4261 preparation, such as emitting abstract debug info for the inline
4262 before it gets mangled by optimization. */
4263 if (cgraph_function_possibly_inlined_p (current_function_decl))
4264 (*debug_hooks->outlining_inline_function) (current_function_decl);
4266 TREE_ASM_WRITTEN (current_function_decl) = 1;
4268 /* After expanding, the return labels are no longer needed. */
4269 return_label = NULL;
4270 naked_return_label = NULL;
4271 /* Tag the blocks with a depth number so that change_scope can find
4272 the common parent easily. */
4273 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4274 default_rtl_profile ();
4275 timevar_pop (TV_POST_EXPAND);
4279 struct rtl_opt_pass pass_expand =
4283 "expand", /* name */
4285 gimple_expand_cfg, /* execute */
4288 0, /* static_pass_number */
4289 TV_EXPAND, /* tv_id */
4290 PROP_ssa | PROP_gimple_leh | PROP_cfg
4291 | PROP_gimple_lcx, /* properties_required */
4292 PROP_rtl, /* properties_provided */
4293 PROP_ssa | PROP_trees, /* properties_destroyed */
4294 TODO_verify_ssa | TODO_verify_flow
4295 | TODO_verify_stmts, /* todo_flags_start */
4297 | TODO_ggc_collect /* todo_flags_finish */