1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
47 #include "ssaexpand.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 /* Return an expression tree corresponding to the RHS of GIMPLE
64 gimple_assign_rhs_to_tree (gimple stmt)
67 enum gimple_rhs_class grhs_class;
69 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
71 if (grhs_class == GIMPLE_TERNARY_RHS)
72 t = build3 (gimple_assign_rhs_code (stmt),
73 TREE_TYPE (gimple_assign_lhs (stmt)),
74 gimple_assign_rhs1 (stmt),
75 gimple_assign_rhs2 (stmt),
76 gimple_assign_rhs3 (stmt));
77 else if (grhs_class == GIMPLE_BINARY_RHS)
78 t = build2 (gimple_assign_rhs_code (stmt),
79 TREE_TYPE (gimple_assign_lhs (stmt)),
80 gimple_assign_rhs1 (stmt),
81 gimple_assign_rhs2 (stmt));
82 else if (grhs_class == GIMPLE_UNARY_RHS)
83 t = build1 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt));
86 else if (grhs_class == GIMPLE_SINGLE_RHS)
88 t = gimple_assign_rhs1 (stmt);
89 /* Avoid modifying this tree in place below. */
90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
91 && gimple_location (stmt) != EXPR_LOCATION (t))
92 || (gimple_block (stmt)
93 && currently_expanding_to_rtl
95 && gimple_block (stmt) != TREE_BLOCK (t)))
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
103 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
104 TREE_BLOCK (t) = gimple_block (stmt);
110 #ifndef STACK_ALIGNMENT_NEEDED
111 #define STACK_ALIGNMENT_NEEDED 1
114 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
116 /* Associate declaration T with storage space X. If T is no
117 SSA name this is exactly SET_DECL_RTL, otherwise make the
118 partition of T associated with X. */
120 set_rtl (tree t, rtx x)
122 if (TREE_CODE (t) == SSA_NAME)
124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
127 /* For the benefit of debug information at -O0 (where vartracking
128 doesn't run) record the place also in the base DECL if it's
129 a normal variable (not a parameter). */
130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
132 tree var = SSA_NAME_VAR (t);
133 /* If we don't yet have something recorded, just record it now. */
134 if (!DECL_RTL_SET_P (var))
135 SET_DECL_RTL (var, x);
136 /* If we have it set alrady to "multiple places" don't
138 else if (DECL_RTL (var) == pc_rtx)
140 /* If we have something recorded and it's not the same place
141 as we want to record now, we have multiple partitions for the
142 same base variable, with different places. We can't just
143 randomly chose one, hence we have to say that we don't know.
144 This only happens with optimization, and there var-tracking
145 will figure out the right thing. */
146 else if (DECL_RTL (var) != x)
147 SET_DECL_RTL (var, pc_rtx);
154 /* This structure holds data relevant to one variable that will be
155 placed in a stack slot. */
161 /* The offset of the variable. During partitioning, this is the
162 offset relative to the partition. After partitioning, this
163 is relative to the stack frame. */
164 HOST_WIDE_INT offset;
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
174 /* The partition representative. */
175 size_t representative;
177 /* The next stack variable in the partition, or EOC. */
180 /* The numbers of conflicting stack variables. */
184 #define EOC ((size_t)-1)
186 /* We have an array of such objects while deciding allocation. */
187 static struct stack_var *stack_vars;
188 static size_t stack_vars_alloc;
189 static size_t stack_vars_num;
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
192 is non-decreasing. */
193 static size_t *stack_vars_sorted;
195 /* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198 static int frame_phase;
200 /* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202 static bool has_protected_decls;
204 /* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206 static bool has_short_buffer;
208 /* Compute the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
212 align_local_variable (tree decl)
214 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
215 DECL_ALIGN (decl) = align;
216 return align / BITS_PER_UNIT;
219 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
220 Return the frame offset. */
223 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
225 HOST_WIDE_INT offset, new_frame_offset;
227 new_frame_offset = frame_offset;
228 if (FRAME_GROWS_DOWNWARD)
230 new_frame_offset -= size + frame_phase;
231 new_frame_offset &= -align;
232 new_frame_offset += frame_phase;
233 offset = new_frame_offset;
237 new_frame_offset -= frame_phase;
238 new_frame_offset += align - 1;
239 new_frame_offset &= -align;
240 new_frame_offset += frame_phase;
241 offset = new_frame_offset;
242 new_frame_offset += size;
244 frame_offset = new_frame_offset;
246 if (frame_offset_overflow (frame_offset, cfun->decl))
247 frame_offset = offset = 0;
252 /* Accumulate DECL into STACK_VARS. */
255 add_stack_var (tree decl)
259 if (stack_vars_num >= stack_vars_alloc)
261 if (stack_vars_alloc)
262 stack_vars_alloc = stack_vars_alloc * 3 / 2;
264 stack_vars_alloc = 32;
266 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
268 v = &stack_vars[stack_vars_num];
272 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
273 /* Ensure that all variables have size, so that &a != &b for any two
274 variables that are simultaneously live. */
277 v->alignb = align_local_variable (SSAVAR (decl));
279 /* All variables are initially in their own partition. */
280 v->representative = stack_vars_num;
283 /* All variables initially conflict with no other. */
286 /* Ensure that this decl doesn't get put onto the list twice. */
287 set_rtl (decl, pc_rtx);
292 /* Make the decls associated with luid's X and Y conflict. */
295 add_stack_var_conflict (size_t x, size_t y)
297 struct stack_var *a = &stack_vars[x];
298 struct stack_var *b = &stack_vars[y];
300 a->conflicts = BITMAP_ALLOC (NULL);
302 b->conflicts = BITMAP_ALLOC (NULL);
303 bitmap_set_bit (a->conflicts, y);
304 bitmap_set_bit (b->conflicts, x);
307 /* Check whether the decls associated with luid's X and Y conflict. */
310 stack_var_conflict_p (size_t x, size_t y)
312 struct stack_var *a = &stack_vars[x];
313 struct stack_var *b = &stack_vars[y];
314 if (!a->conflicts || !b->conflicts)
316 return bitmap_bit_p (a->conflicts, y);
319 /* Returns true if TYPE is or contains a union type. */
322 aggregate_contains_union_type (tree type)
326 if (TREE_CODE (type) == UNION_TYPE
327 || TREE_CODE (type) == QUAL_UNION_TYPE)
329 if (TREE_CODE (type) == ARRAY_TYPE)
330 return aggregate_contains_union_type (TREE_TYPE (type));
331 if (TREE_CODE (type) != RECORD_TYPE)
334 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
335 if (TREE_CODE (field) == FIELD_DECL)
336 if (aggregate_contains_union_type (TREE_TYPE (field)))
342 /* A subroutine of expand_used_vars. If two variables X and Y have alias
343 sets that do not conflict, then do add a conflict for these variables
344 in the interference graph. We also need to make sure to add conflicts
345 for union containing structures. Else RTL alias analysis comes along
346 and due to type based aliasing rules decides that for two overlapping
347 union temporaries { short s; int i; } accesses to the same mem through
348 different types may not alias and happily reorders stores across
349 life-time boundaries of the temporaries (See PR25654).
350 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
353 add_alias_set_conflicts (void)
355 size_t i, j, n = stack_vars_num;
357 for (i = 0; i < n; ++i)
359 tree type_i = TREE_TYPE (stack_vars[i].decl);
360 bool aggr_i = AGGREGATE_TYPE_P (type_i);
363 contains_union = aggregate_contains_union_type (type_i);
364 for (j = 0; j < i; ++j)
366 tree type_j = TREE_TYPE (stack_vars[j].decl);
367 bool aggr_j = AGGREGATE_TYPE_P (type_j);
369 /* Either the objects conflict by means of type based
370 aliasing rules, or we need to add a conflict. */
371 || !objects_must_conflict_p (type_i, type_j)
372 /* In case the types do not conflict ensure that access
373 to elements will conflict. In case of unions we have
374 to be careful as type based aliasing rules may say
375 access to the same memory does not conflict. So play
376 safe and add a conflict in this case. */
378 add_stack_var_conflict (i, j);
383 /* A subroutine of partition_stack_vars. A comparison function for qsort,
384 sorting an array of indices by the properties of the object. */
387 stack_var_cmp (const void *a, const void *b)
389 size_t ia = *(const size_t *)a;
390 size_t ib = *(const size_t *)b;
391 unsigned int aligna = stack_vars[ia].alignb;
392 unsigned int alignb = stack_vars[ib].alignb;
393 HOST_WIDE_INT sizea = stack_vars[ia].size;
394 HOST_WIDE_INT sizeb = stack_vars[ib].size;
395 tree decla = stack_vars[ia].decl;
396 tree declb = stack_vars[ib].decl;
398 unsigned int uida, uidb;
400 /* Primary compare on "large" alignment. Large comes first. */
401 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
402 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
403 if (largea != largeb)
404 return (int)largeb - (int)largea;
406 /* Secondary compare on size, decreasing */
412 /* Tertiary compare on true alignment, decreasing. */
418 /* Final compare on ID for sort stability, increasing.
419 Two SSA names are compared by their version, SSA names come before
420 non-SSA names, and two normal decls are compared by their DECL_UID. */
421 if (TREE_CODE (decla) == SSA_NAME)
423 if (TREE_CODE (declb) == SSA_NAME)
424 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
428 else if (TREE_CODE (declb) == SSA_NAME)
431 uida = DECL_UID (decla), uidb = DECL_UID (declb);
440 /* If the points-to solution *PI points to variables that are in a partition
441 together with other variables add all partition members to the pointed-to
445 add_partitioned_vars_to_ptset (struct pt_solution *pt,
446 struct pointer_map_t *decls_to_partitions,
447 struct pointer_set_t *visited, bitmap temp)
455 /* The pointed-to vars bitmap is shared, it is enough to
457 || pointer_set_insert(visited, pt->vars))
462 /* By using a temporary bitmap to store all members of the partitions
463 we have to add we make sure to visit each of the partitions only
465 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
467 || !bitmap_bit_p (temp, i))
468 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
469 (void *)(size_t) i)))
470 bitmap_ior_into (temp, *part);
471 if (!bitmap_empty_p (temp))
472 bitmap_ior_into (pt->vars, temp);
475 /* Update points-to sets based on partition info, so we can use them on RTL.
476 The bitmaps representing stack partitions will be saved until expand,
477 where partitioned decls used as bases in memory expressions will be
481 update_alias_info_with_stack_vars (void)
483 struct pointer_map_t *decls_to_partitions = NULL;
485 tree var = NULL_TREE;
487 for (i = 0; i < stack_vars_num; i++)
491 struct ptr_info_def *pi;
493 /* Not interested in partitions with single variable. */
494 if (stack_vars[i].representative != i
495 || stack_vars[i].next == EOC)
498 if (!decls_to_partitions)
500 decls_to_partitions = pointer_map_create ();
501 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
504 /* Create an SSA_NAME that points to the partition for use
505 as base during alias-oracle queries on RTL for bases that
506 have been partitioned. */
507 if (var == NULL_TREE)
508 var = create_tmp_var (ptr_type_node, NULL);
509 name = make_ssa_name (var, NULL);
511 /* Create bitmaps representing partitions. They will be used for
512 points-to sets later, so use GGC alloc. */
513 part = BITMAP_GGC_ALLOC ();
514 for (j = i; j != EOC; j = stack_vars[j].next)
516 tree decl = stack_vars[j].decl;
517 unsigned int uid = DECL_PT_UID (decl);
518 /* We should never end up partitioning SSA names (though they
519 may end up on the stack). Neither should we allocate stack
520 space to something that is unused and thus unreferenced, except
521 for -O0 where we are preserving even unreferenced variables. */
522 gcc_assert (DECL_P (decl)
524 || referenced_var_lookup (cfun, DECL_UID (decl))));
525 bitmap_set_bit (part, uid);
526 *((bitmap *) pointer_map_insert (decls_to_partitions,
527 (void *)(size_t) uid)) = part;
528 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
532 /* Make the SSA name point to all partition members. */
533 pi = get_ptr_info (name);
534 pt_solution_set (&pi->pt, part, false, false);
537 /* Make all points-to sets that contain one member of a partition
538 contain all members of the partition. */
539 if (decls_to_partitions)
542 struct pointer_set_t *visited = pointer_set_create ();
543 bitmap temp = BITMAP_ALLOC (NULL);
545 for (i = 1; i < num_ssa_names; i++)
547 tree name = ssa_name (i);
548 struct ptr_info_def *pi;
551 && POINTER_TYPE_P (TREE_TYPE (name))
552 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
553 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
557 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
558 decls_to_partitions, visited, temp);
560 pointer_set_destroy (visited);
561 pointer_map_destroy (decls_to_partitions);
566 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
567 partitioning algorithm. Partitions A and B are known to be non-conflicting.
568 Merge them into a single partition A.
570 At the same time, add OFFSET to all variables in partition B. At the end
571 of the partitioning process we've have a nice block easy to lay out within
575 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
578 struct stack_var *vb = &stack_vars[b];
582 /* Update each element of partition B with the given offset,
583 and merge them into partition A. */
584 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
586 stack_vars[i].offset += offset;
587 stack_vars[i].representative = a;
589 stack_vars[last].next = stack_vars[a].next;
590 stack_vars[a].next = b;
592 /* Update the required alignment of partition A to account for B. */
593 if (stack_vars[a].alignb < stack_vars[b].alignb)
594 stack_vars[a].alignb = stack_vars[b].alignb;
596 /* Update the interference graph and merge the conflicts. */
599 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
600 add_stack_var_conflict (a, stack_vars[u].representative);
601 BITMAP_FREE (vb->conflicts);
605 /* A subroutine of expand_used_vars. Binpack the variables into
606 partitions constrained by the interference graph. The overall
607 algorithm used is as follows:
609 Sort the objects by size.
614 Look for the largest non-conflicting object B with size <= S.
624 partition_stack_vars (void)
626 size_t si, sj, n = stack_vars_num;
628 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
629 for (si = 0; si < n; ++si)
630 stack_vars_sorted[si] = si;
635 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
637 for (si = 0; si < n; ++si)
639 size_t i = stack_vars_sorted[si];
640 HOST_WIDE_INT isize = stack_vars[i].size;
641 unsigned int ialign = stack_vars[i].alignb;
642 HOST_WIDE_INT offset = 0;
644 for (sj = si; sj-- > 0; )
646 size_t j = stack_vars_sorted[sj];
647 HOST_WIDE_INT jsize = stack_vars[j].size;
648 unsigned int jalign = stack_vars[j].alignb;
650 /* Ignore objects that aren't partition representatives. */
651 if (stack_vars[j].representative != j)
654 /* Ignore objects too large for the remaining space. */
658 /* Ignore conflicting objects. */
659 if (stack_var_conflict_p (i, j))
662 /* Do not mix objects of "small" (supported) alignment
663 and "large" (unsupported) alignment. */
664 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
665 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
668 /* Refine the remaining space check to include alignment. */
669 if (offset & (jalign - 1))
671 HOST_WIDE_INT toff = offset;
673 toff &= -(HOST_WIDE_INT)jalign;
674 if (isize - (toff - offset) < jsize)
677 isize -= toff - offset;
681 /* UNION the objects, placing J at OFFSET. */
682 union_stack_vars (i, j, offset);
690 update_alias_info_with_stack_vars ();
693 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
696 dump_stack_var_partition (void)
698 size_t si, i, j, n = stack_vars_num;
700 for (si = 0; si < n; ++si)
702 i = stack_vars_sorted[si];
704 /* Skip variables that aren't partition representatives, for now. */
705 if (stack_vars[i].representative != i)
708 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
709 " align %u\n", (unsigned long) i, stack_vars[i].size,
710 stack_vars[i].alignb);
712 for (j = i; j != EOC; j = stack_vars[j].next)
714 fputc ('\t', dump_file);
715 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
716 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
717 stack_vars[j].offset);
722 /* Assign rtl to DECL at BASE + OFFSET. */
725 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
726 HOST_WIDE_INT offset)
731 /* If this fails, we've overflowed the stack frame. Error nicely? */
732 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
734 x = plus_constant (base, offset);
735 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
737 if (TREE_CODE (decl) != SSA_NAME)
739 /* Set alignment we actually gave this decl if it isn't an SSA name.
740 If it is we generate stack slots only accidentally so it isn't as
741 important, we'll simply use the alignment that is already set. */
742 if (base == virtual_stack_vars_rtx)
743 offset -= frame_phase;
744 align = offset & -offset;
745 align *= BITS_PER_UNIT;
746 if (align == 0 || align > base_align)
749 /* One would think that we could assert that we're not decreasing
750 alignment here, but (at least) the i386 port does exactly this
751 via the MINIMUM_ALIGNMENT hook. */
753 DECL_ALIGN (decl) = align;
754 DECL_USER_ALIGN (decl) = 0;
757 set_mem_attributes (x, SSAVAR (decl), true);
761 /* A subroutine of expand_used_vars. Give each partition representative
762 a unique location within the stack frame. Update each partition member
763 with that location. */
766 expand_stack_vars (bool (*pred) (tree))
768 size_t si, i, j, n = stack_vars_num;
769 HOST_WIDE_INT large_size = 0, large_alloc = 0;
770 rtx large_base = NULL;
771 unsigned large_align = 0;
774 /* Determine if there are any variables requiring "large" alignment.
775 Since these are dynamically allocated, we only process these if
776 no predicate involved. */
777 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
778 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
780 /* Find the total size of these variables. */
781 for (si = 0; si < n; ++si)
785 i = stack_vars_sorted[si];
786 alignb = stack_vars[i].alignb;
788 /* Stop when we get to the first decl with "small" alignment. */
789 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
792 /* Skip variables that aren't partition representatives. */
793 if (stack_vars[i].representative != i)
796 /* Skip variables that have already had rtl assigned. See also
797 add_stack_var where we perpetrate this pc_rtx hack. */
798 decl = stack_vars[i].decl;
799 if ((TREE_CODE (decl) == SSA_NAME
800 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
801 : DECL_RTL (decl)) != pc_rtx)
804 large_size += alignb - 1;
805 large_size &= -(HOST_WIDE_INT)alignb;
806 large_size += stack_vars[i].size;
809 /* If there were any, allocate space. */
811 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
815 for (si = 0; si < n; ++si)
818 unsigned base_align, alignb;
819 HOST_WIDE_INT offset;
821 i = stack_vars_sorted[si];
823 /* Skip variables that aren't partition representatives, for now. */
824 if (stack_vars[i].representative != i)
827 /* Skip variables that have already had rtl assigned. See also
828 add_stack_var where we perpetrate this pc_rtx hack. */
829 decl = stack_vars[i].decl;
830 if ((TREE_CODE (decl) == SSA_NAME
831 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
832 : DECL_RTL (decl)) != pc_rtx)
835 /* Check the predicate to see whether this variable should be
836 allocated in this pass. */
837 if (pred && !pred (decl))
840 alignb = stack_vars[i].alignb;
841 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
843 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
844 base = virtual_stack_vars_rtx;
845 base_align = crtl->max_used_stack_slot_alignment;
849 /* Large alignment is only processed in the last pass. */
852 gcc_assert (large_base != NULL);
854 large_alloc += alignb - 1;
855 large_alloc &= -(HOST_WIDE_INT)alignb;
856 offset = large_alloc;
857 large_alloc += stack_vars[i].size;
860 base_align = large_align;
863 /* Create rtl for each variable based on their location within the
865 for (j = i; j != EOC; j = stack_vars[j].next)
867 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
868 expand_one_stack_var_at (stack_vars[j].decl,
870 stack_vars[j].offset + offset);
874 gcc_assert (large_alloc == large_size);
877 /* Take into account all sizes of partitions and reset DECL_RTLs. */
879 account_stack_vars (void)
881 size_t si, j, i, n = stack_vars_num;
882 HOST_WIDE_INT size = 0;
884 for (si = 0; si < n; ++si)
886 i = stack_vars_sorted[si];
888 /* Skip variables that aren't partition representatives, for now. */
889 if (stack_vars[i].representative != i)
892 size += stack_vars[i].size;
893 for (j = i; j != EOC; j = stack_vars[j].next)
894 set_rtl (stack_vars[j].decl, NULL);
899 /* A subroutine of expand_one_var. Called to immediately assign rtl
900 to a variable to be allocated in the stack frame. */
903 expand_one_stack_var (tree var)
905 HOST_WIDE_INT size, offset;
908 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
909 byte_align = align_local_variable (SSAVAR (var));
911 /* We handle highly aligned variables in expand_stack_vars. */
912 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
914 offset = alloc_stack_frame_space (size, byte_align);
916 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
917 crtl->max_used_stack_slot_alignment, offset);
920 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
921 that will reside in a hard register. */
924 expand_one_hard_reg_var (tree var)
926 rest_of_decl_compilation (var, 0, 0);
929 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
930 that will reside in a pseudo register. */
933 expand_one_register_var (tree var)
935 tree decl = SSAVAR (var);
936 tree type = TREE_TYPE (decl);
937 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
938 rtx x = gen_reg_rtx (reg_mode);
942 /* Note if the object is a user variable. */
943 if (!DECL_ARTIFICIAL (decl))
946 if (POINTER_TYPE_P (type))
947 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
950 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
951 has some associated error, e.g. its type is error-mark. We just need
952 to pick something that won't crash the rest of the compiler. */
955 expand_one_error_var (tree var)
957 enum machine_mode mode = DECL_MODE (var);
961 x = gen_rtx_MEM (BLKmode, const0_rtx);
962 else if (mode == VOIDmode)
965 x = gen_reg_rtx (mode);
967 SET_DECL_RTL (var, x);
970 /* A subroutine of expand_one_var. VAR is a variable that will be
971 allocated to the local stack frame. Return true if we wish to
972 add VAR to STACK_VARS so that it will be coalesced with other
973 variables. Return false to allocate VAR immediately.
975 This function is used to reduce the number of variables considered
976 for coalescing, which reduces the size of the quadratic problem. */
979 defer_stack_allocation (tree var, bool toplevel)
981 /* If stack protection is enabled, *all* stack variables must be deferred,
982 so that we can re-order the strings to the top of the frame. */
983 if (flag_stack_protect)
986 /* We handle "large" alignment via dynamic allocation. We want to handle
987 this extra complication in only one place, so defer them. */
988 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
991 /* Variables in the outermost scope automatically conflict with
992 every other variable. The only reason to want to defer them
993 at all is that, after sorting, we can more efficiently pack
994 small variables in the stack frame. Continue to defer at -O2. */
995 if (toplevel && optimize < 2)
998 /* Without optimization, *most* variables are allocated from the
999 stack, which makes the quadratic problem large exactly when we
1000 want compilation to proceed as quickly as possible. On the
1001 other hand, we don't want the function's stack frame size to
1002 get completely out of hand. So we avoid adding scalars and
1003 "small" aggregates to the list at all. */
1004 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1010 /* A subroutine of expand_used_vars. Expand one variable according to
1011 its flavor. Variables to be placed on the stack are not actually
1012 expanded yet, merely recorded.
1013 When REALLY_EXPAND is false, only add stack values to be allocated.
1014 Return stack usage this variable is supposed to take.
1017 static HOST_WIDE_INT
1018 expand_one_var (tree var, bool toplevel, bool really_expand)
1020 unsigned int align = BITS_PER_UNIT;
1025 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1027 /* Because we don't know if VAR will be in register or on stack,
1028 we conservatively assume it will be on stack even if VAR is
1029 eventually put into register after RA pass. For non-automatic
1030 variables, which won't be on stack, we collect alignment of
1031 type and ignore user specified alignment. */
1032 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1033 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1034 TYPE_MODE (TREE_TYPE (var)),
1035 TYPE_ALIGN (TREE_TYPE (var)));
1036 else if (DECL_HAS_VALUE_EXPR_P (var)
1037 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1038 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1039 or variables which were assigned a stack slot already by
1040 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1041 changed from the offset chosen to it. */
1042 align = crtl->stack_alignment_estimated;
1044 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1046 /* If the variable alignment is very large we'll dynamicaly allocate
1047 it, which means that in-frame portion is just a pointer. */
1048 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1049 align = POINTER_SIZE;
1052 if (SUPPORTS_STACK_ALIGNMENT
1053 && crtl->stack_alignment_estimated < align)
1055 /* stack_alignment_estimated shouldn't change after stack
1056 realign decision made */
1057 gcc_assert(!crtl->stack_realign_processed);
1058 crtl->stack_alignment_estimated = align;
1061 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1062 So here we only make sure stack_alignment_needed >= align. */
1063 if (crtl->stack_alignment_needed < align)
1064 crtl->stack_alignment_needed = align;
1065 if (crtl->max_used_stack_slot_alignment < align)
1066 crtl->max_used_stack_slot_alignment = align;
1068 if (TREE_CODE (origvar) == SSA_NAME)
1070 gcc_assert (TREE_CODE (var) != VAR_DECL
1071 || (!DECL_EXTERNAL (var)
1072 && !DECL_HAS_VALUE_EXPR_P (var)
1073 && !TREE_STATIC (var)
1074 && TREE_TYPE (var) != error_mark_node
1075 && !DECL_HARD_REGISTER (var)
1078 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1080 else if (DECL_EXTERNAL (var))
1082 else if (DECL_HAS_VALUE_EXPR_P (var))
1084 else if (TREE_STATIC (var))
1086 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1088 else if (TREE_TYPE (var) == error_mark_node)
1091 expand_one_error_var (var);
1093 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1096 expand_one_hard_reg_var (var);
1098 else if (use_register_for_decl (var))
1101 expand_one_register_var (origvar);
1103 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1107 error ("size of variable %q+D is too large", var);
1108 expand_one_error_var (var);
1111 else if (defer_stack_allocation (var, toplevel))
1112 add_stack_var (origvar);
1116 expand_one_stack_var (origvar);
1117 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1122 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1123 expanding variables. Those variables that can be put into registers
1124 are allocated pseudos; those that can't are put on the stack.
1126 TOPLEVEL is true if this is the outermost BLOCK. */
1129 expand_used_vars_for_block (tree block, bool toplevel)
1131 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1134 old_sv_num = toplevel ? 0 : stack_vars_num;
1136 /* Expand all variables at this level. */
1137 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1139 expand_one_var (t, toplevel, true);
1141 this_sv_num = stack_vars_num;
1143 /* Expand all variables at containing levels. */
1144 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1145 expand_used_vars_for_block (t, false);
1147 /* Since we do not track exact variable lifetimes (which is not even
1148 possible for variables whose address escapes), we mirror the block
1149 tree in the interference graph. Here we cause all variables at this
1150 level, and all sublevels, to conflict. */
1151 if (old_sv_num < this_sv_num)
1153 new_sv_num = stack_vars_num;
1155 for (i = old_sv_num; i < new_sv_num; ++i)
1156 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1157 add_stack_var_conflict (i, j);
1161 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1162 and clear TREE_USED on all local variables. */
1165 clear_tree_used (tree block)
1169 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1170 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1173 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1174 clear_tree_used (t);
1177 /* Examine TYPE and determine a bit mask of the following features. */
1179 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1180 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1181 #define SPCT_HAS_ARRAY 4
1182 #define SPCT_HAS_AGGREGATE 8
1185 stack_protect_classify_type (tree type)
1187 unsigned int ret = 0;
1190 switch (TREE_CODE (type))
1193 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1194 if (t == char_type_node
1195 || t == signed_char_type_node
1196 || t == unsigned_char_type_node)
1198 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1199 unsigned HOST_WIDE_INT len;
1201 if (!TYPE_SIZE_UNIT (type)
1202 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1205 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1208 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1210 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1213 ret = SPCT_HAS_ARRAY;
1217 case QUAL_UNION_TYPE:
1219 ret = SPCT_HAS_AGGREGATE;
1220 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1221 if (TREE_CODE (t) == FIELD_DECL)
1222 ret |= stack_protect_classify_type (TREE_TYPE (t));
1232 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1233 part of the local stack frame. Remember if we ever return nonzero for
1234 any variable in this function. The return value is the phase number in
1235 which the variable should be allocated. */
1238 stack_protect_decl_phase (tree decl)
1240 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1243 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1244 has_short_buffer = true;
1246 if (flag_stack_protect == 2)
1248 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1249 && !(bits & SPCT_HAS_AGGREGATE))
1251 else if (bits & SPCT_HAS_ARRAY)
1255 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1258 has_protected_decls = true;
1263 /* Two helper routines that check for phase 1 and phase 2. These are used
1264 as callbacks for expand_stack_vars. */
1267 stack_protect_decl_phase_1 (tree decl)
1269 return stack_protect_decl_phase (decl) == 1;
1273 stack_protect_decl_phase_2 (tree decl)
1275 return stack_protect_decl_phase (decl) == 2;
1278 /* Ensure that variables in different stack protection phases conflict
1279 so that they are not merged and share the same stack slot. */
1282 add_stack_protection_conflicts (void)
1284 size_t i, j, n = stack_vars_num;
1285 unsigned char *phase;
1287 phase = XNEWVEC (unsigned char, n);
1288 for (i = 0; i < n; ++i)
1289 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1291 for (i = 0; i < n; ++i)
1293 unsigned char ph_i = phase[i];
1294 for (j = 0; j < i; ++j)
1295 if (ph_i != phase[j])
1296 add_stack_var_conflict (i, j);
1302 /* Create a decl for the guard at the top of the stack frame. */
1305 create_stack_guard (void)
1307 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1308 VAR_DECL, NULL, ptr_type_node);
1309 TREE_THIS_VOLATILE (guard) = 1;
1310 TREE_USED (guard) = 1;
1311 expand_one_stack_var (guard);
1312 crtl->stack_protect_guard = guard;
1315 /* Prepare for expanding variables. */
1317 init_vars_expansion (void)
1321 /* Set TREE_USED on all variables in the local_decls. */
1322 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1325 /* Clear TREE_USED on all variables associated with a block scope. */
1326 clear_tree_used (DECL_INITIAL (current_function_decl));
1328 /* Initialize local stack smashing state. */
1329 has_protected_decls = false;
1330 has_short_buffer = false;
1333 /* Free up stack variable graph data. */
1335 fini_vars_expansion (void)
1337 size_t i, n = stack_vars_num;
1338 for (i = 0; i < n; i++)
1339 BITMAP_FREE (stack_vars[i].conflicts);
1340 XDELETEVEC (stack_vars);
1341 XDELETEVEC (stack_vars_sorted);
1343 stack_vars_alloc = stack_vars_num = 0;
1346 /* Make a fair guess for the size of the stack frame of the function
1347 in NODE. This doesn't have to be exact, the result is only used in
1348 the inline heuristics. So we don't want to run the full stack var
1349 packing algorithm (which is quadratic in the number of stack vars).
1350 Instead, we calculate the total size of all stack vars. This turns
1351 out to be a pretty fair estimate -- packing of stack vars doesn't
1352 happen very often. */
1355 estimated_stack_frame_size (struct cgraph_node *node)
1357 HOST_WIDE_INT size = 0;
1360 tree old_cur_fun_decl = current_function_decl;
1361 referenced_var_iterator rvi;
1362 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1364 current_function_decl = node->decl;
1367 gcc_checking_assert (gimple_referenced_vars (fn));
1368 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1369 size += expand_one_var (var, true, false);
1371 if (stack_vars_num > 0)
1373 /* Fake sorting the stack vars for account_stack_vars (). */
1374 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1375 for (i = 0; i < stack_vars_num; ++i)
1376 stack_vars_sorted[i] = i;
1377 size += account_stack_vars ();
1378 fini_vars_expansion ();
1381 current_function_decl = old_cur_fun_decl;
1385 /* Expand all variables used in the function. */
1388 expand_used_vars (void)
1390 tree var, outer_block = DECL_INITIAL (current_function_decl);
1391 VEC(tree,heap) *maybe_local_decls = NULL;
1395 /* Compute the phase of the stack frame for this function. */
1397 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1398 int off = STARTING_FRAME_OFFSET % align;
1399 frame_phase = off ? align - off : 0;
1402 init_vars_expansion ();
1404 for (i = 0; i < SA.map->num_partitions; i++)
1406 tree var = partition_to_var (SA.map, i);
1408 gcc_assert (is_gimple_reg (var));
1409 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1410 expand_one_var (var, true, true);
1413 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1414 contain the default def (representing the parm or result itself)
1415 we don't do anything here. But those which don't contain the
1416 default def (representing a temporary based on the parm/result)
1417 we need to allocate space just like for normal VAR_DECLs. */
1418 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1420 expand_one_var (var, true, true);
1421 gcc_assert (SA.partition_to_pseudo[i]);
1426 /* At this point all variables on the local_decls with TREE_USED
1427 set are not associated with any block scope. Lay them out. */
1429 len = VEC_length (tree, cfun->local_decls);
1430 FOR_EACH_LOCAL_DECL (cfun, i, var)
1432 bool expand_now = false;
1434 /* Expanded above already. */
1435 if (is_gimple_reg (var))
1437 TREE_USED (var) = 0;
1440 /* We didn't set a block for static or extern because it's hard
1441 to tell the difference between a global variable (re)declared
1442 in a local scope, and one that's really declared there to
1443 begin with. And it doesn't really matter much, since we're
1444 not giving them stack space. Expand them now. */
1445 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1448 /* If the variable is not associated with any block, then it
1449 was created by the optimizers, and could be live anywhere
1451 else if (TREE_USED (var))
1454 /* Finally, mark all variables on the list as used. We'll use
1455 this in a moment when we expand those associated with scopes. */
1456 TREE_USED (var) = 1;
1459 expand_one_var (var, true, true);
1462 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1464 rtx rtl = DECL_RTL_IF_SET (var);
1466 /* Keep artificial non-ignored vars in cfun->local_decls
1467 chain until instantiate_decls. */
1468 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1469 add_local_decl (cfun, var);
1470 else if (rtl == NULL_RTX)
1471 /* If rtl isn't set yet, which can happen e.g. with
1472 -fstack-protector, retry before returning from this
1474 VEC_safe_push (tree, heap, maybe_local_decls, var);
1478 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1480 +-----------------+-----------------+
1481 | ...processed... | ...duplicates...|
1482 +-----------------+-----------------+
1484 +-- LEN points here.
1486 We just want the duplicates, as those are the artificial
1487 non-ignored vars that we want to keep until instantiate_decls.
1488 Move them down and truncate the array. */
1489 if (!VEC_empty (tree, cfun->local_decls))
1490 VEC_block_remove (tree, cfun->local_decls, 0, len);
1492 /* At this point, all variables within the block tree with TREE_USED
1493 set are actually used by the optimized function. Lay them out. */
1494 expand_used_vars_for_block (outer_block, true);
1496 if (stack_vars_num > 0)
1498 /* Due to the way alias sets work, no variables with non-conflicting
1499 alias sets may be assigned the same address. Add conflicts to
1501 add_alias_set_conflicts ();
1503 /* If stack protection is enabled, we don't share space between
1504 vulnerable data and non-vulnerable data. */
1505 if (flag_stack_protect)
1506 add_stack_protection_conflicts ();
1508 /* Now that we have collected all stack variables, and have computed a
1509 minimal interference graph, attempt to save some stack space. */
1510 partition_stack_vars ();
1512 dump_stack_var_partition ();
1515 /* There are several conditions under which we should create a
1516 stack guard: protect-all, alloca used, protected decls present. */
1517 if (flag_stack_protect == 2
1518 || (flag_stack_protect
1519 && (cfun->calls_alloca || has_protected_decls)))
1520 create_stack_guard ();
1522 /* Assign rtl to each variable based on these partitions. */
1523 if (stack_vars_num > 0)
1525 /* Reorder decls to be protected by iterating over the variables
1526 array multiple times, and allocating out of each phase in turn. */
1527 /* ??? We could probably integrate this into the qsort we did
1528 earlier, such that we naturally see these variables first,
1529 and thus naturally allocate things in the right order. */
1530 if (has_protected_decls)
1532 /* Phase 1 contains only character arrays. */
1533 expand_stack_vars (stack_protect_decl_phase_1);
1535 /* Phase 2 contains other kinds of arrays. */
1536 if (flag_stack_protect == 2)
1537 expand_stack_vars (stack_protect_decl_phase_2);
1540 expand_stack_vars (NULL);
1542 fini_vars_expansion ();
1545 /* If there were any artificial non-ignored vars without rtl
1546 found earlier, see if deferred stack allocation hasn't assigned
1548 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1550 rtx rtl = DECL_RTL_IF_SET (var);
1552 /* Keep artificial non-ignored vars in cfun->local_decls
1553 chain until instantiate_decls. */
1554 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1555 add_local_decl (cfun, var);
1557 VEC_free (tree, heap, maybe_local_decls);
1559 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1560 if (STACK_ALIGNMENT_NEEDED)
1562 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1563 if (!FRAME_GROWS_DOWNWARD)
1564 frame_offset += align - 1;
1565 frame_offset &= -align;
1570 /* If we need to produce a detailed dump, print the tree representation
1571 for STMT to the dump file. SINCE is the last RTX after which the RTL
1572 generated for STMT should have been appended. */
1575 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1577 if (dump_file && (dump_flags & TDF_DETAILS))
1579 fprintf (dump_file, "\n;; ");
1580 print_gimple_stmt (dump_file, stmt, 0,
1581 TDF_SLIM | (dump_flags & TDF_LINENO));
1582 fprintf (dump_file, "\n");
1584 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1588 /* Maps the blocks that do not contain tree labels to rtx labels. */
1590 static struct pointer_map_t *lab_rtx_for_bb;
1592 /* Returns the label_rtx expression for a label starting basic block BB. */
1595 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1597 gimple_stmt_iterator gsi;
1602 if (bb->flags & BB_RTL)
1603 return block_label (bb);
1605 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1609 /* Find the tree label if it is present. */
1611 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1613 lab_stmt = gsi_stmt (gsi);
1614 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1617 lab = gimple_label_label (lab_stmt);
1618 if (DECL_NONLOCAL (lab))
1621 return label_rtx (lab);
1624 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1625 *elt = gen_label_rtx ();
1630 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1631 of a basic block where we just expanded the conditional at the end,
1632 possibly clean up the CFG and instruction sequence. LAST is the
1633 last instruction before the just emitted jump sequence. */
1636 maybe_cleanup_end_of_block (edge e, rtx last)
1638 /* Special case: when jumpif decides that the condition is
1639 trivial it emits an unconditional jump (and the necessary
1640 barrier). But we still have two edges, the fallthru one is
1641 wrong. purge_dead_edges would clean this up later. Unfortunately
1642 we have to insert insns (and split edges) before
1643 find_many_sub_basic_blocks and hence before purge_dead_edges.
1644 But splitting edges might create new blocks which depend on the
1645 fact that if there are two edges there's no barrier. So the
1646 barrier would get lost and verify_flow_info would ICE. Instead
1647 of auditing all edge splitters to care for the barrier (which
1648 normally isn't there in a cleaned CFG), fix it here. */
1649 if (BARRIER_P (get_last_insn ()))
1653 /* Now, we have a single successor block, if we have insns to
1654 insert on the remaining edge we potentially will insert
1655 it at the end of this block (if the dest block isn't feasible)
1656 in order to avoid splitting the edge. This insertion will take
1657 place in front of the last jump. But we might have emitted
1658 multiple jumps (conditional and one unconditional) to the
1659 same destination. Inserting in front of the last one then
1660 is a problem. See PR 40021. We fix this by deleting all
1661 jumps except the last unconditional one. */
1662 insn = PREV_INSN (get_last_insn ());
1663 /* Make sure we have an unconditional jump. Otherwise we're
1665 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1666 for (insn = PREV_INSN (insn); insn != last;)
1668 insn = PREV_INSN (insn);
1669 if (JUMP_P (NEXT_INSN (insn)))
1671 if (!any_condjump_p (NEXT_INSN (insn)))
1673 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1674 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1676 delete_insn (NEXT_INSN (insn));
1682 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1683 Returns a new basic block if we've terminated the current basic
1684 block and created a new one. */
1687 expand_gimple_cond (basic_block bb, gimple stmt)
1689 basic_block new_bb, dest;
1694 enum tree_code code;
1697 code = gimple_cond_code (stmt);
1698 op0 = gimple_cond_lhs (stmt);
1699 op1 = gimple_cond_rhs (stmt);
1700 /* We're sometimes presented with such code:
1704 This would expand to two comparisons which then later might
1705 be cleaned up by combine. But some pattern matchers like if-conversion
1706 work better when there's only one compare, so make up for this
1707 here as special exception if TER would have made the same change. */
1708 if (gimple_cond_single_var_p (stmt)
1710 && TREE_CODE (op0) == SSA_NAME
1711 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1713 gimple second = SSA_NAME_DEF_STMT (op0);
1714 if (gimple_code (second) == GIMPLE_ASSIGN)
1716 enum tree_code code2 = gimple_assign_rhs_code (second);
1717 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1720 op0 = gimple_assign_rhs1 (second);
1721 op1 = gimple_assign_rhs2 (second);
1723 /* If jumps are cheap turn some more codes into
1725 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1727 if ((code2 == BIT_AND_EXPR
1728 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1729 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1730 || code2 == TRUTH_AND_EXPR)
1732 code = TRUTH_ANDIF_EXPR;
1733 op0 = gimple_assign_rhs1 (second);
1734 op1 = gimple_assign_rhs2 (second);
1736 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1738 code = TRUTH_ORIF_EXPR;
1739 op0 = gimple_assign_rhs1 (second);
1740 op1 = gimple_assign_rhs2 (second);
1746 last2 = last = get_last_insn ();
1748 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1749 set_curr_insn_source_location (gimple_location (stmt));
1750 set_curr_insn_block (gimple_block (stmt));
1752 /* These flags have no purpose in RTL land. */
1753 true_edge->flags &= ~EDGE_TRUE_VALUE;
1754 false_edge->flags &= ~EDGE_FALSE_VALUE;
1756 /* We can either have a pure conditional jump with one fallthru edge or
1757 two-way jump that needs to be decomposed into two basic blocks. */
1758 if (false_edge->dest == bb->next_bb)
1760 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1761 true_edge->probability);
1762 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1763 if (true_edge->goto_locus)
1765 set_curr_insn_source_location (true_edge->goto_locus);
1766 set_curr_insn_block (true_edge->goto_block);
1767 true_edge->goto_locus = curr_insn_locator ();
1769 true_edge->goto_block = NULL;
1770 false_edge->flags |= EDGE_FALLTHRU;
1771 maybe_cleanup_end_of_block (false_edge, last);
1774 if (true_edge->dest == bb->next_bb)
1776 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1777 false_edge->probability);
1778 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1779 if (false_edge->goto_locus)
1781 set_curr_insn_source_location (false_edge->goto_locus);
1782 set_curr_insn_block (false_edge->goto_block);
1783 false_edge->goto_locus = curr_insn_locator ();
1785 false_edge->goto_block = NULL;
1786 true_edge->flags |= EDGE_FALLTHRU;
1787 maybe_cleanup_end_of_block (true_edge, last);
1791 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1792 true_edge->probability);
1793 last = get_last_insn ();
1794 if (false_edge->goto_locus)
1796 set_curr_insn_source_location (false_edge->goto_locus);
1797 set_curr_insn_block (false_edge->goto_block);
1798 false_edge->goto_locus = curr_insn_locator ();
1800 false_edge->goto_block = NULL;
1801 emit_jump (label_rtx_for_bb (false_edge->dest));
1804 if (BARRIER_P (BB_END (bb)))
1805 BB_END (bb) = PREV_INSN (BB_END (bb));
1806 update_bb_for_insn (bb);
1808 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1809 dest = false_edge->dest;
1810 redirect_edge_succ (false_edge, new_bb);
1811 false_edge->flags |= EDGE_FALLTHRU;
1812 new_bb->count = false_edge->count;
1813 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1814 new_edge = make_edge (new_bb, dest, 0);
1815 new_edge->probability = REG_BR_PROB_BASE;
1816 new_edge->count = new_bb->count;
1817 if (BARRIER_P (BB_END (new_bb)))
1818 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1819 update_bb_for_insn (new_bb);
1821 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1823 if (true_edge->goto_locus)
1825 set_curr_insn_source_location (true_edge->goto_locus);
1826 set_curr_insn_block (true_edge->goto_block);
1827 true_edge->goto_locus = curr_insn_locator ();
1829 true_edge->goto_block = NULL;
1834 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1838 expand_call_stmt (gimple stmt)
1840 tree exp, decl, lhs = gimple_call_lhs (stmt);
1844 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1846 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1847 decl = gimple_call_fndecl (stmt);
1848 builtin_p = decl && DECL_BUILT_IN (decl);
1850 /* If this is not a builtin function, the function type through which the
1851 call is made may be different from the type of the function. */
1854 = fold_build1 (NOP_EXPR, build_pointer_type (gimple_call_fntype (stmt)),
1855 CALL_EXPR_FN (exp));
1857 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1858 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1860 for (i = 0; i < gimple_call_num_args (stmt); i++)
1862 tree arg = gimple_call_arg (stmt, i);
1864 /* TER addresses into arguments of builtin functions so we have a
1865 chance to infer more correct alignment information. See PR39954. */
1867 && TREE_CODE (arg) == SSA_NAME
1868 && (def = get_gimple_for_ssa_name (arg))
1869 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1870 arg = gimple_assign_rhs1 (def);
1871 CALL_EXPR_ARG (exp, i) = arg;
1874 if (gimple_has_side_effects (stmt))
1875 TREE_SIDE_EFFECTS (exp) = 1;
1877 if (gimple_call_nothrow_p (stmt))
1878 TREE_NOTHROW (exp) = 1;
1880 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1881 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1882 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1883 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1884 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1885 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1886 TREE_BLOCK (exp) = gimple_block (stmt);
1889 expand_assignment (lhs, exp, false);
1891 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1894 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1895 STMT that doesn't require special handling for outgoing edges. That
1896 is no tailcalls and no GIMPLE_COND. */
1899 expand_gimple_stmt_1 (gimple stmt)
1903 set_curr_insn_source_location (gimple_location (stmt));
1904 set_curr_insn_block (gimple_block (stmt));
1906 switch (gimple_code (stmt))
1909 op0 = gimple_goto_dest (stmt);
1910 if (TREE_CODE (op0) == LABEL_DECL)
1913 expand_computed_goto (op0);
1916 expand_label (gimple_label_label (stmt));
1919 case GIMPLE_PREDICT:
1925 expand_asm_stmt (stmt);
1928 expand_call_stmt (stmt);
1932 op0 = gimple_return_retval (stmt);
1934 if (op0 && op0 != error_mark_node)
1936 tree result = DECL_RESULT (current_function_decl);
1938 /* If we are not returning the current function's RESULT_DECL,
1939 build an assignment to it. */
1942 /* I believe that a function's RESULT_DECL is unique. */
1943 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1945 /* ??? We'd like to use simply expand_assignment here,
1946 but this fails if the value is of BLKmode but the return
1947 decl is a register. expand_return has special handling
1948 for this combination, which eventually should move
1949 to common code. See comments there. Until then, let's
1950 build a modify expression :-/ */
1951 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1956 expand_null_return ();
1958 expand_return (op0);
1963 tree lhs = gimple_assign_lhs (stmt);
1965 /* Tree expand used to fiddle with |= and &= of two bitfield
1966 COMPONENT_REFs here. This can't happen with gimple, the LHS
1967 of binary assigns must be a gimple reg. */
1969 if (TREE_CODE (lhs) != SSA_NAME
1970 || get_gimple_rhs_class (gimple_expr_code (stmt))
1971 == GIMPLE_SINGLE_RHS)
1973 tree rhs = gimple_assign_rhs1 (stmt);
1974 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1975 == GIMPLE_SINGLE_RHS);
1976 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1977 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1978 expand_assignment (lhs, rhs,
1979 gimple_assign_nontemporal_move_p (stmt));
1984 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1985 struct separate_ops ops;
1986 bool promoted = false;
1988 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1989 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1992 ops.code = gimple_assign_rhs_code (stmt);
1993 ops.type = TREE_TYPE (lhs);
1994 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1996 case GIMPLE_TERNARY_RHS:
1997 ops.op2 = gimple_assign_rhs3 (stmt);
1999 case GIMPLE_BINARY_RHS:
2000 ops.op1 = gimple_assign_rhs2 (stmt);
2002 case GIMPLE_UNARY_RHS:
2003 ops.op0 = gimple_assign_rhs1 (stmt);
2008 ops.location = gimple_location (stmt);
2010 /* If we want to use a nontemporal store, force the value to
2011 register first. If we store into a promoted register,
2012 don't directly expand to target. */
2013 temp = nontemporal || promoted ? NULL_RTX : target;
2014 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2021 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2022 /* If TEMP is a VOIDmode constant, use convert_modes to make
2023 sure that we properly convert it. */
2024 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2026 temp = convert_modes (GET_MODE (target),
2027 TYPE_MODE (ops.type),
2029 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2030 GET_MODE (target), temp, unsignedp);
2033 convert_move (SUBREG_REG (target), temp, unsignedp);
2035 else if (nontemporal && emit_storent_insn (target, temp))
2039 temp = force_operand (temp, target);
2041 emit_move_insn (target, temp);
2052 /* Expand one gimple statement STMT and return the last RTL instruction
2053 before any of the newly generated ones.
2055 In addition to generating the necessary RTL instructions this also
2056 sets REG_EH_REGION notes if necessary and sets the current source
2057 location for diagnostics. */
2060 expand_gimple_stmt (gimple stmt)
2062 location_t saved_location = input_location;
2063 rtx last = get_last_insn ();
2068 /* We need to save and restore the current source location so that errors
2069 discovered during expansion are emitted with the right location. But
2070 it would be better if the diagnostic routines used the source location
2071 embedded in the tree nodes rather than globals. */
2072 if (gimple_has_location (stmt))
2073 input_location = gimple_location (stmt);
2075 expand_gimple_stmt_1 (stmt);
2077 /* Free any temporaries used to evaluate this statement. */
2080 input_location = saved_location;
2082 /* Mark all insns that may trap. */
2083 lp_nr = lookup_stmt_eh_lp (stmt);
2087 for (insn = next_real_insn (last); insn;
2088 insn = next_real_insn (insn))
2090 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2091 /* If we want exceptions for non-call insns, any
2092 may_trap_p instruction may throw. */
2093 && GET_CODE (PATTERN (insn)) != CLOBBER
2094 && GET_CODE (PATTERN (insn)) != USE
2095 && insn_could_throw_p (insn))
2096 make_reg_eh_region_note (insn, 0, lp_nr);
2103 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2104 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2105 generated a tail call (something that might be denied by the ABI
2106 rules governing the call; see calls.c).
2108 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2109 can still reach the rest of BB. The case here is __builtin_sqrt,
2110 where the NaN result goes through the external function (with a
2111 tailcall) and the normal result happens via a sqrt instruction. */
2114 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2122 last2 = last = expand_gimple_stmt (stmt);
2124 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2125 if (CALL_P (last) && SIBLING_CALL_P (last))
2128 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2130 *can_fallthru = true;
2134 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2135 Any instructions emitted here are about to be deleted. */
2136 do_pending_stack_adjust ();
2138 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2139 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2140 EH or abnormal edges, we shouldn't have created a tail call in
2141 the first place. So it seems to me we should just be removing
2142 all edges here, or redirecting the existing fallthru edge to
2148 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2150 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2152 if (e->dest != EXIT_BLOCK_PTR)
2154 e->dest->count -= e->count;
2155 e->dest->frequency -= EDGE_FREQUENCY (e);
2156 if (e->dest->count < 0)
2158 if (e->dest->frequency < 0)
2159 e->dest->frequency = 0;
2162 probability += e->probability;
2169 /* This is somewhat ugly: the call_expr expander often emits instructions
2170 after the sibcall (to perform the function return). These confuse the
2171 find_many_sub_basic_blocks code, so we need to get rid of these. */
2172 last = NEXT_INSN (last);
2173 gcc_assert (BARRIER_P (last));
2175 *can_fallthru = false;
2176 while (NEXT_INSN (last))
2178 /* For instance an sqrt builtin expander expands if with
2179 sibcall in the then and label for `else`. */
2180 if (LABEL_P (NEXT_INSN (last)))
2182 *can_fallthru = true;
2185 delete_insn (NEXT_INSN (last));
2188 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2189 e->probability += probability;
2192 update_bb_for_insn (bb);
2194 if (NEXT_INSN (last))
2196 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2199 if (BARRIER_P (last))
2200 BB_END (bb) = PREV_INSN (last);
2203 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2208 /* Return the difference between the floor and the truncated result of
2209 a signed division by OP1 with remainder MOD. */
2211 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2213 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2214 return gen_rtx_IF_THEN_ELSE
2215 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2216 gen_rtx_IF_THEN_ELSE
2217 (mode, gen_rtx_LT (BImode,
2218 gen_rtx_DIV (mode, op1, mod),
2220 constm1_rtx, const0_rtx),
2224 /* Return the difference between the ceil and the truncated result of
2225 a signed division by OP1 with remainder MOD. */
2227 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2229 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2230 return gen_rtx_IF_THEN_ELSE
2231 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2232 gen_rtx_IF_THEN_ELSE
2233 (mode, gen_rtx_GT (BImode,
2234 gen_rtx_DIV (mode, op1, mod),
2236 const1_rtx, const0_rtx),
2240 /* Return the difference between the ceil and the truncated result of
2241 an unsigned division by OP1 with remainder MOD. */
2243 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2245 /* (mod != 0 ? 1 : 0) */
2246 return gen_rtx_IF_THEN_ELSE
2247 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2248 const1_rtx, const0_rtx);
2251 /* Return the difference between the rounded and the truncated result
2252 of a signed division by OP1 with remainder MOD. Halfway cases are
2253 rounded away from zero, rather than to the nearest even number. */
2255 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2257 /* (abs (mod) >= abs (op1) - abs (mod)
2258 ? (op1 / mod > 0 ? 1 : -1)
2260 return gen_rtx_IF_THEN_ELSE
2261 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2262 gen_rtx_MINUS (mode,
2263 gen_rtx_ABS (mode, op1),
2264 gen_rtx_ABS (mode, mod))),
2265 gen_rtx_IF_THEN_ELSE
2266 (mode, gen_rtx_GT (BImode,
2267 gen_rtx_DIV (mode, op1, mod),
2269 const1_rtx, constm1_rtx),
2273 /* Return the difference between the rounded and the truncated result
2274 of a unsigned division by OP1 with remainder MOD. Halfway cases
2275 are rounded away from zero, rather than to the nearest even
2278 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2280 /* (mod >= op1 - mod ? 1 : 0) */
2281 return gen_rtx_IF_THEN_ELSE
2282 (mode, gen_rtx_GE (BImode, mod,
2283 gen_rtx_MINUS (mode, op1, mod)),
2284 const1_rtx, const0_rtx);
2287 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2291 convert_debug_memory_address (enum machine_mode mode, rtx x,
2294 enum machine_mode xmode = GET_MODE (x);
2296 #ifndef POINTERS_EXTEND_UNSIGNED
2297 gcc_assert (mode == Pmode
2298 || mode == targetm.addr_space.address_mode (as));
2299 gcc_assert (xmode == mode || xmode == VOIDmode);
2302 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2303 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2305 gcc_assert (mode == address_mode || mode == pointer_mode);
2307 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2310 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2311 x = simplify_gen_subreg (mode, x, xmode,
2312 subreg_lowpart_offset
2314 else if (POINTERS_EXTEND_UNSIGNED > 0)
2315 x = gen_rtx_ZERO_EXTEND (mode, x);
2316 else if (!POINTERS_EXTEND_UNSIGNED)
2317 x = gen_rtx_SIGN_EXTEND (mode, x);
2320 switch (GET_CODE (x))
2323 if ((SUBREG_PROMOTED_VAR_P (x)
2324 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2325 || (GET_CODE (SUBREG_REG (x)) == PLUS
2326 && REG_P (XEXP (SUBREG_REG (x), 0))
2327 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2328 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2329 && GET_MODE (SUBREG_REG (x)) == mode)
2330 return SUBREG_REG (x);
2333 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2334 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2337 temp = shallow_copy_rtx (x);
2338 PUT_MODE (temp, mode);
2341 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2343 temp = gen_rtx_CONST (mode, temp);
2347 if (CONST_INT_P (XEXP (x, 1)))
2349 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2351 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2357 /* Don't know how to express ptr_extend as operation in debug info. */
2360 #endif /* POINTERS_EXTEND_UNSIGNED */
2365 /* Return an RTX equivalent to the value of the tree expression
2369 expand_debug_expr (tree exp)
2371 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2372 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2373 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2376 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2378 case tcc_expression:
2379 switch (TREE_CODE (exp))
2383 case WIDEN_MULT_PLUS_EXPR:
2384 case WIDEN_MULT_MINUS_EXPR:
2388 case TRUTH_ANDIF_EXPR:
2389 case TRUTH_ORIF_EXPR:
2390 case TRUTH_AND_EXPR:
2392 case TRUTH_XOR_EXPR:
2395 case TRUTH_NOT_EXPR:
2404 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2411 case tcc_comparison:
2412 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2419 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2429 case tcc_exceptional:
2430 case tcc_declaration:
2436 switch (TREE_CODE (exp))
2439 if (!lookup_constant_def (exp))
2441 if (strlen (TREE_STRING_POINTER (exp)) + 1
2442 != (size_t) TREE_STRING_LENGTH (exp))
2444 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2445 op0 = gen_rtx_MEM (BLKmode, op0);
2446 set_mem_attributes (op0, exp, 0);
2449 /* Fall through... */
2454 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2458 gcc_assert (COMPLEX_MODE_P (mode));
2459 op0 = expand_debug_expr (TREE_REALPART (exp));
2460 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2461 return gen_rtx_CONCAT (mode, op0, op1);
2463 case DEBUG_EXPR_DECL:
2464 op0 = DECL_RTL_IF_SET (exp);
2469 op0 = gen_rtx_DEBUG_EXPR (mode);
2470 DEBUG_EXPR_TREE_DECL (op0) = exp;
2471 SET_DECL_RTL (exp, op0);
2481 op0 = DECL_RTL_IF_SET (exp);
2483 /* This decl was probably optimized away. */
2486 if (TREE_CODE (exp) != VAR_DECL
2487 || DECL_EXTERNAL (exp)
2488 || !TREE_STATIC (exp)
2490 || DECL_HARD_REGISTER (exp)
2491 || mode == VOIDmode)
2494 op0 = make_decl_rtl_for_debug (exp);
2496 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2497 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2501 op0 = copy_rtx (op0);
2503 if (GET_MODE (op0) == BLKmode
2504 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2505 below would ICE. While it is likely a FE bug,
2506 try to be robust here. See PR43166. */
2508 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2510 gcc_assert (MEM_P (op0));
2511 op0 = adjust_address_nv (op0, mode, 0);
2522 enum machine_mode inner_mode = GET_MODE (op0);
2524 if (mode == inner_mode)
2527 if (inner_mode == VOIDmode)
2529 if (TREE_CODE (exp) == SSA_NAME)
2530 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2532 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2533 if (mode == inner_mode)
2537 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2539 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2540 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2541 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2542 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2544 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2546 else if (FLOAT_MODE_P (mode))
2548 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2549 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2550 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2552 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2554 else if (FLOAT_MODE_P (inner_mode))
2557 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2559 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2561 else if (CONSTANT_P (op0)
2562 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2563 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2564 subreg_lowpart_offset (mode,
2566 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2567 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2569 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2571 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2577 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2579 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2580 TREE_OPERAND (exp, 0),
2581 TREE_OPERAND (exp, 1));
2583 return expand_debug_expr (newexp);
2587 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2591 if (TREE_CODE (exp) == MEM_REF)
2593 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2594 || (GET_CODE (op0) == PLUS
2595 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2596 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2597 Instead just use get_inner_reference. */
2600 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2601 if (!op1 || !CONST_INT_P (op1))
2604 op0 = plus_constant (op0, INTVAL (op1));
2607 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2608 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2610 as = ADDR_SPACE_GENERIC;
2612 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2614 if (op0 == NULL_RTX)
2617 op0 = gen_rtx_MEM (mode, op0);
2618 set_mem_attributes (op0, exp, 0);
2619 if (TREE_CODE (exp) == MEM_REF
2620 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2621 set_mem_expr (op0, NULL_TREE);
2622 set_mem_addr_space (op0, as);
2626 case TARGET_MEM_REF:
2627 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2628 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2631 op0 = expand_debug_expr
2632 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2636 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2637 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2639 as = ADDR_SPACE_GENERIC;
2641 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2643 if (op0 == NULL_RTX)
2646 op0 = gen_rtx_MEM (mode, op0);
2648 set_mem_attributes (op0, exp, 0);
2649 set_mem_addr_space (op0, as);
2655 case ARRAY_RANGE_REF:
2660 case VIEW_CONVERT_EXPR:
2662 enum machine_mode mode1;
2663 HOST_WIDE_INT bitsize, bitpos;
2666 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2667 &mode1, &unsignedp, &volatilep, false);
2673 orig_op0 = op0 = expand_debug_expr (tem);
2680 enum machine_mode addrmode, offmode;
2685 op0 = XEXP (op0, 0);
2686 addrmode = GET_MODE (op0);
2687 if (addrmode == VOIDmode)
2690 op1 = expand_debug_expr (offset);
2694 offmode = GET_MODE (op1);
2695 if (offmode == VOIDmode)
2696 offmode = TYPE_MODE (TREE_TYPE (offset));
2698 if (addrmode != offmode)
2699 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2700 subreg_lowpart_offset (addrmode,
2703 /* Don't use offset_address here, we don't need a
2704 recognizable address, and we don't want to generate
2706 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2711 if (mode1 == VOIDmode)
2713 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2714 if (bitpos >= BITS_PER_UNIT)
2716 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2717 bitpos %= BITS_PER_UNIT;
2719 else if (bitpos < 0)
2722 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2723 op0 = adjust_address_nv (op0, mode1, units);
2724 bitpos += units * BITS_PER_UNIT;
2726 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2727 op0 = adjust_address_nv (op0, mode, 0);
2728 else if (GET_MODE (op0) != mode1)
2729 op0 = adjust_address_nv (op0, mode1, 0);
2731 op0 = copy_rtx (op0);
2732 if (op0 == orig_op0)
2733 op0 = shallow_copy_rtx (op0);
2734 set_mem_attributes (op0, exp, 0);
2737 if (bitpos == 0 && mode == GET_MODE (op0))
2743 if (GET_MODE (op0) == BLKmode)
2746 if ((bitpos % BITS_PER_UNIT) == 0
2747 && bitsize == GET_MODE_BITSIZE (mode1))
2749 enum machine_mode opmode = GET_MODE (op0);
2751 if (opmode == VOIDmode)
2752 opmode = TYPE_MODE (TREE_TYPE (tem));
2754 /* This condition may hold if we're expanding the address
2755 right past the end of an array that turned out not to
2756 be addressable (i.e., the address was only computed in
2757 debug stmts). The gen_subreg below would rightfully
2758 crash, and the address doesn't really exist, so just
2760 if (bitpos >= GET_MODE_BITSIZE (opmode))
2763 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2764 return simplify_gen_subreg (mode, op0, opmode,
2765 bitpos / BITS_PER_UNIT);
2768 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2769 && TYPE_UNSIGNED (TREE_TYPE (exp))
2771 : ZERO_EXTRACT, mode,
2772 GET_MODE (op0) != VOIDmode
2774 : TYPE_MODE (TREE_TYPE (tem)),
2775 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2779 return gen_rtx_ABS (mode, op0);
2782 return gen_rtx_NEG (mode, op0);
2785 return gen_rtx_NOT (mode, op0);
2789 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2791 return gen_rtx_FLOAT (mode, op0);
2793 case FIX_TRUNC_EXPR:
2795 return gen_rtx_UNSIGNED_FIX (mode, op0);
2797 return gen_rtx_FIX (mode, op0);
2799 case POINTER_PLUS_EXPR:
2800 /* For the rare target where pointers are not the same size as
2801 size_t, we need to check for mis-matched modes and correct
2804 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2805 && GET_MODE (op0) != GET_MODE (op1))
2807 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2808 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2810 /* We always sign-extend, regardless of the signedness of
2811 the operand, because the operand is always unsigned
2812 here even if the original C expression is signed. */
2813 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2817 return gen_rtx_PLUS (mode, op0, op1);
2820 return gen_rtx_MINUS (mode, op0, op1);
2823 return gen_rtx_MULT (mode, op0, op1);
2826 case TRUNC_DIV_EXPR:
2827 case EXACT_DIV_EXPR:
2829 return gen_rtx_UDIV (mode, op0, op1);
2831 return gen_rtx_DIV (mode, op0, op1);
2833 case TRUNC_MOD_EXPR:
2835 return gen_rtx_UMOD (mode, op0, op1);
2837 return gen_rtx_MOD (mode, op0, op1);
2839 case FLOOR_DIV_EXPR:
2841 return gen_rtx_UDIV (mode, op0, op1);
2844 rtx div = gen_rtx_DIV (mode, op0, op1);
2845 rtx mod = gen_rtx_MOD (mode, op0, op1);
2846 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2847 return gen_rtx_PLUS (mode, div, adj);
2850 case FLOOR_MOD_EXPR:
2852 return gen_rtx_UMOD (mode, op0, op1);
2855 rtx mod = gen_rtx_MOD (mode, op0, op1);
2856 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2857 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2858 return gen_rtx_PLUS (mode, mod, adj);
2864 rtx div = gen_rtx_UDIV (mode, op0, op1);
2865 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2866 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2867 return gen_rtx_PLUS (mode, div, adj);
2871 rtx div = gen_rtx_DIV (mode, op0, op1);
2872 rtx mod = gen_rtx_MOD (mode, op0, op1);
2873 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2874 return gen_rtx_PLUS (mode, div, adj);
2880 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2881 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2882 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2883 return gen_rtx_PLUS (mode, mod, adj);
2887 rtx mod = gen_rtx_MOD (mode, op0, op1);
2888 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2889 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2890 return gen_rtx_PLUS (mode, mod, adj);
2893 case ROUND_DIV_EXPR:
2896 rtx div = gen_rtx_UDIV (mode, op0, op1);
2897 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2898 rtx adj = round_udiv_adjust (mode, mod, op1);
2899 return gen_rtx_PLUS (mode, div, adj);
2903 rtx div = gen_rtx_DIV (mode, op0, op1);
2904 rtx mod = gen_rtx_MOD (mode, op0, op1);
2905 rtx adj = round_sdiv_adjust (mode, mod, op1);
2906 return gen_rtx_PLUS (mode, div, adj);
2909 case ROUND_MOD_EXPR:
2912 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2913 rtx adj = round_udiv_adjust (mode, mod, op1);
2914 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2915 return gen_rtx_PLUS (mode, mod, adj);
2919 rtx mod = gen_rtx_MOD (mode, op0, op1);
2920 rtx adj = round_sdiv_adjust (mode, mod, op1);
2921 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2922 return gen_rtx_PLUS (mode, mod, adj);
2926 return gen_rtx_ASHIFT (mode, op0, op1);
2930 return gen_rtx_LSHIFTRT (mode, op0, op1);
2932 return gen_rtx_ASHIFTRT (mode, op0, op1);
2935 return gen_rtx_ROTATE (mode, op0, op1);
2938 return gen_rtx_ROTATERT (mode, op0, op1);
2942 return gen_rtx_UMIN (mode, op0, op1);
2944 return gen_rtx_SMIN (mode, op0, op1);
2948 return gen_rtx_UMAX (mode, op0, op1);
2950 return gen_rtx_SMAX (mode, op0, op1);
2953 case TRUTH_AND_EXPR:
2954 return gen_rtx_AND (mode, op0, op1);
2958 return gen_rtx_IOR (mode, op0, op1);
2961 case TRUTH_XOR_EXPR:
2962 return gen_rtx_XOR (mode, op0, op1);
2964 case TRUTH_ANDIF_EXPR:
2965 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2967 case TRUTH_ORIF_EXPR:
2968 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2970 case TRUTH_NOT_EXPR:
2971 return gen_rtx_EQ (mode, op0, const0_rtx);
2975 return gen_rtx_LTU (mode, op0, op1);
2977 return gen_rtx_LT (mode, op0, op1);
2981 return gen_rtx_LEU (mode, op0, op1);
2983 return gen_rtx_LE (mode, op0, op1);
2987 return gen_rtx_GTU (mode, op0, op1);
2989 return gen_rtx_GT (mode, op0, op1);
2993 return gen_rtx_GEU (mode, op0, op1);
2995 return gen_rtx_GE (mode, op0, op1);
2998 return gen_rtx_EQ (mode, op0, op1);
3001 return gen_rtx_NE (mode, op0, op1);
3003 case UNORDERED_EXPR:
3004 return gen_rtx_UNORDERED (mode, op0, op1);
3007 return gen_rtx_ORDERED (mode, op0, op1);
3010 return gen_rtx_UNLT (mode, op0, op1);
3013 return gen_rtx_UNLE (mode, op0, op1);
3016 return gen_rtx_UNGT (mode, op0, op1);
3019 return gen_rtx_UNGE (mode, op0, op1);
3022 return gen_rtx_UNEQ (mode, op0, op1);
3025 return gen_rtx_LTGT (mode, op0, op1);
3028 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3031 gcc_assert (COMPLEX_MODE_P (mode));
3032 if (GET_MODE (op0) == VOIDmode)
3033 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3034 if (GET_MODE (op1) == VOIDmode)
3035 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3036 return gen_rtx_CONCAT (mode, op0, op1);
3039 if (GET_CODE (op0) == CONCAT)
3040 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3041 gen_rtx_NEG (GET_MODE_INNER (mode),
3045 enum machine_mode imode = GET_MODE_INNER (mode);
3050 re = adjust_address_nv (op0, imode, 0);
3051 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3055 enum machine_mode ifmode = int_mode_for_mode (mode);
3056 enum machine_mode ihmode = int_mode_for_mode (imode);
3058 if (ifmode == BLKmode || ihmode == BLKmode)
3060 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3063 re = gen_rtx_SUBREG (ifmode, re, 0);
3064 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3065 if (imode != ihmode)
3066 re = gen_rtx_SUBREG (imode, re, 0);
3067 im = copy_rtx (op0);
3069 im = gen_rtx_SUBREG (ifmode, im, 0);
3070 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3071 if (imode != ihmode)
3072 im = gen_rtx_SUBREG (imode, im, 0);
3074 im = gen_rtx_NEG (imode, im);
3075 return gen_rtx_CONCAT (mode, re, im);
3079 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3080 if (!op0 || !MEM_P (op0))
3082 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3083 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3084 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3085 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3086 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3088 if (handled_component_p (TREE_OPERAND (exp, 0)))
3090 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3092 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3093 &bitoffset, &bitsize, &maxsize);
3094 if ((TREE_CODE (decl) == VAR_DECL
3095 || TREE_CODE (decl) == PARM_DECL
3096 || TREE_CODE (decl) == RESULT_DECL)
3097 && !TREE_ADDRESSABLE (decl)
3098 && (bitoffset % BITS_PER_UNIT) == 0
3100 && bitsize == maxsize)
3101 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3102 bitoffset / BITS_PER_UNIT);
3108 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3109 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3114 exp = build_constructor_from_list (TREE_TYPE (exp),
3115 TREE_VECTOR_CST_ELTS (exp));
3119 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3124 op0 = gen_rtx_CONCATN
3125 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3127 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3129 op1 = expand_debug_expr (val);
3132 XVECEXP (op0, 0, i) = op1;
3135 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3137 op1 = expand_debug_expr
3138 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3143 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3144 XVECEXP (op0, 0, i) = op1;
3150 goto flag_unsupported;
3153 /* ??? Maybe handle some builtins? */
3158 gimple g = get_gimple_for_ssa_name (exp);
3161 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3167 int part = var_to_partition (SA.map, exp);
3169 if (part == NO_PARTITION)
3171 /* If this is a reference to an incoming value of parameter
3172 that is never used in the code or where the incoming
3173 value is never used in the code, use PARM_DECL's
3175 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3176 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3178 rtx incoming = DECL_INCOMING_RTL (SSA_NAME_VAR (exp));
3180 && GET_MODE (incoming) != BLKmode
3181 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3182 || (MEM_P (incoming)
3183 && REG_P (XEXP (incoming, 0))
3184 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3186 op0 = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3187 ENTRY_VALUE_EXP (op0) = incoming;
3190 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3198 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3200 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3208 /* Vector stuff. For most of the codes we don't have rtl codes. */
3209 case REALIGN_LOAD_EXPR:
3210 case REDUC_MAX_EXPR:
3211 case REDUC_MIN_EXPR:
3212 case REDUC_PLUS_EXPR:
3214 case VEC_EXTRACT_EVEN_EXPR:
3215 case VEC_EXTRACT_ODD_EXPR:
3216 case VEC_INTERLEAVE_HIGH_EXPR:
3217 case VEC_INTERLEAVE_LOW_EXPR:
3218 case VEC_LSHIFT_EXPR:
3219 case VEC_PACK_FIX_TRUNC_EXPR:
3220 case VEC_PACK_SAT_EXPR:
3221 case VEC_PACK_TRUNC_EXPR:
3222 case VEC_RSHIFT_EXPR:
3223 case VEC_UNPACK_FLOAT_HI_EXPR:
3224 case VEC_UNPACK_FLOAT_LO_EXPR:
3225 case VEC_UNPACK_HI_EXPR:
3226 case VEC_UNPACK_LO_EXPR:
3227 case VEC_WIDEN_MULT_HI_EXPR:
3228 case VEC_WIDEN_MULT_LO_EXPR:
3232 case ADDR_SPACE_CONVERT_EXPR:
3233 case FIXED_CONVERT_EXPR:
3235 case WITH_SIZE_EXPR:
3239 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3240 && SCALAR_INT_MODE_P (mode))
3242 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3243 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3245 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3246 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3247 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3249 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3250 op0 = gen_rtx_MULT (mode, op0, op1);
3251 return gen_rtx_PLUS (mode, op0, op2);
3255 case WIDEN_MULT_EXPR:
3256 case WIDEN_MULT_PLUS_EXPR:
3257 case WIDEN_MULT_MINUS_EXPR:
3258 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3259 && SCALAR_INT_MODE_P (mode))
3261 enum machine_mode inner_mode = GET_MODE (op0);
3262 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3263 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3265 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3266 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3267 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3269 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3270 op0 = gen_rtx_MULT (mode, op0, op1);
3271 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3273 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3274 return gen_rtx_PLUS (mode, op0, op2);
3276 return gen_rtx_MINUS (mode, op2, op0);
3280 case WIDEN_SUM_EXPR:
3281 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3282 && SCALAR_INT_MODE_P (mode))
3284 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3285 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3287 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3288 return gen_rtx_PLUS (mode, op0, op1);
3293 return gen_rtx_FMA (mode, op0, op1, op2);
3297 #ifdef ENABLE_CHECKING
3306 /* Expand the _LOCs in debug insns. We run this after expanding all
3307 regular insns, so that any variables referenced in the function
3308 will have their DECL_RTLs set. */
3311 expand_debug_locations (void)
3314 rtx last = get_last_insn ();
3315 int save_strict_alias = flag_strict_aliasing;
3317 /* New alias sets while setting up memory attributes cause
3318 -fcompare-debug failures, even though it doesn't bring about any
3320 flag_strict_aliasing = 0;
3322 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3323 if (DEBUG_INSN_P (insn))
3325 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3327 enum machine_mode mode;
3329 if (value == NULL_TREE)
3333 val = expand_debug_expr (value);
3334 gcc_assert (last == get_last_insn ());
3338 val = gen_rtx_UNKNOWN_VAR_LOC ();
3341 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3343 gcc_assert (mode == GET_MODE (val)
3344 || (GET_MODE (val) == VOIDmode
3345 && (CONST_INT_P (val)
3346 || GET_CODE (val) == CONST_FIXED
3347 || GET_CODE (val) == CONST_DOUBLE
3348 || GET_CODE (val) == LABEL_REF)));
3351 INSN_VAR_LOCATION_LOC (insn) = val;
3354 flag_strict_aliasing = save_strict_alias;
3357 /* Expand basic block BB from GIMPLE trees to RTL. */
3360 expand_gimple_basic_block (basic_block bb)
3362 gimple_stmt_iterator gsi;
3371 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3374 /* Note that since we are now transitioning from GIMPLE to RTL, we
3375 cannot use the gsi_*_bb() routines because they expect the basic
3376 block to be in GIMPLE, instead of RTL. Therefore, we need to
3377 access the BB sequence directly. */
3378 stmts = bb_seq (bb);
3379 bb->il.gimple = NULL;
3380 rtl_profile_for_bb (bb);
3381 init_rtl_bb_info (bb);
3382 bb->flags |= BB_RTL;
3384 /* Remove the RETURN_EXPR if we may fall though to the exit
3386 gsi = gsi_last (stmts);
3387 if (!gsi_end_p (gsi)
3388 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3390 gimple ret_stmt = gsi_stmt (gsi);
3392 gcc_assert (single_succ_p (bb));
3393 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3395 if (bb->next_bb == EXIT_BLOCK_PTR
3396 && !gimple_return_retval (ret_stmt))
3398 gsi_remove (&gsi, false);
3399 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3403 gsi = gsi_start (stmts);
3404 if (!gsi_end_p (gsi))
3406 stmt = gsi_stmt (gsi);
3407 if (gimple_code (stmt) != GIMPLE_LABEL)
3411 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3415 last = get_last_insn ();
3419 expand_gimple_stmt (stmt);
3424 emit_label ((rtx) *elt);
3426 /* Java emits line number notes in the top of labels.
3427 ??? Make this go away once line number notes are obsoleted. */
3428 BB_HEAD (bb) = NEXT_INSN (last);
3429 if (NOTE_P (BB_HEAD (bb)))
3430 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3431 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3433 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3436 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3438 NOTE_BASIC_BLOCK (note) = bb;
3440 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3444 stmt = gsi_stmt (gsi);
3446 /* If this statement is a non-debug one, and we generate debug
3447 insns, then this one might be the last real use of a TERed
3448 SSA_NAME, but where there are still some debug uses further
3449 down. Expanding the current SSA name in such further debug
3450 uses by their RHS might lead to wrong debug info, as coalescing
3451 might make the operands of such RHS be placed into the same
3452 pseudo as something else. Like so:
3453 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3457 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3458 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3459 the write to a_2 would actually have clobbered the place which
3462 So, instead of that, we recognize the situation, and generate
3463 debug temporaries at the last real use of TERed SSA names:
3470 if (MAY_HAVE_DEBUG_INSNS
3472 && !is_gimple_debug (stmt))
3478 location_t sloc = get_curr_insn_source_location ();
3479 tree sblock = get_curr_insn_block ();
3481 /* Look for SSA names that have their last use here (TERed
3482 names always have only one real use). */
3483 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3484 if ((def = get_gimple_for_ssa_name (op)))
3486 imm_use_iterator imm_iter;
3487 use_operand_p use_p;
3488 bool have_debug_uses = false;
3490 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3492 if (gimple_debug_bind_p (USE_STMT (use_p)))
3494 have_debug_uses = true;
3499 if (have_debug_uses)
3501 /* OP is a TERed SSA name, with DEF it's defining
3502 statement, and where OP is used in further debug
3503 instructions. Generate a debug temporary, and
3504 replace all uses of OP in debug insns with that
3507 tree value = gimple_assign_rhs_to_tree (def);
3508 tree vexpr = make_node (DEBUG_EXPR_DECL);
3510 enum machine_mode mode;
3512 set_curr_insn_source_location (gimple_location (def));
3513 set_curr_insn_block (gimple_block (def));
3515 DECL_ARTIFICIAL (vexpr) = 1;
3516 TREE_TYPE (vexpr) = TREE_TYPE (value);
3518 mode = DECL_MODE (value);
3520 mode = TYPE_MODE (TREE_TYPE (value));
3521 DECL_MODE (vexpr) = mode;
3523 val = gen_rtx_VAR_LOCATION
3524 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3526 emit_debug_insn (val);
3528 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3530 if (!gimple_debug_bind_p (debugstmt))
3533 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3534 SET_USE (use_p, vexpr);
3536 update_stmt (debugstmt);
3540 set_curr_insn_source_location (sloc);
3541 set_curr_insn_block (sblock);
3544 currently_expanding_gimple_stmt = stmt;
3546 /* Expand this statement, then evaluate the resulting RTL and
3547 fixup the CFG accordingly. */
3548 if (gimple_code (stmt) == GIMPLE_COND)
3550 new_bb = expand_gimple_cond (bb, stmt);
3554 else if (gimple_debug_bind_p (stmt))
3556 location_t sloc = get_curr_insn_source_location ();
3557 tree sblock = get_curr_insn_block ();
3558 gimple_stmt_iterator nsi = gsi;
3562 tree var = gimple_debug_bind_get_var (stmt);
3565 enum machine_mode mode;
3567 if (gimple_debug_bind_has_value_p (stmt))
3568 value = gimple_debug_bind_get_value (stmt);
3572 last = get_last_insn ();
3574 set_curr_insn_source_location (gimple_location (stmt));
3575 set_curr_insn_block (gimple_block (stmt));
3578 mode = DECL_MODE (var);
3580 mode = TYPE_MODE (TREE_TYPE (var));
3582 val = gen_rtx_VAR_LOCATION
3583 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3585 emit_debug_insn (val);
3587 if (dump_file && (dump_flags & TDF_DETAILS))
3589 /* We can't dump the insn with a TREE where an RTX
3591 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3592 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3593 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3596 /* In order not to generate too many debug temporaries,
3597 we delink all uses of debug statements we already expanded.
3598 Therefore debug statements between definition and real
3599 use of TERed SSA names will continue to use the SSA name,
3600 and not be replaced with debug temps. */
3601 delink_stmt_imm_use (stmt);
3605 if (gsi_end_p (nsi))
3607 stmt = gsi_stmt (nsi);
3608 if (!gimple_debug_bind_p (stmt))
3612 set_curr_insn_source_location (sloc);
3613 set_curr_insn_block (sblock);
3617 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3620 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3631 def_operand_p def_p;
3632 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3636 /* Ignore this stmt if it is in the list of
3637 replaceable expressions. */
3639 && bitmap_bit_p (SA.values,
3640 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3643 last = expand_gimple_stmt (stmt);
3644 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3649 currently_expanding_gimple_stmt = NULL;
3651 /* Expand implicit goto and convert goto_locus. */
3652 FOR_EACH_EDGE (e, ei, bb->succs)
3654 if (e->goto_locus && e->goto_block)
3656 set_curr_insn_source_location (e->goto_locus);
3657 set_curr_insn_block (e->goto_block);
3658 e->goto_locus = curr_insn_locator ();
3660 e->goto_block = NULL;
3661 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3663 emit_jump (label_rtx_for_bb (e->dest));
3664 e->flags &= ~EDGE_FALLTHRU;
3668 /* Expanded RTL can create a jump in the last instruction of block.
3669 This later might be assumed to be a jump to successor and break edge insertion.
3670 We need to insert dummy move to prevent this. PR41440. */
3671 if (single_succ_p (bb)
3672 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3673 && (last = get_last_insn ())
3676 rtx dummy = gen_reg_rtx (SImode);
3677 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3680 do_pending_stack_adjust ();
3682 /* Find the block tail. The last insn in the block is the insn
3683 before a barrier and/or table jump insn. */
3684 last = get_last_insn ();
3685 if (BARRIER_P (last))
3686 last = PREV_INSN (last);
3687 if (JUMP_TABLE_DATA_P (last))
3688 last = PREV_INSN (PREV_INSN (last));
3691 update_bb_for_insn (bb);
3697 /* Create a basic block for initialization code. */
3700 construct_init_block (void)
3702 basic_block init_block, first_block;
3706 /* Multiple entry points not supported yet. */
3707 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3708 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3709 init_rtl_bb_info (EXIT_BLOCK_PTR);
3710 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3711 EXIT_BLOCK_PTR->flags |= BB_RTL;
3713 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3715 /* When entry edge points to first basic block, we don't need jump,
3716 otherwise we have to jump into proper target. */
3717 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3719 tree label = gimple_block_label (e->dest);
3721 emit_jump (label_rtx (label));
3725 flags = EDGE_FALLTHRU;
3727 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3730 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3731 init_block->count = ENTRY_BLOCK_PTR->count;
3734 first_block = e->dest;
3735 redirect_edge_succ (e, init_block);
3736 e = make_edge (init_block, first_block, flags);
3739 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3740 e->probability = REG_BR_PROB_BASE;
3741 e->count = ENTRY_BLOCK_PTR->count;
3743 update_bb_for_insn (init_block);
3747 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3748 found in the block tree. */
3751 set_block_levels (tree block, int level)
3755 BLOCK_NUMBER (block) = level;
3756 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3757 block = BLOCK_CHAIN (block);
3761 /* Create a block containing landing pads and similar stuff. */
3764 construct_exit_block (void)
3766 rtx head = get_last_insn ();
3768 basic_block exit_block;
3772 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3774 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3776 /* Make sure the locus is set to the end of the function, so that
3777 epilogue line numbers and warnings are set properly. */
3778 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3779 input_location = cfun->function_end_locus;
3781 /* The following insns belong to the top scope. */
3782 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3784 /* Generate rtl for function exit. */
3785 expand_function_end ();
3787 end = get_last_insn ();
3790 /* While emitting the function end we could move end of the last basic block.
3792 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3793 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3794 head = NEXT_INSN (head);
3795 exit_block = create_basic_block (NEXT_INSN (head), end,
3796 EXIT_BLOCK_PTR->prev_bb);
3797 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3798 exit_block->count = EXIT_BLOCK_PTR->count;
3801 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3803 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3804 if (!(e->flags & EDGE_ABNORMAL))
3805 redirect_edge_succ (e, exit_block);
3810 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3811 e->probability = REG_BR_PROB_BASE;
3812 e->count = EXIT_BLOCK_PTR->count;
3813 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3816 e->count -= e2->count;
3817 exit_block->count -= e2->count;
3818 exit_block->frequency -= EDGE_FREQUENCY (e2);
3822 if (exit_block->count < 0)
3823 exit_block->count = 0;
3824 if (exit_block->frequency < 0)
3825 exit_block->frequency = 0;
3826 update_bb_for_insn (exit_block);
3829 /* Helper function for discover_nonconstant_array_refs.
3830 Look for ARRAY_REF nodes with non-constant indexes and mark them
3834 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3835 void *data ATTRIBUTE_UNUSED)
3839 if (IS_TYPE_OR_DECL_P (t))
3841 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3843 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3844 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3845 && (!TREE_OPERAND (t, 2)
3846 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3847 || (TREE_CODE (t) == COMPONENT_REF
3848 && (!TREE_OPERAND (t,2)
3849 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3850 || TREE_CODE (t) == BIT_FIELD_REF
3851 || TREE_CODE (t) == REALPART_EXPR
3852 || TREE_CODE (t) == IMAGPART_EXPR
3853 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3854 || CONVERT_EXPR_P (t))
3855 t = TREE_OPERAND (t, 0);
3857 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3859 t = get_base_address (t);
3861 && DECL_MODE (t) != BLKmode)
3862 TREE_ADDRESSABLE (t) = 1;
3871 /* RTL expansion is not able to compile array references with variable
3872 offsets for arrays stored in single register. Discover such
3873 expressions and mark variables as addressable to avoid this
3877 discover_nonconstant_array_refs (void)
3880 gimple_stmt_iterator gsi;
3883 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3885 gimple stmt = gsi_stmt (gsi);
3886 if (!is_gimple_debug (stmt))
3887 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3891 /* This function sets crtl->args.internal_arg_pointer to a virtual
3892 register if DRAP is needed. Local register allocator will replace
3893 virtual_incoming_args_rtx with the virtual register. */
3896 expand_stack_alignment (void)
3899 unsigned int preferred_stack_boundary;
3901 if (! SUPPORTS_STACK_ALIGNMENT)
3904 if (cfun->calls_alloca
3905 || cfun->has_nonlocal_label
3906 || crtl->has_nonlocal_goto)
3907 crtl->need_drap = true;
3909 /* Call update_stack_boundary here again to update incoming stack
3910 boundary. It may set incoming stack alignment to a different
3911 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3912 use the minimum incoming stack alignment to check if it is OK
3913 to perform sibcall optimization since sibcall optimization will
3914 only align the outgoing stack to incoming stack boundary. */
3915 if (targetm.calls.update_stack_boundary)
3916 targetm.calls.update_stack_boundary ();
3918 /* The incoming stack frame has to be aligned at least at
3919 parm_stack_boundary. */
3920 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3922 /* Update crtl->stack_alignment_estimated and use it later to align
3923 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3924 exceptions since callgraph doesn't collect incoming stack alignment
3926 if (cfun->can_throw_non_call_exceptions
3927 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3928 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3930 preferred_stack_boundary = crtl->preferred_stack_boundary;
3931 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3932 crtl->stack_alignment_estimated = preferred_stack_boundary;
3933 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3934 crtl->stack_alignment_needed = preferred_stack_boundary;
3936 gcc_assert (crtl->stack_alignment_needed
3937 <= crtl->stack_alignment_estimated);
3939 crtl->stack_realign_needed
3940 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3941 crtl->stack_realign_tried = crtl->stack_realign_needed;
3943 crtl->stack_realign_processed = true;
3945 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3947 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3948 drap_rtx = targetm.calls.get_drap_rtx ();
3950 /* stack_realign_drap and drap_rtx must match. */
3951 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3953 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3954 if (NULL != drap_rtx)
3956 crtl->args.internal_arg_pointer = drap_rtx;
3958 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3960 fixup_tail_calls ();
3964 /* Translate the intermediate representation contained in the CFG
3965 from GIMPLE trees to RTL.
3967 We do conversion per basic block and preserve/update the tree CFG.
3968 This implies we have to do some magic as the CFG can simultaneously
3969 consist of basic blocks containing RTL and GIMPLE trees. This can
3970 confuse the CFG hooks, so be careful to not manipulate CFG during
3974 gimple_expand_cfg (void)
3976 basic_block bb, init_block;
3983 timevar_push (TV_OUT_OF_SSA);
3984 rewrite_out_of_ssa (&SA);
3985 timevar_pop (TV_OUT_OF_SSA);
3986 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3989 /* Some backends want to know that we are expanding to RTL. */
3990 currently_expanding_to_rtl = 1;
3992 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3994 insn_locators_alloc ();
3995 if (!DECL_IS_BUILTIN (current_function_decl))
3997 /* Eventually, all FEs should explicitly set function_start_locus. */
3998 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3999 set_curr_insn_source_location
4000 (DECL_SOURCE_LOCATION (current_function_decl));
4002 set_curr_insn_source_location (cfun->function_start_locus);
4005 set_curr_insn_source_location (UNKNOWN_LOCATION);
4006 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4007 prologue_locator = curr_insn_locator ();
4009 #ifdef INSN_SCHEDULING
4010 init_sched_attrs ();
4013 /* Make sure first insn is a note even if we don't want linenums.
4014 This makes sure the first insn will never be deleted.
4015 Also, final expects a note to appear there. */
4016 emit_note (NOTE_INSN_DELETED);
4018 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4019 discover_nonconstant_array_refs ();
4021 targetm.expand_to_rtl_hook ();
4022 crtl->stack_alignment_needed = STACK_BOUNDARY;
4023 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4024 crtl->stack_alignment_estimated = 0;
4025 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4026 cfun->cfg->max_jumptable_ents = 0;
4028 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4029 of the function section at exapnsion time to predict distance of calls. */
4030 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4032 /* Expand the variables recorded during gimple lowering. */
4033 timevar_push (TV_VAR_EXPAND);
4036 expand_used_vars ();
4038 var_seq = get_insns ();
4040 timevar_pop (TV_VAR_EXPAND);
4042 /* Honor stack protection warnings. */
4043 if (warn_stack_protect)
4045 if (cfun->calls_alloca)
4046 warning (OPT_Wstack_protector,
4047 "stack protector not protecting local variables: "
4048 "variable length buffer");
4049 if (has_short_buffer && !crtl->stack_protect_guard)
4050 warning (OPT_Wstack_protector,
4051 "stack protector not protecting function: "
4052 "all local arrays are less than %d bytes long",
4053 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4056 /* Set up parameters and prepare for return, for the function. */
4057 expand_function_start (current_function_decl);
4059 /* If we emitted any instructions for setting up the variables,
4060 emit them before the FUNCTION_START note. */
4063 emit_insn_before (var_seq, parm_birth_insn);
4065 /* In expand_function_end we'll insert the alloca save/restore
4066 before parm_birth_insn. We've just insertted an alloca call.
4067 Adjust the pointer to match. */
4068 parm_birth_insn = var_seq;
4071 /* Now that we also have the parameter RTXs, copy them over to our
4073 for (i = 0; i < SA.map->num_partitions; i++)
4075 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4077 if (TREE_CODE (var) != VAR_DECL
4078 && !SA.partition_to_pseudo[i])
4079 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4080 gcc_assert (SA.partition_to_pseudo[i]);
4082 /* If this decl was marked as living in multiple places, reset
4083 this now to NULL. */
4084 if (DECL_RTL_IF_SET (var) == pc_rtx)
4085 SET_DECL_RTL (var, NULL);
4087 /* Some RTL parts really want to look at DECL_RTL(x) when x
4088 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4089 SET_DECL_RTL here making this available, but that would mean
4090 to select one of the potentially many RTLs for one DECL. Instead
4091 of doing that we simply reset the MEM_EXPR of the RTL in question,
4092 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4093 if (!DECL_RTL_SET_P (var))
4095 if (MEM_P (SA.partition_to_pseudo[i]))
4096 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4100 /* If this function is `main', emit a call to `__main'
4101 to run global initializers, etc. */
4102 if (DECL_NAME (current_function_decl)
4103 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4104 && DECL_FILE_SCOPE_P (current_function_decl))
4105 expand_main_function ();
4107 /* Initialize the stack_protect_guard field. This must happen after the
4108 call to __main (if any) so that the external decl is initialized. */
4109 if (crtl->stack_protect_guard)
4110 stack_protect_prologue ();
4112 expand_phi_nodes (&SA);
4114 /* Register rtl specific functions for cfg. */
4115 rtl_register_cfg_hooks ();
4117 init_block = construct_init_block ();
4119 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4120 remaining edges later. */
4121 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4122 e->flags &= ~EDGE_EXECUTABLE;
4124 lab_rtx_for_bb = pointer_map_create ();
4125 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4126 bb = expand_gimple_basic_block (bb);
4128 if (MAY_HAVE_DEBUG_INSNS)
4129 expand_debug_locations ();
4131 execute_free_datastructures ();
4132 timevar_push (TV_OUT_OF_SSA);
4133 finish_out_of_ssa (&SA);
4134 timevar_pop (TV_OUT_OF_SSA);
4136 timevar_push (TV_POST_EXPAND);
4137 /* We are no longer in SSA form. */
4138 cfun->gimple_df->in_ssa_p = false;
4140 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4141 conservatively to true until they are all profile aware. */
4142 pointer_map_destroy (lab_rtx_for_bb);
4145 construct_exit_block ();
4146 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4147 insn_locators_finalize ();
4149 /* Zap the tree EH table. */
4150 set_eh_throw_stmt_table (cfun, NULL);
4152 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4153 split edges which edge insertions might do. */
4154 rebuild_jump_labels (get_insns ());
4156 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4160 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4164 rebuild_jump_labels_chain (e->insns.r);
4165 /* Avoid putting insns before parm_birth_insn. */
4166 if (e->src == ENTRY_BLOCK_PTR
4167 && single_succ_p (ENTRY_BLOCK_PTR)
4170 rtx insns = e->insns.r;
4171 e->insns.r = NULL_RTX;
4172 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4175 commit_one_edge_insertion (e);
4182 /* We're done expanding trees to RTL. */
4183 currently_expanding_to_rtl = 0;
4185 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4189 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4191 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4192 e->flags &= ~EDGE_EXECUTABLE;
4194 /* At the moment not all abnormal edges match the RTL
4195 representation. It is safe to remove them here as
4196 find_many_sub_basic_blocks will rediscover them.
4197 In the future we should get this fixed properly. */
4198 if ((e->flags & EDGE_ABNORMAL)
4199 && !(e->flags & EDGE_SIBCALL))
4206 blocks = sbitmap_alloc (last_basic_block);
4207 sbitmap_ones (blocks);
4208 find_many_sub_basic_blocks (blocks);
4209 sbitmap_free (blocks);
4210 purge_all_dead_edges ();
4214 expand_stack_alignment ();
4216 #ifdef ENABLE_CHECKING
4217 verify_flow_info ();
4220 /* There's no need to defer outputting this function any more; we
4221 know we want to output it. */
4222 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4224 /* Now that we're done expanding trees to RTL, we shouldn't have any
4225 more CONCATs anywhere. */
4226 generating_concat_p = 0;
4231 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4232 /* And the pass manager will dump RTL for us. */
4235 /* If we're emitting a nested function, make sure its parent gets
4236 emitted as well. Doing otherwise confuses debug info. */
4239 for (parent = DECL_CONTEXT (current_function_decl);
4240 parent != NULL_TREE;
4241 parent = get_containing_scope (parent))
4242 if (TREE_CODE (parent) == FUNCTION_DECL)
4243 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4246 /* We are now committed to emitting code for this function. Do any
4247 preparation, such as emitting abstract debug info for the inline
4248 before it gets mangled by optimization. */
4249 if (cgraph_function_possibly_inlined_p (current_function_decl))
4250 (*debug_hooks->outlining_inline_function) (current_function_decl);
4252 TREE_ASM_WRITTEN (current_function_decl) = 1;
4254 /* After expanding, the return labels are no longer needed. */
4255 return_label = NULL;
4256 naked_return_label = NULL;
4257 /* Tag the blocks with a depth number so that change_scope can find
4258 the common parent easily. */
4259 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4260 default_rtl_profile ();
4261 timevar_pop (TV_POST_EXPAND);
4265 struct rtl_opt_pass pass_expand =
4269 "expand", /* name */
4271 gimple_expand_cfg, /* execute */
4274 0, /* static_pass_number */
4275 TV_EXPAND, /* tv_id */
4276 PROP_ssa | PROP_gimple_leh | PROP_cfg
4277 | PROP_gimple_lcx, /* properties_required */
4278 PROP_rtl, /* properties_provided */
4279 PROP_ssa | PROP_trees, /* properties_destroyed */
4280 TODO_verify_ssa | TODO_verify_flow
4281 | TODO_verify_stmts, /* todo_flags_start */
4283 | TODO_ggc_collect /* todo_flags_finish */