1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
47 #include "ssaexpand.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 /* Return an expression tree corresponding to the RHS of GIMPLE
64 gimple_assign_rhs_to_tree (gimple stmt)
67 enum gimple_rhs_class grhs_class;
69 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
71 if (grhs_class == GIMPLE_TERNARY_RHS)
72 t = build3 (gimple_assign_rhs_code (stmt),
73 TREE_TYPE (gimple_assign_lhs (stmt)),
74 gimple_assign_rhs1 (stmt),
75 gimple_assign_rhs2 (stmt),
76 gimple_assign_rhs3 (stmt));
77 else if (grhs_class == GIMPLE_BINARY_RHS)
78 t = build2 (gimple_assign_rhs_code (stmt),
79 TREE_TYPE (gimple_assign_lhs (stmt)),
80 gimple_assign_rhs1 (stmt),
81 gimple_assign_rhs2 (stmt));
82 else if (grhs_class == GIMPLE_UNARY_RHS)
83 t = build1 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt));
86 else if (grhs_class == GIMPLE_SINGLE_RHS)
88 t = gimple_assign_rhs1 (stmt);
89 /* Avoid modifying this tree in place below. */
90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
91 && gimple_location (stmt) != EXPR_LOCATION (t))
92 || (gimple_block (stmt)
93 && currently_expanding_to_rtl
95 && gimple_block (stmt) != TREE_BLOCK (t)))
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
103 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
104 TREE_BLOCK (t) = gimple_block (stmt);
110 #ifndef STACK_ALIGNMENT_NEEDED
111 #define STACK_ALIGNMENT_NEEDED 1
114 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
116 /* Associate declaration T with storage space X. If T is no
117 SSA name this is exactly SET_DECL_RTL, otherwise make the
118 partition of T associated with X. */
120 set_rtl (tree t, rtx x)
122 if (TREE_CODE (t) == SSA_NAME)
124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
127 /* For the benefit of debug information at -O0 (where vartracking
128 doesn't run) record the place also in the base DECL if it's
129 a normal variable (not a parameter). */
130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
132 tree var = SSA_NAME_VAR (t);
133 /* If we don't yet have something recorded, just record it now. */
134 if (!DECL_RTL_SET_P (var))
135 SET_DECL_RTL (var, x);
136 /* If we have it set alrady to "multiple places" don't
138 else if (DECL_RTL (var) == pc_rtx)
140 /* If we have something recorded and it's not the same place
141 as we want to record now, we have multiple partitions for the
142 same base variable, with different places. We can't just
143 randomly chose one, hence we have to say that we don't know.
144 This only happens with optimization, and there var-tracking
145 will figure out the right thing. */
146 else if (DECL_RTL (var) != x)
147 SET_DECL_RTL (var, pc_rtx);
154 /* This structure holds data relevant to one variable that will be
155 placed in a stack slot. */
161 /* The offset of the variable. During partitioning, this is the
162 offset relative to the partition. After partitioning, this
163 is relative to the stack frame. */
164 HOST_WIDE_INT offset;
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
174 /* The partition representative. */
175 size_t representative;
177 /* The next stack variable in the partition, or EOC. */
180 /* The numbers of conflicting stack variables. */
184 #define EOC ((size_t)-1)
186 /* We have an array of such objects while deciding allocation. */
187 static struct stack_var *stack_vars;
188 static size_t stack_vars_alloc;
189 static size_t stack_vars_num;
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
192 is non-decreasing. */
193 static size_t *stack_vars_sorted;
195 /* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198 static int frame_phase;
200 /* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202 static bool has_protected_decls;
204 /* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206 static bool has_short_buffer;
208 /* Discover the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
212 get_decl_align_unit (tree decl)
214 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
215 return align / BITS_PER_UNIT;
218 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
219 Return the frame offset. */
222 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
224 HOST_WIDE_INT offset, new_frame_offset;
226 new_frame_offset = frame_offset;
227 if (FRAME_GROWS_DOWNWARD)
229 new_frame_offset -= size + frame_phase;
230 new_frame_offset &= -align;
231 new_frame_offset += frame_phase;
232 offset = new_frame_offset;
236 new_frame_offset -= frame_phase;
237 new_frame_offset += align - 1;
238 new_frame_offset &= -align;
239 new_frame_offset += frame_phase;
240 offset = new_frame_offset;
241 new_frame_offset += size;
243 frame_offset = new_frame_offset;
245 if (frame_offset_overflow (frame_offset, cfun->decl))
246 frame_offset = offset = 0;
251 /* Accumulate DECL into STACK_VARS. */
254 add_stack_var (tree decl)
258 if (stack_vars_num >= stack_vars_alloc)
260 if (stack_vars_alloc)
261 stack_vars_alloc = stack_vars_alloc * 3 / 2;
263 stack_vars_alloc = 32;
265 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
267 v = &stack_vars[stack_vars_num];
271 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
272 /* Ensure that all variables have size, so that &a != &b for any two
273 variables that are simultaneously live. */
276 v->alignb = get_decl_align_unit (SSAVAR (decl));
278 /* All variables are initially in their own partition. */
279 v->representative = stack_vars_num;
282 /* All variables initially conflict with no other. */
285 /* Ensure that this decl doesn't get put onto the list twice. */
286 set_rtl (decl, pc_rtx);
291 /* Make the decls associated with luid's X and Y conflict. */
294 add_stack_var_conflict (size_t x, size_t y)
296 struct stack_var *a = &stack_vars[x];
297 struct stack_var *b = &stack_vars[y];
299 a->conflicts = BITMAP_ALLOC (NULL);
301 b->conflicts = BITMAP_ALLOC (NULL);
302 bitmap_set_bit (a->conflicts, y);
303 bitmap_set_bit (b->conflicts, x);
306 /* Check whether the decls associated with luid's X and Y conflict. */
309 stack_var_conflict_p (size_t x, size_t y)
311 struct stack_var *a = &stack_vars[x];
312 struct stack_var *b = &stack_vars[y];
313 if (!a->conflicts || !b->conflicts)
315 return bitmap_bit_p (a->conflicts, y);
318 /* Returns true if TYPE is or contains a union type. */
321 aggregate_contains_union_type (tree type)
325 if (TREE_CODE (type) == UNION_TYPE
326 || TREE_CODE (type) == QUAL_UNION_TYPE)
328 if (TREE_CODE (type) == ARRAY_TYPE)
329 return aggregate_contains_union_type (TREE_TYPE (type));
330 if (TREE_CODE (type) != RECORD_TYPE)
333 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
334 if (TREE_CODE (field) == FIELD_DECL)
335 if (aggregate_contains_union_type (TREE_TYPE (field)))
341 /* A subroutine of expand_used_vars. If two variables X and Y have alias
342 sets that do not conflict, then do add a conflict for these variables
343 in the interference graph. We also need to make sure to add conflicts
344 for union containing structures. Else RTL alias analysis comes along
345 and due to type based aliasing rules decides that for two overlapping
346 union temporaries { short s; int i; } accesses to the same mem through
347 different types may not alias and happily reorders stores across
348 life-time boundaries of the temporaries (See PR25654).
349 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
352 add_alias_set_conflicts (void)
354 size_t i, j, n = stack_vars_num;
356 for (i = 0; i < n; ++i)
358 tree type_i = TREE_TYPE (stack_vars[i].decl);
359 bool aggr_i = AGGREGATE_TYPE_P (type_i);
362 contains_union = aggregate_contains_union_type (type_i);
363 for (j = 0; j < i; ++j)
365 tree type_j = TREE_TYPE (stack_vars[j].decl);
366 bool aggr_j = AGGREGATE_TYPE_P (type_j);
368 /* Either the objects conflict by means of type based
369 aliasing rules, or we need to add a conflict. */
370 || !objects_must_conflict_p (type_i, type_j)
371 /* In case the types do not conflict ensure that access
372 to elements will conflict. In case of unions we have
373 to be careful as type based aliasing rules may say
374 access to the same memory does not conflict. So play
375 safe and add a conflict in this case. */
377 add_stack_var_conflict (i, j);
382 /* A subroutine of partition_stack_vars. A comparison function for qsort,
383 sorting an array of indices by the properties of the object. */
386 stack_var_cmp (const void *a, const void *b)
388 size_t ia = *(const size_t *)a;
389 size_t ib = *(const size_t *)b;
390 unsigned int aligna = stack_vars[ia].alignb;
391 unsigned int alignb = stack_vars[ib].alignb;
392 HOST_WIDE_INT sizea = stack_vars[ia].size;
393 HOST_WIDE_INT sizeb = stack_vars[ib].size;
394 tree decla = stack_vars[ia].decl;
395 tree declb = stack_vars[ib].decl;
397 unsigned int uida, uidb;
399 /* Primary compare on "large" alignment. Large comes first. */
400 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
401 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
402 if (largea != largeb)
403 return (int)largeb - (int)largea;
405 /* Secondary compare on size, decreasing */
411 /* Tertiary compare on true alignment, decreasing. */
417 /* Final compare on ID for sort stability, increasing.
418 Two SSA names are compared by their version, SSA names come before
419 non-SSA names, and two normal decls are compared by their DECL_UID. */
420 if (TREE_CODE (decla) == SSA_NAME)
422 if (TREE_CODE (declb) == SSA_NAME)
423 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
427 else if (TREE_CODE (declb) == SSA_NAME)
430 uida = DECL_UID (decla), uidb = DECL_UID (declb);
439 /* If the points-to solution *PI points to variables that are in a partition
440 together with other variables add all partition members to the pointed-to
444 add_partitioned_vars_to_ptset (struct pt_solution *pt,
445 struct pointer_map_t *decls_to_partitions,
446 struct pointer_set_t *visited, bitmap temp)
454 /* The pointed-to vars bitmap is shared, it is enough to
456 || pointer_set_insert(visited, pt->vars))
461 /* By using a temporary bitmap to store all members of the partitions
462 we have to add we make sure to visit each of the partitions only
464 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
466 || !bitmap_bit_p (temp, i))
467 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
468 (void *)(size_t) i)))
469 bitmap_ior_into (temp, *part);
470 if (!bitmap_empty_p (temp))
471 bitmap_ior_into (pt->vars, temp);
474 /* Update points-to sets based on partition info, so we can use them on RTL.
475 The bitmaps representing stack partitions will be saved until expand,
476 where partitioned decls used as bases in memory expressions will be
480 update_alias_info_with_stack_vars (void)
482 struct pointer_map_t *decls_to_partitions = NULL;
484 tree var = NULL_TREE;
486 for (i = 0; i < stack_vars_num; i++)
490 struct ptr_info_def *pi;
492 /* Not interested in partitions with single variable. */
493 if (stack_vars[i].representative != i
494 || stack_vars[i].next == EOC)
497 if (!decls_to_partitions)
499 decls_to_partitions = pointer_map_create ();
500 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
503 /* Create an SSA_NAME that points to the partition for use
504 as base during alias-oracle queries on RTL for bases that
505 have been partitioned. */
506 if (var == NULL_TREE)
507 var = create_tmp_var (ptr_type_node, NULL);
508 name = make_ssa_name (var, NULL);
510 /* Create bitmaps representing partitions. They will be used for
511 points-to sets later, so use GGC alloc. */
512 part = BITMAP_GGC_ALLOC ();
513 for (j = i; j != EOC; j = stack_vars[j].next)
515 tree decl = stack_vars[j].decl;
516 unsigned int uid = DECL_PT_UID (decl);
517 /* We should never end up partitioning SSA names (though they
518 may end up on the stack). Neither should we allocate stack
519 space to something that is unused and thus unreferenced, except
520 for -O0 where we are preserving even unreferenced variables. */
521 gcc_assert (DECL_P (decl)
523 || referenced_var_lookup (cfun, DECL_UID (decl))));
524 bitmap_set_bit (part, uid);
525 *((bitmap *) pointer_map_insert (decls_to_partitions,
526 (void *)(size_t) uid)) = part;
527 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
531 /* Make the SSA name point to all partition members. */
532 pi = get_ptr_info (name);
533 pt_solution_set (&pi->pt, part, false, false);
536 /* Make all points-to sets that contain one member of a partition
537 contain all members of the partition. */
538 if (decls_to_partitions)
541 struct pointer_set_t *visited = pointer_set_create ();
542 bitmap temp = BITMAP_ALLOC (NULL);
544 for (i = 1; i < num_ssa_names; i++)
546 tree name = ssa_name (i);
547 struct ptr_info_def *pi;
550 && POINTER_TYPE_P (TREE_TYPE (name))
551 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
552 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
556 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
557 decls_to_partitions, visited, temp);
559 pointer_set_destroy (visited);
560 pointer_map_destroy (decls_to_partitions);
565 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
566 partitioning algorithm. Partitions A and B are known to be non-conflicting.
567 Merge them into a single partition A.
569 At the same time, add OFFSET to all variables in partition B. At the end
570 of the partitioning process we've have a nice block easy to lay out within
574 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
577 struct stack_var *vb = &stack_vars[b];
581 /* Update each element of partition B with the given offset,
582 and merge them into partition A. */
583 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
585 stack_vars[i].offset += offset;
586 stack_vars[i].representative = a;
588 stack_vars[last].next = stack_vars[a].next;
589 stack_vars[a].next = b;
591 /* Update the required alignment of partition A to account for B. */
592 if (stack_vars[a].alignb < stack_vars[b].alignb)
593 stack_vars[a].alignb = stack_vars[b].alignb;
595 /* Update the interference graph and merge the conflicts. */
598 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
599 add_stack_var_conflict (a, stack_vars[u].representative);
600 BITMAP_FREE (vb->conflicts);
604 /* A subroutine of expand_used_vars. Binpack the variables into
605 partitions constrained by the interference graph. The overall
606 algorithm used is as follows:
608 Sort the objects by size.
613 Look for the largest non-conflicting object B with size <= S.
623 partition_stack_vars (void)
625 size_t si, sj, n = stack_vars_num;
627 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
628 for (si = 0; si < n; ++si)
629 stack_vars_sorted[si] = si;
634 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
636 for (si = 0; si < n; ++si)
638 size_t i = stack_vars_sorted[si];
639 HOST_WIDE_INT isize = stack_vars[i].size;
640 unsigned int ialign = stack_vars[i].alignb;
641 HOST_WIDE_INT offset = 0;
643 for (sj = si; sj-- > 0; )
645 size_t j = stack_vars_sorted[sj];
646 HOST_WIDE_INT jsize = stack_vars[j].size;
647 unsigned int jalign = stack_vars[j].alignb;
649 /* Ignore objects that aren't partition representatives. */
650 if (stack_vars[j].representative != j)
653 /* Ignore objects too large for the remaining space. */
657 /* Ignore conflicting objects. */
658 if (stack_var_conflict_p (i, j))
661 /* Do not mix objects of "small" (supported) alignment
662 and "large" (unsupported) alignment. */
663 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
664 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
667 /* Refine the remaining space check to include alignment. */
668 if (offset & (jalign - 1))
670 HOST_WIDE_INT toff = offset;
672 toff &= -(HOST_WIDE_INT)jalign;
673 if (isize - (toff - offset) < jsize)
676 isize -= toff - offset;
680 /* UNION the objects, placing J at OFFSET. */
681 union_stack_vars (i, j, offset);
689 update_alias_info_with_stack_vars ();
692 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
695 dump_stack_var_partition (void)
697 size_t si, i, j, n = stack_vars_num;
699 for (si = 0; si < n; ++si)
701 i = stack_vars_sorted[si];
703 /* Skip variables that aren't partition representatives, for now. */
704 if (stack_vars[i].representative != i)
707 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
708 " align %u\n", (unsigned long) i, stack_vars[i].size,
709 stack_vars[i].alignb);
711 for (j = i; j != EOC; j = stack_vars[j].next)
713 fputc ('\t', dump_file);
714 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
715 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
716 stack_vars[j].offset);
721 /* Assign rtl to DECL at BASE + OFFSET. */
724 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
725 HOST_WIDE_INT offset)
730 /* If this fails, we've overflowed the stack frame. Error nicely? */
731 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
733 x = plus_constant (base, offset);
734 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
736 if (TREE_CODE (decl) != SSA_NAME)
738 /* Set alignment we actually gave this decl if it isn't an SSA name.
739 If it is we generate stack slots only accidentally so it isn't as
740 important, we'll simply use the alignment that is already set. */
741 if (base == virtual_stack_vars_rtx)
742 offset -= frame_phase;
743 align = offset & -offset;
744 align *= BITS_PER_UNIT;
745 if (align == 0 || align > base_align)
748 /* One would think that we could assert that we're not decreasing
749 alignment here, but (at least) the i386 port does exactly this
750 via the MINIMUM_ALIGNMENT hook. */
752 DECL_ALIGN (decl) = align;
753 DECL_USER_ALIGN (decl) = 0;
756 set_mem_attributes (x, SSAVAR (decl), true);
760 /* A subroutine of expand_used_vars. Give each partition representative
761 a unique location within the stack frame. Update each partition member
762 with that location. */
765 expand_stack_vars (bool (*pred) (tree))
767 size_t si, i, j, n = stack_vars_num;
768 HOST_WIDE_INT large_size = 0, large_alloc = 0;
769 rtx large_base = NULL;
770 unsigned large_align = 0;
773 /* Determine if there are any variables requiring "large" alignment.
774 Since these are dynamically allocated, we only process these if
775 no predicate involved. */
776 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
777 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
779 /* Find the total size of these variables. */
780 for (si = 0; si < n; ++si)
784 i = stack_vars_sorted[si];
785 alignb = stack_vars[i].alignb;
787 /* Stop when we get to the first decl with "small" alignment. */
788 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
791 /* Skip variables that aren't partition representatives. */
792 if (stack_vars[i].representative != i)
795 /* Skip variables that have already had rtl assigned. See also
796 add_stack_var where we perpetrate this pc_rtx hack. */
797 decl = stack_vars[i].decl;
798 if ((TREE_CODE (decl) == SSA_NAME
799 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
800 : DECL_RTL (decl)) != pc_rtx)
803 large_size += alignb - 1;
804 large_size &= -(HOST_WIDE_INT)alignb;
805 large_size += stack_vars[i].size;
808 /* If there were any, allocate space. */
810 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
814 for (si = 0; si < n; ++si)
817 unsigned base_align, alignb;
818 HOST_WIDE_INT offset;
820 i = stack_vars_sorted[si];
822 /* Skip variables that aren't partition representatives, for now. */
823 if (stack_vars[i].representative != i)
826 /* Skip variables that have already had rtl assigned. See also
827 add_stack_var where we perpetrate this pc_rtx hack. */
828 decl = stack_vars[i].decl;
829 if ((TREE_CODE (decl) == SSA_NAME
830 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
831 : DECL_RTL (decl)) != pc_rtx)
834 /* Check the predicate to see whether this variable should be
835 allocated in this pass. */
836 if (pred && !pred (decl))
839 alignb = stack_vars[i].alignb;
840 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
842 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
843 base = virtual_stack_vars_rtx;
844 base_align = crtl->max_used_stack_slot_alignment;
848 /* Large alignment is only processed in the last pass. */
851 gcc_assert (large_base != NULL);
853 large_alloc += alignb - 1;
854 large_alloc &= -(HOST_WIDE_INT)alignb;
855 offset = large_alloc;
856 large_alloc += stack_vars[i].size;
859 base_align = large_align;
862 /* Create rtl for each variable based on their location within the
864 for (j = i; j != EOC; j = stack_vars[j].next)
866 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
867 expand_one_stack_var_at (stack_vars[j].decl,
869 stack_vars[j].offset + offset);
873 gcc_assert (large_alloc == large_size);
876 /* Take into account all sizes of partitions and reset DECL_RTLs. */
878 account_stack_vars (void)
880 size_t si, j, i, n = stack_vars_num;
881 HOST_WIDE_INT size = 0;
883 for (si = 0; si < n; ++si)
885 i = stack_vars_sorted[si];
887 /* Skip variables that aren't partition representatives, for now. */
888 if (stack_vars[i].representative != i)
891 size += stack_vars[i].size;
892 for (j = i; j != EOC; j = stack_vars[j].next)
893 set_rtl (stack_vars[j].decl, NULL);
898 /* A subroutine of expand_one_var. Called to immediately assign rtl
899 to a variable to be allocated in the stack frame. */
902 expand_one_stack_var (tree var)
904 HOST_WIDE_INT size, offset;
907 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
908 byte_align = get_decl_align_unit (SSAVAR (var));
910 /* We handle highly aligned variables in expand_stack_vars. */
911 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
913 offset = alloc_stack_frame_space (size, byte_align);
915 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
916 crtl->max_used_stack_slot_alignment, offset);
919 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
920 that will reside in a hard register. */
923 expand_one_hard_reg_var (tree var)
925 rest_of_decl_compilation (var, 0, 0);
928 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
929 that will reside in a pseudo register. */
932 expand_one_register_var (tree var)
934 tree decl = SSAVAR (var);
935 tree type = TREE_TYPE (decl);
936 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
937 rtx x = gen_reg_rtx (reg_mode);
941 /* Note if the object is a user variable. */
942 if (!DECL_ARTIFICIAL (decl))
945 if (POINTER_TYPE_P (type))
946 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
949 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
950 has some associated error, e.g. its type is error-mark. We just need
951 to pick something that won't crash the rest of the compiler. */
954 expand_one_error_var (tree var)
956 enum machine_mode mode = DECL_MODE (var);
960 x = gen_rtx_MEM (BLKmode, const0_rtx);
961 else if (mode == VOIDmode)
964 x = gen_reg_rtx (mode);
966 SET_DECL_RTL (var, x);
969 /* A subroutine of expand_one_var. VAR is a variable that will be
970 allocated to the local stack frame. Return true if we wish to
971 add VAR to STACK_VARS so that it will be coalesced with other
972 variables. Return false to allocate VAR immediately.
974 This function is used to reduce the number of variables considered
975 for coalescing, which reduces the size of the quadratic problem. */
978 defer_stack_allocation (tree var, bool toplevel)
980 /* If stack protection is enabled, *all* stack variables must be deferred,
981 so that we can re-order the strings to the top of the frame. */
982 if (flag_stack_protect)
985 /* We handle "large" alignment via dynamic allocation. We want to handle
986 this extra complication in only one place, so defer them. */
987 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
990 /* Variables in the outermost scope automatically conflict with
991 every other variable. The only reason to want to defer them
992 at all is that, after sorting, we can more efficiently pack
993 small variables in the stack frame. Continue to defer at -O2. */
994 if (toplevel && optimize < 2)
997 /* Without optimization, *most* variables are allocated from the
998 stack, which makes the quadratic problem large exactly when we
999 want compilation to proceed as quickly as possible. On the
1000 other hand, we don't want the function's stack frame size to
1001 get completely out of hand. So we avoid adding scalars and
1002 "small" aggregates to the list at all. */
1003 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1009 /* A subroutine of expand_used_vars. Expand one variable according to
1010 its flavor. Variables to be placed on the stack are not actually
1011 expanded yet, merely recorded.
1012 When REALLY_EXPAND is false, only add stack values to be allocated.
1013 Return stack usage this variable is supposed to take.
1016 static HOST_WIDE_INT
1017 expand_one_var (tree var, bool toplevel, bool really_expand)
1019 unsigned int align = BITS_PER_UNIT;
1024 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1026 /* Because we don't know if VAR will be in register or on stack,
1027 we conservatively assume it will be on stack even if VAR is
1028 eventually put into register after RA pass. For non-automatic
1029 variables, which won't be on stack, we collect alignment of
1030 type and ignore user specified alignment. */
1031 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1032 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1033 TYPE_MODE (TREE_TYPE (var)),
1034 TYPE_ALIGN (TREE_TYPE (var)));
1035 else if (DECL_HAS_VALUE_EXPR_P (var)
1036 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1037 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1038 or variables which were assigned a stack slot already by
1039 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1040 changed from the offset chosen to it. */
1041 align = crtl->stack_alignment_estimated;
1043 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1045 /* If the variable alignment is very large we'll dynamicaly allocate
1046 it, which means that in-frame portion is just a pointer. */
1047 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1048 align = POINTER_SIZE;
1051 if (SUPPORTS_STACK_ALIGNMENT
1052 && crtl->stack_alignment_estimated < align)
1054 /* stack_alignment_estimated shouldn't change after stack
1055 realign decision made */
1056 gcc_assert(!crtl->stack_realign_processed);
1057 crtl->stack_alignment_estimated = align;
1060 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1061 So here we only make sure stack_alignment_needed >= align. */
1062 if (crtl->stack_alignment_needed < align)
1063 crtl->stack_alignment_needed = align;
1064 if (crtl->max_used_stack_slot_alignment < align)
1065 crtl->max_used_stack_slot_alignment = align;
1067 if (TREE_CODE (origvar) == SSA_NAME)
1069 gcc_assert (TREE_CODE (var) != VAR_DECL
1070 || (!DECL_EXTERNAL (var)
1071 && !DECL_HAS_VALUE_EXPR_P (var)
1072 && !TREE_STATIC (var)
1073 && TREE_TYPE (var) != error_mark_node
1074 && !DECL_HARD_REGISTER (var)
1077 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1079 else if (DECL_EXTERNAL (var))
1081 else if (DECL_HAS_VALUE_EXPR_P (var))
1083 else if (TREE_STATIC (var))
1085 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1087 else if (TREE_TYPE (var) == error_mark_node)
1090 expand_one_error_var (var);
1092 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1095 expand_one_hard_reg_var (var);
1097 else if (use_register_for_decl (var))
1100 expand_one_register_var (origvar);
1102 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1106 error ("size of variable %q+D is too large", var);
1107 expand_one_error_var (var);
1110 else if (defer_stack_allocation (var, toplevel))
1111 add_stack_var (origvar);
1115 expand_one_stack_var (origvar);
1116 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1121 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1122 expanding variables. Those variables that can be put into registers
1123 are allocated pseudos; those that can't are put on the stack.
1125 TOPLEVEL is true if this is the outermost BLOCK. */
1128 expand_used_vars_for_block (tree block, bool toplevel)
1130 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1133 old_sv_num = toplevel ? 0 : stack_vars_num;
1135 /* Expand all variables at this level. */
1136 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1138 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1139 || !DECL_NONSHAREABLE (t)))
1140 expand_one_var (t, toplevel, true);
1142 this_sv_num = stack_vars_num;
1144 /* Expand all variables at containing levels. */
1145 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1146 expand_used_vars_for_block (t, false);
1148 /* Since we do not track exact variable lifetimes (which is not even
1149 possible for variables whose address escapes), we mirror the block
1150 tree in the interference graph. Here we cause all variables at this
1151 level, and all sublevels, to conflict. */
1152 if (old_sv_num < this_sv_num)
1154 new_sv_num = stack_vars_num;
1156 for (i = old_sv_num; i < new_sv_num; ++i)
1157 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1158 add_stack_var_conflict (i, j);
1162 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1163 and clear TREE_USED on all local variables. */
1166 clear_tree_used (tree block)
1170 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1171 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1172 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1173 || !DECL_NONSHAREABLE (t))
1176 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1177 clear_tree_used (t);
1180 /* Examine TYPE and determine a bit mask of the following features. */
1182 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1183 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1184 #define SPCT_HAS_ARRAY 4
1185 #define SPCT_HAS_AGGREGATE 8
1188 stack_protect_classify_type (tree type)
1190 unsigned int ret = 0;
1193 switch (TREE_CODE (type))
1196 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1197 if (t == char_type_node
1198 || t == signed_char_type_node
1199 || t == unsigned_char_type_node)
1201 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1202 unsigned HOST_WIDE_INT len;
1204 if (!TYPE_SIZE_UNIT (type)
1205 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1208 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1211 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1213 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1216 ret = SPCT_HAS_ARRAY;
1220 case QUAL_UNION_TYPE:
1222 ret = SPCT_HAS_AGGREGATE;
1223 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1224 if (TREE_CODE (t) == FIELD_DECL)
1225 ret |= stack_protect_classify_type (TREE_TYPE (t));
1235 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1236 part of the local stack frame. Remember if we ever return nonzero for
1237 any variable in this function. The return value is the phase number in
1238 which the variable should be allocated. */
1241 stack_protect_decl_phase (tree decl)
1243 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1246 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1247 has_short_buffer = true;
1249 if (flag_stack_protect == 2)
1251 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1252 && !(bits & SPCT_HAS_AGGREGATE))
1254 else if (bits & SPCT_HAS_ARRAY)
1258 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1261 has_protected_decls = true;
1266 /* Two helper routines that check for phase 1 and phase 2. These are used
1267 as callbacks for expand_stack_vars. */
1270 stack_protect_decl_phase_1 (tree decl)
1272 return stack_protect_decl_phase (decl) == 1;
1276 stack_protect_decl_phase_2 (tree decl)
1278 return stack_protect_decl_phase (decl) == 2;
1281 /* Ensure that variables in different stack protection phases conflict
1282 so that they are not merged and share the same stack slot. */
1285 add_stack_protection_conflicts (void)
1287 size_t i, j, n = stack_vars_num;
1288 unsigned char *phase;
1290 phase = XNEWVEC (unsigned char, n);
1291 for (i = 0; i < n; ++i)
1292 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1294 for (i = 0; i < n; ++i)
1296 unsigned char ph_i = phase[i];
1297 for (j = 0; j < i; ++j)
1298 if (ph_i != phase[j])
1299 add_stack_var_conflict (i, j);
1305 /* Create a decl for the guard at the top of the stack frame. */
1308 create_stack_guard (void)
1310 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1311 VAR_DECL, NULL, ptr_type_node);
1312 TREE_THIS_VOLATILE (guard) = 1;
1313 TREE_USED (guard) = 1;
1314 expand_one_stack_var (guard);
1315 crtl->stack_protect_guard = guard;
1318 /* Prepare for expanding variables. */
1320 init_vars_expansion (void)
1324 /* Set TREE_USED on all variables in the local_decls. */
1325 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1328 /* Clear TREE_USED on all variables associated with a block scope. */
1329 clear_tree_used (DECL_INITIAL (current_function_decl));
1331 /* Initialize local stack smashing state. */
1332 has_protected_decls = false;
1333 has_short_buffer = false;
1336 /* Free up stack variable graph data. */
1338 fini_vars_expansion (void)
1340 size_t i, n = stack_vars_num;
1341 for (i = 0; i < n; i++)
1342 BITMAP_FREE (stack_vars[i].conflicts);
1343 XDELETEVEC (stack_vars);
1344 XDELETEVEC (stack_vars_sorted);
1346 stack_vars_alloc = stack_vars_num = 0;
1349 /* Make a fair guess for the size of the stack frame of the function
1350 in NODE. This doesn't have to be exact, the result is only used in
1351 the inline heuristics. So we don't want to run the full stack var
1352 packing algorithm (which is quadratic in the number of stack vars).
1353 Instead, we calculate the total size of all stack vars. This turns
1354 out to be a pretty fair estimate -- packing of stack vars doesn't
1355 happen very often. */
1358 estimated_stack_frame_size (struct cgraph_node *node)
1360 HOST_WIDE_INT size = 0;
1363 tree old_cur_fun_decl = current_function_decl;
1364 referenced_var_iterator rvi;
1365 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1367 current_function_decl = node->decl;
1370 gcc_checking_assert (gimple_referenced_vars (fn));
1371 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1372 size += expand_one_var (var, true, false);
1374 if (stack_vars_num > 0)
1376 /* Fake sorting the stack vars for account_stack_vars (). */
1377 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1378 for (i = 0; i < stack_vars_num; ++i)
1379 stack_vars_sorted[i] = i;
1380 size += account_stack_vars ();
1381 fini_vars_expansion ();
1384 current_function_decl = old_cur_fun_decl;
1388 /* Expand all variables used in the function. */
1391 expand_used_vars (void)
1393 tree var, outer_block = DECL_INITIAL (current_function_decl);
1394 VEC(tree,heap) *maybe_local_decls = NULL;
1398 /* Compute the phase of the stack frame for this function. */
1400 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1401 int off = STARTING_FRAME_OFFSET % align;
1402 frame_phase = off ? align - off : 0;
1405 init_vars_expansion ();
1407 for (i = 0; i < SA.map->num_partitions; i++)
1409 tree var = partition_to_var (SA.map, i);
1411 gcc_assert (is_gimple_reg (var));
1412 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1413 expand_one_var (var, true, true);
1416 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1417 contain the default def (representing the parm or result itself)
1418 we don't do anything here. But those which don't contain the
1419 default def (representing a temporary based on the parm/result)
1420 we need to allocate space just like for normal VAR_DECLs. */
1421 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1423 expand_one_var (var, true, true);
1424 gcc_assert (SA.partition_to_pseudo[i]);
1429 /* At this point all variables on the local_decls with TREE_USED
1430 set are not associated with any block scope. Lay them out. */
1432 len = VEC_length (tree, cfun->local_decls);
1433 FOR_EACH_LOCAL_DECL (cfun, i, var)
1435 bool expand_now = false;
1437 /* Expanded above already. */
1438 if (is_gimple_reg (var))
1440 TREE_USED (var) = 0;
1443 /* We didn't set a block for static or extern because it's hard
1444 to tell the difference between a global variable (re)declared
1445 in a local scope, and one that's really declared there to
1446 begin with. And it doesn't really matter much, since we're
1447 not giving them stack space. Expand them now. */
1448 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1451 /* If the variable is not associated with any block, then it
1452 was created by the optimizers, and could be live anywhere
1454 else if (TREE_USED (var))
1457 /* Finally, mark all variables on the list as used. We'll use
1458 this in a moment when we expand those associated with scopes. */
1459 TREE_USED (var) = 1;
1462 expand_one_var (var, true, true);
1465 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1467 rtx rtl = DECL_RTL_IF_SET (var);
1469 /* Keep artificial non-ignored vars in cfun->local_decls
1470 chain until instantiate_decls. */
1471 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1472 add_local_decl (cfun, var);
1473 else if (rtl == NULL_RTX)
1474 /* If rtl isn't set yet, which can happen e.g. with
1475 -fstack-protector, retry before returning from this
1477 VEC_safe_push (tree, heap, maybe_local_decls, var);
1481 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1483 +-----------------+-----------------+
1484 | ...processed... | ...duplicates...|
1485 +-----------------+-----------------+
1487 +-- LEN points here.
1489 We just want the duplicates, as those are the artificial
1490 non-ignored vars that we want to keep until instantiate_decls.
1491 Move them down and truncate the array. */
1492 if (!VEC_empty (tree, cfun->local_decls))
1493 VEC_block_remove (tree, cfun->local_decls, 0, len);
1495 /* At this point, all variables within the block tree with TREE_USED
1496 set are actually used by the optimized function. Lay them out. */
1497 expand_used_vars_for_block (outer_block, true);
1499 if (stack_vars_num > 0)
1501 /* Due to the way alias sets work, no variables with non-conflicting
1502 alias sets may be assigned the same address. Add conflicts to
1504 add_alias_set_conflicts ();
1506 /* If stack protection is enabled, we don't share space between
1507 vulnerable data and non-vulnerable data. */
1508 if (flag_stack_protect)
1509 add_stack_protection_conflicts ();
1511 /* Now that we have collected all stack variables, and have computed a
1512 minimal interference graph, attempt to save some stack space. */
1513 partition_stack_vars ();
1515 dump_stack_var_partition ();
1518 /* There are several conditions under which we should create a
1519 stack guard: protect-all, alloca used, protected decls present. */
1520 if (flag_stack_protect == 2
1521 || (flag_stack_protect
1522 && (cfun->calls_alloca || has_protected_decls)))
1523 create_stack_guard ();
1525 /* Assign rtl to each variable based on these partitions. */
1526 if (stack_vars_num > 0)
1528 /* Reorder decls to be protected by iterating over the variables
1529 array multiple times, and allocating out of each phase in turn. */
1530 /* ??? We could probably integrate this into the qsort we did
1531 earlier, such that we naturally see these variables first,
1532 and thus naturally allocate things in the right order. */
1533 if (has_protected_decls)
1535 /* Phase 1 contains only character arrays. */
1536 expand_stack_vars (stack_protect_decl_phase_1);
1538 /* Phase 2 contains other kinds of arrays. */
1539 if (flag_stack_protect == 2)
1540 expand_stack_vars (stack_protect_decl_phase_2);
1543 expand_stack_vars (NULL);
1545 fini_vars_expansion ();
1548 /* If there were any artificial non-ignored vars without rtl
1549 found earlier, see if deferred stack allocation hasn't assigned
1551 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1553 rtx rtl = DECL_RTL_IF_SET (var);
1555 /* Keep artificial non-ignored vars in cfun->local_decls
1556 chain until instantiate_decls. */
1557 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1558 add_local_decl (cfun, var);
1560 VEC_free (tree, heap, maybe_local_decls);
1562 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1563 if (STACK_ALIGNMENT_NEEDED)
1565 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1566 if (!FRAME_GROWS_DOWNWARD)
1567 frame_offset += align - 1;
1568 frame_offset &= -align;
1573 /* If we need to produce a detailed dump, print the tree representation
1574 for STMT to the dump file. SINCE is the last RTX after which the RTL
1575 generated for STMT should have been appended. */
1578 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1580 if (dump_file && (dump_flags & TDF_DETAILS))
1582 fprintf (dump_file, "\n;; ");
1583 print_gimple_stmt (dump_file, stmt, 0,
1584 TDF_SLIM | (dump_flags & TDF_LINENO));
1585 fprintf (dump_file, "\n");
1587 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1591 /* Maps the blocks that do not contain tree labels to rtx labels. */
1593 static struct pointer_map_t *lab_rtx_for_bb;
1595 /* Returns the label_rtx expression for a label starting basic block BB. */
1598 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1600 gimple_stmt_iterator gsi;
1605 if (bb->flags & BB_RTL)
1606 return block_label (bb);
1608 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1612 /* Find the tree label if it is present. */
1614 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1616 lab_stmt = gsi_stmt (gsi);
1617 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1620 lab = gimple_label_label (lab_stmt);
1621 if (DECL_NONLOCAL (lab))
1624 return label_rtx (lab);
1627 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1628 *elt = gen_label_rtx ();
1633 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1634 of a basic block where we just expanded the conditional at the end,
1635 possibly clean up the CFG and instruction sequence. LAST is the
1636 last instruction before the just emitted jump sequence. */
1639 maybe_cleanup_end_of_block (edge e, rtx last)
1641 /* Special case: when jumpif decides that the condition is
1642 trivial it emits an unconditional jump (and the necessary
1643 barrier). But we still have two edges, the fallthru one is
1644 wrong. purge_dead_edges would clean this up later. Unfortunately
1645 we have to insert insns (and split edges) before
1646 find_many_sub_basic_blocks and hence before purge_dead_edges.
1647 But splitting edges might create new blocks which depend on the
1648 fact that if there are two edges there's no barrier. So the
1649 barrier would get lost and verify_flow_info would ICE. Instead
1650 of auditing all edge splitters to care for the barrier (which
1651 normally isn't there in a cleaned CFG), fix it here. */
1652 if (BARRIER_P (get_last_insn ()))
1656 /* Now, we have a single successor block, if we have insns to
1657 insert on the remaining edge we potentially will insert
1658 it at the end of this block (if the dest block isn't feasible)
1659 in order to avoid splitting the edge. This insertion will take
1660 place in front of the last jump. But we might have emitted
1661 multiple jumps (conditional and one unconditional) to the
1662 same destination. Inserting in front of the last one then
1663 is a problem. See PR 40021. We fix this by deleting all
1664 jumps except the last unconditional one. */
1665 insn = PREV_INSN (get_last_insn ());
1666 /* Make sure we have an unconditional jump. Otherwise we're
1668 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1669 for (insn = PREV_INSN (insn); insn != last;)
1671 insn = PREV_INSN (insn);
1672 if (JUMP_P (NEXT_INSN (insn)))
1674 if (!any_condjump_p (NEXT_INSN (insn)))
1676 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1677 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1679 delete_insn (NEXT_INSN (insn));
1685 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1686 Returns a new basic block if we've terminated the current basic
1687 block and created a new one. */
1690 expand_gimple_cond (basic_block bb, gimple stmt)
1692 basic_block new_bb, dest;
1697 enum tree_code code;
1700 code = gimple_cond_code (stmt);
1701 op0 = gimple_cond_lhs (stmt);
1702 op1 = gimple_cond_rhs (stmt);
1703 /* We're sometimes presented with such code:
1707 This would expand to two comparisons which then later might
1708 be cleaned up by combine. But some pattern matchers like if-conversion
1709 work better when there's only one compare, so make up for this
1710 here as special exception if TER would have made the same change. */
1711 if (gimple_cond_single_var_p (stmt)
1713 && TREE_CODE (op0) == SSA_NAME
1714 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1716 gimple second = SSA_NAME_DEF_STMT (op0);
1717 if (gimple_code (second) == GIMPLE_ASSIGN)
1719 enum tree_code code2 = gimple_assign_rhs_code (second);
1720 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1723 op0 = gimple_assign_rhs1 (second);
1724 op1 = gimple_assign_rhs2 (second);
1726 /* If jumps are cheap turn some more codes into
1728 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1730 if ((code2 == BIT_AND_EXPR
1731 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1732 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1733 || code2 == TRUTH_AND_EXPR)
1735 code = TRUTH_ANDIF_EXPR;
1736 op0 = gimple_assign_rhs1 (second);
1737 op1 = gimple_assign_rhs2 (second);
1739 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1741 code = TRUTH_ORIF_EXPR;
1742 op0 = gimple_assign_rhs1 (second);
1743 op1 = gimple_assign_rhs2 (second);
1749 last2 = last = get_last_insn ();
1751 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1752 if (gimple_has_location (stmt))
1754 set_curr_insn_source_location (gimple_location (stmt));
1755 set_curr_insn_block (gimple_block (stmt));
1758 /* These flags have no purpose in RTL land. */
1759 true_edge->flags &= ~EDGE_TRUE_VALUE;
1760 false_edge->flags &= ~EDGE_FALSE_VALUE;
1762 /* We can either have a pure conditional jump with one fallthru edge or
1763 two-way jump that needs to be decomposed into two basic blocks. */
1764 if (false_edge->dest == bb->next_bb)
1766 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1767 true_edge->probability);
1768 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1769 if (true_edge->goto_locus)
1771 set_curr_insn_source_location (true_edge->goto_locus);
1772 set_curr_insn_block (true_edge->goto_block);
1773 true_edge->goto_locus = curr_insn_locator ();
1775 true_edge->goto_block = NULL;
1776 false_edge->flags |= EDGE_FALLTHRU;
1777 maybe_cleanup_end_of_block (false_edge, last);
1780 if (true_edge->dest == bb->next_bb)
1782 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1783 false_edge->probability);
1784 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1785 if (false_edge->goto_locus)
1787 set_curr_insn_source_location (false_edge->goto_locus);
1788 set_curr_insn_block (false_edge->goto_block);
1789 false_edge->goto_locus = curr_insn_locator ();
1791 false_edge->goto_block = NULL;
1792 true_edge->flags |= EDGE_FALLTHRU;
1793 maybe_cleanup_end_of_block (true_edge, last);
1797 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1798 true_edge->probability);
1799 last = get_last_insn ();
1800 if (false_edge->goto_locus)
1802 set_curr_insn_source_location (false_edge->goto_locus);
1803 set_curr_insn_block (false_edge->goto_block);
1804 false_edge->goto_locus = curr_insn_locator ();
1806 false_edge->goto_block = NULL;
1807 emit_jump (label_rtx_for_bb (false_edge->dest));
1810 if (BARRIER_P (BB_END (bb)))
1811 BB_END (bb) = PREV_INSN (BB_END (bb));
1812 update_bb_for_insn (bb);
1814 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1815 dest = false_edge->dest;
1816 redirect_edge_succ (false_edge, new_bb);
1817 false_edge->flags |= EDGE_FALLTHRU;
1818 new_bb->count = false_edge->count;
1819 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1820 new_edge = make_edge (new_bb, dest, 0);
1821 new_edge->probability = REG_BR_PROB_BASE;
1822 new_edge->count = new_bb->count;
1823 if (BARRIER_P (BB_END (new_bb)))
1824 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1825 update_bb_for_insn (new_bb);
1827 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1829 if (true_edge->goto_locus)
1831 set_curr_insn_source_location (true_edge->goto_locus);
1832 set_curr_insn_block (true_edge->goto_block);
1833 true_edge->goto_locus = curr_insn_locator ();
1835 true_edge->goto_block = NULL;
1840 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1844 expand_call_stmt (gimple stmt)
1847 tree lhs = gimple_call_lhs (stmt);
1852 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1854 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1855 decl = gimple_call_fndecl (stmt);
1856 builtin_p = decl && DECL_BUILT_IN (decl);
1858 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1859 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1861 for (i = 0; i < gimple_call_num_args (stmt); i++)
1863 tree arg = gimple_call_arg (stmt, i);
1865 /* TER addresses into arguments of builtin functions so we have a
1866 chance to infer more correct alignment information. See PR39954. */
1868 && TREE_CODE (arg) == SSA_NAME
1869 && (def = get_gimple_for_ssa_name (arg))
1870 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1871 arg = gimple_assign_rhs1 (def);
1872 CALL_EXPR_ARG (exp, i) = arg;
1875 if (gimple_has_side_effects (stmt))
1876 TREE_SIDE_EFFECTS (exp) = 1;
1878 if (gimple_call_nothrow_p (stmt))
1879 TREE_NOTHROW (exp) = 1;
1881 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1882 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1883 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1884 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1885 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1886 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1887 TREE_BLOCK (exp) = gimple_block (stmt);
1890 expand_assignment (lhs, exp, false);
1892 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1895 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1896 STMT that doesn't require special handling for outgoing edges. That
1897 is no tailcalls and no GIMPLE_COND. */
1900 expand_gimple_stmt_1 (gimple stmt)
1903 switch (gimple_code (stmt))
1906 op0 = gimple_goto_dest (stmt);
1907 if (TREE_CODE (op0) == LABEL_DECL)
1910 expand_computed_goto (op0);
1913 expand_label (gimple_label_label (stmt));
1916 case GIMPLE_PREDICT:
1922 expand_asm_stmt (stmt);
1925 expand_call_stmt (stmt);
1929 op0 = gimple_return_retval (stmt);
1931 if (op0 && op0 != error_mark_node)
1933 tree result = DECL_RESULT (current_function_decl);
1935 /* If we are not returning the current function's RESULT_DECL,
1936 build an assignment to it. */
1939 /* I believe that a function's RESULT_DECL is unique. */
1940 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1942 /* ??? We'd like to use simply expand_assignment here,
1943 but this fails if the value is of BLKmode but the return
1944 decl is a register. expand_return has special handling
1945 for this combination, which eventually should move
1946 to common code. See comments there. Until then, let's
1947 build a modify expression :-/ */
1948 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1953 expand_null_return ();
1955 expand_return (op0);
1960 tree lhs = gimple_assign_lhs (stmt);
1962 /* Tree expand used to fiddle with |= and &= of two bitfield
1963 COMPONENT_REFs here. This can't happen with gimple, the LHS
1964 of binary assigns must be a gimple reg. */
1966 if (TREE_CODE (lhs) != SSA_NAME
1967 || get_gimple_rhs_class (gimple_expr_code (stmt))
1968 == GIMPLE_SINGLE_RHS)
1970 tree rhs = gimple_assign_rhs1 (stmt);
1971 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1972 == GIMPLE_SINGLE_RHS);
1973 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1974 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1975 expand_assignment (lhs, rhs,
1976 gimple_assign_nontemporal_move_p (stmt));
1981 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1982 struct separate_ops ops;
1983 bool promoted = false;
1985 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1986 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1989 ops.code = gimple_assign_rhs_code (stmt);
1990 ops.type = TREE_TYPE (lhs);
1991 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1993 case GIMPLE_TERNARY_RHS:
1994 ops.op2 = gimple_assign_rhs3 (stmt);
1996 case GIMPLE_BINARY_RHS:
1997 ops.op1 = gimple_assign_rhs2 (stmt);
1999 case GIMPLE_UNARY_RHS:
2000 ops.op0 = gimple_assign_rhs1 (stmt);
2005 ops.location = gimple_location (stmt);
2007 /* If we want to use a nontemporal store, force the value to
2008 register first. If we store into a promoted register,
2009 don't directly expand to target. */
2010 temp = nontemporal || promoted ? NULL_RTX : target;
2011 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2018 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2019 /* If TEMP is a VOIDmode constant, use convert_modes to make
2020 sure that we properly convert it. */
2021 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2023 temp = convert_modes (GET_MODE (target),
2024 TYPE_MODE (ops.type),
2026 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2027 GET_MODE (target), temp, unsignedp);
2030 convert_move (SUBREG_REG (target), temp, unsignedp);
2032 else if (nontemporal && emit_storent_insn (target, temp))
2036 temp = force_operand (temp, target);
2038 emit_move_insn (target, temp);
2049 /* Expand one gimple statement STMT and return the last RTL instruction
2050 before any of the newly generated ones.
2052 In addition to generating the necessary RTL instructions this also
2053 sets REG_EH_REGION notes if necessary and sets the current source
2054 location for diagnostics. */
2057 expand_gimple_stmt (gimple stmt)
2061 location_t saved_location = input_location;
2063 last = get_last_insn ();
2065 /* If this is an expression of some kind and it has an associated line
2066 number, then emit the line number before expanding the expression.
2068 We need to save and restore the file and line information so that
2069 errors discovered during expansion are emitted with the right
2070 information. It would be better of the diagnostic routines
2071 used the file/line information embedded in the tree nodes rather
2075 if (gimple_has_location (stmt))
2077 input_location = gimple_location (stmt);
2078 set_curr_insn_source_location (input_location);
2080 /* Record where the insns produced belong. */
2081 set_curr_insn_block (gimple_block (stmt));
2084 expand_gimple_stmt_1 (stmt);
2085 /* Free any temporaries used to evaluate this statement. */
2088 input_location = saved_location;
2090 /* Mark all insns that may trap. */
2091 lp_nr = lookup_stmt_eh_lp (stmt);
2095 for (insn = next_real_insn (last); insn;
2096 insn = next_real_insn (insn))
2098 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2099 /* If we want exceptions for non-call insns, any
2100 may_trap_p instruction may throw. */
2101 && GET_CODE (PATTERN (insn)) != CLOBBER
2102 && GET_CODE (PATTERN (insn)) != USE
2103 && insn_could_throw_p (insn))
2104 make_reg_eh_region_note (insn, 0, lp_nr);
2111 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2112 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2113 generated a tail call (something that might be denied by the ABI
2114 rules governing the call; see calls.c).
2116 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2117 can still reach the rest of BB. The case here is __builtin_sqrt,
2118 where the NaN result goes through the external function (with a
2119 tailcall) and the normal result happens via a sqrt instruction. */
2122 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2130 last2 = last = expand_gimple_stmt (stmt);
2132 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2133 if (CALL_P (last) && SIBLING_CALL_P (last))
2136 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2138 *can_fallthru = true;
2142 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2143 Any instructions emitted here are about to be deleted. */
2144 do_pending_stack_adjust ();
2146 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2147 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2148 EH or abnormal edges, we shouldn't have created a tail call in
2149 the first place. So it seems to me we should just be removing
2150 all edges here, or redirecting the existing fallthru edge to
2156 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2158 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2160 if (e->dest != EXIT_BLOCK_PTR)
2162 e->dest->count -= e->count;
2163 e->dest->frequency -= EDGE_FREQUENCY (e);
2164 if (e->dest->count < 0)
2166 if (e->dest->frequency < 0)
2167 e->dest->frequency = 0;
2170 probability += e->probability;
2177 /* This is somewhat ugly: the call_expr expander often emits instructions
2178 after the sibcall (to perform the function return). These confuse the
2179 find_many_sub_basic_blocks code, so we need to get rid of these. */
2180 last = NEXT_INSN (last);
2181 gcc_assert (BARRIER_P (last));
2183 *can_fallthru = false;
2184 while (NEXT_INSN (last))
2186 /* For instance an sqrt builtin expander expands if with
2187 sibcall in the then and label for `else`. */
2188 if (LABEL_P (NEXT_INSN (last)))
2190 *can_fallthru = true;
2193 delete_insn (NEXT_INSN (last));
2196 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2197 e->probability += probability;
2200 update_bb_for_insn (bb);
2202 if (NEXT_INSN (last))
2204 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2207 if (BARRIER_P (last))
2208 BB_END (bb) = PREV_INSN (last);
2211 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2216 /* Return the difference between the floor and the truncated result of
2217 a signed division by OP1 with remainder MOD. */
2219 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2221 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2222 return gen_rtx_IF_THEN_ELSE
2223 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2224 gen_rtx_IF_THEN_ELSE
2225 (mode, gen_rtx_LT (BImode,
2226 gen_rtx_DIV (mode, op1, mod),
2228 constm1_rtx, const0_rtx),
2232 /* Return the difference between the ceil and the truncated result of
2233 a signed division by OP1 with remainder MOD. */
2235 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2237 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2238 return gen_rtx_IF_THEN_ELSE
2239 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2240 gen_rtx_IF_THEN_ELSE
2241 (mode, gen_rtx_GT (BImode,
2242 gen_rtx_DIV (mode, op1, mod),
2244 const1_rtx, const0_rtx),
2248 /* Return the difference between the ceil and the truncated result of
2249 an unsigned division by OP1 with remainder MOD. */
2251 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2253 /* (mod != 0 ? 1 : 0) */
2254 return gen_rtx_IF_THEN_ELSE
2255 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2256 const1_rtx, const0_rtx);
2259 /* Return the difference between the rounded and the truncated result
2260 of a signed division by OP1 with remainder MOD. Halfway cases are
2261 rounded away from zero, rather than to the nearest even number. */
2263 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2265 /* (abs (mod) >= abs (op1) - abs (mod)
2266 ? (op1 / mod > 0 ? 1 : -1)
2268 return gen_rtx_IF_THEN_ELSE
2269 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2270 gen_rtx_MINUS (mode,
2271 gen_rtx_ABS (mode, op1),
2272 gen_rtx_ABS (mode, mod))),
2273 gen_rtx_IF_THEN_ELSE
2274 (mode, gen_rtx_GT (BImode,
2275 gen_rtx_DIV (mode, op1, mod),
2277 const1_rtx, constm1_rtx),
2281 /* Return the difference between the rounded and the truncated result
2282 of a unsigned division by OP1 with remainder MOD. Halfway cases
2283 are rounded away from zero, rather than to the nearest even
2286 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2288 /* (mod >= op1 - mod ? 1 : 0) */
2289 return gen_rtx_IF_THEN_ELSE
2290 (mode, gen_rtx_GE (BImode, mod,
2291 gen_rtx_MINUS (mode, op1, mod)),
2292 const1_rtx, const0_rtx);
2295 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2299 convert_debug_memory_address (enum machine_mode mode, rtx x,
2302 enum machine_mode xmode = GET_MODE (x);
2304 #ifndef POINTERS_EXTEND_UNSIGNED
2305 gcc_assert (mode == Pmode
2306 || mode == targetm.addr_space.address_mode (as));
2307 gcc_assert (xmode == mode || xmode == VOIDmode);
2310 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2311 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2313 gcc_assert (mode == address_mode || mode == pointer_mode);
2315 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2318 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2319 x = simplify_gen_subreg (mode, x, xmode,
2320 subreg_lowpart_offset
2322 else if (POINTERS_EXTEND_UNSIGNED > 0)
2323 x = gen_rtx_ZERO_EXTEND (mode, x);
2324 else if (!POINTERS_EXTEND_UNSIGNED)
2325 x = gen_rtx_SIGN_EXTEND (mode, x);
2328 switch (GET_CODE (x))
2331 if ((SUBREG_PROMOTED_VAR_P (x)
2332 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2333 || (GET_CODE (SUBREG_REG (x)) == PLUS
2334 && REG_P (XEXP (SUBREG_REG (x), 0))
2335 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2336 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2337 && GET_MODE (SUBREG_REG (x)) == mode)
2338 return SUBREG_REG (x);
2341 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2342 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2345 temp = shallow_copy_rtx (x);
2346 PUT_MODE (temp, mode);
2349 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2351 temp = gen_rtx_CONST (mode, temp);
2355 if (CONST_INT_P (XEXP (x, 1)))
2357 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2359 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2365 /* Don't know how to express ptr_extend as operation in debug info. */
2368 #endif /* POINTERS_EXTEND_UNSIGNED */
2373 /* Return an RTX equivalent to the value of the tree expression
2377 expand_debug_expr (tree exp)
2379 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2380 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2381 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2384 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2386 case tcc_expression:
2387 switch (TREE_CODE (exp))
2391 case WIDEN_MULT_PLUS_EXPR:
2392 case WIDEN_MULT_MINUS_EXPR:
2396 case TRUTH_ANDIF_EXPR:
2397 case TRUTH_ORIF_EXPR:
2398 case TRUTH_AND_EXPR:
2400 case TRUTH_XOR_EXPR:
2403 case TRUTH_NOT_EXPR:
2412 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2419 case tcc_comparison:
2420 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2427 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2437 case tcc_exceptional:
2438 case tcc_declaration:
2444 switch (TREE_CODE (exp))
2447 if (!lookup_constant_def (exp))
2449 if (strlen (TREE_STRING_POINTER (exp)) + 1
2450 != (size_t) TREE_STRING_LENGTH (exp))
2452 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2453 op0 = gen_rtx_MEM (BLKmode, op0);
2454 set_mem_attributes (op0, exp, 0);
2457 /* Fall through... */
2462 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2466 gcc_assert (COMPLEX_MODE_P (mode));
2467 op0 = expand_debug_expr (TREE_REALPART (exp));
2468 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2469 return gen_rtx_CONCAT (mode, op0, op1);
2471 case DEBUG_EXPR_DECL:
2472 op0 = DECL_RTL_IF_SET (exp);
2477 op0 = gen_rtx_DEBUG_EXPR (mode);
2478 DEBUG_EXPR_TREE_DECL (op0) = exp;
2479 SET_DECL_RTL (exp, op0);
2489 op0 = DECL_RTL_IF_SET (exp);
2491 /* This decl was probably optimized away. */
2494 if (TREE_CODE (exp) != VAR_DECL
2495 || DECL_EXTERNAL (exp)
2496 || !TREE_STATIC (exp)
2498 || DECL_HARD_REGISTER (exp)
2499 || DECL_IN_CONSTANT_POOL (exp)
2500 || mode == VOIDmode)
2503 op0 = make_decl_rtl_for_debug (exp);
2505 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2506 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2510 op0 = copy_rtx (op0);
2512 if (GET_MODE (op0) == BLKmode
2513 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2514 below would ICE. While it is likely a FE bug,
2515 try to be robust here. See PR43166. */
2517 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2519 gcc_assert (MEM_P (op0));
2520 op0 = adjust_address_nv (op0, mode, 0);
2531 enum machine_mode inner_mode = GET_MODE (op0);
2533 if (mode == inner_mode)
2536 if (inner_mode == VOIDmode)
2538 if (TREE_CODE (exp) == SSA_NAME)
2539 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2541 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2542 if (mode == inner_mode)
2546 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2548 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2549 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2550 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2551 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2553 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2555 else if (FLOAT_MODE_P (mode))
2557 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2558 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2559 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2561 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2563 else if (FLOAT_MODE_P (inner_mode))
2566 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2568 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2570 else if (CONSTANT_P (op0)
2571 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2572 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2573 subreg_lowpart_offset (mode,
2575 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2576 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2578 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2580 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2586 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2588 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2589 TREE_OPERAND (exp, 0),
2590 TREE_OPERAND (exp, 1));
2592 return expand_debug_expr (newexp);
2596 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2600 if (TREE_CODE (exp) == MEM_REF)
2602 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2603 || (GET_CODE (op0) == PLUS
2604 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2605 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2606 Instead just use get_inner_reference. */
2609 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2610 if (!op1 || !CONST_INT_P (op1))
2613 op0 = plus_constant (op0, INTVAL (op1));
2616 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2617 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2619 as = ADDR_SPACE_GENERIC;
2621 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2623 if (op0 == NULL_RTX)
2626 op0 = gen_rtx_MEM (mode, op0);
2627 set_mem_attributes (op0, exp, 0);
2628 if (TREE_CODE (exp) == MEM_REF
2629 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2630 set_mem_expr (op0, NULL_TREE);
2631 set_mem_addr_space (op0, as);
2635 case TARGET_MEM_REF:
2636 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2637 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2640 op0 = expand_debug_expr
2641 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2645 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2646 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2648 as = ADDR_SPACE_GENERIC;
2650 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2652 if (op0 == NULL_RTX)
2655 op0 = gen_rtx_MEM (mode, op0);
2657 set_mem_attributes (op0, exp, 0);
2658 set_mem_addr_space (op0, as);
2664 case ARRAY_RANGE_REF:
2669 case VIEW_CONVERT_EXPR:
2671 enum machine_mode mode1;
2672 HOST_WIDE_INT bitsize, bitpos;
2675 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2676 &mode1, &unsignedp, &volatilep, false);
2682 orig_op0 = op0 = expand_debug_expr (tem);
2689 enum machine_mode addrmode, offmode;
2694 op0 = XEXP (op0, 0);
2695 addrmode = GET_MODE (op0);
2696 if (addrmode == VOIDmode)
2699 op1 = expand_debug_expr (offset);
2703 offmode = GET_MODE (op1);
2704 if (offmode == VOIDmode)
2705 offmode = TYPE_MODE (TREE_TYPE (offset));
2707 if (addrmode != offmode)
2708 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2709 subreg_lowpart_offset (addrmode,
2712 /* Don't use offset_address here, we don't need a
2713 recognizable address, and we don't want to generate
2715 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2720 if (mode1 == VOIDmode)
2722 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2723 if (bitpos >= BITS_PER_UNIT)
2725 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2726 bitpos %= BITS_PER_UNIT;
2728 else if (bitpos < 0)
2731 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2732 op0 = adjust_address_nv (op0, mode1, units);
2733 bitpos += units * BITS_PER_UNIT;
2735 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2736 op0 = adjust_address_nv (op0, mode, 0);
2737 else if (GET_MODE (op0) != mode1)
2738 op0 = adjust_address_nv (op0, mode1, 0);
2740 op0 = copy_rtx (op0);
2741 if (op0 == orig_op0)
2742 op0 = shallow_copy_rtx (op0);
2743 set_mem_attributes (op0, exp, 0);
2746 if (bitpos == 0 && mode == GET_MODE (op0))
2752 if (GET_MODE (op0) == BLKmode)
2755 if ((bitpos % BITS_PER_UNIT) == 0
2756 && bitsize == GET_MODE_BITSIZE (mode1))
2758 enum machine_mode opmode = GET_MODE (op0);
2760 if (opmode == VOIDmode)
2761 opmode = TYPE_MODE (TREE_TYPE (tem));
2763 /* This condition may hold if we're expanding the address
2764 right past the end of an array that turned out not to
2765 be addressable (i.e., the address was only computed in
2766 debug stmts). The gen_subreg below would rightfully
2767 crash, and the address doesn't really exist, so just
2769 if (bitpos >= GET_MODE_BITSIZE (opmode))
2772 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2773 return simplify_gen_subreg (mode, op0, opmode,
2774 bitpos / BITS_PER_UNIT);
2777 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2778 && TYPE_UNSIGNED (TREE_TYPE (exp))
2780 : ZERO_EXTRACT, mode,
2781 GET_MODE (op0) != VOIDmode
2783 : TYPE_MODE (TREE_TYPE (tem)),
2784 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2788 return gen_rtx_ABS (mode, op0);
2791 return gen_rtx_NEG (mode, op0);
2794 return gen_rtx_NOT (mode, op0);
2798 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2800 return gen_rtx_FLOAT (mode, op0);
2802 case FIX_TRUNC_EXPR:
2804 return gen_rtx_UNSIGNED_FIX (mode, op0);
2806 return gen_rtx_FIX (mode, op0);
2808 case POINTER_PLUS_EXPR:
2809 /* For the rare target where pointers are not the same size as
2810 size_t, we need to check for mis-matched modes and correct
2813 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2814 && GET_MODE (op0) != GET_MODE (op1))
2816 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2817 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2819 /* We always sign-extend, regardless of the signedness of
2820 the operand, because the operand is always unsigned
2821 here even if the original C expression is signed. */
2822 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2826 return gen_rtx_PLUS (mode, op0, op1);
2829 return gen_rtx_MINUS (mode, op0, op1);
2832 return gen_rtx_MULT (mode, op0, op1);
2835 case TRUNC_DIV_EXPR:
2836 case EXACT_DIV_EXPR:
2838 return gen_rtx_UDIV (mode, op0, op1);
2840 return gen_rtx_DIV (mode, op0, op1);
2842 case TRUNC_MOD_EXPR:
2844 return gen_rtx_UMOD (mode, op0, op1);
2846 return gen_rtx_MOD (mode, op0, op1);
2848 case FLOOR_DIV_EXPR:
2850 return gen_rtx_UDIV (mode, op0, op1);
2853 rtx div = gen_rtx_DIV (mode, op0, op1);
2854 rtx mod = gen_rtx_MOD (mode, op0, op1);
2855 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2856 return gen_rtx_PLUS (mode, div, adj);
2859 case FLOOR_MOD_EXPR:
2861 return gen_rtx_UMOD (mode, op0, op1);
2864 rtx mod = gen_rtx_MOD (mode, op0, op1);
2865 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2866 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2867 return gen_rtx_PLUS (mode, mod, adj);
2873 rtx div = gen_rtx_UDIV (mode, op0, op1);
2874 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2875 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2876 return gen_rtx_PLUS (mode, div, adj);
2880 rtx div = gen_rtx_DIV (mode, op0, op1);
2881 rtx mod = gen_rtx_MOD (mode, op0, op1);
2882 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2883 return gen_rtx_PLUS (mode, div, adj);
2889 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2890 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2891 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2892 return gen_rtx_PLUS (mode, mod, adj);
2896 rtx mod = gen_rtx_MOD (mode, op0, op1);
2897 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2898 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2899 return gen_rtx_PLUS (mode, mod, adj);
2902 case ROUND_DIV_EXPR:
2905 rtx div = gen_rtx_UDIV (mode, op0, op1);
2906 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2907 rtx adj = round_udiv_adjust (mode, mod, op1);
2908 return gen_rtx_PLUS (mode, div, adj);
2912 rtx div = gen_rtx_DIV (mode, op0, op1);
2913 rtx mod = gen_rtx_MOD (mode, op0, op1);
2914 rtx adj = round_sdiv_adjust (mode, mod, op1);
2915 return gen_rtx_PLUS (mode, div, adj);
2918 case ROUND_MOD_EXPR:
2921 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2922 rtx adj = round_udiv_adjust (mode, mod, op1);
2923 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2924 return gen_rtx_PLUS (mode, mod, adj);
2928 rtx mod = gen_rtx_MOD (mode, op0, op1);
2929 rtx adj = round_sdiv_adjust (mode, mod, op1);
2930 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2931 return gen_rtx_PLUS (mode, mod, adj);
2935 return gen_rtx_ASHIFT (mode, op0, op1);
2939 return gen_rtx_LSHIFTRT (mode, op0, op1);
2941 return gen_rtx_ASHIFTRT (mode, op0, op1);
2944 return gen_rtx_ROTATE (mode, op0, op1);
2947 return gen_rtx_ROTATERT (mode, op0, op1);
2951 return gen_rtx_UMIN (mode, op0, op1);
2953 return gen_rtx_SMIN (mode, op0, op1);
2957 return gen_rtx_UMAX (mode, op0, op1);
2959 return gen_rtx_SMAX (mode, op0, op1);
2962 case TRUTH_AND_EXPR:
2963 return gen_rtx_AND (mode, op0, op1);
2967 return gen_rtx_IOR (mode, op0, op1);
2970 case TRUTH_XOR_EXPR:
2971 return gen_rtx_XOR (mode, op0, op1);
2973 case TRUTH_ANDIF_EXPR:
2974 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2976 case TRUTH_ORIF_EXPR:
2977 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2979 case TRUTH_NOT_EXPR:
2980 return gen_rtx_EQ (mode, op0, const0_rtx);
2984 return gen_rtx_LTU (mode, op0, op1);
2986 return gen_rtx_LT (mode, op0, op1);
2990 return gen_rtx_LEU (mode, op0, op1);
2992 return gen_rtx_LE (mode, op0, op1);
2996 return gen_rtx_GTU (mode, op0, op1);
2998 return gen_rtx_GT (mode, op0, op1);
3002 return gen_rtx_GEU (mode, op0, op1);
3004 return gen_rtx_GE (mode, op0, op1);
3007 return gen_rtx_EQ (mode, op0, op1);
3010 return gen_rtx_NE (mode, op0, op1);
3012 case UNORDERED_EXPR:
3013 return gen_rtx_UNORDERED (mode, op0, op1);
3016 return gen_rtx_ORDERED (mode, op0, op1);
3019 return gen_rtx_UNLT (mode, op0, op1);
3022 return gen_rtx_UNLE (mode, op0, op1);
3025 return gen_rtx_UNGT (mode, op0, op1);
3028 return gen_rtx_UNGE (mode, op0, op1);
3031 return gen_rtx_UNEQ (mode, op0, op1);
3034 return gen_rtx_LTGT (mode, op0, op1);
3037 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3040 gcc_assert (COMPLEX_MODE_P (mode));
3041 if (GET_MODE (op0) == VOIDmode)
3042 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3043 if (GET_MODE (op1) == VOIDmode)
3044 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3045 return gen_rtx_CONCAT (mode, op0, op1);
3048 if (GET_CODE (op0) == CONCAT)
3049 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3050 gen_rtx_NEG (GET_MODE_INNER (mode),
3054 enum machine_mode imode = GET_MODE_INNER (mode);
3059 re = adjust_address_nv (op0, imode, 0);
3060 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3064 enum machine_mode ifmode = int_mode_for_mode (mode);
3065 enum machine_mode ihmode = int_mode_for_mode (imode);
3067 if (ifmode == BLKmode || ihmode == BLKmode)
3069 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3072 re = gen_rtx_SUBREG (ifmode, re, 0);
3073 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3074 if (imode != ihmode)
3075 re = gen_rtx_SUBREG (imode, re, 0);
3076 im = copy_rtx (op0);
3078 im = gen_rtx_SUBREG (ifmode, im, 0);
3079 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3080 if (imode != ihmode)
3081 im = gen_rtx_SUBREG (imode, im, 0);
3083 im = gen_rtx_NEG (imode, im);
3084 return gen_rtx_CONCAT (mode, re, im);
3088 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3089 if (!op0 || !MEM_P (op0))
3091 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3092 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3093 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3094 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3095 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3097 if (handled_component_p (TREE_OPERAND (exp, 0)))
3099 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3101 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3102 &bitoffset, &bitsize, &maxsize);
3103 if ((TREE_CODE (decl) == VAR_DECL
3104 || TREE_CODE (decl) == PARM_DECL
3105 || TREE_CODE (decl) == RESULT_DECL)
3106 && !TREE_ADDRESSABLE (decl)
3107 && (bitoffset % BITS_PER_UNIT) == 0
3109 && bitsize == maxsize)
3110 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3111 bitoffset / BITS_PER_UNIT);
3117 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3118 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3123 exp = build_constructor_from_list (TREE_TYPE (exp),
3124 TREE_VECTOR_CST_ELTS (exp));
3128 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3133 op0 = gen_rtx_CONCATN
3134 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3136 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3138 op1 = expand_debug_expr (val);
3141 XVECEXP (op0, 0, i) = op1;
3144 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3146 op1 = expand_debug_expr
3147 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3152 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3153 XVECEXP (op0, 0, i) = op1;
3159 goto flag_unsupported;
3162 /* ??? Maybe handle some builtins? */
3167 gimple g = get_gimple_for_ssa_name (exp);
3170 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3176 int part = var_to_partition (SA.map, exp);
3178 if (part == NO_PARTITION)
3181 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3183 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3191 /* Vector stuff. For most of the codes we don't have rtl codes. */
3192 case REALIGN_LOAD_EXPR:
3193 case REDUC_MAX_EXPR:
3194 case REDUC_MIN_EXPR:
3195 case REDUC_PLUS_EXPR:
3197 case VEC_EXTRACT_EVEN_EXPR:
3198 case VEC_EXTRACT_ODD_EXPR:
3199 case VEC_INTERLEAVE_HIGH_EXPR:
3200 case VEC_INTERLEAVE_LOW_EXPR:
3201 case VEC_LSHIFT_EXPR:
3202 case VEC_PACK_FIX_TRUNC_EXPR:
3203 case VEC_PACK_SAT_EXPR:
3204 case VEC_PACK_TRUNC_EXPR:
3205 case VEC_RSHIFT_EXPR:
3206 case VEC_UNPACK_FLOAT_HI_EXPR:
3207 case VEC_UNPACK_FLOAT_LO_EXPR:
3208 case VEC_UNPACK_HI_EXPR:
3209 case VEC_UNPACK_LO_EXPR:
3210 case VEC_WIDEN_MULT_HI_EXPR:
3211 case VEC_WIDEN_MULT_LO_EXPR:
3215 case ADDR_SPACE_CONVERT_EXPR:
3216 case FIXED_CONVERT_EXPR:
3218 case WITH_SIZE_EXPR:
3222 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3223 && SCALAR_INT_MODE_P (mode))
3225 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3226 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3228 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3229 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3230 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3232 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3233 op0 = gen_rtx_MULT (mode, op0, op1);
3234 return gen_rtx_PLUS (mode, op0, op2);
3238 case WIDEN_MULT_EXPR:
3239 case WIDEN_MULT_PLUS_EXPR:
3240 case WIDEN_MULT_MINUS_EXPR:
3241 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3242 && SCALAR_INT_MODE_P (mode))
3244 enum machine_mode inner_mode = GET_MODE (op0);
3245 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3246 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3248 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3249 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3250 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3252 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3253 op0 = gen_rtx_MULT (mode, op0, op1);
3254 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3256 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3257 return gen_rtx_PLUS (mode, op0, op2);
3259 return gen_rtx_MINUS (mode, op2, op0);
3263 case WIDEN_SUM_EXPR:
3264 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3265 && SCALAR_INT_MODE_P (mode))
3267 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3268 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3270 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3271 return gen_rtx_PLUS (mode, op0, op1);
3276 return gen_rtx_FMA (mode, op0, op1, op2);
3280 #ifdef ENABLE_CHECKING
3289 /* Expand the _LOCs in debug insns. We run this after expanding all
3290 regular insns, so that any variables referenced in the function
3291 will have their DECL_RTLs set. */
3294 expand_debug_locations (void)
3297 rtx last = get_last_insn ();
3298 int save_strict_alias = flag_strict_aliasing;
3300 /* New alias sets while setting up memory attributes cause
3301 -fcompare-debug failures, even though it doesn't bring about any
3303 flag_strict_aliasing = 0;
3305 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3306 if (DEBUG_INSN_P (insn))
3308 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3310 enum machine_mode mode;
3312 if (value == NULL_TREE)
3316 val = expand_debug_expr (value);
3317 gcc_assert (last == get_last_insn ());
3321 val = gen_rtx_UNKNOWN_VAR_LOC ();
3324 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3326 gcc_assert (mode == GET_MODE (val)
3327 || (GET_MODE (val) == VOIDmode
3328 && (CONST_INT_P (val)
3329 || GET_CODE (val) == CONST_FIXED
3330 || GET_CODE (val) == CONST_DOUBLE
3331 || GET_CODE (val) == LABEL_REF)));
3334 INSN_VAR_LOCATION_LOC (insn) = val;
3337 flag_strict_aliasing = save_strict_alias;
3340 /* Expand basic block BB from GIMPLE trees to RTL. */
3343 expand_gimple_basic_block (basic_block bb)
3345 gimple_stmt_iterator gsi;
3354 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3357 /* Note that since we are now transitioning from GIMPLE to RTL, we
3358 cannot use the gsi_*_bb() routines because they expect the basic
3359 block to be in GIMPLE, instead of RTL. Therefore, we need to
3360 access the BB sequence directly. */
3361 stmts = bb_seq (bb);
3362 bb->il.gimple = NULL;
3363 rtl_profile_for_bb (bb);
3364 init_rtl_bb_info (bb);
3365 bb->flags |= BB_RTL;
3367 /* Remove the RETURN_EXPR if we may fall though to the exit
3369 gsi = gsi_last (stmts);
3370 if (!gsi_end_p (gsi)
3371 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3373 gimple ret_stmt = gsi_stmt (gsi);
3375 gcc_assert (single_succ_p (bb));
3376 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3378 if (bb->next_bb == EXIT_BLOCK_PTR
3379 && !gimple_return_retval (ret_stmt))
3381 gsi_remove (&gsi, false);
3382 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3386 gsi = gsi_start (stmts);
3387 if (!gsi_end_p (gsi))
3389 stmt = gsi_stmt (gsi);
3390 if (gimple_code (stmt) != GIMPLE_LABEL)
3394 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3398 last = get_last_insn ();
3402 expand_gimple_stmt (stmt);
3407 emit_label ((rtx) *elt);
3409 /* Java emits line number notes in the top of labels.
3410 ??? Make this go away once line number notes are obsoleted. */
3411 BB_HEAD (bb) = NEXT_INSN (last);
3412 if (NOTE_P (BB_HEAD (bb)))
3413 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3414 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3416 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3419 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3421 NOTE_BASIC_BLOCK (note) = bb;
3423 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3427 stmt = gsi_stmt (gsi);
3429 /* If this statement is a non-debug one, and we generate debug
3430 insns, then this one might be the last real use of a TERed
3431 SSA_NAME, but where there are still some debug uses further
3432 down. Expanding the current SSA name in such further debug
3433 uses by their RHS might lead to wrong debug info, as coalescing
3434 might make the operands of such RHS be placed into the same
3435 pseudo as something else. Like so:
3436 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3440 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3441 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3442 the write to a_2 would actually have clobbered the place which
3445 So, instead of that, we recognize the situation, and generate
3446 debug temporaries at the last real use of TERed SSA names:
3453 if (MAY_HAVE_DEBUG_INSNS
3455 && !is_gimple_debug (stmt))
3461 location_t sloc = get_curr_insn_source_location ();
3462 tree sblock = get_curr_insn_block ();
3464 /* Look for SSA names that have their last use here (TERed
3465 names always have only one real use). */
3466 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3467 if ((def = get_gimple_for_ssa_name (op)))
3469 imm_use_iterator imm_iter;
3470 use_operand_p use_p;
3471 bool have_debug_uses = false;
3473 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3475 if (gimple_debug_bind_p (USE_STMT (use_p)))
3477 have_debug_uses = true;
3482 if (have_debug_uses)
3484 /* OP is a TERed SSA name, with DEF it's defining
3485 statement, and where OP is used in further debug
3486 instructions. Generate a debug temporary, and
3487 replace all uses of OP in debug insns with that
3490 tree value = gimple_assign_rhs_to_tree (def);
3491 tree vexpr = make_node (DEBUG_EXPR_DECL);
3493 enum machine_mode mode;
3495 set_curr_insn_source_location (gimple_location (def));
3496 set_curr_insn_block (gimple_block (def));
3498 DECL_ARTIFICIAL (vexpr) = 1;
3499 TREE_TYPE (vexpr) = TREE_TYPE (value);
3501 mode = DECL_MODE (value);
3503 mode = TYPE_MODE (TREE_TYPE (value));
3504 DECL_MODE (vexpr) = mode;
3506 val = gen_rtx_VAR_LOCATION
3507 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3509 val = emit_debug_insn (val);
3511 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3513 if (!gimple_debug_bind_p (debugstmt))
3516 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3517 SET_USE (use_p, vexpr);
3519 update_stmt (debugstmt);
3523 set_curr_insn_source_location (sloc);
3524 set_curr_insn_block (sblock);
3527 currently_expanding_gimple_stmt = stmt;
3529 /* Expand this statement, then evaluate the resulting RTL and
3530 fixup the CFG accordingly. */
3531 if (gimple_code (stmt) == GIMPLE_COND)
3533 new_bb = expand_gimple_cond (bb, stmt);
3537 else if (gimple_debug_bind_p (stmt))
3539 location_t sloc = get_curr_insn_source_location ();
3540 tree sblock = get_curr_insn_block ();
3541 gimple_stmt_iterator nsi = gsi;
3545 tree var = gimple_debug_bind_get_var (stmt);
3548 enum machine_mode mode;
3550 if (gimple_debug_bind_has_value_p (stmt))
3551 value = gimple_debug_bind_get_value (stmt);
3555 last = get_last_insn ();
3557 set_curr_insn_source_location (gimple_location (stmt));
3558 set_curr_insn_block (gimple_block (stmt));
3561 mode = DECL_MODE (var);
3563 mode = TYPE_MODE (TREE_TYPE (var));
3565 val = gen_rtx_VAR_LOCATION
3566 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3568 val = emit_debug_insn (val);
3570 if (dump_file && (dump_flags & TDF_DETAILS))
3572 /* We can't dump the insn with a TREE where an RTX
3574 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3575 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3576 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3579 /* In order not to generate too many debug temporaries,
3580 we delink all uses of debug statements we already expanded.
3581 Therefore debug statements between definition and real
3582 use of TERed SSA names will continue to use the SSA name,
3583 and not be replaced with debug temps. */
3584 delink_stmt_imm_use (stmt);
3588 if (gsi_end_p (nsi))
3590 stmt = gsi_stmt (nsi);
3591 if (!gimple_debug_bind_p (stmt))
3595 set_curr_insn_source_location (sloc);
3596 set_curr_insn_block (sblock);
3600 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3603 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3614 def_operand_p def_p;
3615 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3619 /* Ignore this stmt if it is in the list of
3620 replaceable expressions. */
3622 && bitmap_bit_p (SA.values,
3623 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3626 last = expand_gimple_stmt (stmt);
3627 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3632 currently_expanding_gimple_stmt = NULL;
3634 /* Expand implicit goto and convert goto_locus. */
3635 FOR_EACH_EDGE (e, ei, bb->succs)
3637 if (e->goto_locus && e->goto_block)
3639 set_curr_insn_source_location (e->goto_locus);
3640 set_curr_insn_block (e->goto_block);
3641 e->goto_locus = curr_insn_locator ();
3643 e->goto_block = NULL;
3644 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3646 emit_jump (label_rtx_for_bb (e->dest));
3647 e->flags &= ~EDGE_FALLTHRU;
3651 /* Expanded RTL can create a jump in the last instruction of block.
3652 This later might be assumed to be a jump to successor and break edge insertion.
3653 We need to insert dummy move to prevent this. PR41440. */
3654 if (single_succ_p (bb)
3655 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3656 && (last = get_last_insn ())
3659 rtx dummy = gen_reg_rtx (SImode);
3660 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3663 do_pending_stack_adjust ();
3665 /* Find the block tail. The last insn in the block is the insn
3666 before a barrier and/or table jump insn. */
3667 last = get_last_insn ();
3668 if (BARRIER_P (last))
3669 last = PREV_INSN (last);
3670 if (JUMP_TABLE_DATA_P (last))
3671 last = PREV_INSN (PREV_INSN (last));
3674 update_bb_for_insn (bb);
3680 /* Create a basic block for initialization code. */
3683 construct_init_block (void)
3685 basic_block init_block, first_block;
3689 /* Multiple entry points not supported yet. */
3690 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3691 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3692 init_rtl_bb_info (EXIT_BLOCK_PTR);
3693 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3694 EXIT_BLOCK_PTR->flags |= BB_RTL;
3696 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3698 /* When entry edge points to first basic block, we don't need jump,
3699 otherwise we have to jump into proper target. */
3700 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3702 tree label = gimple_block_label (e->dest);
3704 emit_jump (label_rtx (label));
3708 flags = EDGE_FALLTHRU;
3710 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3713 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3714 init_block->count = ENTRY_BLOCK_PTR->count;
3717 first_block = e->dest;
3718 redirect_edge_succ (e, init_block);
3719 e = make_edge (init_block, first_block, flags);
3722 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3723 e->probability = REG_BR_PROB_BASE;
3724 e->count = ENTRY_BLOCK_PTR->count;
3726 update_bb_for_insn (init_block);
3730 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3731 found in the block tree. */
3734 set_block_levels (tree block, int level)
3738 BLOCK_NUMBER (block) = level;
3739 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3740 block = BLOCK_CHAIN (block);
3744 /* Create a block containing landing pads and similar stuff. */
3747 construct_exit_block (void)
3749 rtx head = get_last_insn ();
3751 basic_block exit_block;
3755 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3757 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3759 /* Make sure the locus is set to the end of the function, so that
3760 epilogue line numbers and warnings are set properly. */
3761 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3762 input_location = cfun->function_end_locus;
3764 /* The following insns belong to the top scope. */
3765 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3767 /* Generate rtl for function exit. */
3768 expand_function_end ();
3770 end = get_last_insn ();
3773 /* While emitting the function end we could move end of the last basic block.
3775 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3776 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3777 head = NEXT_INSN (head);
3778 exit_block = create_basic_block (NEXT_INSN (head), end,
3779 EXIT_BLOCK_PTR->prev_bb);
3780 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3781 exit_block->count = EXIT_BLOCK_PTR->count;
3784 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3786 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3787 if (!(e->flags & EDGE_ABNORMAL))
3788 redirect_edge_succ (e, exit_block);
3793 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3794 e->probability = REG_BR_PROB_BASE;
3795 e->count = EXIT_BLOCK_PTR->count;
3796 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3799 e->count -= e2->count;
3800 exit_block->count -= e2->count;
3801 exit_block->frequency -= EDGE_FREQUENCY (e2);
3805 if (exit_block->count < 0)
3806 exit_block->count = 0;
3807 if (exit_block->frequency < 0)
3808 exit_block->frequency = 0;
3809 update_bb_for_insn (exit_block);
3812 /* Helper function for discover_nonconstant_array_refs.
3813 Look for ARRAY_REF nodes with non-constant indexes and mark them
3817 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3818 void *data ATTRIBUTE_UNUSED)
3822 if (IS_TYPE_OR_DECL_P (t))
3824 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3826 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3827 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3828 && (!TREE_OPERAND (t, 2)
3829 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3830 || (TREE_CODE (t) == COMPONENT_REF
3831 && (!TREE_OPERAND (t,2)
3832 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3833 || TREE_CODE (t) == BIT_FIELD_REF
3834 || TREE_CODE (t) == REALPART_EXPR
3835 || TREE_CODE (t) == IMAGPART_EXPR
3836 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3837 || CONVERT_EXPR_P (t))
3838 t = TREE_OPERAND (t, 0);
3840 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3842 t = get_base_address (t);
3844 && DECL_MODE (t) != BLKmode)
3845 TREE_ADDRESSABLE (t) = 1;
3854 /* RTL expansion is not able to compile array references with variable
3855 offsets for arrays stored in single register. Discover such
3856 expressions and mark variables as addressable to avoid this
3860 discover_nonconstant_array_refs (void)
3863 gimple_stmt_iterator gsi;
3866 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3868 gimple stmt = gsi_stmt (gsi);
3869 if (!is_gimple_debug (stmt))
3870 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3874 /* This function sets crtl->args.internal_arg_pointer to a virtual
3875 register if DRAP is needed. Local register allocator will replace
3876 virtual_incoming_args_rtx with the virtual register. */
3879 expand_stack_alignment (void)
3882 unsigned int preferred_stack_boundary;
3884 if (! SUPPORTS_STACK_ALIGNMENT)
3887 if (cfun->calls_alloca
3888 || cfun->has_nonlocal_label
3889 || crtl->has_nonlocal_goto)
3890 crtl->need_drap = true;
3892 /* Call update_stack_boundary here again to update incoming stack
3893 boundary. It may set incoming stack alignment to a different
3894 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3895 use the minimum incoming stack alignment to check if it is OK
3896 to perform sibcall optimization since sibcall optimization will
3897 only align the outgoing stack to incoming stack boundary. */
3898 if (targetm.calls.update_stack_boundary)
3899 targetm.calls.update_stack_boundary ();
3901 /* The incoming stack frame has to be aligned at least at
3902 parm_stack_boundary. */
3903 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3905 /* Update crtl->stack_alignment_estimated and use it later to align
3906 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3907 exceptions since callgraph doesn't collect incoming stack alignment
3909 if (cfun->can_throw_non_call_exceptions
3910 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3911 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3913 preferred_stack_boundary = crtl->preferred_stack_boundary;
3914 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3915 crtl->stack_alignment_estimated = preferred_stack_boundary;
3916 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3917 crtl->stack_alignment_needed = preferred_stack_boundary;
3919 gcc_assert (crtl->stack_alignment_needed
3920 <= crtl->stack_alignment_estimated);
3922 crtl->stack_realign_needed
3923 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3924 crtl->stack_realign_tried = crtl->stack_realign_needed;
3926 crtl->stack_realign_processed = true;
3928 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3930 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3931 drap_rtx = targetm.calls.get_drap_rtx ();
3933 /* stack_realign_drap and drap_rtx must match. */
3934 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3936 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3937 if (NULL != drap_rtx)
3939 crtl->args.internal_arg_pointer = drap_rtx;
3941 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3943 fixup_tail_calls ();
3947 /* Translate the intermediate representation contained in the CFG
3948 from GIMPLE trees to RTL.
3950 We do conversion per basic block and preserve/update the tree CFG.
3951 This implies we have to do some magic as the CFG can simultaneously
3952 consist of basic blocks containing RTL and GIMPLE trees. This can
3953 confuse the CFG hooks, so be careful to not manipulate CFG during
3957 gimple_expand_cfg (void)
3959 basic_block bb, init_block;
3966 timevar_push (TV_OUT_OF_SSA);
3967 rewrite_out_of_ssa (&SA);
3968 timevar_pop (TV_OUT_OF_SSA);
3969 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3972 /* Some backends want to know that we are expanding to RTL. */
3973 currently_expanding_to_rtl = 1;
3975 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3977 insn_locators_alloc ();
3978 if (!DECL_IS_BUILTIN (current_function_decl))
3980 /* Eventually, all FEs should explicitly set function_start_locus. */
3981 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3982 set_curr_insn_source_location
3983 (DECL_SOURCE_LOCATION (current_function_decl));
3985 set_curr_insn_source_location (cfun->function_start_locus);
3988 set_curr_insn_source_location (UNKNOWN_LOCATION);
3989 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3990 prologue_locator = curr_insn_locator ();
3992 #ifdef INSN_SCHEDULING
3993 init_sched_attrs ();
3996 /* Make sure first insn is a note even if we don't want linenums.
3997 This makes sure the first insn will never be deleted.
3998 Also, final expects a note to appear there. */
3999 emit_note (NOTE_INSN_DELETED);
4001 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4002 discover_nonconstant_array_refs ();
4004 targetm.expand_to_rtl_hook ();
4005 crtl->stack_alignment_needed = STACK_BOUNDARY;
4006 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4007 crtl->stack_alignment_estimated = 0;
4008 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4009 cfun->cfg->max_jumptable_ents = 0;
4011 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4012 of the function section at exapnsion time to predict distance of calls. */
4013 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4015 /* Expand the variables recorded during gimple lowering. */
4016 timevar_push (TV_VAR_EXPAND);
4019 expand_used_vars ();
4021 var_seq = get_insns ();
4023 timevar_pop (TV_VAR_EXPAND);
4025 /* Honor stack protection warnings. */
4026 if (warn_stack_protect)
4028 if (cfun->calls_alloca)
4029 warning (OPT_Wstack_protector,
4030 "stack protector not protecting local variables: "
4031 "variable length buffer");
4032 if (has_short_buffer && !crtl->stack_protect_guard)
4033 warning (OPT_Wstack_protector,
4034 "stack protector not protecting function: "
4035 "all local arrays are less than %d bytes long",
4036 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4039 /* Set up parameters and prepare for return, for the function. */
4040 expand_function_start (current_function_decl);
4042 /* If we emitted any instructions for setting up the variables,
4043 emit them before the FUNCTION_START note. */
4046 emit_insn_before (var_seq, parm_birth_insn);
4048 /* In expand_function_end we'll insert the alloca save/restore
4049 before parm_birth_insn. We've just insertted an alloca call.
4050 Adjust the pointer to match. */
4051 parm_birth_insn = var_seq;
4054 /* Now that we also have the parameter RTXs, copy them over to our
4056 for (i = 0; i < SA.map->num_partitions; i++)
4058 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4060 if (TREE_CODE (var) != VAR_DECL
4061 && !SA.partition_to_pseudo[i])
4062 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4063 gcc_assert (SA.partition_to_pseudo[i]);
4065 /* If this decl was marked as living in multiple places, reset
4066 this now to NULL. */
4067 if (DECL_RTL_IF_SET (var) == pc_rtx)
4068 SET_DECL_RTL (var, NULL);
4070 /* Some RTL parts really want to look at DECL_RTL(x) when x
4071 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4072 SET_DECL_RTL here making this available, but that would mean
4073 to select one of the potentially many RTLs for one DECL. Instead
4074 of doing that we simply reset the MEM_EXPR of the RTL in question,
4075 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4076 if (!DECL_RTL_SET_P (var))
4078 if (MEM_P (SA.partition_to_pseudo[i]))
4079 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4083 /* If this function is `main', emit a call to `__main'
4084 to run global initializers, etc. */
4085 if (DECL_NAME (current_function_decl)
4086 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4087 && DECL_FILE_SCOPE_P (current_function_decl))
4088 expand_main_function ();
4090 /* Initialize the stack_protect_guard field. This must happen after the
4091 call to __main (if any) so that the external decl is initialized. */
4092 if (crtl->stack_protect_guard)
4093 stack_protect_prologue ();
4095 expand_phi_nodes (&SA);
4097 /* Register rtl specific functions for cfg. */
4098 rtl_register_cfg_hooks ();
4100 init_block = construct_init_block ();
4102 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4103 remaining edges later. */
4104 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4105 e->flags &= ~EDGE_EXECUTABLE;
4107 lab_rtx_for_bb = pointer_map_create ();
4108 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4109 bb = expand_gimple_basic_block (bb);
4111 if (MAY_HAVE_DEBUG_INSNS)
4112 expand_debug_locations ();
4114 execute_free_datastructures ();
4115 timevar_push (TV_OUT_OF_SSA);
4116 finish_out_of_ssa (&SA);
4117 timevar_pop (TV_OUT_OF_SSA);
4119 timevar_push (TV_POST_EXPAND);
4120 /* We are no longer in SSA form. */
4121 cfun->gimple_df->in_ssa_p = false;
4123 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4124 conservatively to true until they are all profile aware. */
4125 pointer_map_destroy (lab_rtx_for_bb);
4128 construct_exit_block ();
4129 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4130 insn_locators_finalize ();
4132 /* Zap the tree EH table. */
4133 set_eh_throw_stmt_table (cfun, NULL);
4135 rebuild_jump_labels (get_insns ());
4137 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4141 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4145 /* Avoid putting insns before parm_birth_insn. */
4146 if (e->src == ENTRY_BLOCK_PTR
4147 && single_succ_p (ENTRY_BLOCK_PTR)
4150 rtx insns = e->insns.r;
4151 e->insns.r = NULL_RTX;
4152 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4155 commit_one_edge_insertion (e);
4162 /* We're done expanding trees to RTL. */
4163 currently_expanding_to_rtl = 0;
4165 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4169 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4171 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4172 e->flags &= ~EDGE_EXECUTABLE;
4174 /* At the moment not all abnormal edges match the RTL
4175 representation. It is safe to remove them here as
4176 find_many_sub_basic_blocks will rediscover them.
4177 In the future we should get this fixed properly. */
4178 if ((e->flags & EDGE_ABNORMAL)
4179 && !(e->flags & EDGE_SIBCALL))
4186 blocks = sbitmap_alloc (last_basic_block);
4187 sbitmap_ones (blocks);
4188 find_many_sub_basic_blocks (blocks);
4189 sbitmap_free (blocks);
4190 purge_all_dead_edges ();
4194 expand_stack_alignment ();
4196 #ifdef ENABLE_CHECKING
4197 verify_flow_info ();
4200 /* There's no need to defer outputting this function any more; we
4201 know we want to output it. */
4202 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4204 /* Now that we're done expanding trees to RTL, we shouldn't have any
4205 more CONCATs anywhere. */
4206 generating_concat_p = 0;
4211 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4212 /* And the pass manager will dump RTL for us. */
4215 /* If we're emitting a nested function, make sure its parent gets
4216 emitted as well. Doing otherwise confuses debug info. */
4219 for (parent = DECL_CONTEXT (current_function_decl);
4220 parent != NULL_TREE;
4221 parent = get_containing_scope (parent))
4222 if (TREE_CODE (parent) == FUNCTION_DECL)
4223 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4226 /* We are now committed to emitting code for this function. Do any
4227 preparation, such as emitting abstract debug info for the inline
4228 before it gets mangled by optimization. */
4229 if (cgraph_function_possibly_inlined_p (current_function_decl))
4230 (*debug_hooks->outlining_inline_function) (current_function_decl);
4232 TREE_ASM_WRITTEN (current_function_decl) = 1;
4234 /* After expanding, the return labels are no longer needed. */
4235 return_label = NULL;
4236 naked_return_label = NULL;
4237 /* Tag the blocks with a depth number so that change_scope can find
4238 the common parent easily. */
4239 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4240 default_rtl_profile ();
4241 timevar_pop (TV_POST_EXPAND);
4245 struct rtl_opt_pass pass_expand =
4249 "expand", /* name */
4251 gimple_expand_cfg, /* execute */
4254 0, /* static_pass_number */
4255 TV_EXPAND, /* tv_id */
4256 PROP_ssa | PROP_gimple_leh | PROP_cfg
4257 | PROP_gimple_lcx, /* properties_required */
4258 PROP_rtl, /* properties_provided */
4259 PROP_ssa | PROP_trees, /* properties_destroyed */
4260 TODO_verify_ssa | TODO_verify_flow
4261 | TODO_verify_stmts, /* todo_flags_start */
4263 | TODO_ggc_collect /* todo_flags_finish */