1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
47 #include "ssaexpand.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 static rtx expand_debug_expr (tree);
62 /* Return an expression tree corresponding to the RHS of GIMPLE
66 gimple_assign_rhs_to_tree (gimple stmt)
69 enum gimple_rhs_class grhs_class;
71 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
73 if (grhs_class == GIMPLE_TERNARY_RHS)
74 t = build3 (gimple_assign_rhs_code (stmt),
75 TREE_TYPE (gimple_assign_lhs (stmt)),
76 gimple_assign_rhs1 (stmt),
77 gimple_assign_rhs2 (stmt),
78 gimple_assign_rhs3 (stmt));
79 else if (grhs_class == GIMPLE_BINARY_RHS)
80 t = build2 (gimple_assign_rhs_code (stmt),
81 TREE_TYPE (gimple_assign_lhs (stmt)),
82 gimple_assign_rhs1 (stmt),
83 gimple_assign_rhs2 (stmt));
84 else if (grhs_class == GIMPLE_UNARY_RHS)
85 t = build1 (gimple_assign_rhs_code (stmt),
86 TREE_TYPE (gimple_assign_lhs (stmt)),
87 gimple_assign_rhs1 (stmt));
88 else if (grhs_class == GIMPLE_SINGLE_RHS)
90 t = gimple_assign_rhs1 (stmt);
91 /* Avoid modifying this tree in place below. */
92 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
93 && gimple_location (stmt) != EXPR_LOCATION (t))
94 || (gimple_block (stmt)
95 && currently_expanding_to_rtl
97 && gimple_block (stmt) != TREE_BLOCK (t)))
103 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
104 SET_EXPR_LOCATION (t, gimple_location (stmt));
105 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
106 TREE_BLOCK (t) = gimple_block (stmt);
112 #ifndef STACK_ALIGNMENT_NEEDED
113 #define STACK_ALIGNMENT_NEEDED 1
116 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
118 /* Associate declaration T with storage space X. If T is no
119 SSA name this is exactly SET_DECL_RTL, otherwise make the
120 partition of T associated with X. */
122 set_rtl (tree t, rtx x)
124 if (TREE_CODE (t) == SSA_NAME)
126 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
128 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
129 /* For the benefit of debug information at -O0 (where vartracking
130 doesn't run) record the place also in the base DECL if it's
131 a normal variable (not a parameter). */
132 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
134 tree var = SSA_NAME_VAR (t);
135 /* If we don't yet have something recorded, just record it now. */
136 if (!DECL_RTL_SET_P (var))
137 SET_DECL_RTL (var, x);
138 /* If we have it set already to "multiple places" don't
140 else if (DECL_RTL (var) == pc_rtx)
142 /* If we have something recorded and it's not the same place
143 as we want to record now, we have multiple partitions for the
144 same base variable, with different places. We can't just
145 randomly chose one, hence we have to say that we don't know.
146 This only happens with optimization, and there var-tracking
147 will figure out the right thing. */
148 else if (DECL_RTL (var) != x)
149 SET_DECL_RTL (var, pc_rtx);
156 /* This structure holds data relevant to one variable that will be
157 placed in a stack slot. */
163 /* Initially, the size of the variable. Later, the size of the partition,
164 if this variable becomes it's partition's representative. */
167 /* The *byte* alignment required for this variable. Or as, with the
168 size, the alignment for this partition. */
171 /* The partition representative. */
172 size_t representative;
174 /* The next stack variable in the partition, or EOC. */
177 /* The numbers of conflicting stack variables. */
181 #define EOC ((size_t)-1)
183 /* We have an array of such objects while deciding allocation. */
184 static struct stack_var *stack_vars;
185 static size_t stack_vars_alloc;
186 static size_t stack_vars_num;
187 static struct pointer_map_t *decl_to_stack_part;
189 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
190 is non-decreasing. */
191 static size_t *stack_vars_sorted;
193 /* The phase of the stack frame. This is the known misalignment of
194 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
195 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
196 static int frame_phase;
198 /* Used during expand_used_vars to remember if we saw any decls for
199 which we'd like to enable stack smashing protection. */
200 static bool has_protected_decls;
202 /* Used during expand_used_vars. Remember if we say a character buffer
203 smaller than our cutoff threshold. Used for -Wstack-protector. */
204 static bool has_short_buffer;
206 /* Compute the byte alignment to use for DECL. Ignore alignment
207 we can't do with expected alignment of the stack boundary. */
210 align_local_variable (tree decl)
212 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
213 DECL_ALIGN (decl) = align;
214 return align / BITS_PER_UNIT;
217 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
218 Return the frame offset. */
221 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
223 HOST_WIDE_INT offset, new_frame_offset;
225 new_frame_offset = frame_offset;
226 if (FRAME_GROWS_DOWNWARD)
228 new_frame_offset -= size + frame_phase;
229 new_frame_offset &= -align;
230 new_frame_offset += frame_phase;
231 offset = new_frame_offset;
235 new_frame_offset -= frame_phase;
236 new_frame_offset += align - 1;
237 new_frame_offset &= -align;
238 new_frame_offset += frame_phase;
239 offset = new_frame_offset;
240 new_frame_offset += size;
242 frame_offset = new_frame_offset;
244 if (frame_offset_overflow (frame_offset, cfun->decl))
245 frame_offset = offset = 0;
250 /* Accumulate DECL into STACK_VARS. */
253 add_stack_var (tree decl)
257 if (stack_vars_num >= stack_vars_alloc)
259 if (stack_vars_alloc)
260 stack_vars_alloc = stack_vars_alloc * 3 / 2;
262 stack_vars_alloc = 32;
264 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
266 if (!decl_to_stack_part)
267 decl_to_stack_part = pointer_map_create ();
269 v = &stack_vars[stack_vars_num];
270 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
273 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274 /* Ensure that all variables have size, so that &a != &b for any two
275 variables that are simultaneously live. */
278 v->alignb = align_local_variable (SSAVAR (decl));
279 /* An alignment of zero can mightily confuse us later. */
280 gcc_assert (v->alignb != 0);
282 /* All variables are initially in their own partition. */
283 v->representative = stack_vars_num;
286 /* All variables initially conflict with no other. */
289 /* Ensure that this decl doesn't get put onto the list twice. */
290 set_rtl (decl, pc_rtx);
295 /* Make the decls associated with luid's X and Y conflict. */
298 add_stack_var_conflict (size_t x, size_t y)
300 struct stack_var *a = &stack_vars[x];
301 struct stack_var *b = &stack_vars[y];
303 a->conflicts = BITMAP_ALLOC (NULL);
305 b->conflicts = BITMAP_ALLOC (NULL);
306 bitmap_set_bit (a->conflicts, y);
307 bitmap_set_bit (b->conflicts, x);
310 /* Check whether the decls associated with luid's X and Y conflict. */
313 stack_var_conflict_p (size_t x, size_t y)
315 struct stack_var *a = &stack_vars[x];
316 struct stack_var *b = &stack_vars[y];
319 /* Partitions containing an SSA name result from gimple registers
320 with things like unsupported modes. They are top-level and
321 hence conflict with everything else. */
322 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
325 if (!a->conflicts || !b->conflicts)
327 return bitmap_bit_p (a->conflicts, y);
330 /* Returns true if TYPE is or contains a union type. */
333 aggregate_contains_union_type (tree type)
337 if (TREE_CODE (type) == UNION_TYPE
338 || TREE_CODE (type) == QUAL_UNION_TYPE)
340 if (TREE_CODE (type) == ARRAY_TYPE)
341 return aggregate_contains_union_type (TREE_TYPE (type));
342 if (TREE_CODE (type) != RECORD_TYPE)
345 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
346 if (TREE_CODE (field) == FIELD_DECL)
347 if (aggregate_contains_union_type (TREE_TYPE (field)))
353 /* A subroutine of expand_used_vars. If two variables X and Y have alias
354 sets that do not conflict, then do add a conflict for these variables
355 in the interference graph. We also need to make sure to add conflicts
356 for union containing structures. Else RTL alias analysis comes along
357 and due to type based aliasing rules decides that for two overlapping
358 union temporaries { short s; int i; } accesses to the same mem through
359 different types may not alias and happily reorders stores across
360 life-time boundaries of the temporaries (See PR25654).
361 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
364 add_alias_set_conflicts (void)
366 size_t i, j, n = stack_vars_num;
368 for (i = 0; i < n; ++i)
370 tree type_i = TREE_TYPE (stack_vars[i].decl);
371 bool aggr_i = AGGREGATE_TYPE_P (type_i);
374 contains_union = aggregate_contains_union_type (type_i);
375 for (j = 0; j < i; ++j)
377 tree type_j = TREE_TYPE (stack_vars[j].decl);
378 bool aggr_j = AGGREGATE_TYPE_P (type_j);
380 /* Either the objects conflict by means of type based
381 aliasing rules, or we need to add a conflict. */
382 || !objects_must_conflict_p (type_i, type_j)
383 /* In case the types do not conflict ensure that access
384 to elements will conflict. In case of unions we have
385 to be careful as type based aliasing rules may say
386 access to the same memory does not conflict. So play
387 safe and add a conflict in this case when
388 -fstrict-aliasing is used. */
389 || (contains_union && flag_strict_aliasing))
390 add_stack_var_conflict (i, j);
395 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
396 enter its partition number into bitmap DATA. */
399 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
401 bitmap active = (bitmap)data;
402 op = get_base_address (op);
405 && DECL_RTL_IF_SET (op) == pc_rtx)
407 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
409 bitmap_set_bit (active, *v);
414 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
415 record conflicts between it and all currently active other partitions
419 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
421 bitmap active = (bitmap)data;
422 op = get_base_address (op);
425 && DECL_RTL_IF_SET (op) == pc_rtx)
428 (size_t *) pointer_map_contains (decl_to_stack_part, op);
429 if (v && bitmap_set_bit (active, *v))
434 gcc_assert (num < stack_vars_num);
435 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
436 add_stack_var_conflict (num, i);
442 /* Helper routine for add_scope_conflicts, calculating the active partitions
443 at the end of BB, leaving the result in WORK. We're called to generate
444 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
448 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
452 gimple_stmt_iterator gsi;
453 bool (*visit)(gimple, tree, void *);
456 FOR_EACH_EDGE (e, ei, bb->preds)
457 bitmap_ior_into (work, (bitmap)e->src->aux);
461 /* We need to add conflicts for everything life at the start of
462 this block. Unlike classical lifeness for named objects we can't
463 rely on seeing a def/use of the names we're interested in.
464 There might merely be indirect loads/stores. We'd not add any
465 conflicts for such partitions. */
468 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
472 EXECUTE_IF_SET_IN_BITMAP (work, i, j, bj)
473 add_stack_var_conflict (i, j);
475 visit = visit_conflict;
480 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
482 gimple stmt = gsi_stmt (gsi);
483 if (!is_gimple_debug (stmt))
484 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
486 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
488 gimple stmt = gsi_stmt (gsi);
490 if (gimple_clobber_p (stmt))
492 tree lhs = gimple_assign_lhs (stmt);
494 /* Nested function lowering might introduce LHSs
495 that are COMPONENT_REFs. */
496 if (TREE_CODE (lhs) != VAR_DECL)
498 if (DECL_RTL_IF_SET (lhs) == pc_rtx
500 pointer_map_contains (decl_to_stack_part, lhs)))
501 bitmap_clear_bit (work, *v);
503 else if (!is_gimple_debug (stmt))
504 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
508 /* Generate stack partition conflicts between all partitions that are
509 simultaneously live. */
512 add_scope_conflicts (void)
516 bitmap work = BITMAP_ALLOC (NULL);
518 /* We approximate the life range of a stack variable by taking the first
519 mention of its name as starting point(s), and by the end-of-scope
520 death clobber added by gimplify as ending point(s) of the range.
521 This overapproximates in the case we for instance moved an address-taken
522 operation upward, without also moving a dereference to it upwards.
523 But it's conservatively correct as a variable never can hold values
524 before its name is mentioned at least once.
526 We then do a mostly classical bitmap lifeness algorithm. */
529 bb->aux = BITMAP_ALLOC (NULL);
537 bitmap active = (bitmap)bb->aux;
538 add_scope_conflicts_1 (bb, work, false);
539 if (bitmap_ior_into (active, work))
545 add_scope_conflicts_1 (bb, work, true);
549 BITMAP_FREE (bb->aux);
552 /* A subroutine of partition_stack_vars. A comparison function for qsort,
553 sorting an array of indices by the properties of the object. */
556 stack_var_cmp (const void *a, const void *b)
558 size_t ia = *(const size_t *)a;
559 size_t ib = *(const size_t *)b;
560 unsigned int aligna = stack_vars[ia].alignb;
561 unsigned int alignb = stack_vars[ib].alignb;
562 HOST_WIDE_INT sizea = stack_vars[ia].size;
563 HOST_WIDE_INT sizeb = stack_vars[ib].size;
564 tree decla = stack_vars[ia].decl;
565 tree declb = stack_vars[ib].decl;
567 unsigned int uida, uidb;
569 /* Primary compare on "large" alignment. Large comes first. */
570 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
571 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
572 if (largea != largeb)
573 return (int)largeb - (int)largea;
575 /* Secondary compare on size, decreasing */
581 /* Tertiary compare on true alignment, decreasing. */
587 /* Final compare on ID for sort stability, increasing.
588 Two SSA names are compared by their version, SSA names come before
589 non-SSA names, and two normal decls are compared by their DECL_UID. */
590 if (TREE_CODE (decla) == SSA_NAME)
592 if (TREE_CODE (declb) == SSA_NAME)
593 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
597 else if (TREE_CODE (declb) == SSA_NAME)
600 uida = DECL_UID (decla), uidb = DECL_UID (declb);
609 /* If the points-to solution *PI points to variables that are in a partition
610 together with other variables add all partition members to the pointed-to
614 add_partitioned_vars_to_ptset (struct pt_solution *pt,
615 struct pointer_map_t *decls_to_partitions,
616 struct pointer_set_t *visited, bitmap temp)
624 /* The pointed-to vars bitmap is shared, it is enough to
626 || pointer_set_insert(visited, pt->vars))
631 /* By using a temporary bitmap to store all members of the partitions
632 we have to add we make sure to visit each of the partitions only
634 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
636 || !bitmap_bit_p (temp, i))
637 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
638 (void *)(size_t) i)))
639 bitmap_ior_into (temp, *part);
640 if (!bitmap_empty_p (temp))
641 bitmap_ior_into (pt->vars, temp);
644 /* Update points-to sets based on partition info, so we can use them on RTL.
645 The bitmaps representing stack partitions will be saved until expand,
646 where partitioned decls used as bases in memory expressions will be
650 update_alias_info_with_stack_vars (void)
652 struct pointer_map_t *decls_to_partitions = NULL;
654 tree var = NULL_TREE;
656 for (i = 0; i < stack_vars_num; i++)
660 struct ptr_info_def *pi;
662 /* Not interested in partitions with single variable. */
663 if (stack_vars[i].representative != i
664 || stack_vars[i].next == EOC)
667 if (!decls_to_partitions)
669 decls_to_partitions = pointer_map_create ();
670 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
673 /* Create an SSA_NAME that points to the partition for use
674 as base during alias-oracle queries on RTL for bases that
675 have been partitioned. */
676 if (var == NULL_TREE)
677 var = create_tmp_var (ptr_type_node, NULL);
678 name = make_ssa_name (var, NULL);
680 /* Create bitmaps representing partitions. They will be used for
681 points-to sets later, so use GGC alloc. */
682 part = BITMAP_GGC_ALLOC ();
683 for (j = i; j != EOC; j = stack_vars[j].next)
685 tree decl = stack_vars[j].decl;
686 unsigned int uid = DECL_PT_UID (decl);
687 /* We should never end up partitioning SSA names (though they
688 may end up on the stack). Neither should we allocate stack
689 space to something that is unused and thus unreferenced, except
690 for -O0 where we are preserving even unreferenced variables. */
691 gcc_assert (DECL_P (decl)
693 || referenced_var_lookup (cfun, DECL_UID (decl))));
694 bitmap_set_bit (part, uid);
695 *((bitmap *) pointer_map_insert (decls_to_partitions,
696 (void *)(size_t) uid)) = part;
697 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
701 /* Make the SSA name point to all partition members. */
702 pi = get_ptr_info (name);
703 pt_solution_set (&pi->pt, part, false);
706 /* Make all points-to sets that contain one member of a partition
707 contain all members of the partition. */
708 if (decls_to_partitions)
711 struct pointer_set_t *visited = pointer_set_create ();
712 bitmap temp = BITMAP_ALLOC (NULL);
714 for (i = 1; i < num_ssa_names; i++)
716 tree name = ssa_name (i);
717 struct ptr_info_def *pi;
720 && POINTER_TYPE_P (TREE_TYPE (name))
721 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
722 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
726 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
727 decls_to_partitions, visited, temp);
729 pointer_set_destroy (visited);
730 pointer_map_destroy (decls_to_partitions);
735 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
736 partitioning algorithm. Partitions A and B are known to be non-conflicting.
737 Merge them into a single partition A. */
740 union_stack_vars (size_t a, size_t b)
742 struct stack_var *vb = &stack_vars[b];
746 gcc_assert (stack_vars[b].next == EOC);
747 /* Add B to A's partition. */
748 stack_vars[b].next = stack_vars[a].next;
749 stack_vars[b].representative = a;
750 stack_vars[a].next = b;
752 /* Update the required alignment of partition A to account for B. */
753 if (stack_vars[a].alignb < stack_vars[b].alignb)
754 stack_vars[a].alignb = stack_vars[b].alignb;
756 /* Update the interference graph and merge the conflicts. */
759 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
760 add_stack_var_conflict (a, stack_vars[u].representative);
761 BITMAP_FREE (vb->conflicts);
765 /* A subroutine of expand_used_vars. Binpack the variables into
766 partitions constrained by the interference graph. The overall
767 algorithm used is as follows:
769 Sort the objects by size in descending order.
774 Look for the largest non-conflicting object B with size <= S.
781 partition_stack_vars (void)
783 size_t si, sj, n = stack_vars_num;
785 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
786 for (si = 0; si < n; ++si)
787 stack_vars_sorted[si] = si;
792 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
794 for (si = 0; si < n; ++si)
796 size_t i = stack_vars_sorted[si];
797 unsigned int ialign = stack_vars[i].alignb;
799 /* Ignore objects that aren't partition representatives. If we
800 see a var that is not a partition representative, it must
801 have been merged earlier. */
802 if (stack_vars[i].representative != i)
805 for (sj = si + 1; sj < n; ++sj)
807 size_t j = stack_vars_sorted[sj];
808 unsigned int jalign = stack_vars[j].alignb;
810 /* Ignore objects that aren't partition representatives. */
811 if (stack_vars[j].representative != j)
814 /* Ignore conflicting objects. */
815 if (stack_var_conflict_p (i, j))
818 /* Do not mix objects of "small" (supported) alignment
819 and "large" (unsupported) alignment. */
820 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
821 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
824 /* UNION the objects, placing J at OFFSET. */
825 union_stack_vars (i, j);
829 update_alias_info_with_stack_vars ();
832 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
835 dump_stack_var_partition (void)
837 size_t si, i, j, n = stack_vars_num;
839 for (si = 0; si < n; ++si)
841 i = stack_vars_sorted[si];
843 /* Skip variables that aren't partition representatives, for now. */
844 if (stack_vars[i].representative != i)
847 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
848 " align %u\n", (unsigned long) i, stack_vars[i].size,
849 stack_vars[i].alignb);
851 for (j = i; j != EOC; j = stack_vars[j].next)
853 fputc ('\t', dump_file);
854 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
856 fputc ('\n', dump_file);
860 /* Assign rtl to DECL at BASE + OFFSET. */
863 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
864 HOST_WIDE_INT offset)
869 /* If this fails, we've overflowed the stack frame. Error nicely? */
870 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
872 x = plus_constant (base, offset);
873 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
875 if (TREE_CODE (decl) != SSA_NAME)
877 /* Set alignment we actually gave this decl if it isn't an SSA name.
878 If it is we generate stack slots only accidentally so it isn't as
879 important, we'll simply use the alignment that is already set. */
880 if (base == virtual_stack_vars_rtx)
881 offset -= frame_phase;
882 align = offset & -offset;
883 align *= BITS_PER_UNIT;
884 if (align == 0 || align > base_align)
887 /* One would think that we could assert that we're not decreasing
888 alignment here, but (at least) the i386 port does exactly this
889 via the MINIMUM_ALIGNMENT hook. */
891 DECL_ALIGN (decl) = align;
892 DECL_USER_ALIGN (decl) = 0;
895 set_mem_attributes (x, SSAVAR (decl), true);
899 /* A subroutine of expand_used_vars. Give each partition representative
900 a unique location within the stack frame. Update each partition member
901 with that location. */
904 expand_stack_vars (bool (*pred) (tree))
906 size_t si, i, j, n = stack_vars_num;
907 HOST_WIDE_INT large_size = 0, large_alloc = 0;
908 rtx large_base = NULL;
909 unsigned large_align = 0;
912 /* Determine if there are any variables requiring "large" alignment.
913 Since these are dynamically allocated, we only process these if
914 no predicate involved. */
915 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
916 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
918 /* Find the total size of these variables. */
919 for (si = 0; si < n; ++si)
923 i = stack_vars_sorted[si];
924 alignb = stack_vars[i].alignb;
926 /* Stop when we get to the first decl with "small" alignment. */
927 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
930 /* Skip variables that aren't partition representatives. */
931 if (stack_vars[i].representative != i)
934 /* Skip variables that have already had rtl assigned. See also
935 add_stack_var where we perpetrate this pc_rtx hack. */
936 decl = stack_vars[i].decl;
937 if ((TREE_CODE (decl) == SSA_NAME
938 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
939 : DECL_RTL (decl)) != pc_rtx)
942 large_size += alignb - 1;
943 large_size &= -(HOST_WIDE_INT)alignb;
944 large_size += stack_vars[i].size;
947 /* If there were any, allocate space. */
949 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
953 for (si = 0; si < n; ++si)
956 unsigned base_align, alignb;
957 HOST_WIDE_INT offset;
959 i = stack_vars_sorted[si];
961 /* Skip variables that aren't partition representatives, for now. */
962 if (stack_vars[i].representative != i)
965 /* Skip variables that have already had rtl assigned. See also
966 add_stack_var where we perpetrate this pc_rtx hack. */
967 decl = stack_vars[i].decl;
968 if ((TREE_CODE (decl) == SSA_NAME
969 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
970 : DECL_RTL (decl)) != pc_rtx)
973 /* Check the predicate to see whether this variable should be
974 allocated in this pass. */
975 if (pred && !pred (decl))
978 alignb = stack_vars[i].alignb;
979 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
981 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
982 base = virtual_stack_vars_rtx;
983 base_align = crtl->max_used_stack_slot_alignment;
987 /* Large alignment is only processed in the last pass. */
990 gcc_assert (large_base != NULL);
992 large_alloc += alignb - 1;
993 large_alloc &= -(HOST_WIDE_INT)alignb;
994 offset = large_alloc;
995 large_alloc += stack_vars[i].size;
998 base_align = large_align;
1001 /* Create rtl for each variable based on their location within the
1003 for (j = i; j != EOC; j = stack_vars[j].next)
1005 expand_one_stack_var_at (stack_vars[j].decl,
1011 gcc_assert (large_alloc == large_size);
1014 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1015 static HOST_WIDE_INT
1016 account_stack_vars (void)
1018 size_t si, j, i, n = stack_vars_num;
1019 HOST_WIDE_INT size = 0;
1021 for (si = 0; si < n; ++si)
1023 i = stack_vars_sorted[si];
1025 /* Skip variables that aren't partition representatives, for now. */
1026 if (stack_vars[i].representative != i)
1029 size += stack_vars[i].size;
1030 for (j = i; j != EOC; j = stack_vars[j].next)
1031 set_rtl (stack_vars[j].decl, NULL);
1036 /* A subroutine of expand_one_var. Called to immediately assign rtl
1037 to a variable to be allocated in the stack frame. */
1040 expand_one_stack_var (tree var)
1042 HOST_WIDE_INT size, offset;
1043 unsigned byte_align;
1045 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1046 byte_align = align_local_variable (SSAVAR (var));
1048 /* We handle highly aligned variables in expand_stack_vars. */
1049 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1051 offset = alloc_stack_frame_space (size, byte_align);
1053 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1054 crtl->max_used_stack_slot_alignment, offset);
1057 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1058 that will reside in a hard register. */
1061 expand_one_hard_reg_var (tree var)
1063 rest_of_decl_compilation (var, 0, 0);
1066 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1067 that will reside in a pseudo register. */
1070 expand_one_register_var (tree var)
1072 tree decl = SSAVAR (var);
1073 tree type = TREE_TYPE (decl);
1074 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1075 rtx x = gen_reg_rtx (reg_mode);
1079 /* Note if the object is a user variable. */
1080 if (!DECL_ARTIFICIAL (decl))
1083 if (POINTER_TYPE_P (type))
1084 mark_reg_pointer (x, get_pointer_alignment (var));
1087 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1088 has some associated error, e.g. its type is error-mark. We just need
1089 to pick something that won't crash the rest of the compiler. */
1092 expand_one_error_var (tree var)
1094 enum machine_mode mode = DECL_MODE (var);
1097 if (mode == BLKmode)
1098 x = gen_rtx_MEM (BLKmode, const0_rtx);
1099 else if (mode == VOIDmode)
1102 x = gen_reg_rtx (mode);
1104 SET_DECL_RTL (var, x);
1107 /* A subroutine of expand_one_var. VAR is a variable that will be
1108 allocated to the local stack frame. Return true if we wish to
1109 add VAR to STACK_VARS so that it will be coalesced with other
1110 variables. Return false to allocate VAR immediately.
1112 This function is used to reduce the number of variables considered
1113 for coalescing, which reduces the size of the quadratic problem. */
1116 defer_stack_allocation (tree var, bool toplevel)
1118 /* If stack protection is enabled, *all* stack variables must be deferred,
1119 so that we can re-order the strings to the top of the frame. */
1120 if (flag_stack_protect)
1123 /* We handle "large" alignment via dynamic allocation. We want to handle
1124 this extra complication in only one place, so defer them. */
1125 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1128 /* Variables in the outermost scope automatically conflict with
1129 every other variable. The only reason to want to defer them
1130 at all is that, after sorting, we can more efficiently pack
1131 small variables in the stack frame. Continue to defer at -O2. */
1132 if (toplevel && optimize < 2)
1135 /* Without optimization, *most* variables are allocated from the
1136 stack, which makes the quadratic problem large exactly when we
1137 want compilation to proceed as quickly as possible. On the
1138 other hand, we don't want the function's stack frame size to
1139 get completely out of hand. So we avoid adding scalars and
1140 "small" aggregates to the list at all. */
1141 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1147 /* A subroutine of expand_used_vars. Expand one variable according to
1148 its flavor. Variables to be placed on the stack are not actually
1149 expanded yet, merely recorded.
1150 When REALLY_EXPAND is false, only add stack values to be allocated.
1151 Return stack usage this variable is supposed to take.
1154 static HOST_WIDE_INT
1155 expand_one_var (tree var, bool toplevel, bool really_expand)
1157 unsigned int align = BITS_PER_UNIT;
1162 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1164 /* Because we don't know if VAR will be in register or on stack,
1165 we conservatively assume it will be on stack even if VAR is
1166 eventually put into register after RA pass. For non-automatic
1167 variables, which won't be on stack, we collect alignment of
1168 type and ignore user specified alignment. */
1169 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1170 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1171 TYPE_MODE (TREE_TYPE (var)),
1172 TYPE_ALIGN (TREE_TYPE (var)));
1173 else if (DECL_HAS_VALUE_EXPR_P (var)
1174 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1175 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1176 or variables which were assigned a stack slot already by
1177 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1178 changed from the offset chosen to it. */
1179 align = crtl->stack_alignment_estimated;
1181 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1183 /* If the variable alignment is very large we'll dynamicaly allocate
1184 it, which means that in-frame portion is just a pointer. */
1185 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1186 align = POINTER_SIZE;
1189 if (SUPPORTS_STACK_ALIGNMENT
1190 && crtl->stack_alignment_estimated < align)
1192 /* stack_alignment_estimated shouldn't change after stack
1193 realign decision made */
1194 gcc_assert(!crtl->stack_realign_processed);
1195 crtl->stack_alignment_estimated = align;
1198 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1199 So here we only make sure stack_alignment_needed >= align. */
1200 if (crtl->stack_alignment_needed < align)
1201 crtl->stack_alignment_needed = align;
1202 if (crtl->max_used_stack_slot_alignment < align)
1203 crtl->max_used_stack_slot_alignment = align;
1205 if (TREE_CODE (origvar) == SSA_NAME)
1207 gcc_assert (TREE_CODE (var) != VAR_DECL
1208 || (!DECL_EXTERNAL (var)
1209 && !DECL_HAS_VALUE_EXPR_P (var)
1210 && !TREE_STATIC (var)
1211 && TREE_TYPE (var) != error_mark_node
1212 && !DECL_HARD_REGISTER (var)
1215 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1217 else if (DECL_EXTERNAL (var))
1219 else if (DECL_HAS_VALUE_EXPR_P (var))
1221 else if (TREE_STATIC (var))
1223 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1225 else if (TREE_TYPE (var) == error_mark_node)
1228 expand_one_error_var (var);
1230 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1233 expand_one_hard_reg_var (var);
1235 else if (use_register_for_decl (var))
1238 expand_one_register_var (origvar);
1240 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1244 error ("size of variable %q+D is too large", var);
1245 expand_one_error_var (var);
1248 else if (defer_stack_allocation (var, toplevel))
1249 add_stack_var (origvar);
1253 expand_one_stack_var (origvar);
1254 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1259 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1260 expanding variables. Those variables that can be put into registers
1261 are allocated pseudos; those that can't are put on the stack.
1263 TOPLEVEL is true if this is the outermost BLOCK. */
1266 expand_used_vars_for_block (tree block, bool toplevel)
1270 /* Expand all variables at this level. */
1271 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1273 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1274 || !DECL_NONSHAREABLE (t)))
1275 expand_one_var (t, toplevel, true);
1277 /* Expand all variables at containing levels. */
1278 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1279 expand_used_vars_for_block (t, false);
1282 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1283 and clear TREE_USED on all local variables. */
1286 clear_tree_used (tree block)
1290 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1291 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1292 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1293 || !DECL_NONSHAREABLE (t))
1296 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1297 clear_tree_used (t);
1300 /* Examine TYPE and determine a bit mask of the following features. */
1302 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1303 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1304 #define SPCT_HAS_ARRAY 4
1305 #define SPCT_HAS_AGGREGATE 8
1308 stack_protect_classify_type (tree type)
1310 unsigned int ret = 0;
1313 switch (TREE_CODE (type))
1316 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1317 if (t == char_type_node
1318 || t == signed_char_type_node
1319 || t == unsigned_char_type_node)
1321 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1322 unsigned HOST_WIDE_INT len;
1324 if (!TYPE_SIZE_UNIT (type)
1325 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1328 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1331 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1333 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1336 ret = SPCT_HAS_ARRAY;
1340 case QUAL_UNION_TYPE:
1342 ret = SPCT_HAS_AGGREGATE;
1343 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1344 if (TREE_CODE (t) == FIELD_DECL)
1345 ret |= stack_protect_classify_type (TREE_TYPE (t));
1355 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1356 part of the local stack frame. Remember if we ever return nonzero for
1357 any variable in this function. The return value is the phase number in
1358 which the variable should be allocated. */
1361 stack_protect_decl_phase (tree decl)
1363 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1366 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1367 has_short_buffer = true;
1369 if (flag_stack_protect == 2)
1371 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1372 && !(bits & SPCT_HAS_AGGREGATE))
1374 else if (bits & SPCT_HAS_ARRAY)
1378 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1381 has_protected_decls = true;
1386 /* Two helper routines that check for phase 1 and phase 2. These are used
1387 as callbacks for expand_stack_vars. */
1390 stack_protect_decl_phase_1 (tree decl)
1392 return stack_protect_decl_phase (decl) == 1;
1396 stack_protect_decl_phase_2 (tree decl)
1398 return stack_protect_decl_phase (decl) == 2;
1401 /* Ensure that variables in different stack protection phases conflict
1402 so that they are not merged and share the same stack slot. */
1405 add_stack_protection_conflicts (void)
1407 size_t i, j, n = stack_vars_num;
1408 unsigned char *phase;
1410 phase = XNEWVEC (unsigned char, n);
1411 for (i = 0; i < n; ++i)
1412 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1414 for (i = 0; i < n; ++i)
1416 unsigned char ph_i = phase[i];
1417 for (j = 0; j < i; ++j)
1418 if (ph_i != phase[j])
1419 add_stack_var_conflict (i, j);
1425 /* Create a decl for the guard at the top of the stack frame. */
1428 create_stack_guard (void)
1430 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1431 VAR_DECL, NULL, ptr_type_node);
1432 TREE_THIS_VOLATILE (guard) = 1;
1433 TREE_USED (guard) = 1;
1434 expand_one_stack_var (guard);
1435 crtl->stack_protect_guard = guard;
1438 /* Prepare for expanding variables. */
1440 init_vars_expansion (void)
1444 /* Set TREE_USED on all variables in the local_decls. */
1445 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1448 /* Clear TREE_USED on all variables associated with a block scope. */
1449 clear_tree_used (DECL_INITIAL (current_function_decl));
1451 /* Initialize local stack smashing state. */
1452 has_protected_decls = false;
1453 has_short_buffer = false;
1456 /* Free up stack variable graph data. */
1458 fini_vars_expansion (void)
1460 size_t i, n = stack_vars_num;
1461 for (i = 0; i < n; i++)
1462 BITMAP_FREE (stack_vars[i].conflicts);
1463 XDELETEVEC (stack_vars);
1464 XDELETEVEC (stack_vars_sorted);
1466 stack_vars_alloc = stack_vars_num = 0;
1467 pointer_map_destroy (decl_to_stack_part);
1468 decl_to_stack_part = NULL;
1471 /* Make a fair guess for the size of the stack frame of the function
1472 in NODE. This doesn't have to be exact, the result is only used in
1473 the inline heuristics. So we don't want to run the full stack var
1474 packing algorithm (which is quadratic in the number of stack vars).
1475 Instead, we calculate the total size of all stack vars. This turns
1476 out to be a pretty fair estimate -- packing of stack vars doesn't
1477 happen very often. */
1480 estimated_stack_frame_size (struct cgraph_node *node)
1482 HOST_WIDE_INT size = 0;
1485 tree old_cur_fun_decl = current_function_decl;
1486 referenced_var_iterator rvi;
1487 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1489 current_function_decl = node->decl;
1492 gcc_checking_assert (gimple_referenced_vars (fn));
1493 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1494 size += expand_one_var (var, true, false);
1496 if (stack_vars_num > 0)
1498 /* Fake sorting the stack vars for account_stack_vars (). */
1499 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1500 for (i = 0; i < stack_vars_num; ++i)
1501 stack_vars_sorted[i] = i;
1502 size += account_stack_vars ();
1503 fini_vars_expansion ();
1506 current_function_decl = old_cur_fun_decl;
1510 /* Expand all variables used in the function. */
1513 expand_used_vars (void)
1515 tree var, outer_block = DECL_INITIAL (current_function_decl);
1516 VEC(tree,heap) *maybe_local_decls = NULL;
1520 /* Compute the phase of the stack frame for this function. */
1522 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1523 int off = STARTING_FRAME_OFFSET % align;
1524 frame_phase = off ? align - off : 0;
1527 init_vars_expansion ();
1529 for (i = 0; i < SA.map->num_partitions; i++)
1531 tree var = partition_to_var (SA.map, i);
1533 gcc_assert (is_gimple_reg (var));
1534 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1535 expand_one_var (var, true, true);
1538 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1539 contain the default def (representing the parm or result itself)
1540 we don't do anything here. But those which don't contain the
1541 default def (representing a temporary based on the parm/result)
1542 we need to allocate space just like for normal VAR_DECLs. */
1543 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1545 expand_one_var (var, true, true);
1546 gcc_assert (SA.partition_to_pseudo[i]);
1551 /* At this point all variables on the local_decls with TREE_USED
1552 set are not associated with any block scope. Lay them out. */
1554 len = VEC_length (tree, cfun->local_decls);
1555 FOR_EACH_LOCAL_DECL (cfun, i, var)
1557 bool expand_now = false;
1559 /* Expanded above already. */
1560 if (is_gimple_reg (var))
1562 TREE_USED (var) = 0;
1565 /* We didn't set a block for static or extern because it's hard
1566 to tell the difference between a global variable (re)declared
1567 in a local scope, and one that's really declared there to
1568 begin with. And it doesn't really matter much, since we're
1569 not giving them stack space. Expand them now. */
1570 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1573 /* If the variable is not associated with any block, then it
1574 was created by the optimizers, and could be live anywhere
1576 else if (TREE_USED (var))
1579 /* Finally, mark all variables on the list as used. We'll use
1580 this in a moment when we expand those associated with scopes. */
1581 TREE_USED (var) = 1;
1584 expand_one_var (var, true, true);
1587 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1589 rtx rtl = DECL_RTL_IF_SET (var);
1591 /* Keep artificial non-ignored vars in cfun->local_decls
1592 chain until instantiate_decls. */
1593 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1594 add_local_decl (cfun, var);
1595 else if (rtl == NULL_RTX)
1596 /* If rtl isn't set yet, which can happen e.g. with
1597 -fstack-protector, retry before returning from this
1599 VEC_safe_push (tree, heap, maybe_local_decls, var);
1603 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1605 +-----------------+-----------------+
1606 | ...processed... | ...duplicates...|
1607 +-----------------+-----------------+
1609 +-- LEN points here.
1611 We just want the duplicates, as those are the artificial
1612 non-ignored vars that we want to keep until instantiate_decls.
1613 Move them down and truncate the array. */
1614 if (!VEC_empty (tree, cfun->local_decls))
1615 VEC_block_remove (tree, cfun->local_decls, 0, len);
1617 /* At this point, all variables within the block tree with TREE_USED
1618 set are actually used by the optimized function. Lay them out. */
1619 expand_used_vars_for_block (outer_block, true);
1621 if (stack_vars_num > 0)
1623 add_scope_conflicts ();
1624 /* Due to the way alias sets work, no variables with non-conflicting
1625 alias sets may be assigned the same address. Add conflicts to
1627 add_alias_set_conflicts ();
1629 /* If stack protection is enabled, we don't share space between
1630 vulnerable data and non-vulnerable data. */
1631 if (flag_stack_protect)
1632 add_stack_protection_conflicts ();
1634 /* Now that we have collected all stack variables, and have computed a
1635 minimal interference graph, attempt to save some stack space. */
1636 partition_stack_vars ();
1638 dump_stack_var_partition ();
1641 /* There are several conditions under which we should create a
1642 stack guard: protect-all, alloca used, protected decls present. */
1643 if (flag_stack_protect == 2
1644 || (flag_stack_protect
1645 && (cfun->calls_alloca || has_protected_decls)))
1646 create_stack_guard ();
1648 /* Assign rtl to each variable based on these partitions. */
1649 if (stack_vars_num > 0)
1651 /* Reorder decls to be protected by iterating over the variables
1652 array multiple times, and allocating out of each phase in turn. */
1653 /* ??? We could probably integrate this into the qsort we did
1654 earlier, such that we naturally see these variables first,
1655 and thus naturally allocate things in the right order. */
1656 if (has_protected_decls)
1658 /* Phase 1 contains only character arrays. */
1659 expand_stack_vars (stack_protect_decl_phase_1);
1661 /* Phase 2 contains other kinds of arrays. */
1662 if (flag_stack_protect == 2)
1663 expand_stack_vars (stack_protect_decl_phase_2);
1666 expand_stack_vars (NULL);
1668 fini_vars_expansion ();
1671 /* If there were any artificial non-ignored vars without rtl
1672 found earlier, see if deferred stack allocation hasn't assigned
1674 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1676 rtx rtl = DECL_RTL_IF_SET (var);
1678 /* Keep artificial non-ignored vars in cfun->local_decls
1679 chain until instantiate_decls. */
1680 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1681 add_local_decl (cfun, var);
1683 VEC_free (tree, heap, maybe_local_decls);
1685 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1686 if (STACK_ALIGNMENT_NEEDED)
1688 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1689 if (!FRAME_GROWS_DOWNWARD)
1690 frame_offset += align - 1;
1691 frame_offset &= -align;
1696 /* If we need to produce a detailed dump, print the tree representation
1697 for STMT to the dump file. SINCE is the last RTX after which the RTL
1698 generated for STMT should have been appended. */
1701 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1703 if (dump_file && (dump_flags & TDF_DETAILS))
1705 fprintf (dump_file, "\n;; ");
1706 print_gimple_stmt (dump_file, stmt, 0,
1707 TDF_SLIM | (dump_flags & TDF_LINENO));
1708 fprintf (dump_file, "\n");
1710 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1714 /* Maps the blocks that do not contain tree labels to rtx labels. */
1716 static struct pointer_map_t *lab_rtx_for_bb;
1718 /* Returns the label_rtx expression for a label starting basic block BB. */
1721 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1723 gimple_stmt_iterator gsi;
1728 if (bb->flags & BB_RTL)
1729 return block_label (bb);
1731 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1735 /* Find the tree label if it is present. */
1737 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1739 lab_stmt = gsi_stmt (gsi);
1740 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1743 lab = gimple_label_label (lab_stmt);
1744 if (DECL_NONLOCAL (lab))
1747 return label_rtx (lab);
1750 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1751 *elt = gen_label_rtx ();
1756 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1757 of a basic block where we just expanded the conditional at the end,
1758 possibly clean up the CFG and instruction sequence. LAST is the
1759 last instruction before the just emitted jump sequence. */
1762 maybe_cleanup_end_of_block (edge e, rtx last)
1764 /* Special case: when jumpif decides that the condition is
1765 trivial it emits an unconditional jump (and the necessary
1766 barrier). But we still have two edges, the fallthru one is
1767 wrong. purge_dead_edges would clean this up later. Unfortunately
1768 we have to insert insns (and split edges) before
1769 find_many_sub_basic_blocks and hence before purge_dead_edges.
1770 But splitting edges might create new blocks which depend on the
1771 fact that if there are two edges there's no barrier. So the
1772 barrier would get lost and verify_flow_info would ICE. Instead
1773 of auditing all edge splitters to care for the barrier (which
1774 normally isn't there in a cleaned CFG), fix it here. */
1775 if (BARRIER_P (get_last_insn ()))
1779 /* Now, we have a single successor block, if we have insns to
1780 insert on the remaining edge we potentially will insert
1781 it at the end of this block (if the dest block isn't feasible)
1782 in order to avoid splitting the edge. This insertion will take
1783 place in front of the last jump. But we might have emitted
1784 multiple jumps (conditional and one unconditional) to the
1785 same destination. Inserting in front of the last one then
1786 is a problem. See PR 40021. We fix this by deleting all
1787 jumps except the last unconditional one. */
1788 insn = PREV_INSN (get_last_insn ());
1789 /* Make sure we have an unconditional jump. Otherwise we're
1791 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1792 for (insn = PREV_INSN (insn); insn != last;)
1794 insn = PREV_INSN (insn);
1795 if (JUMP_P (NEXT_INSN (insn)))
1797 if (!any_condjump_p (NEXT_INSN (insn)))
1799 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1800 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1802 delete_insn (NEXT_INSN (insn));
1808 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1809 Returns a new basic block if we've terminated the current basic
1810 block and created a new one. */
1813 expand_gimple_cond (basic_block bb, gimple stmt)
1815 basic_block new_bb, dest;
1820 enum tree_code code;
1823 code = gimple_cond_code (stmt);
1824 op0 = gimple_cond_lhs (stmt);
1825 op1 = gimple_cond_rhs (stmt);
1826 /* We're sometimes presented with such code:
1830 This would expand to two comparisons which then later might
1831 be cleaned up by combine. But some pattern matchers like if-conversion
1832 work better when there's only one compare, so make up for this
1833 here as special exception if TER would have made the same change. */
1834 if (gimple_cond_single_var_p (stmt)
1836 && TREE_CODE (op0) == SSA_NAME
1837 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1839 gimple second = SSA_NAME_DEF_STMT (op0);
1840 if (gimple_code (second) == GIMPLE_ASSIGN)
1842 enum tree_code code2 = gimple_assign_rhs_code (second);
1843 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1846 op0 = gimple_assign_rhs1 (second);
1847 op1 = gimple_assign_rhs2 (second);
1849 /* If jumps are cheap turn some more codes into
1851 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1853 if ((code2 == BIT_AND_EXPR
1854 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1855 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1856 || code2 == TRUTH_AND_EXPR)
1858 code = TRUTH_ANDIF_EXPR;
1859 op0 = gimple_assign_rhs1 (second);
1860 op1 = gimple_assign_rhs2 (second);
1862 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1864 code = TRUTH_ORIF_EXPR;
1865 op0 = gimple_assign_rhs1 (second);
1866 op1 = gimple_assign_rhs2 (second);
1872 last2 = last = get_last_insn ();
1874 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1875 set_curr_insn_source_location (gimple_location (stmt));
1876 set_curr_insn_block (gimple_block (stmt));
1878 /* These flags have no purpose in RTL land. */
1879 true_edge->flags &= ~EDGE_TRUE_VALUE;
1880 false_edge->flags &= ~EDGE_FALSE_VALUE;
1882 /* We can either have a pure conditional jump with one fallthru edge or
1883 two-way jump that needs to be decomposed into two basic blocks. */
1884 if (false_edge->dest == bb->next_bb)
1886 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1887 true_edge->probability);
1888 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1889 if (true_edge->goto_locus)
1891 set_curr_insn_source_location (true_edge->goto_locus);
1892 set_curr_insn_block (true_edge->goto_block);
1893 true_edge->goto_locus = curr_insn_locator ();
1895 true_edge->goto_block = NULL;
1896 false_edge->flags |= EDGE_FALLTHRU;
1897 maybe_cleanup_end_of_block (false_edge, last);
1900 if (true_edge->dest == bb->next_bb)
1902 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1903 false_edge->probability);
1904 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1905 if (false_edge->goto_locus)
1907 set_curr_insn_source_location (false_edge->goto_locus);
1908 set_curr_insn_block (false_edge->goto_block);
1909 false_edge->goto_locus = curr_insn_locator ();
1911 false_edge->goto_block = NULL;
1912 true_edge->flags |= EDGE_FALLTHRU;
1913 maybe_cleanup_end_of_block (true_edge, last);
1917 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1918 true_edge->probability);
1919 last = get_last_insn ();
1920 if (false_edge->goto_locus)
1922 set_curr_insn_source_location (false_edge->goto_locus);
1923 set_curr_insn_block (false_edge->goto_block);
1924 false_edge->goto_locus = curr_insn_locator ();
1926 false_edge->goto_block = NULL;
1927 emit_jump (label_rtx_for_bb (false_edge->dest));
1930 if (BARRIER_P (BB_END (bb)))
1931 BB_END (bb) = PREV_INSN (BB_END (bb));
1932 update_bb_for_insn (bb);
1934 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1935 dest = false_edge->dest;
1936 redirect_edge_succ (false_edge, new_bb);
1937 false_edge->flags |= EDGE_FALLTHRU;
1938 new_bb->count = false_edge->count;
1939 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1940 new_edge = make_edge (new_bb, dest, 0);
1941 new_edge->probability = REG_BR_PROB_BASE;
1942 new_edge->count = new_bb->count;
1943 if (BARRIER_P (BB_END (new_bb)))
1944 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1945 update_bb_for_insn (new_bb);
1947 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1949 if (true_edge->goto_locus)
1951 set_curr_insn_source_location (true_edge->goto_locus);
1952 set_curr_insn_block (true_edge->goto_block);
1953 true_edge->goto_locus = curr_insn_locator ();
1955 true_edge->goto_block = NULL;
1960 /* Mark all calls that can have a transaction restart. */
1963 mark_transaction_restart_calls (gimple stmt)
1965 struct tm_restart_node dummy;
1968 if (!cfun->gimple_df->tm_restart)
1972 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1975 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1976 tree list = n->label_or_list;
1979 for (insn = next_real_insn (get_last_insn ());
1981 insn = next_real_insn (insn))
1984 if (TREE_CODE (list) == LABEL_DECL)
1985 add_reg_note (insn, REG_TM, label_rtx (list));
1987 for (; list ; list = TREE_CHAIN (list))
1988 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1992 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1996 expand_call_stmt (gimple stmt)
1998 tree exp, decl, lhs;
2002 if (gimple_call_internal_p (stmt))
2004 expand_internal_call (stmt);
2008 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2010 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2011 decl = gimple_call_fndecl (stmt);
2012 builtin_p = decl && DECL_BUILT_IN (decl);
2014 /* If this is not a builtin function, the function type through which the
2015 call is made may be different from the type of the function. */
2018 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2019 CALL_EXPR_FN (exp));
2021 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2022 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2024 for (i = 0; i < gimple_call_num_args (stmt); i++)
2026 tree arg = gimple_call_arg (stmt, i);
2028 /* TER addresses into arguments of builtin functions so we have a
2029 chance to infer more correct alignment information. See PR39954. */
2031 && TREE_CODE (arg) == SSA_NAME
2032 && (def = get_gimple_for_ssa_name (arg))
2033 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2034 arg = gimple_assign_rhs1 (def);
2035 CALL_EXPR_ARG (exp, i) = arg;
2038 if (gimple_has_side_effects (stmt))
2039 TREE_SIDE_EFFECTS (exp) = 1;
2041 if (gimple_call_nothrow_p (stmt))
2042 TREE_NOTHROW (exp) = 1;
2044 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2045 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2047 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2048 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2049 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2050 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2052 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2053 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2054 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2055 TREE_BLOCK (exp) = gimple_block (stmt);
2057 /* Ensure RTL is created for debug args. */
2058 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2060 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2065 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2067 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2068 expand_debug_expr (dtemp);
2072 lhs = gimple_call_lhs (stmt);
2074 expand_assignment (lhs, exp, false);
2076 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2078 mark_transaction_restart_calls (stmt);
2081 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2082 STMT that doesn't require special handling for outgoing edges. That
2083 is no tailcalls and no GIMPLE_COND. */
2086 expand_gimple_stmt_1 (gimple stmt)
2090 set_curr_insn_source_location (gimple_location (stmt));
2091 set_curr_insn_block (gimple_block (stmt));
2093 switch (gimple_code (stmt))
2096 op0 = gimple_goto_dest (stmt);
2097 if (TREE_CODE (op0) == LABEL_DECL)
2100 expand_computed_goto (op0);
2103 expand_label (gimple_label_label (stmt));
2106 case GIMPLE_PREDICT:
2112 expand_asm_stmt (stmt);
2115 expand_call_stmt (stmt);
2119 op0 = gimple_return_retval (stmt);
2121 if (op0 && op0 != error_mark_node)
2123 tree result = DECL_RESULT (current_function_decl);
2125 /* If we are not returning the current function's RESULT_DECL,
2126 build an assignment to it. */
2129 /* I believe that a function's RESULT_DECL is unique. */
2130 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2132 /* ??? We'd like to use simply expand_assignment here,
2133 but this fails if the value is of BLKmode but the return
2134 decl is a register. expand_return has special handling
2135 for this combination, which eventually should move
2136 to common code. See comments there. Until then, let's
2137 build a modify expression :-/ */
2138 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2143 expand_null_return ();
2145 expand_return (op0);
2150 tree lhs = gimple_assign_lhs (stmt);
2152 /* Tree expand used to fiddle with |= and &= of two bitfield
2153 COMPONENT_REFs here. This can't happen with gimple, the LHS
2154 of binary assigns must be a gimple reg. */
2156 if (TREE_CODE (lhs) != SSA_NAME
2157 || get_gimple_rhs_class (gimple_expr_code (stmt))
2158 == GIMPLE_SINGLE_RHS)
2160 tree rhs = gimple_assign_rhs1 (stmt);
2161 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2162 == GIMPLE_SINGLE_RHS);
2163 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2164 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2165 if (TREE_CLOBBER_P (rhs))
2166 /* This is a clobber to mark the going out of scope for
2170 expand_assignment (lhs, rhs,
2171 gimple_assign_nontemporal_move_p (stmt));
2176 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2177 struct separate_ops ops;
2178 bool promoted = false;
2180 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2181 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2184 ops.code = gimple_assign_rhs_code (stmt);
2185 ops.type = TREE_TYPE (lhs);
2186 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2188 case GIMPLE_TERNARY_RHS:
2189 ops.op2 = gimple_assign_rhs3 (stmt);
2191 case GIMPLE_BINARY_RHS:
2192 ops.op1 = gimple_assign_rhs2 (stmt);
2194 case GIMPLE_UNARY_RHS:
2195 ops.op0 = gimple_assign_rhs1 (stmt);
2200 ops.location = gimple_location (stmt);
2202 /* If we want to use a nontemporal store, force the value to
2203 register first. If we store into a promoted register,
2204 don't directly expand to target. */
2205 temp = nontemporal || promoted ? NULL_RTX : target;
2206 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2213 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2214 /* If TEMP is a VOIDmode constant, use convert_modes to make
2215 sure that we properly convert it. */
2216 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2218 temp = convert_modes (GET_MODE (target),
2219 TYPE_MODE (ops.type),
2221 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2222 GET_MODE (target), temp, unsignedp);
2225 convert_move (SUBREG_REG (target), temp, unsignedp);
2227 else if (nontemporal && emit_storent_insn (target, temp))
2231 temp = force_operand (temp, target);
2233 emit_move_insn (target, temp);
2244 /* Expand one gimple statement STMT and return the last RTL instruction
2245 before any of the newly generated ones.
2247 In addition to generating the necessary RTL instructions this also
2248 sets REG_EH_REGION notes if necessary and sets the current source
2249 location for diagnostics. */
2252 expand_gimple_stmt (gimple stmt)
2254 location_t saved_location = input_location;
2255 rtx last = get_last_insn ();
2260 /* We need to save and restore the current source location so that errors
2261 discovered during expansion are emitted with the right location. But
2262 it would be better if the diagnostic routines used the source location
2263 embedded in the tree nodes rather than globals. */
2264 if (gimple_has_location (stmt))
2265 input_location = gimple_location (stmt);
2267 expand_gimple_stmt_1 (stmt);
2269 /* Free any temporaries used to evaluate this statement. */
2272 input_location = saved_location;
2274 /* Mark all insns that may trap. */
2275 lp_nr = lookup_stmt_eh_lp (stmt);
2279 for (insn = next_real_insn (last); insn;
2280 insn = next_real_insn (insn))
2282 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2283 /* If we want exceptions for non-call insns, any
2284 may_trap_p instruction may throw. */
2285 && GET_CODE (PATTERN (insn)) != CLOBBER
2286 && GET_CODE (PATTERN (insn)) != USE
2287 && insn_could_throw_p (insn))
2288 make_reg_eh_region_note (insn, 0, lp_nr);
2295 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2296 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2297 generated a tail call (something that might be denied by the ABI
2298 rules governing the call; see calls.c).
2300 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2301 can still reach the rest of BB. The case here is __builtin_sqrt,
2302 where the NaN result goes through the external function (with a
2303 tailcall) and the normal result happens via a sqrt instruction. */
2306 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2314 last2 = last = expand_gimple_stmt (stmt);
2316 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2317 if (CALL_P (last) && SIBLING_CALL_P (last))
2320 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2322 *can_fallthru = true;
2326 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2327 Any instructions emitted here are about to be deleted. */
2328 do_pending_stack_adjust ();
2330 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2331 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2332 EH or abnormal edges, we shouldn't have created a tail call in
2333 the first place. So it seems to me we should just be removing
2334 all edges here, or redirecting the existing fallthru edge to
2340 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2342 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2344 if (e->dest != EXIT_BLOCK_PTR)
2346 e->dest->count -= e->count;
2347 e->dest->frequency -= EDGE_FREQUENCY (e);
2348 if (e->dest->count < 0)
2350 if (e->dest->frequency < 0)
2351 e->dest->frequency = 0;
2354 probability += e->probability;
2361 /* This is somewhat ugly: the call_expr expander often emits instructions
2362 after the sibcall (to perform the function return). These confuse the
2363 find_many_sub_basic_blocks code, so we need to get rid of these. */
2364 last = NEXT_INSN (last);
2365 gcc_assert (BARRIER_P (last));
2367 *can_fallthru = false;
2368 while (NEXT_INSN (last))
2370 /* For instance an sqrt builtin expander expands if with
2371 sibcall in the then and label for `else`. */
2372 if (LABEL_P (NEXT_INSN (last)))
2374 *can_fallthru = true;
2377 delete_insn (NEXT_INSN (last));
2380 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2381 e->probability += probability;
2384 update_bb_for_insn (bb);
2386 if (NEXT_INSN (last))
2388 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2391 if (BARRIER_P (last))
2392 BB_END (bb) = PREV_INSN (last);
2395 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2400 /* Return the difference between the floor and the truncated result of
2401 a signed division by OP1 with remainder MOD. */
2403 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2405 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2406 return gen_rtx_IF_THEN_ELSE
2407 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2408 gen_rtx_IF_THEN_ELSE
2409 (mode, gen_rtx_LT (BImode,
2410 gen_rtx_DIV (mode, op1, mod),
2412 constm1_rtx, const0_rtx),
2416 /* Return the difference between the ceil and the truncated result of
2417 a signed division by OP1 with remainder MOD. */
2419 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2421 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2422 return gen_rtx_IF_THEN_ELSE
2423 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2424 gen_rtx_IF_THEN_ELSE
2425 (mode, gen_rtx_GT (BImode,
2426 gen_rtx_DIV (mode, op1, mod),
2428 const1_rtx, const0_rtx),
2432 /* Return the difference between the ceil and the truncated result of
2433 an unsigned division by OP1 with remainder MOD. */
2435 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2437 /* (mod != 0 ? 1 : 0) */
2438 return gen_rtx_IF_THEN_ELSE
2439 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2440 const1_rtx, const0_rtx);
2443 /* Return the difference between the rounded and the truncated result
2444 of a signed division by OP1 with remainder MOD. Halfway cases are
2445 rounded away from zero, rather than to the nearest even number. */
2447 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2449 /* (abs (mod) >= abs (op1) - abs (mod)
2450 ? (op1 / mod > 0 ? 1 : -1)
2452 return gen_rtx_IF_THEN_ELSE
2453 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2454 gen_rtx_MINUS (mode,
2455 gen_rtx_ABS (mode, op1),
2456 gen_rtx_ABS (mode, mod))),
2457 gen_rtx_IF_THEN_ELSE
2458 (mode, gen_rtx_GT (BImode,
2459 gen_rtx_DIV (mode, op1, mod),
2461 const1_rtx, constm1_rtx),
2465 /* Return the difference between the rounded and the truncated result
2466 of a unsigned division by OP1 with remainder MOD. Halfway cases
2467 are rounded away from zero, rather than to the nearest even
2470 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2472 /* (mod >= op1 - mod ? 1 : 0) */
2473 return gen_rtx_IF_THEN_ELSE
2474 (mode, gen_rtx_GE (BImode, mod,
2475 gen_rtx_MINUS (mode, op1, mod)),
2476 const1_rtx, const0_rtx);
2479 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2483 convert_debug_memory_address (enum machine_mode mode, rtx x,
2486 enum machine_mode xmode = GET_MODE (x);
2488 #ifndef POINTERS_EXTEND_UNSIGNED
2489 gcc_assert (mode == Pmode
2490 || mode == targetm.addr_space.address_mode (as));
2491 gcc_assert (xmode == mode || xmode == VOIDmode);
2494 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2495 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2497 gcc_assert (mode == address_mode || mode == pointer_mode);
2499 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2502 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2503 x = simplify_gen_subreg (mode, x, xmode,
2504 subreg_lowpart_offset
2506 else if (POINTERS_EXTEND_UNSIGNED > 0)
2507 x = gen_rtx_ZERO_EXTEND (mode, x);
2508 else if (!POINTERS_EXTEND_UNSIGNED)
2509 x = gen_rtx_SIGN_EXTEND (mode, x);
2512 switch (GET_CODE (x))
2515 if ((SUBREG_PROMOTED_VAR_P (x)
2516 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2517 || (GET_CODE (SUBREG_REG (x)) == PLUS
2518 && REG_P (XEXP (SUBREG_REG (x), 0))
2519 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2520 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2521 && GET_MODE (SUBREG_REG (x)) == mode)
2522 return SUBREG_REG (x);
2525 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2526 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2529 temp = shallow_copy_rtx (x);
2530 PUT_MODE (temp, mode);
2533 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2535 temp = gen_rtx_CONST (mode, temp);
2539 if (CONST_INT_P (XEXP (x, 1)))
2541 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2543 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2549 /* Don't know how to express ptr_extend as operation in debug info. */
2552 #endif /* POINTERS_EXTEND_UNSIGNED */
2557 /* Return an RTX equivalent to the value of the parameter DECL. */
2560 expand_debug_parm_decl (tree decl)
2562 rtx incoming = DECL_INCOMING_RTL (decl);
2565 && GET_MODE (incoming) != BLKmode
2566 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2567 || (MEM_P (incoming)
2568 && REG_P (XEXP (incoming, 0))
2569 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2571 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2573 #ifdef HAVE_window_save
2574 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2575 If the target machine has an explicit window save instruction, the
2576 actual entry value is the corresponding OUTGOING_REGNO instead. */
2577 if (REG_P (incoming)
2578 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2580 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2581 OUTGOING_REGNO (REGNO (incoming)), 0);
2582 else if (MEM_P (incoming))
2584 rtx reg = XEXP (incoming, 0);
2585 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2587 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2588 incoming = replace_equiv_address_nv (incoming, reg);
2593 ENTRY_VALUE_EXP (rtl) = incoming;
2598 && GET_MODE (incoming) != BLKmode
2599 && !TREE_ADDRESSABLE (decl)
2601 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2602 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2603 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2604 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2610 /* Return an RTX equivalent to the value of the tree expression EXP. */
2613 expand_debug_expr (tree exp)
2615 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2616 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2617 enum machine_mode inner_mode = VOIDmode;
2618 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2621 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2623 case tcc_expression:
2624 switch (TREE_CODE (exp))
2628 case WIDEN_MULT_PLUS_EXPR:
2629 case WIDEN_MULT_MINUS_EXPR:
2633 case TRUTH_ANDIF_EXPR:
2634 case TRUTH_ORIF_EXPR:
2635 case TRUTH_AND_EXPR:
2637 case TRUTH_XOR_EXPR:
2640 case TRUTH_NOT_EXPR:
2649 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2656 case tcc_comparison:
2657 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2664 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2665 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2675 case tcc_exceptional:
2676 case tcc_declaration:
2682 switch (TREE_CODE (exp))
2685 if (!lookup_constant_def (exp))
2687 if (strlen (TREE_STRING_POINTER (exp)) + 1
2688 != (size_t) TREE_STRING_LENGTH (exp))
2690 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2691 op0 = gen_rtx_MEM (BLKmode, op0);
2692 set_mem_attributes (op0, exp, 0);
2695 /* Fall through... */
2700 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2704 gcc_assert (COMPLEX_MODE_P (mode));
2705 op0 = expand_debug_expr (TREE_REALPART (exp));
2706 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2707 return gen_rtx_CONCAT (mode, op0, op1);
2709 case DEBUG_EXPR_DECL:
2710 op0 = DECL_RTL_IF_SET (exp);
2715 op0 = gen_rtx_DEBUG_EXPR (mode);
2716 DEBUG_EXPR_TREE_DECL (op0) = exp;
2717 SET_DECL_RTL (exp, op0);
2727 op0 = DECL_RTL_IF_SET (exp);
2729 /* This decl was probably optimized away. */
2732 if (TREE_CODE (exp) != VAR_DECL
2733 || DECL_EXTERNAL (exp)
2734 || !TREE_STATIC (exp)
2736 || DECL_HARD_REGISTER (exp)
2737 || DECL_IN_CONSTANT_POOL (exp)
2738 || mode == VOIDmode)
2741 op0 = make_decl_rtl_for_debug (exp);
2743 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2744 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2748 op0 = copy_rtx (op0);
2750 if (GET_MODE (op0) == BLKmode
2751 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2752 below would ICE. While it is likely a FE bug,
2753 try to be robust here. See PR43166. */
2755 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2757 gcc_assert (MEM_P (op0));
2758 op0 = adjust_address_nv (op0, mode, 0);
2769 inner_mode = GET_MODE (op0);
2771 if (mode == inner_mode)
2774 if (inner_mode == VOIDmode)
2776 if (TREE_CODE (exp) == SSA_NAME)
2777 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2779 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2780 if (mode == inner_mode)
2784 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2786 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2787 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2788 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2789 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2791 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2793 else if (FLOAT_MODE_P (mode))
2795 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2796 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2797 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2799 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2801 else if (FLOAT_MODE_P (inner_mode))
2804 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2806 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2808 else if (CONSTANT_P (op0)
2809 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2810 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2811 subreg_lowpart_offset (mode,
2813 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2814 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2816 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2818 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2824 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2826 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2827 TREE_OPERAND (exp, 0),
2828 TREE_OPERAND (exp, 1));
2830 return expand_debug_expr (newexp);
2834 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2838 if (TREE_CODE (exp) == MEM_REF)
2840 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2841 || (GET_CODE (op0) == PLUS
2842 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2843 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2844 Instead just use get_inner_reference. */
2847 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2848 if (!op1 || !CONST_INT_P (op1))
2851 op0 = plus_constant (op0, INTVAL (op1));
2854 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2855 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2857 as = ADDR_SPACE_GENERIC;
2859 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2861 if (op0 == NULL_RTX)
2864 op0 = gen_rtx_MEM (mode, op0);
2865 set_mem_attributes (op0, exp, 0);
2866 if (TREE_CODE (exp) == MEM_REF
2867 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2868 set_mem_expr (op0, NULL_TREE);
2869 set_mem_addr_space (op0, as);
2873 case TARGET_MEM_REF:
2874 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2875 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2878 op0 = expand_debug_expr
2879 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2883 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2884 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2886 as = ADDR_SPACE_GENERIC;
2888 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2890 if (op0 == NULL_RTX)
2893 op0 = gen_rtx_MEM (mode, op0);
2895 set_mem_attributes (op0, exp, 0);
2896 set_mem_addr_space (op0, as);
2902 case ARRAY_RANGE_REF:
2907 case VIEW_CONVERT_EXPR:
2909 enum machine_mode mode1;
2910 HOST_WIDE_INT bitsize, bitpos;
2913 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2914 &mode1, &unsignedp, &volatilep, false);
2920 orig_op0 = op0 = expand_debug_expr (tem);
2927 enum machine_mode addrmode, offmode;
2932 op0 = XEXP (op0, 0);
2933 addrmode = GET_MODE (op0);
2934 if (addrmode == VOIDmode)
2937 op1 = expand_debug_expr (offset);
2941 offmode = GET_MODE (op1);
2942 if (offmode == VOIDmode)
2943 offmode = TYPE_MODE (TREE_TYPE (offset));
2945 if (addrmode != offmode)
2946 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2947 subreg_lowpart_offset (addrmode,
2950 /* Don't use offset_address here, we don't need a
2951 recognizable address, and we don't want to generate
2953 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2959 if (mode1 == VOIDmode)
2961 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2962 if (bitpos >= BITS_PER_UNIT)
2964 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2965 bitpos %= BITS_PER_UNIT;
2967 else if (bitpos < 0)
2970 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2971 op0 = adjust_address_nv (op0, mode1, units);
2972 bitpos += units * BITS_PER_UNIT;
2974 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2975 op0 = adjust_address_nv (op0, mode, 0);
2976 else if (GET_MODE (op0) != mode1)
2977 op0 = adjust_address_nv (op0, mode1, 0);
2979 op0 = copy_rtx (op0);
2980 if (op0 == orig_op0)
2981 op0 = shallow_copy_rtx (op0);
2982 set_mem_attributes (op0, exp, 0);
2985 if (bitpos == 0 && mode == GET_MODE (op0))
2991 if (GET_MODE (op0) == BLKmode)
2994 if ((bitpos % BITS_PER_UNIT) == 0
2995 && bitsize == GET_MODE_BITSIZE (mode1))
2997 enum machine_mode opmode = GET_MODE (op0);
2999 if (opmode == VOIDmode)
3000 opmode = TYPE_MODE (TREE_TYPE (tem));
3002 /* This condition may hold if we're expanding the address
3003 right past the end of an array that turned out not to
3004 be addressable (i.e., the address was only computed in
3005 debug stmts). The gen_subreg below would rightfully
3006 crash, and the address doesn't really exist, so just
3008 if (bitpos >= GET_MODE_BITSIZE (opmode))
3011 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3012 return simplify_gen_subreg (mode, op0, opmode,
3013 bitpos / BITS_PER_UNIT);
3016 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3017 && TYPE_UNSIGNED (TREE_TYPE (exp))
3019 : ZERO_EXTRACT, mode,
3020 GET_MODE (op0) != VOIDmode
3022 : TYPE_MODE (TREE_TYPE (tem)),
3023 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3027 return simplify_gen_unary (ABS, mode, op0, mode);
3030 return simplify_gen_unary (NEG, mode, op0, mode);
3033 return simplify_gen_unary (NOT, mode, op0, mode);
3036 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3038 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3041 case FIX_TRUNC_EXPR:
3042 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3045 case POINTER_PLUS_EXPR:
3046 /* For the rare target where pointers are not the same size as
3047 size_t, we need to check for mis-matched modes and correct
3050 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3051 && GET_MODE (op0) != GET_MODE (op1))
3053 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
3054 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3057 /* We always sign-extend, regardless of the signedness of
3058 the operand, because the operand is always unsigned
3059 here even if the original C expression is signed. */
3060 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3065 return simplify_gen_binary (PLUS, mode, op0, op1);
3068 return simplify_gen_binary (MINUS, mode, op0, op1);
3071 return simplify_gen_binary (MULT, mode, op0, op1);
3074 case TRUNC_DIV_EXPR:
3075 case EXACT_DIV_EXPR:
3077 return simplify_gen_binary (UDIV, mode, op0, op1);
3079 return simplify_gen_binary (DIV, mode, op0, op1);
3081 case TRUNC_MOD_EXPR:
3082 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3084 case FLOOR_DIV_EXPR:
3086 return simplify_gen_binary (UDIV, mode, op0, op1);
3089 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3090 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3091 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3092 return simplify_gen_binary (PLUS, mode, div, adj);
3095 case FLOOR_MOD_EXPR:
3097 return simplify_gen_binary (UMOD, mode, op0, op1);
3100 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3101 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3102 adj = simplify_gen_unary (NEG, mode,
3103 simplify_gen_binary (MULT, mode, adj, op1),
3105 return simplify_gen_binary (PLUS, mode, mod, adj);
3111 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3112 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3113 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3114 return simplify_gen_binary (PLUS, mode, div, adj);
3118 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3119 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3120 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3121 return simplify_gen_binary (PLUS, mode, div, adj);
3127 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3128 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3129 adj = simplify_gen_unary (NEG, mode,
3130 simplify_gen_binary (MULT, mode, adj, op1),
3132 return simplify_gen_binary (PLUS, mode, mod, adj);
3136 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3137 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3138 adj = simplify_gen_unary (NEG, mode,
3139 simplify_gen_binary (MULT, mode, adj, op1),
3141 return simplify_gen_binary (PLUS, mode, mod, adj);
3144 case ROUND_DIV_EXPR:
3147 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3148 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3149 rtx adj = round_udiv_adjust (mode, mod, op1);
3150 return simplify_gen_binary (PLUS, mode, div, adj);
3154 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3155 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3156 rtx adj = round_sdiv_adjust (mode, mod, op1);
3157 return simplify_gen_binary (PLUS, mode, div, adj);
3160 case ROUND_MOD_EXPR:
3163 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3164 rtx adj = round_udiv_adjust (mode, mod, op1);
3165 adj = simplify_gen_unary (NEG, mode,
3166 simplify_gen_binary (MULT, mode, adj, op1),
3168 return simplify_gen_binary (PLUS, mode, mod, adj);
3172 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3173 rtx adj = round_sdiv_adjust (mode, mod, op1);
3174 adj = simplify_gen_unary (NEG, mode,
3175 simplify_gen_binary (MULT, mode, adj, op1),
3177 return simplify_gen_binary (PLUS, mode, mod, adj);
3181 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3185 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3187 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3190 return simplify_gen_binary (ROTATE, mode, op0, op1);
3193 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3196 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3199 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3202 case TRUTH_AND_EXPR:
3203 return simplify_gen_binary (AND, mode, op0, op1);
3207 return simplify_gen_binary (IOR, mode, op0, op1);
3210 case TRUTH_XOR_EXPR:
3211 return simplify_gen_binary (XOR, mode, op0, op1);
3213 case TRUTH_ANDIF_EXPR:
3214 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3216 case TRUTH_ORIF_EXPR:
3217 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3219 case TRUTH_NOT_EXPR:
3220 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3223 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3227 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3231 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3235 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3239 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3242 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3244 case UNORDERED_EXPR:
3245 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3248 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3251 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3254 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3257 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3260 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3263 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3266 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3269 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3272 gcc_assert (COMPLEX_MODE_P (mode));
3273 if (GET_MODE (op0) == VOIDmode)
3274 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3275 if (GET_MODE (op1) == VOIDmode)
3276 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3277 return gen_rtx_CONCAT (mode, op0, op1);
3280 if (GET_CODE (op0) == CONCAT)
3281 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3282 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3284 GET_MODE_INNER (mode)));
3287 enum machine_mode imode = GET_MODE_INNER (mode);
3292 re = adjust_address_nv (op0, imode, 0);
3293 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3297 enum machine_mode ifmode = int_mode_for_mode (mode);
3298 enum machine_mode ihmode = int_mode_for_mode (imode);
3300 if (ifmode == BLKmode || ihmode == BLKmode)
3302 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3305 re = gen_rtx_SUBREG (ifmode, re, 0);
3306 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3307 if (imode != ihmode)
3308 re = gen_rtx_SUBREG (imode, re, 0);
3309 im = copy_rtx (op0);
3311 im = gen_rtx_SUBREG (ifmode, im, 0);
3312 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3313 if (imode != ihmode)
3314 im = gen_rtx_SUBREG (imode, im, 0);
3316 im = gen_rtx_NEG (imode, im);
3317 return gen_rtx_CONCAT (mode, re, im);
3321 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3322 if (!op0 || !MEM_P (op0))
3324 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3325 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3326 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3327 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3328 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3329 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3331 if (handled_component_p (TREE_OPERAND (exp, 0)))
3333 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3335 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3336 &bitoffset, &bitsize, &maxsize);
3337 if ((TREE_CODE (decl) == VAR_DECL
3338 || TREE_CODE (decl) == PARM_DECL
3339 || TREE_CODE (decl) == RESULT_DECL)
3340 && (!TREE_ADDRESSABLE (decl)
3341 || target_for_debug_bind (decl))
3342 && (bitoffset % BITS_PER_UNIT) == 0
3344 && bitsize == maxsize)
3345 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3346 bitoffset / BITS_PER_UNIT);
3352 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3353 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3358 exp = build_constructor_from_list (TREE_TYPE (exp),
3359 TREE_VECTOR_CST_ELTS (exp));
3363 if (TREE_CLOBBER_P (exp))
3365 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3370 op0 = gen_rtx_CONCATN
3371 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3373 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3375 op1 = expand_debug_expr (val);
3378 XVECEXP (op0, 0, i) = op1;
3381 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3383 op1 = expand_debug_expr
3384 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3389 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3390 XVECEXP (op0, 0, i) = op1;
3396 goto flag_unsupported;
3399 /* ??? Maybe handle some builtins? */
3404 gimple g = get_gimple_for_ssa_name (exp);
3407 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3413 int part = var_to_partition (SA.map, exp);
3415 if (part == NO_PARTITION)
3417 /* If this is a reference to an incoming value of parameter
3418 that is never used in the code or where the incoming
3419 value is never used in the code, use PARM_DECL's
3421 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3422 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3424 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3427 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3434 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3436 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3444 /* Vector stuff. For most of the codes we don't have rtl codes. */
3445 case REALIGN_LOAD_EXPR:
3446 case REDUC_MAX_EXPR:
3447 case REDUC_MIN_EXPR:
3448 case REDUC_PLUS_EXPR:
3450 case VEC_EXTRACT_EVEN_EXPR:
3451 case VEC_EXTRACT_ODD_EXPR:
3452 case VEC_INTERLEAVE_HIGH_EXPR:
3453 case VEC_INTERLEAVE_LOW_EXPR:
3454 case VEC_LSHIFT_EXPR:
3455 case VEC_PACK_FIX_TRUNC_EXPR:
3456 case VEC_PACK_SAT_EXPR:
3457 case VEC_PACK_TRUNC_EXPR:
3458 case VEC_RSHIFT_EXPR:
3459 case VEC_UNPACK_FLOAT_HI_EXPR:
3460 case VEC_UNPACK_FLOAT_LO_EXPR:
3461 case VEC_UNPACK_HI_EXPR:
3462 case VEC_UNPACK_LO_EXPR:
3463 case VEC_WIDEN_MULT_HI_EXPR:
3464 case VEC_WIDEN_MULT_LO_EXPR:
3465 case VEC_WIDEN_LSHIFT_HI_EXPR:
3466 case VEC_WIDEN_LSHIFT_LO_EXPR:
3471 case ADDR_SPACE_CONVERT_EXPR:
3472 case FIXED_CONVERT_EXPR:
3474 case WITH_SIZE_EXPR:
3478 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3479 && SCALAR_INT_MODE_P (mode))
3482 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3484 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3487 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3489 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3491 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3492 return simplify_gen_binary (PLUS, mode, op0, op2);
3496 case WIDEN_MULT_EXPR:
3497 case WIDEN_MULT_PLUS_EXPR:
3498 case WIDEN_MULT_MINUS_EXPR:
3499 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3500 && SCALAR_INT_MODE_P (mode))
3502 inner_mode = GET_MODE (op0);
3503 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3504 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3506 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3507 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3508 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3510 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3511 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3512 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3514 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3515 return simplify_gen_binary (PLUS, mode, op0, op2);
3517 return simplify_gen_binary (MINUS, mode, op2, op0);
3521 case WIDEN_SUM_EXPR:
3522 case WIDEN_LSHIFT_EXPR:
3523 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3524 && SCALAR_INT_MODE_P (mode))
3527 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3529 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3531 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3532 ? ASHIFT : PLUS, mode, op0, op1);
3537 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3541 #ifdef ENABLE_CHECKING
3550 /* Return an RTX equivalent to the source bind value of the tree expression
3554 expand_debug_source_expr (tree exp)
3557 enum machine_mode mode = VOIDmode, inner_mode;
3559 switch (TREE_CODE (exp))
3563 mode = DECL_MODE (exp);
3564 op0 = expand_debug_parm_decl (exp);
3567 /* See if this isn't an argument that has been completely
3569 if (!DECL_RTL_SET_P (exp)
3570 && !DECL_INCOMING_RTL (exp)
3571 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3574 if (DECL_ABSTRACT_ORIGIN (exp))
3575 aexp = DECL_ABSTRACT_ORIGIN (exp);
3576 if (DECL_CONTEXT (aexp)
3577 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3579 VEC(tree, gc) **debug_args;
3582 #ifdef ENABLE_CHECKING
3584 for (parm = DECL_ARGUMENTS (current_function_decl);
3585 parm; parm = DECL_CHAIN (parm))
3586 gcc_assert (parm != exp
3587 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3589 debug_args = decl_debug_args_lookup (current_function_decl);
3590 if (debug_args != NULL)
3592 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3595 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3605 if (op0 == NULL_RTX)
3608 inner_mode = GET_MODE (op0);
3609 if (mode == inner_mode)
3612 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3614 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3615 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3616 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3617 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3619 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3621 else if (FLOAT_MODE_P (mode))
3623 else if (FLOAT_MODE_P (inner_mode))
3625 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3626 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3628 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3630 else if (CONSTANT_P (op0)
3631 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3632 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3633 subreg_lowpart_offset (mode, inner_mode));
3634 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3635 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3637 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3642 /* Expand the _LOCs in debug insns. We run this after expanding all
3643 regular insns, so that any variables referenced in the function
3644 will have their DECL_RTLs set. */
3647 expand_debug_locations (void)
3650 rtx last = get_last_insn ();
3651 int save_strict_alias = flag_strict_aliasing;
3653 /* New alias sets while setting up memory attributes cause
3654 -fcompare-debug failures, even though it doesn't bring about any
3656 flag_strict_aliasing = 0;
3658 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3659 if (DEBUG_INSN_P (insn))
3661 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3663 enum machine_mode mode;
3665 if (value == NULL_TREE)
3669 if (INSN_VAR_LOCATION_STATUS (insn)
3670 == VAR_INIT_STATUS_UNINITIALIZED)
3671 val = expand_debug_source_expr (value);
3673 val = expand_debug_expr (value);
3674 gcc_assert (last == get_last_insn ());
3678 val = gen_rtx_UNKNOWN_VAR_LOC ();
3681 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3683 gcc_assert (mode == GET_MODE (val)
3684 || (GET_MODE (val) == VOIDmode
3685 && (CONST_INT_P (val)
3686 || GET_CODE (val) == CONST_FIXED
3687 || GET_CODE (val) == CONST_DOUBLE
3688 || GET_CODE (val) == LABEL_REF)));
3691 INSN_VAR_LOCATION_LOC (insn) = val;
3694 flag_strict_aliasing = save_strict_alias;
3697 /* Expand basic block BB from GIMPLE trees to RTL. */
3700 expand_gimple_basic_block (basic_block bb)
3702 gimple_stmt_iterator gsi;
3711 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3714 /* Note that since we are now transitioning from GIMPLE to RTL, we
3715 cannot use the gsi_*_bb() routines because they expect the basic
3716 block to be in GIMPLE, instead of RTL. Therefore, we need to
3717 access the BB sequence directly. */
3718 stmts = bb_seq (bb);
3719 bb->il.gimple = NULL;
3720 rtl_profile_for_bb (bb);
3721 init_rtl_bb_info (bb);
3722 bb->flags |= BB_RTL;
3724 /* Remove the RETURN_EXPR if we may fall though to the exit
3726 gsi = gsi_last (stmts);
3727 if (!gsi_end_p (gsi)
3728 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3730 gimple ret_stmt = gsi_stmt (gsi);
3732 gcc_assert (single_succ_p (bb));
3733 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3735 if (bb->next_bb == EXIT_BLOCK_PTR
3736 && !gimple_return_retval (ret_stmt))
3738 gsi_remove (&gsi, false);
3739 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3743 gsi = gsi_start (stmts);
3744 if (!gsi_end_p (gsi))
3746 stmt = gsi_stmt (gsi);
3747 if (gimple_code (stmt) != GIMPLE_LABEL)
3751 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3755 last = get_last_insn ();
3759 expand_gimple_stmt (stmt);
3764 emit_label ((rtx) *elt);
3766 /* Java emits line number notes in the top of labels.
3767 ??? Make this go away once line number notes are obsoleted. */
3768 BB_HEAD (bb) = NEXT_INSN (last);
3769 if (NOTE_P (BB_HEAD (bb)))
3770 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3771 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3773 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3776 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3778 NOTE_BASIC_BLOCK (note) = bb;
3780 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3784 stmt = gsi_stmt (gsi);
3786 /* If this statement is a non-debug one, and we generate debug
3787 insns, then this one might be the last real use of a TERed
3788 SSA_NAME, but where there are still some debug uses further
3789 down. Expanding the current SSA name in such further debug
3790 uses by their RHS might lead to wrong debug info, as coalescing
3791 might make the operands of such RHS be placed into the same
3792 pseudo as something else. Like so:
3793 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3797 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3798 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3799 the write to a_2 would actually have clobbered the place which
3802 So, instead of that, we recognize the situation, and generate
3803 debug temporaries at the last real use of TERed SSA names:
3810 if (MAY_HAVE_DEBUG_INSNS
3812 && !is_gimple_debug (stmt))
3818 location_t sloc = get_curr_insn_source_location ();
3819 tree sblock = get_curr_insn_block ();
3821 /* Look for SSA names that have their last use here (TERed
3822 names always have only one real use). */
3823 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3824 if ((def = get_gimple_for_ssa_name (op)))
3826 imm_use_iterator imm_iter;
3827 use_operand_p use_p;
3828 bool have_debug_uses = false;
3830 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3832 if (gimple_debug_bind_p (USE_STMT (use_p)))
3834 have_debug_uses = true;
3839 if (have_debug_uses)
3841 /* OP is a TERed SSA name, with DEF it's defining
3842 statement, and where OP is used in further debug
3843 instructions. Generate a debug temporary, and
3844 replace all uses of OP in debug insns with that
3847 tree value = gimple_assign_rhs_to_tree (def);
3848 tree vexpr = make_node (DEBUG_EXPR_DECL);
3850 enum machine_mode mode;
3852 set_curr_insn_source_location (gimple_location (def));
3853 set_curr_insn_block (gimple_block (def));
3855 DECL_ARTIFICIAL (vexpr) = 1;
3856 TREE_TYPE (vexpr) = TREE_TYPE (value);
3858 mode = DECL_MODE (value);
3860 mode = TYPE_MODE (TREE_TYPE (value));
3861 DECL_MODE (vexpr) = mode;
3863 val = gen_rtx_VAR_LOCATION
3864 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3866 emit_debug_insn (val);
3868 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3870 if (!gimple_debug_bind_p (debugstmt))
3873 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3874 SET_USE (use_p, vexpr);
3876 update_stmt (debugstmt);
3880 set_curr_insn_source_location (sloc);
3881 set_curr_insn_block (sblock);
3884 currently_expanding_gimple_stmt = stmt;
3886 /* Expand this statement, then evaluate the resulting RTL and
3887 fixup the CFG accordingly. */
3888 if (gimple_code (stmt) == GIMPLE_COND)
3890 new_bb = expand_gimple_cond (bb, stmt);
3894 else if (gimple_debug_bind_p (stmt))
3896 location_t sloc = get_curr_insn_source_location ();
3897 tree sblock = get_curr_insn_block ();
3898 gimple_stmt_iterator nsi = gsi;
3902 tree var = gimple_debug_bind_get_var (stmt);
3905 enum machine_mode mode;
3907 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3908 && TREE_CODE (var) != LABEL_DECL
3909 && !target_for_debug_bind (var))
3910 goto delink_debug_stmt;
3912 if (gimple_debug_bind_has_value_p (stmt))
3913 value = gimple_debug_bind_get_value (stmt);
3917 last = get_last_insn ();
3919 set_curr_insn_source_location (gimple_location (stmt));
3920 set_curr_insn_block (gimple_block (stmt));
3923 mode = DECL_MODE (var);
3925 mode = TYPE_MODE (TREE_TYPE (var));
3927 val = gen_rtx_VAR_LOCATION
3928 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3930 emit_debug_insn (val);
3932 if (dump_file && (dump_flags & TDF_DETAILS))
3934 /* We can't dump the insn with a TREE where an RTX
3936 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3937 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3938 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3942 /* In order not to generate too many debug temporaries,
3943 we delink all uses of debug statements we already expanded.
3944 Therefore debug statements between definition and real
3945 use of TERed SSA names will continue to use the SSA name,
3946 and not be replaced with debug temps. */
3947 delink_stmt_imm_use (stmt);
3951 if (gsi_end_p (nsi))
3953 stmt = gsi_stmt (nsi);
3954 if (!gimple_debug_bind_p (stmt))
3958 set_curr_insn_source_location (sloc);
3959 set_curr_insn_block (sblock);
3961 else if (gimple_debug_source_bind_p (stmt))
3963 location_t sloc = get_curr_insn_source_location ();
3964 tree sblock = get_curr_insn_block ();
3965 tree var = gimple_debug_source_bind_get_var (stmt);
3966 tree value = gimple_debug_source_bind_get_value (stmt);
3968 enum machine_mode mode;
3970 last = get_last_insn ();
3972 set_curr_insn_source_location (gimple_location (stmt));
3973 set_curr_insn_block (gimple_block (stmt));
3975 mode = DECL_MODE (var);
3977 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3978 VAR_INIT_STATUS_UNINITIALIZED);
3980 emit_debug_insn (val);
3982 if (dump_file && (dump_flags & TDF_DETAILS))
3984 /* We can't dump the insn with a TREE where an RTX
3986 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3987 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3988 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3991 set_curr_insn_source_location (sloc);
3992 set_curr_insn_block (sblock);
3996 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3999 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4010 def_operand_p def_p;
4011 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4015 /* Ignore this stmt if it is in the list of
4016 replaceable expressions. */
4018 && bitmap_bit_p (SA.values,
4019 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4022 last = expand_gimple_stmt (stmt);
4023 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4028 currently_expanding_gimple_stmt = NULL;
4030 /* Expand implicit goto and convert goto_locus. */
4031 FOR_EACH_EDGE (e, ei, bb->succs)
4033 if (e->goto_locus && e->goto_block)
4035 set_curr_insn_source_location (e->goto_locus);
4036 set_curr_insn_block (e->goto_block);
4037 e->goto_locus = curr_insn_locator ();
4039 e->goto_block = NULL;
4040 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4042 emit_jump (label_rtx_for_bb (e->dest));
4043 e->flags &= ~EDGE_FALLTHRU;
4047 /* Expanded RTL can create a jump in the last instruction of block.
4048 This later might be assumed to be a jump to successor and break edge insertion.
4049 We need to insert dummy move to prevent this. PR41440. */
4050 if (single_succ_p (bb)
4051 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4052 && (last = get_last_insn ())
4055 rtx dummy = gen_reg_rtx (SImode);
4056 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4059 do_pending_stack_adjust ();
4061 /* Find the block tail. The last insn in the block is the insn
4062 before a barrier and/or table jump insn. */
4063 last = get_last_insn ();
4064 if (BARRIER_P (last))
4065 last = PREV_INSN (last);
4066 if (JUMP_TABLE_DATA_P (last))
4067 last = PREV_INSN (PREV_INSN (last));
4070 update_bb_for_insn (bb);
4076 /* Create a basic block for initialization code. */
4079 construct_init_block (void)
4081 basic_block init_block, first_block;
4085 /* Multiple entry points not supported yet. */
4086 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4087 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4088 init_rtl_bb_info (EXIT_BLOCK_PTR);
4089 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4090 EXIT_BLOCK_PTR->flags |= BB_RTL;
4092 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4094 /* When entry edge points to first basic block, we don't need jump,
4095 otherwise we have to jump into proper target. */
4096 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4098 tree label = gimple_block_label (e->dest);
4100 emit_jump (label_rtx (label));
4104 flags = EDGE_FALLTHRU;
4106 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4109 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4110 init_block->count = ENTRY_BLOCK_PTR->count;
4113 first_block = e->dest;
4114 redirect_edge_succ (e, init_block);
4115 e = make_edge (init_block, first_block, flags);
4118 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4119 e->probability = REG_BR_PROB_BASE;
4120 e->count = ENTRY_BLOCK_PTR->count;
4122 update_bb_for_insn (init_block);
4126 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4127 found in the block tree. */
4130 set_block_levels (tree block, int level)
4134 BLOCK_NUMBER (block) = level;
4135 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4136 block = BLOCK_CHAIN (block);
4140 /* Create a block containing landing pads and similar stuff. */
4143 construct_exit_block (void)
4145 rtx head = get_last_insn ();
4147 basic_block exit_block;
4151 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4153 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4155 /* Make sure the locus is set to the end of the function, so that
4156 epilogue line numbers and warnings are set properly. */
4157 if (cfun->function_end_locus != UNKNOWN_LOCATION)
4158 input_location = cfun->function_end_locus;
4160 /* The following insns belong to the top scope. */
4161 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4163 /* Generate rtl for function exit. */
4164 expand_function_end ();
4166 end = get_last_insn ();
4169 /* While emitting the function end we could move end of the last basic block.
4171 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4172 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4173 head = NEXT_INSN (head);
4174 exit_block = create_basic_block (NEXT_INSN (head), end,
4175 EXIT_BLOCK_PTR->prev_bb);
4176 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4177 exit_block->count = EXIT_BLOCK_PTR->count;
4180 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4182 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4183 if (!(e->flags & EDGE_ABNORMAL))
4184 redirect_edge_succ (e, exit_block);
4189 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4190 e->probability = REG_BR_PROB_BASE;
4191 e->count = EXIT_BLOCK_PTR->count;
4192 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4195 e->count -= e2->count;
4196 exit_block->count -= e2->count;
4197 exit_block->frequency -= EDGE_FREQUENCY (e2);
4201 if (exit_block->count < 0)
4202 exit_block->count = 0;
4203 if (exit_block->frequency < 0)
4204 exit_block->frequency = 0;
4205 update_bb_for_insn (exit_block);
4208 /* Helper function for discover_nonconstant_array_refs.
4209 Look for ARRAY_REF nodes with non-constant indexes and mark them
4213 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4214 void *data ATTRIBUTE_UNUSED)
4218 if (IS_TYPE_OR_DECL_P (t))
4220 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4222 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4223 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4224 && (!TREE_OPERAND (t, 2)
4225 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4226 || (TREE_CODE (t) == COMPONENT_REF
4227 && (!TREE_OPERAND (t,2)
4228 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4229 || TREE_CODE (t) == BIT_FIELD_REF
4230 || TREE_CODE (t) == REALPART_EXPR
4231 || TREE_CODE (t) == IMAGPART_EXPR
4232 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4233 || CONVERT_EXPR_P (t))
4234 t = TREE_OPERAND (t, 0);
4236 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4238 t = get_base_address (t);
4240 && DECL_MODE (t) != BLKmode)
4241 TREE_ADDRESSABLE (t) = 1;
4250 /* RTL expansion is not able to compile array references with variable
4251 offsets for arrays stored in single register. Discover such
4252 expressions and mark variables as addressable to avoid this
4256 discover_nonconstant_array_refs (void)
4259 gimple_stmt_iterator gsi;
4262 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4264 gimple stmt = gsi_stmt (gsi);
4265 if (!is_gimple_debug (stmt))
4266 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4270 /* This function sets crtl->args.internal_arg_pointer to a virtual
4271 register if DRAP is needed. Local register allocator will replace
4272 virtual_incoming_args_rtx with the virtual register. */
4275 expand_stack_alignment (void)
4278 unsigned int preferred_stack_boundary;
4280 if (! SUPPORTS_STACK_ALIGNMENT)
4283 if (cfun->calls_alloca
4284 || cfun->has_nonlocal_label
4285 || crtl->has_nonlocal_goto)
4286 crtl->need_drap = true;
4288 /* Call update_stack_boundary here again to update incoming stack
4289 boundary. It may set incoming stack alignment to a different
4290 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4291 use the minimum incoming stack alignment to check if it is OK
4292 to perform sibcall optimization since sibcall optimization will
4293 only align the outgoing stack to incoming stack boundary. */
4294 if (targetm.calls.update_stack_boundary)
4295 targetm.calls.update_stack_boundary ();
4297 /* The incoming stack frame has to be aligned at least at
4298 parm_stack_boundary. */
4299 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4301 /* Update crtl->stack_alignment_estimated and use it later to align
4302 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4303 exceptions since callgraph doesn't collect incoming stack alignment
4305 if (cfun->can_throw_non_call_exceptions
4306 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4307 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4309 preferred_stack_boundary = crtl->preferred_stack_boundary;
4310 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4311 crtl->stack_alignment_estimated = preferred_stack_boundary;
4312 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4313 crtl->stack_alignment_needed = preferred_stack_boundary;
4315 gcc_assert (crtl->stack_alignment_needed
4316 <= crtl->stack_alignment_estimated);
4318 crtl->stack_realign_needed
4319 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4320 crtl->stack_realign_tried = crtl->stack_realign_needed;
4322 crtl->stack_realign_processed = true;
4324 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4326 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4327 drap_rtx = targetm.calls.get_drap_rtx ();
4329 /* stack_realign_drap and drap_rtx must match. */
4330 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4332 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4333 if (NULL != drap_rtx)
4335 crtl->args.internal_arg_pointer = drap_rtx;
4337 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4339 fixup_tail_calls ();
4343 /* Translate the intermediate representation contained in the CFG
4344 from GIMPLE trees to RTL.
4346 We do conversion per basic block and preserve/update the tree CFG.
4347 This implies we have to do some magic as the CFG can simultaneously
4348 consist of basic blocks containing RTL and GIMPLE trees. This can
4349 confuse the CFG hooks, so be careful to not manipulate CFG during
4353 gimple_expand_cfg (void)
4355 basic_block bb, init_block;
4362 timevar_push (TV_OUT_OF_SSA);
4363 rewrite_out_of_ssa (&SA);
4364 timevar_pop (TV_OUT_OF_SSA);
4365 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4368 /* Some backends want to know that we are expanding to RTL. */
4369 currently_expanding_to_rtl = 1;
4371 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4373 insn_locators_alloc ();
4374 if (!DECL_IS_BUILTIN (current_function_decl))
4376 /* Eventually, all FEs should explicitly set function_start_locus. */
4377 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4378 set_curr_insn_source_location
4379 (DECL_SOURCE_LOCATION (current_function_decl));
4381 set_curr_insn_source_location (cfun->function_start_locus);
4384 set_curr_insn_source_location (UNKNOWN_LOCATION);
4385 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4386 prologue_locator = curr_insn_locator ();
4388 #ifdef INSN_SCHEDULING
4389 init_sched_attrs ();
4392 /* Make sure first insn is a note even if we don't want linenums.
4393 This makes sure the first insn will never be deleted.
4394 Also, final expects a note to appear there. */
4395 emit_note (NOTE_INSN_DELETED);
4397 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4398 discover_nonconstant_array_refs ();
4400 targetm.expand_to_rtl_hook ();
4401 crtl->stack_alignment_needed = STACK_BOUNDARY;
4402 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4403 crtl->stack_alignment_estimated = 0;
4404 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4405 cfun->cfg->max_jumptable_ents = 0;
4407 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4408 of the function section at exapnsion time to predict distance of calls. */
4409 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4411 /* Expand the variables recorded during gimple lowering. */
4412 timevar_push (TV_VAR_EXPAND);
4415 expand_used_vars ();
4417 var_seq = get_insns ();
4419 timevar_pop (TV_VAR_EXPAND);
4421 /* Honor stack protection warnings. */
4422 if (warn_stack_protect)
4424 if (cfun->calls_alloca)
4425 warning (OPT_Wstack_protector,
4426 "stack protector not protecting local variables: "
4427 "variable length buffer");
4428 if (has_short_buffer && !crtl->stack_protect_guard)
4429 warning (OPT_Wstack_protector,
4430 "stack protector not protecting function: "
4431 "all local arrays are less than %d bytes long",
4432 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4435 /* Set up parameters and prepare for return, for the function. */
4436 expand_function_start (current_function_decl);
4438 /* If we emitted any instructions for setting up the variables,
4439 emit them before the FUNCTION_START note. */
4442 emit_insn_before (var_seq, parm_birth_insn);
4444 /* In expand_function_end we'll insert the alloca save/restore
4445 before parm_birth_insn. We've just insertted an alloca call.
4446 Adjust the pointer to match. */
4447 parm_birth_insn = var_seq;
4450 /* Now that we also have the parameter RTXs, copy them over to our
4452 for (i = 0; i < SA.map->num_partitions; i++)
4454 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4456 if (TREE_CODE (var) != VAR_DECL
4457 && !SA.partition_to_pseudo[i])
4458 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4459 gcc_assert (SA.partition_to_pseudo[i]);
4461 /* If this decl was marked as living in multiple places, reset
4462 this now to NULL. */
4463 if (DECL_RTL_IF_SET (var) == pc_rtx)
4464 SET_DECL_RTL (var, NULL);
4466 /* Some RTL parts really want to look at DECL_RTL(x) when x
4467 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4468 SET_DECL_RTL here making this available, but that would mean
4469 to select one of the potentially many RTLs for one DECL. Instead
4470 of doing that we simply reset the MEM_EXPR of the RTL in question,
4471 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4472 if (!DECL_RTL_SET_P (var))
4474 if (MEM_P (SA.partition_to_pseudo[i]))
4475 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4479 /* If we have a class containing differently aligned pointers
4480 we need to merge those into the corresponding RTL pointer
4482 for (i = 1; i < num_ssa_names; i++)
4484 tree name = ssa_name (i);
4489 || !POINTER_TYPE_P (TREE_TYPE (name))
4490 /* We might have generated new SSA names in
4491 update_alias_info_with_stack_vars. They will have a NULL
4492 defining statements, and won't be part of the partitioning,
4494 || !SSA_NAME_DEF_STMT (name))
4496 part = var_to_partition (SA.map, name);
4497 if (part == NO_PARTITION)
4499 r = SA.partition_to_pseudo[part];
4501 mark_reg_pointer (r, get_pointer_alignment (name));
4504 /* If this function is `main', emit a call to `__main'
4505 to run global initializers, etc. */
4506 if (DECL_NAME (current_function_decl)
4507 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4508 && DECL_FILE_SCOPE_P (current_function_decl))
4509 expand_main_function ();
4511 /* Initialize the stack_protect_guard field. This must happen after the
4512 call to __main (if any) so that the external decl is initialized. */
4513 if (crtl->stack_protect_guard)
4514 stack_protect_prologue ();
4516 expand_phi_nodes (&SA);
4518 /* Register rtl specific functions for cfg. */
4519 rtl_register_cfg_hooks ();
4521 init_block = construct_init_block ();
4523 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4524 remaining edges later. */
4525 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4526 e->flags &= ~EDGE_EXECUTABLE;
4528 lab_rtx_for_bb = pointer_map_create ();
4529 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4530 bb = expand_gimple_basic_block (bb);
4532 if (MAY_HAVE_DEBUG_INSNS)
4533 expand_debug_locations ();
4535 execute_free_datastructures ();
4536 timevar_push (TV_OUT_OF_SSA);
4537 finish_out_of_ssa (&SA);
4538 timevar_pop (TV_OUT_OF_SSA);
4540 timevar_push (TV_POST_EXPAND);
4541 /* We are no longer in SSA form. */
4542 cfun->gimple_df->in_ssa_p = false;
4544 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4545 conservatively to true until they are all profile aware. */
4546 pointer_map_destroy (lab_rtx_for_bb);
4549 construct_exit_block ();
4550 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4551 insn_locators_finalize ();
4553 /* Zap the tree EH table. */
4554 set_eh_throw_stmt_table (cfun, NULL);
4556 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4557 split edges which edge insertions might do. */
4558 rebuild_jump_labels (get_insns ());
4560 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4564 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4568 rebuild_jump_labels_chain (e->insns.r);
4569 /* Avoid putting insns before parm_birth_insn. */
4570 if (e->src == ENTRY_BLOCK_PTR
4571 && single_succ_p (ENTRY_BLOCK_PTR)
4574 rtx insns = e->insns.r;
4575 e->insns.r = NULL_RTX;
4576 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4579 commit_one_edge_insertion (e);
4586 /* We're done expanding trees to RTL. */
4587 currently_expanding_to_rtl = 0;
4589 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4593 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4595 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4596 e->flags &= ~EDGE_EXECUTABLE;
4598 /* At the moment not all abnormal edges match the RTL
4599 representation. It is safe to remove them here as
4600 find_many_sub_basic_blocks will rediscover them.
4601 In the future we should get this fixed properly. */
4602 if ((e->flags & EDGE_ABNORMAL)
4603 && !(e->flags & EDGE_SIBCALL))
4610 blocks = sbitmap_alloc (last_basic_block);
4611 sbitmap_ones (blocks);
4612 find_many_sub_basic_blocks (blocks);
4613 sbitmap_free (blocks);
4614 purge_all_dead_edges ();
4618 expand_stack_alignment ();
4620 #ifdef ENABLE_CHECKING
4621 verify_flow_info ();
4624 /* There's no need to defer outputting this function any more; we
4625 know we want to output it. */
4626 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4628 /* Now that we're done expanding trees to RTL, we shouldn't have any
4629 more CONCATs anywhere. */
4630 generating_concat_p = 0;
4635 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4636 /* And the pass manager will dump RTL for us. */
4639 /* If we're emitting a nested function, make sure its parent gets
4640 emitted as well. Doing otherwise confuses debug info. */
4643 for (parent = DECL_CONTEXT (current_function_decl);
4644 parent != NULL_TREE;
4645 parent = get_containing_scope (parent))
4646 if (TREE_CODE (parent) == FUNCTION_DECL)
4647 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4650 /* We are now committed to emitting code for this function. Do any
4651 preparation, such as emitting abstract debug info for the inline
4652 before it gets mangled by optimization. */
4653 if (cgraph_function_possibly_inlined_p (current_function_decl))
4654 (*debug_hooks->outlining_inline_function) (current_function_decl);
4656 TREE_ASM_WRITTEN (current_function_decl) = 1;
4658 /* After expanding, the return labels are no longer needed. */
4659 return_label = NULL;
4660 naked_return_label = NULL;
4662 /* After expanding, the tm_restart map is no longer needed. */
4663 if (cfun->gimple_df->tm_restart)
4665 htab_delete (cfun->gimple_df->tm_restart);
4666 cfun->gimple_df->tm_restart = NULL;
4669 /* Tag the blocks with a depth number so that change_scope can find
4670 the common parent easily. */
4671 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4672 default_rtl_profile ();
4673 timevar_pop (TV_POST_EXPAND);
4677 struct rtl_opt_pass pass_expand =
4681 "expand", /* name */
4683 gimple_expand_cfg, /* execute */
4686 0, /* static_pass_number */
4687 TV_EXPAND, /* tv_id */
4688 PROP_ssa | PROP_gimple_leh | PROP_cfg
4689 | PROP_gimple_lcx, /* properties_required */
4690 PROP_rtl, /* properties_provided */
4691 PROP_ssa | PROP_trees, /* properties_destroyed */
4692 TODO_verify_ssa | TODO_verify_flow
4693 | TODO_verify_stmts, /* todo_flags_start */
4694 TODO_ggc_collect /* todo_flags_finish */