1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
47 #include "ssaexpand.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 static rtx expand_debug_expr (tree);
62 /* Return an expression tree corresponding to the RHS of GIMPLE
66 gimple_assign_rhs_to_tree (gimple stmt)
69 enum gimple_rhs_class grhs_class;
71 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
73 if (grhs_class == GIMPLE_TERNARY_RHS)
74 t = build3 (gimple_assign_rhs_code (stmt),
75 TREE_TYPE (gimple_assign_lhs (stmt)),
76 gimple_assign_rhs1 (stmt),
77 gimple_assign_rhs2 (stmt),
78 gimple_assign_rhs3 (stmt));
79 else if (grhs_class == GIMPLE_BINARY_RHS)
80 t = build2 (gimple_assign_rhs_code (stmt),
81 TREE_TYPE (gimple_assign_lhs (stmt)),
82 gimple_assign_rhs1 (stmt),
83 gimple_assign_rhs2 (stmt));
84 else if (grhs_class == GIMPLE_UNARY_RHS)
85 t = build1 (gimple_assign_rhs_code (stmt),
86 TREE_TYPE (gimple_assign_lhs (stmt)),
87 gimple_assign_rhs1 (stmt));
88 else if (grhs_class == GIMPLE_SINGLE_RHS)
90 t = gimple_assign_rhs1 (stmt);
91 /* Avoid modifying this tree in place below. */
92 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
93 && gimple_location (stmt) != EXPR_LOCATION (t))
94 || (gimple_block (stmt)
95 && currently_expanding_to_rtl
97 && gimple_block (stmt) != TREE_BLOCK (t)))
103 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
104 SET_EXPR_LOCATION (t, gimple_location (stmt));
105 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
106 TREE_BLOCK (t) = gimple_block (stmt);
112 #ifndef STACK_ALIGNMENT_NEEDED
113 #define STACK_ALIGNMENT_NEEDED 1
116 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
118 /* Associate declaration T with storage space X. If T is no
119 SSA name this is exactly SET_DECL_RTL, otherwise make the
120 partition of T associated with X. */
122 set_rtl (tree t, rtx x)
124 if (TREE_CODE (t) == SSA_NAME)
126 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
128 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
129 /* For the benefit of debug information at -O0 (where vartracking
130 doesn't run) record the place also in the base DECL if it's
131 a normal variable (not a parameter). */
132 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
134 tree var = SSA_NAME_VAR (t);
135 /* If we don't yet have something recorded, just record it now. */
136 if (!DECL_RTL_SET_P (var))
137 SET_DECL_RTL (var, x);
138 /* If we have it set already to "multiple places" don't
140 else if (DECL_RTL (var) == pc_rtx)
142 /* If we have something recorded and it's not the same place
143 as we want to record now, we have multiple partitions for the
144 same base variable, with different places. We can't just
145 randomly chose one, hence we have to say that we don't know.
146 This only happens with optimization, and there var-tracking
147 will figure out the right thing. */
148 else if (DECL_RTL (var) != x)
149 SET_DECL_RTL (var, pc_rtx);
156 /* This structure holds data relevant to one variable that will be
157 placed in a stack slot. */
163 /* Initially, the size of the variable. Later, the size of the partition,
164 if this variable becomes it's partition's representative. */
167 /* The *byte* alignment required for this variable. Or as, with the
168 size, the alignment for this partition. */
171 /* The partition representative. */
172 size_t representative;
174 /* The next stack variable in the partition, or EOC. */
177 /* The numbers of conflicting stack variables. */
181 #define EOC ((size_t)-1)
183 /* We have an array of such objects while deciding allocation. */
184 static struct stack_var *stack_vars;
185 static size_t stack_vars_alloc;
186 static size_t stack_vars_num;
187 static struct pointer_map_t *decl_to_stack_part;
189 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
190 is non-decreasing. */
191 static size_t *stack_vars_sorted;
193 /* The phase of the stack frame. This is the known misalignment of
194 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
195 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
196 static int frame_phase;
198 /* Used during expand_used_vars to remember if we saw any decls for
199 which we'd like to enable stack smashing protection. */
200 static bool has_protected_decls;
202 /* Used during expand_used_vars. Remember if we say a character buffer
203 smaller than our cutoff threshold. Used for -Wstack-protector. */
204 static bool has_short_buffer;
206 /* Compute the byte alignment to use for DECL. Ignore alignment
207 we can't do with expected alignment of the stack boundary. */
210 align_local_variable (tree decl)
212 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
213 DECL_ALIGN (decl) = align;
214 return align / BITS_PER_UNIT;
217 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
218 Return the frame offset. */
221 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
223 HOST_WIDE_INT offset, new_frame_offset;
225 new_frame_offset = frame_offset;
226 if (FRAME_GROWS_DOWNWARD)
228 new_frame_offset -= size + frame_phase;
229 new_frame_offset &= -align;
230 new_frame_offset += frame_phase;
231 offset = new_frame_offset;
235 new_frame_offset -= frame_phase;
236 new_frame_offset += align - 1;
237 new_frame_offset &= -align;
238 new_frame_offset += frame_phase;
239 offset = new_frame_offset;
240 new_frame_offset += size;
242 frame_offset = new_frame_offset;
244 if (frame_offset_overflow (frame_offset, cfun->decl))
245 frame_offset = offset = 0;
250 /* Accumulate DECL into STACK_VARS. */
253 add_stack_var (tree decl)
257 if (stack_vars_num >= stack_vars_alloc)
259 if (stack_vars_alloc)
260 stack_vars_alloc = stack_vars_alloc * 3 / 2;
262 stack_vars_alloc = 32;
264 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
266 if (!decl_to_stack_part)
267 decl_to_stack_part = pointer_map_create ();
269 v = &stack_vars[stack_vars_num];
270 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
273 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274 /* Ensure that all variables have size, so that &a != &b for any two
275 variables that are simultaneously live. */
278 v->alignb = align_local_variable (SSAVAR (decl));
279 /* An alignment of zero can mightily confuse us later. */
280 gcc_assert (v->alignb != 0);
282 /* All variables are initially in their own partition. */
283 v->representative = stack_vars_num;
286 /* All variables initially conflict with no other. */
289 /* Ensure that this decl doesn't get put onto the list twice. */
290 set_rtl (decl, pc_rtx);
295 /* Make the decls associated with luid's X and Y conflict. */
298 add_stack_var_conflict (size_t x, size_t y)
300 struct stack_var *a = &stack_vars[x];
301 struct stack_var *b = &stack_vars[y];
303 a->conflicts = BITMAP_ALLOC (NULL);
305 b->conflicts = BITMAP_ALLOC (NULL);
306 bitmap_set_bit (a->conflicts, y);
307 bitmap_set_bit (b->conflicts, x);
310 /* Check whether the decls associated with luid's X and Y conflict. */
313 stack_var_conflict_p (size_t x, size_t y)
315 struct stack_var *a = &stack_vars[x];
316 struct stack_var *b = &stack_vars[y];
319 /* Partitions containing an SSA name result from gimple registers
320 with things like unsupported modes. They are top-level and
321 hence conflict with everything else. */
322 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
325 if (!a->conflicts || !b->conflicts)
327 return bitmap_bit_p (a->conflicts, y);
330 /* Returns true if TYPE is or contains a union type. */
333 aggregate_contains_union_type (tree type)
337 if (TREE_CODE (type) == UNION_TYPE
338 || TREE_CODE (type) == QUAL_UNION_TYPE)
340 if (TREE_CODE (type) == ARRAY_TYPE)
341 return aggregate_contains_union_type (TREE_TYPE (type));
342 if (TREE_CODE (type) != RECORD_TYPE)
345 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
346 if (TREE_CODE (field) == FIELD_DECL)
347 if (aggregate_contains_union_type (TREE_TYPE (field)))
353 /* A subroutine of expand_used_vars. If two variables X and Y have alias
354 sets that do not conflict, then do add a conflict for these variables
355 in the interference graph. We also need to make sure to add conflicts
356 for union containing structures. Else RTL alias analysis comes along
357 and due to type based aliasing rules decides that for two overlapping
358 union temporaries { short s; int i; } accesses to the same mem through
359 different types may not alias and happily reorders stores across
360 life-time boundaries of the temporaries (See PR25654).
361 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
364 add_alias_set_conflicts (void)
366 size_t i, j, n = stack_vars_num;
368 for (i = 0; i < n; ++i)
370 tree type_i = TREE_TYPE (stack_vars[i].decl);
371 bool aggr_i = AGGREGATE_TYPE_P (type_i);
374 contains_union = aggregate_contains_union_type (type_i);
375 for (j = 0; j < i; ++j)
377 tree type_j = TREE_TYPE (stack_vars[j].decl);
378 bool aggr_j = AGGREGATE_TYPE_P (type_j);
380 /* Either the objects conflict by means of type based
381 aliasing rules, or we need to add a conflict. */
382 || !objects_must_conflict_p (type_i, type_j)
383 /* In case the types do not conflict ensure that access
384 to elements will conflict. In case of unions we have
385 to be careful as type based aliasing rules may say
386 access to the same memory does not conflict. So play
387 safe and add a conflict in this case when
388 -fstrict-aliasing is used. */
389 || (contains_union && flag_strict_aliasing))
390 add_stack_var_conflict (i, j);
395 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
396 enter its partition number into bitmap DATA. */
399 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
401 bitmap active = (bitmap)data;
402 op = get_base_address (op);
405 && DECL_RTL_IF_SET (op) == pc_rtx)
407 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
409 bitmap_set_bit (active, *v);
414 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
415 record conflicts between it and all currently active other partitions
419 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
421 bitmap active = (bitmap)data;
422 op = get_base_address (op);
425 && DECL_RTL_IF_SET (op) == pc_rtx)
428 (size_t *) pointer_map_contains (decl_to_stack_part, op);
429 if (v && bitmap_set_bit (active, *v))
434 gcc_assert (num < stack_vars_num);
435 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
436 add_stack_var_conflict (num, i);
442 /* Helper routine for add_scope_conflicts, calculating the active partitions
443 at the end of BB, leaving the result in WORK. We're called to generate
444 conflicts when OLD_CONFLICTS is non-null, otherwise we're just tracking
445 liveness. If we generate conflicts then OLD_CONFLICTS stores the bits
446 for which we generated conflicts already. */
449 add_scope_conflicts_1 (basic_block bb, bitmap work, bitmap old_conflicts)
453 gimple_stmt_iterator gsi;
454 bool (*visit)(gimple, tree, void *);
457 FOR_EACH_EDGE (e, ei, bb->preds)
458 bitmap_ior_into (work, (bitmap)e->src->aux);
462 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
464 gimple stmt = gsi_stmt (gsi);
465 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
467 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
469 gimple stmt = gsi_stmt (gsi);
471 if (gimple_clobber_p (stmt))
473 tree lhs = gimple_assign_lhs (stmt);
475 /* Nested function lowering might introduce LHSs
476 that are COMPONENT_REFs. */
477 if (TREE_CODE (lhs) != VAR_DECL)
479 if (DECL_RTL_IF_SET (lhs) == pc_rtx
481 pointer_map_contains (decl_to_stack_part, lhs)))
482 bitmap_clear_bit (work, *v);
484 else if (!is_gimple_debug (stmt))
487 && visit == visit_op)
489 /* If this is the first real instruction in this BB we need
490 to add conflicts for everything live at this point now.
491 Unlike classical liveness for named objects we can't
492 rely on seeing a def/use of the names we're interested in.
493 There might merely be indirect loads/stores. We'd not add any
494 conflicts for such partitions. We know that we generated
495 conflicts between all partitions in old_conflicts already,
496 so we need to generate only the new ones, avoiding to
497 repeatedly pay the O(N^2) cost for each basic block. */
501 EXECUTE_IF_AND_COMPL_IN_BITMAP (work, old_conflicts, 0, i, bi)
505 /* First the conflicts between new and old_conflicts. */
506 EXECUTE_IF_SET_IN_BITMAP (old_conflicts, 0, j, bj)
507 add_stack_var_conflict (i, j);
508 /* Then the conflicts between only the new members. */
509 EXECUTE_IF_AND_COMPL_IN_BITMAP (work, old_conflicts, i + 1,
511 add_stack_var_conflict (i, j);
513 /* And remember for the next basic block. */
514 bitmap_ior_into (old_conflicts, work);
515 visit = visit_conflict;
517 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
522 /* Generate stack partition conflicts between all partitions that are
523 simultaneously live. */
526 add_scope_conflicts (void)
530 bitmap work = BITMAP_ALLOC (NULL);
531 bitmap old_conflicts;
533 /* We approximate the live range of a stack variable by taking the first
534 mention of its name as starting point(s), and by the end-of-scope
535 death clobber added by gimplify as ending point(s) of the range.
536 This overapproximates in the case we for instance moved an address-taken
537 operation upward, without also moving a dereference to it upwards.
538 But it's conservatively correct as a variable never can hold values
539 before its name is mentioned at least once.
541 We then do a mostly classical bitmap liveness algorithm. */
544 bb->aux = BITMAP_ALLOC (NULL);
552 bitmap active = (bitmap)bb->aux;
553 add_scope_conflicts_1 (bb, work, NULL);
554 if (bitmap_ior_into (active, work))
559 old_conflicts = BITMAP_ALLOC (NULL);
562 add_scope_conflicts_1 (bb, work, old_conflicts);
564 BITMAP_FREE (old_conflicts);
567 BITMAP_FREE (bb->aux);
570 /* A subroutine of partition_stack_vars. A comparison function for qsort,
571 sorting an array of indices by the properties of the object. */
574 stack_var_cmp (const void *a, const void *b)
576 size_t ia = *(const size_t *)a;
577 size_t ib = *(const size_t *)b;
578 unsigned int aligna = stack_vars[ia].alignb;
579 unsigned int alignb = stack_vars[ib].alignb;
580 HOST_WIDE_INT sizea = stack_vars[ia].size;
581 HOST_WIDE_INT sizeb = stack_vars[ib].size;
582 tree decla = stack_vars[ia].decl;
583 tree declb = stack_vars[ib].decl;
585 unsigned int uida, uidb;
587 /* Primary compare on "large" alignment. Large comes first. */
588 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
589 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
590 if (largea != largeb)
591 return (int)largeb - (int)largea;
593 /* Secondary compare on size, decreasing */
599 /* Tertiary compare on true alignment, decreasing. */
605 /* Final compare on ID for sort stability, increasing.
606 Two SSA names are compared by their version, SSA names come before
607 non-SSA names, and two normal decls are compared by their DECL_UID. */
608 if (TREE_CODE (decla) == SSA_NAME)
610 if (TREE_CODE (declb) == SSA_NAME)
611 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
615 else if (TREE_CODE (declb) == SSA_NAME)
618 uida = DECL_UID (decla), uidb = DECL_UID (declb);
627 /* If the points-to solution *PI points to variables that are in a partition
628 together with other variables add all partition members to the pointed-to
632 add_partitioned_vars_to_ptset (struct pt_solution *pt,
633 struct pointer_map_t *decls_to_partitions,
634 struct pointer_set_t *visited, bitmap temp)
642 /* The pointed-to vars bitmap is shared, it is enough to
644 || pointer_set_insert(visited, pt->vars))
649 /* By using a temporary bitmap to store all members of the partitions
650 we have to add we make sure to visit each of the partitions only
652 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
654 || !bitmap_bit_p (temp, i))
655 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
656 (void *)(size_t) i)))
657 bitmap_ior_into (temp, *part);
658 if (!bitmap_empty_p (temp))
659 bitmap_ior_into (pt->vars, temp);
662 /* Update points-to sets based on partition info, so we can use them on RTL.
663 The bitmaps representing stack partitions will be saved until expand,
664 where partitioned decls used as bases in memory expressions will be
668 update_alias_info_with_stack_vars (void)
670 struct pointer_map_t *decls_to_partitions = NULL;
672 tree var = NULL_TREE;
674 for (i = 0; i < stack_vars_num; i++)
678 struct ptr_info_def *pi;
680 /* Not interested in partitions with single variable. */
681 if (stack_vars[i].representative != i
682 || stack_vars[i].next == EOC)
685 if (!decls_to_partitions)
687 decls_to_partitions = pointer_map_create ();
688 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
691 /* Create an SSA_NAME that points to the partition for use
692 as base during alias-oracle queries on RTL for bases that
693 have been partitioned. */
694 if (var == NULL_TREE)
695 var = create_tmp_var (ptr_type_node, NULL);
696 name = make_ssa_name (var, NULL);
698 /* Create bitmaps representing partitions. They will be used for
699 points-to sets later, so use GGC alloc. */
700 part = BITMAP_GGC_ALLOC ();
701 for (j = i; j != EOC; j = stack_vars[j].next)
703 tree decl = stack_vars[j].decl;
704 unsigned int uid = DECL_PT_UID (decl);
705 /* We should never end up partitioning SSA names (though they
706 may end up on the stack). Neither should we allocate stack
707 space to something that is unused and thus unreferenced, except
708 for -O0 where we are preserving even unreferenced variables. */
709 gcc_assert (DECL_P (decl)
711 || referenced_var_lookup (cfun, DECL_UID (decl))));
712 bitmap_set_bit (part, uid);
713 *((bitmap *) pointer_map_insert (decls_to_partitions,
714 (void *)(size_t) uid)) = part;
715 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
719 /* Make the SSA name point to all partition members. */
720 pi = get_ptr_info (name);
721 pt_solution_set (&pi->pt, part, false);
724 /* Make all points-to sets that contain one member of a partition
725 contain all members of the partition. */
726 if (decls_to_partitions)
729 struct pointer_set_t *visited = pointer_set_create ();
730 bitmap temp = BITMAP_ALLOC (NULL);
732 for (i = 1; i < num_ssa_names; i++)
734 tree name = ssa_name (i);
735 struct ptr_info_def *pi;
738 && POINTER_TYPE_P (TREE_TYPE (name))
739 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
740 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
744 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
745 decls_to_partitions, visited, temp);
747 pointer_set_destroy (visited);
748 pointer_map_destroy (decls_to_partitions);
753 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
754 partitioning algorithm. Partitions A and B are known to be non-conflicting.
755 Merge them into a single partition A. */
758 union_stack_vars (size_t a, size_t b)
760 struct stack_var *vb = &stack_vars[b];
764 gcc_assert (stack_vars[b].next == EOC);
765 /* Add B to A's partition. */
766 stack_vars[b].next = stack_vars[a].next;
767 stack_vars[b].representative = a;
768 stack_vars[a].next = b;
770 /* Update the required alignment of partition A to account for B. */
771 if (stack_vars[a].alignb < stack_vars[b].alignb)
772 stack_vars[a].alignb = stack_vars[b].alignb;
774 /* Update the interference graph and merge the conflicts. */
777 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
778 add_stack_var_conflict (a, stack_vars[u].representative);
779 BITMAP_FREE (vb->conflicts);
783 /* A subroutine of expand_used_vars. Binpack the variables into
784 partitions constrained by the interference graph. The overall
785 algorithm used is as follows:
787 Sort the objects by size in descending order.
792 Look for the largest non-conflicting object B with size <= S.
799 partition_stack_vars (void)
801 size_t si, sj, n = stack_vars_num;
803 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
804 for (si = 0; si < n; ++si)
805 stack_vars_sorted[si] = si;
810 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
812 for (si = 0; si < n; ++si)
814 size_t i = stack_vars_sorted[si];
815 unsigned int ialign = stack_vars[i].alignb;
817 /* Ignore objects that aren't partition representatives. If we
818 see a var that is not a partition representative, it must
819 have been merged earlier. */
820 if (stack_vars[i].representative != i)
823 for (sj = si + 1; sj < n; ++sj)
825 size_t j = stack_vars_sorted[sj];
826 unsigned int jalign = stack_vars[j].alignb;
828 /* Ignore objects that aren't partition representatives. */
829 if (stack_vars[j].representative != j)
832 /* Ignore conflicting objects. */
833 if (stack_var_conflict_p (i, j))
836 /* Do not mix objects of "small" (supported) alignment
837 and "large" (unsupported) alignment. */
838 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
839 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
842 /* UNION the objects, placing J at OFFSET. */
843 union_stack_vars (i, j);
847 update_alias_info_with_stack_vars ();
850 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
853 dump_stack_var_partition (void)
855 size_t si, i, j, n = stack_vars_num;
857 for (si = 0; si < n; ++si)
859 i = stack_vars_sorted[si];
861 /* Skip variables that aren't partition representatives, for now. */
862 if (stack_vars[i].representative != i)
865 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
866 " align %u\n", (unsigned long) i, stack_vars[i].size,
867 stack_vars[i].alignb);
869 for (j = i; j != EOC; j = stack_vars[j].next)
871 fputc ('\t', dump_file);
872 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
874 fputc ('\n', dump_file);
878 /* Assign rtl to DECL at BASE + OFFSET. */
881 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
882 HOST_WIDE_INT offset)
887 /* If this fails, we've overflowed the stack frame. Error nicely? */
888 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
890 x = plus_constant (base, offset);
891 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
893 if (TREE_CODE (decl) != SSA_NAME)
895 /* Set alignment we actually gave this decl if it isn't an SSA name.
896 If it is we generate stack slots only accidentally so it isn't as
897 important, we'll simply use the alignment that is already set. */
898 if (base == virtual_stack_vars_rtx)
899 offset -= frame_phase;
900 align = offset & -offset;
901 align *= BITS_PER_UNIT;
902 if (align == 0 || align > base_align)
905 /* One would think that we could assert that we're not decreasing
906 alignment here, but (at least) the i386 port does exactly this
907 via the MINIMUM_ALIGNMENT hook. */
909 DECL_ALIGN (decl) = align;
910 DECL_USER_ALIGN (decl) = 0;
913 set_mem_attributes (x, SSAVAR (decl), true);
917 /* A subroutine of expand_used_vars. Give each partition representative
918 a unique location within the stack frame. Update each partition member
919 with that location. */
922 expand_stack_vars (bool (*pred) (tree))
924 size_t si, i, j, n = stack_vars_num;
925 HOST_WIDE_INT large_size = 0, large_alloc = 0;
926 rtx large_base = NULL;
927 unsigned large_align = 0;
930 /* Determine if there are any variables requiring "large" alignment.
931 Since these are dynamically allocated, we only process these if
932 no predicate involved. */
933 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
934 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
936 /* Find the total size of these variables. */
937 for (si = 0; si < n; ++si)
941 i = stack_vars_sorted[si];
942 alignb = stack_vars[i].alignb;
944 /* Stop when we get to the first decl with "small" alignment. */
945 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
948 /* Skip variables that aren't partition representatives. */
949 if (stack_vars[i].representative != i)
952 /* Skip variables that have already had rtl assigned. See also
953 add_stack_var where we perpetrate this pc_rtx hack. */
954 decl = stack_vars[i].decl;
955 if ((TREE_CODE (decl) == SSA_NAME
956 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
957 : DECL_RTL (decl)) != pc_rtx)
960 large_size += alignb - 1;
961 large_size &= -(HOST_WIDE_INT)alignb;
962 large_size += stack_vars[i].size;
965 /* If there were any, allocate space. */
967 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
971 for (si = 0; si < n; ++si)
974 unsigned base_align, alignb;
975 HOST_WIDE_INT offset;
977 i = stack_vars_sorted[si];
979 /* Skip variables that aren't partition representatives, for now. */
980 if (stack_vars[i].representative != i)
983 /* Skip variables that have already had rtl assigned. See also
984 add_stack_var where we perpetrate this pc_rtx hack. */
985 decl = stack_vars[i].decl;
986 if ((TREE_CODE (decl) == SSA_NAME
987 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
988 : DECL_RTL (decl)) != pc_rtx)
991 /* Check the predicate to see whether this variable should be
992 allocated in this pass. */
993 if (pred && !pred (decl))
996 alignb = stack_vars[i].alignb;
997 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
999 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1000 base = virtual_stack_vars_rtx;
1001 base_align = crtl->max_used_stack_slot_alignment;
1005 /* Large alignment is only processed in the last pass. */
1008 gcc_assert (large_base != NULL);
1010 large_alloc += alignb - 1;
1011 large_alloc &= -(HOST_WIDE_INT)alignb;
1012 offset = large_alloc;
1013 large_alloc += stack_vars[i].size;
1016 base_align = large_align;
1019 /* Create rtl for each variable based on their location within the
1021 for (j = i; j != EOC; j = stack_vars[j].next)
1023 expand_one_stack_var_at (stack_vars[j].decl,
1029 gcc_assert (large_alloc == large_size);
1032 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1033 static HOST_WIDE_INT
1034 account_stack_vars (void)
1036 size_t si, j, i, n = stack_vars_num;
1037 HOST_WIDE_INT size = 0;
1039 for (si = 0; si < n; ++si)
1041 i = stack_vars_sorted[si];
1043 /* Skip variables that aren't partition representatives, for now. */
1044 if (stack_vars[i].representative != i)
1047 size += stack_vars[i].size;
1048 for (j = i; j != EOC; j = stack_vars[j].next)
1049 set_rtl (stack_vars[j].decl, NULL);
1054 /* A subroutine of expand_one_var. Called to immediately assign rtl
1055 to a variable to be allocated in the stack frame. */
1058 expand_one_stack_var (tree var)
1060 HOST_WIDE_INT size, offset;
1061 unsigned byte_align;
1063 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1064 byte_align = align_local_variable (SSAVAR (var));
1066 /* We handle highly aligned variables in expand_stack_vars. */
1067 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1069 offset = alloc_stack_frame_space (size, byte_align);
1071 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1072 crtl->max_used_stack_slot_alignment, offset);
1075 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1076 that will reside in a hard register. */
1079 expand_one_hard_reg_var (tree var)
1081 rest_of_decl_compilation (var, 0, 0);
1084 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1085 that will reside in a pseudo register. */
1088 expand_one_register_var (tree var)
1090 tree decl = SSAVAR (var);
1091 tree type = TREE_TYPE (decl);
1092 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1093 rtx x = gen_reg_rtx (reg_mode);
1097 /* Note if the object is a user variable. */
1098 if (!DECL_ARTIFICIAL (decl))
1101 if (POINTER_TYPE_P (type))
1102 mark_reg_pointer (x, get_pointer_alignment (var));
1105 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1106 has some associated error, e.g. its type is error-mark. We just need
1107 to pick something that won't crash the rest of the compiler. */
1110 expand_one_error_var (tree var)
1112 enum machine_mode mode = DECL_MODE (var);
1115 if (mode == BLKmode)
1116 x = gen_rtx_MEM (BLKmode, const0_rtx);
1117 else if (mode == VOIDmode)
1120 x = gen_reg_rtx (mode);
1122 SET_DECL_RTL (var, x);
1125 /* A subroutine of expand_one_var. VAR is a variable that will be
1126 allocated to the local stack frame. Return true if we wish to
1127 add VAR to STACK_VARS so that it will be coalesced with other
1128 variables. Return false to allocate VAR immediately.
1130 This function is used to reduce the number of variables considered
1131 for coalescing, which reduces the size of the quadratic problem. */
1134 defer_stack_allocation (tree var, bool toplevel)
1136 /* If stack protection is enabled, *all* stack variables must be deferred,
1137 so that we can re-order the strings to the top of the frame. */
1138 if (flag_stack_protect)
1141 /* We handle "large" alignment via dynamic allocation. We want to handle
1142 this extra complication in only one place, so defer them. */
1143 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1146 /* Variables in the outermost scope automatically conflict with
1147 every other variable. The only reason to want to defer them
1148 at all is that, after sorting, we can more efficiently pack
1149 small variables in the stack frame. Continue to defer at -O2. */
1150 if (toplevel && optimize < 2)
1153 /* Without optimization, *most* variables are allocated from the
1154 stack, which makes the quadratic problem large exactly when we
1155 want compilation to proceed as quickly as possible. On the
1156 other hand, we don't want the function's stack frame size to
1157 get completely out of hand. So we avoid adding scalars and
1158 "small" aggregates to the list at all. */
1159 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1165 /* A subroutine of expand_used_vars. Expand one variable according to
1166 its flavor. Variables to be placed on the stack are not actually
1167 expanded yet, merely recorded.
1168 When REALLY_EXPAND is false, only add stack values to be allocated.
1169 Return stack usage this variable is supposed to take.
1172 static HOST_WIDE_INT
1173 expand_one_var (tree var, bool toplevel, bool really_expand)
1175 unsigned int align = BITS_PER_UNIT;
1180 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1182 /* Because we don't know if VAR will be in register or on stack,
1183 we conservatively assume it will be on stack even if VAR is
1184 eventually put into register after RA pass. For non-automatic
1185 variables, which won't be on stack, we collect alignment of
1186 type and ignore user specified alignment. */
1187 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1188 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1189 TYPE_MODE (TREE_TYPE (var)),
1190 TYPE_ALIGN (TREE_TYPE (var)));
1191 else if (DECL_HAS_VALUE_EXPR_P (var)
1192 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1193 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1194 or variables which were assigned a stack slot already by
1195 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1196 changed from the offset chosen to it. */
1197 align = crtl->stack_alignment_estimated;
1199 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1201 /* If the variable alignment is very large we'll dynamicaly allocate
1202 it, which means that in-frame portion is just a pointer. */
1203 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1204 align = POINTER_SIZE;
1207 if (SUPPORTS_STACK_ALIGNMENT
1208 && crtl->stack_alignment_estimated < align)
1210 /* stack_alignment_estimated shouldn't change after stack
1211 realign decision made */
1212 gcc_assert(!crtl->stack_realign_processed);
1213 crtl->stack_alignment_estimated = align;
1216 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1217 So here we only make sure stack_alignment_needed >= align. */
1218 if (crtl->stack_alignment_needed < align)
1219 crtl->stack_alignment_needed = align;
1220 if (crtl->max_used_stack_slot_alignment < align)
1221 crtl->max_used_stack_slot_alignment = align;
1223 if (TREE_CODE (origvar) == SSA_NAME)
1225 gcc_assert (TREE_CODE (var) != VAR_DECL
1226 || (!DECL_EXTERNAL (var)
1227 && !DECL_HAS_VALUE_EXPR_P (var)
1228 && !TREE_STATIC (var)
1229 && TREE_TYPE (var) != error_mark_node
1230 && !DECL_HARD_REGISTER (var)
1233 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1235 else if (DECL_EXTERNAL (var))
1237 else if (DECL_HAS_VALUE_EXPR_P (var))
1239 else if (TREE_STATIC (var))
1241 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1243 else if (TREE_TYPE (var) == error_mark_node)
1246 expand_one_error_var (var);
1248 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1251 expand_one_hard_reg_var (var);
1253 else if (use_register_for_decl (var))
1256 expand_one_register_var (origvar);
1258 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1262 error ("size of variable %q+D is too large", var);
1263 expand_one_error_var (var);
1266 else if (defer_stack_allocation (var, toplevel))
1267 add_stack_var (origvar);
1271 expand_one_stack_var (origvar);
1272 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1277 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1278 expanding variables. Those variables that can be put into registers
1279 are allocated pseudos; those that can't are put on the stack.
1281 TOPLEVEL is true if this is the outermost BLOCK. */
1284 expand_used_vars_for_block (tree block, bool toplevel)
1288 /* Expand all variables at this level. */
1289 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1291 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1292 || !DECL_NONSHAREABLE (t)))
1293 expand_one_var (t, toplevel, true);
1295 /* Expand all variables at containing levels. */
1296 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1297 expand_used_vars_for_block (t, false);
1300 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1301 and clear TREE_USED on all local variables. */
1304 clear_tree_used (tree block)
1308 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1309 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1310 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1311 || !DECL_NONSHAREABLE (t))
1314 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1315 clear_tree_used (t);
1318 /* Examine TYPE and determine a bit mask of the following features. */
1320 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1321 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1322 #define SPCT_HAS_ARRAY 4
1323 #define SPCT_HAS_AGGREGATE 8
1326 stack_protect_classify_type (tree type)
1328 unsigned int ret = 0;
1331 switch (TREE_CODE (type))
1334 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1335 if (t == char_type_node
1336 || t == signed_char_type_node
1337 || t == unsigned_char_type_node)
1339 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1340 unsigned HOST_WIDE_INT len;
1342 if (!TYPE_SIZE_UNIT (type)
1343 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1346 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1349 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1351 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1354 ret = SPCT_HAS_ARRAY;
1358 case QUAL_UNION_TYPE:
1360 ret = SPCT_HAS_AGGREGATE;
1361 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1362 if (TREE_CODE (t) == FIELD_DECL)
1363 ret |= stack_protect_classify_type (TREE_TYPE (t));
1373 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1374 part of the local stack frame. Remember if we ever return nonzero for
1375 any variable in this function. The return value is the phase number in
1376 which the variable should be allocated. */
1379 stack_protect_decl_phase (tree decl)
1381 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1384 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1385 has_short_buffer = true;
1387 if (flag_stack_protect == 2)
1389 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1390 && !(bits & SPCT_HAS_AGGREGATE))
1392 else if (bits & SPCT_HAS_ARRAY)
1396 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1399 has_protected_decls = true;
1404 /* Two helper routines that check for phase 1 and phase 2. These are used
1405 as callbacks for expand_stack_vars. */
1408 stack_protect_decl_phase_1 (tree decl)
1410 return stack_protect_decl_phase (decl) == 1;
1414 stack_protect_decl_phase_2 (tree decl)
1416 return stack_protect_decl_phase (decl) == 2;
1419 /* Ensure that variables in different stack protection phases conflict
1420 so that they are not merged and share the same stack slot. */
1423 add_stack_protection_conflicts (void)
1425 size_t i, j, n = stack_vars_num;
1426 unsigned char *phase;
1428 phase = XNEWVEC (unsigned char, n);
1429 for (i = 0; i < n; ++i)
1430 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1432 for (i = 0; i < n; ++i)
1434 unsigned char ph_i = phase[i];
1435 for (j = 0; j < i; ++j)
1436 if (ph_i != phase[j])
1437 add_stack_var_conflict (i, j);
1443 /* Create a decl for the guard at the top of the stack frame. */
1446 create_stack_guard (void)
1448 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1449 VAR_DECL, NULL, ptr_type_node);
1450 TREE_THIS_VOLATILE (guard) = 1;
1451 TREE_USED (guard) = 1;
1452 expand_one_stack_var (guard);
1453 crtl->stack_protect_guard = guard;
1456 /* Prepare for expanding variables. */
1458 init_vars_expansion (void)
1462 /* Set TREE_USED on all variables in the local_decls. */
1463 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1466 /* Clear TREE_USED on all variables associated with a block scope. */
1467 clear_tree_used (DECL_INITIAL (current_function_decl));
1469 /* Initialize local stack smashing state. */
1470 has_protected_decls = false;
1471 has_short_buffer = false;
1474 /* Free up stack variable graph data. */
1476 fini_vars_expansion (void)
1478 size_t i, n = stack_vars_num;
1479 for (i = 0; i < n; i++)
1480 BITMAP_FREE (stack_vars[i].conflicts);
1481 XDELETEVEC (stack_vars);
1482 XDELETEVEC (stack_vars_sorted);
1484 stack_vars_alloc = stack_vars_num = 0;
1485 pointer_map_destroy (decl_to_stack_part);
1486 decl_to_stack_part = NULL;
1489 /* Make a fair guess for the size of the stack frame of the function
1490 in NODE. This doesn't have to be exact, the result is only used in
1491 the inline heuristics. So we don't want to run the full stack var
1492 packing algorithm (which is quadratic in the number of stack vars).
1493 Instead, we calculate the total size of all stack vars. This turns
1494 out to be a pretty fair estimate -- packing of stack vars doesn't
1495 happen very often. */
1498 estimated_stack_frame_size (struct cgraph_node *node)
1500 HOST_WIDE_INT size = 0;
1503 tree old_cur_fun_decl = current_function_decl;
1504 referenced_var_iterator rvi;
1505 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1507 current_function_decl = node->decl;
1510 gcc_checking_assert (gimple_referenced_vars (fn));
1511 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1512 size += expand_one_var (var, true, false);
1514 if (stack_vars_num > 0)
1516 /* Fake sorting the stack vars for account_stack_vars (). */
1517 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1518 for (i = 0; i < stack_vars_num; ++i)
1519 stack_vars_sorted[i] = i;
1520 size += account_stack_vars ();
1521 fini_vars_expansion ();
1524 current_function_decl = old_cur_fun_decl;
1528 /* Expand all variables used in the function. */
1531 expand_used_vars (void)
1533 tree var, outer_block = DECL_INITIAL (current_function_decl);
1534 VEC(tree,heap) *maybe_local_decls = NULL;
1538 /* Compute the phase of the stack frame for this function. */
1540 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1541 int off = STARTING_FRAME_OFFSET % align;
1542 frame_phase = off ? align - off : 0;
1545 init_vars_expansion ();
1547 for (i = 0; i < SA.map->num_partitions; i++)
1549 tree var = partition_to_var (SA.map, i);
1551 gcc_assert (is_gimple_reg (var));
1552 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1553 expand_one_var (var, true, true);
1556 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1557 contain the default def (representing the parm or result itself)
1558 we don't do anything here. But those which don't contain the
1559 default def (representing a temporary based on the parm/result)
1560 we need to allocate space just like for normal VAR_DECLs. */
1561 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1563 expand_one_var (var, true, true);
1564 gcc_assert (SA.partition_to_pseudo[i]);
1569 /* At this point all variables on the local_decls with TREE_USED
1570 set are not associated with any block scope. Lay them out. */
1572 len = VEC_length (tree, cfun->local_decls);
1573 FOR_EACH_LOCAL_DECL (cfun, i, var)
1575 bool expand_now = false;
1577 /* Expanded above already. */
1578 if (is_gimple_reg (var))
1580 TREE_USED (var) = 0;
1583 /* We didn't set a block for static or extern because it's hard
1584 to tell the difference between a global variable (re)declared
1585 in a local scope, and one that's really declared there to
1586 begin with. And it doesn't really matter much, since we're
1587 not giving them stack space. Expand them now. */
1588 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1591 /* If the variable is not associated with any block, then it
1592 was created by the optimizers, and could be live anywhere
1594 else if (TREE_USED (var))
1597 /* Finally, mark all variables on the list as used. We'll use
1598 this in a moment when we expand those associated with scopes. */
1599 TREE_USED (var) = 1;
1602 expand_one_var (var, true, true);
1605 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1607 rtx rtl = DECL_RTL_IF_SET (var);
1609 /* Keep artificial non-ignored vars in cfun->local_decls
1610 chain until instantiate_decls. */
1611 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1612 add_local_decl (cfun, var);
1613 else if (rtl == NULL_RTX)
1614 /* If rtl isn't set yet, which can happen e.g. with
1615 -fstack-protector, retry before returning from this
1617 VEC_safe_push (tree, heap, maybe_local_decls, var);
1621 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1623 +-----------------+-----------------+
1624 | ...processed... | ...duplicates...|
1625 +-----------------+-----------------+
1627 +-- LEN points here.
1629 We just want the duplicates, as those are the artificial
1630 non-ignored vars that we want to keep until instantiate_decls.
1631 Move them down and truncate the array. */
1632 if (!VEC_empty (tree, cfun->local_decls))
1633 VEC_block_remove (tree, cfun->local_decls, 0, len);
1635 /* At this point, all variables within the block tree with TREE_USED
1636 set are actually used by the optimized function. Lay them out. */
1637 expand_used_vars_for_block (outer_block, true);
1639 if (stack_vars_num > 0)
1641 add_scope_conflicts ();
1642 /* Due to the way alias sets work, no variables with non-conflicting
1643 alias sets may be assigned the same address. Add conflicts to
1645 add_alias_set_conflicts ();
1647 /* If stack protection is enabled, we don't share space between
1648 vulnerable data and non-vulnerable data. */
1649 if (flag_stack_protect)
1650 add_stack_protection_conflicts ();
1652 /* Now that we have collected all stack variables, and have computed a
1653 minimal interference graph, attempt to save some stack space. */
1654 partition_stack_vars ();
1656 dump_stack_var_partition ();
1659 /* There are several conditions under which we should create a
1660 stack guard: protect-all, alloca used, protected decls present. */
1661 if (flag_stack_protect == 2
1662 || (flag_stack_protect
1663 && (cfun->calls_alloca || has_protected_decls)))
1664 create_stack_guard ();
1666 /* Assign rtl to each variable based on these partitions. */
1667 if (stack_vars_num > 0)
1669 /* Reorder decls to be protected by iterating over the variables
1670 array multiple times, and allocating out of each phase in turn. */
1671 /* ??? We could probably integrate this into the qsort we did
1672 earlier, such that we naturally see these variables first,
1673 and thus naturally allocate things in the right order. */
1674 if (has_protected_decls)
1676 /* Phase 1 contains only character arrays. */
1677 expand_stack_vars (stack_protect_decl_phase_1);
1679 /* Phase 2 contains other kinds of arrays. */
1680 if (flag_stack_protect == 2)
1681 expand_stack_vars (stack_protect_decl_phase_2);
1684 expand_stack_vars (NULL);
1686 fini_vars_expansion ();
1689 /* If there were any artificial non-ignored vars without rtl
1690 found earlier, see if deferred stack allocation hasn't assigned
1692 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1694 rtx rtl = DECL_RTL_IF_SET (var);
1696 /* Keep artificial non-ignored vars in cfun->local_decls
1697 chain until instantiate_decls. */
1698 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1699 add_local_decl (cfun, var);
1701 VEC_free (tree, heap, maybe_local_decls);
1703 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1704 if (STACK_ALIGNMENT_NEEDED)
1706 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1707 if (!FRAME_GROWS_DOWNWARD)
1708 frame_offset += align - 1;
1709 frame_offset &= -align;
1714 /* If we need to produce a detailed dump, print the tree representation
1715 for STMT to the dump file. SINCE is the last RTX after which the RTL
1716 generated for STMT should have been appended. */
1719 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1721 if (dump_file && (dump_flags & TDF_DETAILS))
1723 fprintf (dump_file, "\n;; ");
1724 print_gimple_stmt (dump_file, stmt, 0,
1725 TDF_SLIM | (dump_flags & TDF_LINENO));
1726 fprintf (dump_file, "\n");
1728 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1732 /* Maps the blocks that do not contain tree labels to rtx labels. */
1734 static struct pointer_map_t *lab_rtx_for_bb;
1736 /* Returns the label_rtx expression for a label starting basic block BB. */
1739 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1741 gimple_stmt_iterator gsi;
1746 if (bb->flags & BB_RTL)
1747 return block_label (bb);
1749 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1753 /* Find the tree label if it is present. */
1755 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1757 lab_stmt = gsi_stmt (gsi);
1758 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1761 lab = gimple_label_label (lab_stmt);
1762 if (DECL_NONLOCAL (lab))
1765 return label_rtx (lab);
1768 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1769 *elt = gen_label_rtx ();
1774 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1775 of a basic block where we just expanded the conditional at the end,
1776 possibly clean up the CFG and instruction sequence. LAST is the
1777 last instruction before the just emitted jump sequence. */
1780 maybe_cleanup_end_of_block (edge e, rtx last)
1782 /* Special case: when jumpif decides that the condition is
1783 trivial it emits an unconditional jump (and the necessary
1784 barrier). But we still have two edges, the fallthru one is
1785 wrong. purge_dead_edges would clean this up later. Unfortunately
1786 we have to insert insns (and split edges) before
1787 find_many_sub_basic_blocks and hence before purge_dead_edges.
1788 But splitting edges might create new blocks which depend on the
1789 fact that if there are two edges there's no barrier. So the
1790 barrier would get lost and verify_flow_info would ICE. Instead
1791 of auditing all edge splitters to care for the barrier (which
1792 normally isn't there in a cleaned CFG), fix it here. */
1793 if (BARRIER_P (get_last_insn ()))
1797 /* Now, we have a single successor block, if we have insns to
1798 insert on the remaining edge we potentially will insert
1799 it at the end of this block (if the dest block isn't feasible)
1800 in order to avoid splitting the edge. This insertion will take
1801 place in front of the last jump. But we might have emitted
1802 multiple jumps (conditional and one unconditional) to the
1803 same destination. Inserting in front of the last one then
1804 is a problem. See PR 40021. We fix this by deleting all
1805 jumps except the last unconditional one. */
1806 insn = PREV_INSN (get_last_insn ());
1807 /* Make sure we have an unconditional jump. Otherwise we're
1809 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1810 for (insn = PREV_INSN (insn); insn != last;)
1812 insn = PREV_INSN (insn);
1813 if (JUMP_P (NEXT_INSN (insn)))
1815 if (!any_condjump_p (NEXT_INSN (insn)))
1817 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1818 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1820 delete_insn (NEXT_INSN (insn));
1826 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1827 Returns a new basic block if we've terminated the current basic
1828 block and created a new one. */
1831 expand_gimple_cond (basic_block bb, gimple stmt)
1833 basic_block new_bb, dest;
1838 enum tree_code code;
1841 code = gimple_cond_code (stmt);
1842 op0 = gimple_cond_lhs (stmt);
1843 op1 = gimple_cond_rhs (stmt);
1844 /* We're sometimes presented with such code:
1848 This would expand to two comparisons which then later might
1849 be cleaned up by combine. But some pattern matchers like if-conversion
1850 work better when there's only one compare, so make up for this
1851 here as special exception if TER would have made the same change. */
1852 if (gimple_cond_single_var_p (stmt)
1854 && TREE_CODE (op0) == SSA_NAME
1855 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1857 gimple second = SSA_NAME_DEF_STMT (op0);
1858 if (gimple_code (second) == GIMPLE_ASSIGN)
1860 enum tree_code code2 = gimple_assign_rhs_code (second);
1861 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1864 op0 = gimple_assign_rhs1 (second);
1865 op1 = gimple_assign_rhs2 (second);
1867 /* If jumps are cheap turn some more codes into
1869 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1871 if ((code2 == BIT_AND_EXPR
1872 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1873 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1874 || code2 == TRUTH_AND_EXPR)
1876 code = TRUTH_ANDIF_EXPR;
1877 op0 = gimple_assign_rhs1 (second);
1878 op1 = gimple_assign_rhs2 (second);
1880 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1882 code = TRUTH_ORIF_EXPR;
1883 op0 = gimple_assign_rhs1 (second);
1884 op1 = gimple_assign_rhs2 (second);
1890 last2 = last = get_last_insn ();
1892 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1893 set_curr_insn_source_location (gimple_location (stmt));
1894 set_curr_insn_block (gimple_block (stmt));
1896 /* These flags have no purpose in RTL land. */
1897 true_edge->flags &= ~EDGE_TRUE_VALUE;
1898 false_edge->flags &= ~EDGE_FALSE_VALUE;
1900 /* We can either have a pure conditional jump with one fallthru edge or
1901 two-way jump that needs to be decomposed into two basic blocks. */
1902 if (false_edge->dest == bb->next_bb)
1904 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1905 true_edge->probability);
1906 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1907 if (true_edge->goto_locus)
1909 set_curr_insn_source_location (true_edge->goto_locus);
1910 set_curr_insn_block (true_edge->goto_block);
1911 true_edge->goto_locus = curr_insn_locator ();
1913 true_edge->goto_block = NULL;
1914 false_edge->flags |= EDGE_FALLTHRU;
1915 maybe_cleanup_end_of_block (false_edge, last);
1918 if (true_edge->dest == bb->next_bb)
1920 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1921 false_edge->probability);
1922 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1923 if (false_edge->goto_locus)
1925 set_curr_insn_source_location (false_edge->goto_locus);
1926 set_curr_insn_block (false_edge->goto_block);
1927 false_edge->goto_locus = curr_insn_locator ();
1929 false_edge->goto_block = NULL;
1930 true_edge->flags |= EDGE_FALLTHRU;
1931 maybe_cleanup_end_of_block (true_edge, last);
1935 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1936 true_edge->probability);
1937 last = get_last_insn ();
1938 if (false_edge->goto_locus)
1940 set_curr_insn_source_location (false_edge->goto_locus);
1941 set_curr_insn_block (false_edge->goto_block);
1942 false_edge->goto_locus = curr_insn_locator ();
1944 false_edge->goto_block = NULL;
1945 emit_jump (label_rtx_for_bb (false_edge->dest));
1948 if (BARRIER_P (BB_END (bb)))
1949 BB_END (bb) = PREV_INSN (BB_END (bb));
1950 update_bb_for_insn (bb);
1952 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1953 dest = false_edge->dest;
1954 redirect_edge_succ (false_edge, new_bb);
1955 false_edge->flags |= EDGE_FALLTHRU;
1956 new_bb->count = false_edge->count;
1957 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1958 new_edge = make_edge (new_bb, dest, 0);
1959 new_edge->probability = REG_BR_PROB_BASE;
1960 new_edge->count = new_bb->count;
1961 if (BARRIER_P (BB_END (new_bb)))
1962 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1963 update_bb_for_insn (new_bb);
1965 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1967 if (true_edge->goto_locus)
1969 set_curr_insn_source_location (true_edge->goto_locus);
1970 set_curr_insn_block (true_edge->goto_block);
1971 true_edge->goto_locus = curr_insn_locator ();
1973 true_edge->goto_block = NULL;
1978 /* Mark all calls that can have a transaction restart. */
1981 mark_transaction_restart_calls (gimple stmt)
1983 struct tm_restart_node dummy;
1986 if (!cfun->gimple_df->tm_restart)
1990 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1993 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1994 tree list = n->label_or_list;
1997 for (insn = next_real_insn (get_last_insn ());
1999 insn = next_real_insn (insn))
2002 if (TREE_CODE (list) == LABEL_DECL)
2003 add_reg_note (insn, REG_TM, label_rtx (list));
2005 for (; list ; list = TREE_CHAIN (list))
2006 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2010 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2014 expand_call_stmt (gimple stmt)
2016 tree exp, decl, lhs;
2020 if (gimple_call_internal_p (stmt))
2022 expand_internal_call (stmt);
2026 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2028 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2029 decl = gimple_call_fndecl (stmt);
2030 builtin_p = decl && DECL_BUILT_IN (decl);
2032 /* If this is not a builtin function, the function type through which the
2033 call is made may be different from the type of the function. */
2036 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2037 CALL_EXPR_FN (exp));
2039 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2040 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2042 for (i = 0; i < gimple_call_num_args (stmt); i++)
2044 tree arg = gimple_call_arg (stmt, i);
2046 /* TER addresses into arguments of builtin functions so we have a
2047 chance to infer more correct alignment information. See PR39954. */
2049 && TREE_CODE (arg) == SSA_NAME
2050 && (def = get_gimple_for_ssa_name (arg))
2051 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2052 arg = gimple_assign_rhs1 (def);
2053 CALL_EXPR_ARG (exp, i) = arg;
2056 if (gimple_has_side_effects (stmt))
2057 TREE_SIDE_EFFECTS (exp) = 1;
2059 if (gimple_call_nothrow_p (stmt))
2060 TREE_NOTHROW (exp) = 1;
2062 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2063 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2065 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2066 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2067 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2068 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2070 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2071 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2072 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2073 TREE_BLOCK (exp) = gimple_block (stmt);
2075 /* Ensure RTL is created for debug args. */
2076 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2078 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2083 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2085 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2086 expand_debug_expr (dtemp);
2090 lhs = gimple_call_lhs (stmt);
2092 expand_assignment (lhs, exp, false);
2094 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2096 mark_transaction_restart_calls (stmt);
2099 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2100 STMT that doesn't require special handling for outgoing edges. That
2101 is no tailcalls and no GIMPLE_COND. */
2104 expand_gimple_stmt_1 (gimple stmt)
2108 set_curr_insn_source_location (gimple_location (stmt));
2109 set_curr_insn_block (gimple_block (stmt));
2111 switch (gimple_code (stmt))
2114 op0 = gimple_goto_dest (stmt);
2115 if (TREE_CODE (op0) == LABEL_DECL)
2118 expand_computed_goto (op0);
2121 expand_label (gimple_label_label (stmt));
2124 case GIMPLE_PREDICT:
2130 expand_asm_stmt (stmt);
2133 expand_call_stmt (stmt);
2137 op0 = gimple_return_retval (stmt);
2139 if (op0 && op0 != error_mark_node)
2141 tree result = DECL_RESULT (current_function_decl);
2143 /* If we are not returning the current function's RESULT_DECL,
2144 build an assignment to it. */
2147 /* I believe that a function's RESULT_DECL is unique. */
2148 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2150 /* ??? We'd like to use simply expand_assignment here,
2151 but this fails if the value is of BLKmode but the return
2152 decl is a register. expand_return has special handling
2153 for this combination, which eventually should move
2154 to common code. See comments there. Until then, let's
2155 build a modify expression :-/ */
2156 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2161 expand_null_return ();
2163 expand_return (op0);
2168 tree lhs = gimple_assign_lhs (stmt);
2170 /* Tree expand used to fiddle with |= and &= of two bitfield
2171 COMPONENT_REFs here. This can't happen with gimple, the LHS
2172 of binary assigns must be a gimple reg. */
2174 if (TREE_CODE (lhs) != SSA_NAME
2175 || get_gimple_rhs_class (gimple_expr_code (stmt))
2176 == GIMPLE_SINGLE_RHS)
2178 tree rhs = gimple_assign_rhs1 (stmt);
2179 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2180 == GIMPLE_SINGLE_RHS);
2181 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2182 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2183 if (TREE_CLOBBER_P (rhs))
2184 /* This is a clobber to mark the going out of scope for
2188 expand_assignment (lhs, rhs,
2189 gimple_assign_nontemporal_move_p (stmt));
2194 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2195 struct separate_ops ops;
2196 bool promoted = false;
2198 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2199 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2202 ops.code = gimple_assign_rhs_code (stmt);
2203 ops.type = TREE_TYPE (lhs);
2204 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2206 case GIMPLE_TERNARY_RHS:
2207 ops.op2 = gimple_assign_rhs3 (stmt);
2209 case GIMPLE_BINARY_RHS:
2210 ops.op1 = gimple_assign_rhs2 (stmt);
2212 case GIMPLE_UNARY_RHS:
2213 ops.op0 = gimple_assign_rhs1 (stmt);
2218 ops.location = gimple_location (stmt);
2220 /* If we want to use a nontemporal store, force the value to
2221 register first. If we store into a promoted register,
2222 don't directly expand to target. */
2223 temp = nontemporal || promoted ? NULL_RTX : target;
2224 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2231 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2232 /* If TEMP is a VOIDmode constant, use convert_modes to make
2233 sure that we properly convert it. */
2234 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2236 temp = convert_modes (GET_MODE (target),
2237 TYPE_MODE (ops.type),
2239 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2240 GET_MODE (target), temp, unsignedp);
2243 convert_move (SUBREG_REG (target), temp, unsignedp);
2245 else if (nontemporal && emit_storent_insn (target, temp))
2249 temp = force_operand (temp, target);
2251 emit_move_insn (target, temp);
2262 /* Expand one gimple statement STMT and return the last RTL instruction
2263 before any of the newly generated ones.
2265 In addition to generating the necessary RTL instructions this also
2266 sets REG_EH_REGION notes if necessary and sets the current source
2267 location for diagnostics. */
2270 expand_gimple_stmt (gimple stmt)
2272 location_t saved_location = input_location;
2273 rtx last = get_last_insn ();
2278 /* We need to save and restore the current source location so that errors
2279 discovered during expansion are emitted with the right location. But
2280 it would be better if the diagnostic routines used the source location
2281 embedded in the tree nodes rather than globals. */
2282 if (gimple_has_location (stmt))
2283 input_location = gimple_location (stmt);
2285 expand_gimple_stmt_1 (stmt);
2287 /* Free any temporaries used to evaluate this statement. */
2290 input_location = saved_location;
2292 /* Mark all insns that may trap. */
2293 lp_nr = lookup_stmt_eh_lp (stmt);
2297 for (insn = next_real_insn (last); insn;
2298 insn = next_real_insn (insn))
2300 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2301 /* If we want exceptions for non-call insns, any
2302 may_trap_p instruction may throw. */
2303 && GET_CODE (PATTERN (insn)) != CLOBBER
2304 && GET_CODE (PATTERN (insn)) != USE
2305 && insn_could_throw_p (insn))
2306 make_reg_eh_region_note (insn, 0, lp_nr);
2313 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2314 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2315 generated a tail call (something that might be denied by the ABI
2316 rules governing the call; see calls.c).
2318 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2319 can still reach the rest of BB. The case here is __builtin_sqrt,
2320 where the NaN result goes through the external function (with a
2321 tailcall) and the normal result happens via a sqrt instruction. */
2324 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2332 last2 = last = expand_gimple_stmt (stmt);
2334 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2335 if (CALL_P (last) && SIBLING_CALL_P (last))
2338 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2340 *can_fallthru = true;
2344 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2345 Any instructions emitted here are about to be deleted. */
2346 do_pending_stack_adjust ();
2348 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2349 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2350 EH or abnormal edges, we shouldn't have created a tail call in
2351 the first place. So it seems to me we should just be removing
2352 all edges here, or redirecting the existing fallthru edge to
2358 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2360 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2362 if (e->dest != EXIT_BLOCK_PTR)
2364 e->dest->count -= e->count;
2365 e->dest->frequency -= EDGE_FREQUENCY (e);
2366 if (e->dest->count < 0)
2368 if (e->dest->frequency < 0)
2369 e->dest->frequency = 0;
2372 probability += e->probability;
2379 /* This is somewhat ugly: the call_expr expander often emits instructions
2380 after the sibcall (to perform the function return). These confuse the
2381 find_many_sub_basic_blocks code, so we need to get rid of these. */
2382 last = NEXT_INSN (last);
2383 gcc_assert (BARRIER_P (last));
2385 *can_fallthru = false;
2386 while (NEXT_INSN (last))
2388 /* For instance an sqrt builtin expander expands if with
2389 sibcall in the then and label for `else`. */
2390 if (LABEL_P (NEXT_INSN (last)))
2392 *can_fallthru = true;
2395 delete_insn (NEXT_INSN (last));
2398 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2399 e->probability += probability;
2402 update_bb_for_insn (bb);
2404 if (NEXT_INSN (last))
2406 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2409 if (BARRIER_P (last))
2410 BB_END (bb) = PREV_INSN (last);
2413 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2418 /* Return the difference between the floor and the truncated result of
2419 a signed division by OP1 with remainder MOD. */
2421 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2423 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2424 return gen_rtx_IF_THEN_ELSE
2425 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2426 gen_rtx_IF_THEN_ELSE
2427 (mode, gen_rtx_LT (BImode,
2428 gen_rtx_DIV (mode, op1, mod),
2430 constm1_rtx, const0_rtx),
2434 /* Return the difference between the ceil and the truncated result of
2435 a signed division by OP1 with remainder MOD. */
2437 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2439 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2440 return gen_rtx_IF_THEN_ELSE
2441 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2442 gen_rtx_IF_THEN_ELSE
2443 (mode, gen_rtx_GT (BImode,
2444 gen_rtx_DIV (mode, op1, mod),
2446 const1_rtx, const0_rtx),
2450 /* Return the difference between the ceil and the truncated result of
2451 an unsigned division by OP1 with remainder MOD. */
2453 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2455 /* (mod != 0 ? 1 : 0) */
2456 return gen_rtx_IF_THEN_ELSE
2457 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2458 const1_rtx, const0_rtx);
2461 /* Return the difference between the rounded and the truncated result
2462 of a signed division by OP1 with remainder MOD. Halfway cases are
2463 rounded away from zero, rather than to the nearest even number. */
2465 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2467 /* (abs (mod) >= abs (op1) - abs (mod)
2468 ? (op1 / mod > 0 ? 1 : -1)
2470 return gen_rtx_IF_THEN_ELSE
2471 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2472 gen_rtx_MINUS (mode,
2473 gen_rtx_ABS (mode, op1),
2474 gen_rtx_ABS (mode, mod))),
2475 gen_rtx_IF_THEN_ELSE
2476 (mode, gen_rtx_GT (BImode,
2477 gen_rtx_DIV (mode, op1, mod),
2479 const1_rtx, constm1_rtx),
2483 /* Return the difference between the rounded and the truncated result
2484 of a unsigned division by OP1 with remainder MOD. Halfway cases
2485 are rounded away from zero, rather than to the nearest even
2488 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2490 /* (mod >= op1 - mod ? 1 : 0) */
2491 return gen_rtx_IF_THEN_ELSE
2492 (mode, gen_rtx_GE (BImode, mod,
2493 gen_rtx_MINUS (mode, op1, mod)),
2494 const1_rtx, const0_rtx);
2497 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2501 convert_debug_memory_address (enum machine_mode mode, rtx x,
2504 enum machine_mode xmode = GET_MODE (x);
2506 #ifndef POINTERS_EXTEND_UNSIGNED
2507 gcc_assert (mode == Pmode
2508 || mode == targetm.addr_space.address_mode (as));
2509 gcc_assert (xmode == mode || xmode == VOIDmode);
2513 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2515 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2518 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2519 x = simplify_gen_subreg (mode, x, xmode,
2520 subreg_lowpart_offset
2522 else if (POINTERS_EXTEND_UNSIGNED > 0)
2523 x = gen_rtx_ZERO_EXTEND (mode, x);
2524 else if (!POINTERS_EXTEND_UNSIGNED)
2525 x = gen_rtx_SIGN_EXTEND (mode, x);
2528 switch (GET_CODE (x))
2531 if ((SUBREG_PROMOTED_VAR_P (x)
2532 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2533 || (GET_CODE (SUBREG_REG (x)) == PLUS
2534 && REG_P (XEXP (SUBREG_REG (x), 0))
2535 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2536 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2537 && GET_MODE (SUBREG_REG (x)) == mode)
2538 return SUBREG_REG (x);
2541 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2542 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2545 temp = shallow_copy_rtx (x);
2546 PUT_MODE (temp, mode);
2549 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2551 temp = gen_rtx_CONST (mode, temp);
2555 if (CONST_INT_P (XEXP (x, 1)))
2557 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2559 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2565 /* Don't know how to express ptr_extend as operation in debug info. */
2568 #endif /* POINTERS_EXTEND_UNSIGNED */
2573 /* Return an RTX equivalent to the value of the parameter DECL. */
2576 expand_debug_parm_decl (tree decl)
2578 rtx incoming = DECL_INCOMING_RTL (decl);
2581 && GET_MODE (incoming) != BLKmode
2582 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2583 || (MEM_P (incoming)
2584 && REG_P (XEXP (incoming, 0))
2585 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2587 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2589 #ifdef HAVE_window_save
2590 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2591 If the target machine has an explicit window save instruction, the
2592 actual entry value is the corresponding OUTGOING_REGNO instead. */
2593 if (REG_P (incoming)
2594 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2596 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2597 OUTGOING_REGNO (REGNO (incoming)), 0);
2598 else if (MEM_P (incoming))
2600 rtx reg = XEXP (incoming, 0);
2601 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2603 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2604 incoming = replace_equiv_address_nv (incoming, reg);
2609 ENTRY_VALUE_EXP (rtl) = incoming;
2614 && GET_MODE (incoming) != BLKmode
2615 && !TREE_ADDRESSABLE (decl)
2617 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2618 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2619 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2620 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2626 /* Return an RTX equivalent to the value of the tree expression EXP. */
2629 expand_debug_expr (tree exp)
2631 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2632 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2633 enum machine_mode inner_mode = VOIDmode;
2634 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2637 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2639 case tcc_expression:
2640 switch (TREE_CODE (exp))
2644 case WIDEN_MULT_PLUS_EXPR:
2645 case WIDEN_MULT_MINUS_EXPR:
2649 case TRUTH_ANDIF_EXPR:
2650 case TRUTH_ORIF_EXPR:
2651 case TRUTH_AND_EXPR:
2653 case TRUTH_XOR_EXPR:
2656 case TRUTH_NOT_EXPR:
2665 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2672 case tcc_comparison:
2673 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2680 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2681 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2691 case tcc_exceptional:
2692 case tcc_declaration:
2698 switch (TREE_CODE (exp))
2701 if (!lookup_constant_def (exp))
2703 if (strlen (TREE_STRING_POINTER (exp)) + 1
2704 != (size_t) TREE_STRING_LENGTH (exp))
2706 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2707 op0 = gen_rtx_MEM (BLKmode, op0);
2708 set_mem_attributes (op0, exp, 0);
2711 /* Fall through... */
2716 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2720 gcc_assert (COMPLEX_MODE_P (mode));
2721 op0 = expand_debug_expr (TREE_REALPART (exp));
2722 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2723 return gen_rtx_CONCAT (mode, op0, op1);
2725 case DEBUG_EXPR_DECL:
2726 op0 = DECL_RTL_IF_SET (exp);
2731 op0 = gen_rtx_DEBUG_EXPR (mode);
2732 DEBUG_EXPR_TREE_DECL (op0) = exp;
2733 SET_DECL_RTL (exp, op0);
2743 op0 = DECL_RTL_IF_SET (exp);
2745 /* This decl was probably optimized away. */
2748 if (TREE_CODE (exp) != VAR_DECL
2749 || DECL_EXTERNAL (exp)
2750 || !TREE_STATIC (exp)
2752 || DECL_HARD_REGISTER (exp)
2753 || DECL_IN_CONSTANT_POOL (exp)
2754 || mode == VOIDmode)
2757 op0 = make_decl_rtl_for_debug (exp);
2759 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2760 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2764 op0 = copy_rtx (op0);
2766 if (GET_MODE (op0) == BLKmode
2767 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2768 below would ICE. While it is likely a FE bug,
2769 try to be robust here. See PR43166. */
2771 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2773 gcc_assert (MEM_P (op0));
2774 op0 = adjust_address_nv (op0, mode, 0);
2785 inner_mode = GET_MODE (op0);
2787 if (mode == inner_mode)
2790 if (inner_mode == VOIDmode)
2792 if (TREE_CODE (exp) == SSA_NAME)
2793 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2795 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2796 if (mode == inner_mode)
2800 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2802 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2803 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2804 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2805 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2807 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2809 else if (FLOAT_MODE_P (mode))
2811 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2812 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2813 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2815 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2817 else if (FLOAT_MODE_P (inner_mode))
2820 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2822 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2824 else if (CONSTANT_P (op0)
2825 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2826 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2827 subreg_lowpart_offset (mode,
2829 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2830 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2832 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2834 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2840 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2842 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2843 TREE_OPERAND (exp, 0),
2844 TREE_OPERAND (exp, 1));
2846 return expand_debug_expr (newexp);
2850 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2854 if (TREE_CODE (exp) == MEM_REF)
2856 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2857 || (GET_CODE (op0) == PLUS
2858 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2859 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2860 Instead just use get_inner_reference. */
2863 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2864 if (!op1 || !CONST_INT_P (op1))
2867 op0 = plus_constant (op0, INTVAL (op1));
2870 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2871 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2873 as = ADDR_SPACE_GENERIC;
2875 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2877 if (op0 == NULL_RTX)
2880 op0 = gen_rtx_MEM (mode, op0);
2881 set_mem_attributes (op0, exp, 0);
2882 if (TREE_CODE (exp) == MEM_REF
2883 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2884 set_mem_expr (op0, NULL_TREE);
2885 set_mem_addr_space (op0, as);
2889 case TARGET_MEM_REF:
2890 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2891 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2894 op0 = expand_debug_expr
2895 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2899 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2900 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2902 as = ADDR_SPACE_GENERIC;
2904 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2906 if (op0 == NULL_RTX)
2909 op0 = gen_rtx_MEM (mode, op0);
2911 set_mem_attributes (op0, exp, 0);
2912 set_mem_addr_space (op0, as);
2918 case ARRAY_RANGE_REF:
2923 case VIEW_CONVERT_EXPR:
2925 enum machine_mode mode1;
2926 HOST_WIDE_INT bitsize, bitpos;
2929 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2930 &mode1, &unsignedp, &volatilep, false);
2936 orig_op0 = op0 = expand_debug_expr (tem);
2943 enum machine_mode addrmode, offmode;
2948 op0 = XEXP (op0, 0);
2949 addrmode = GET_MODE (op0);
2950 if (addrmode == VOIDmode)
2953 op1 = expand_debug_expr (offset);
2957 offmode = GET_MODE (op1);
2958 if (offmode == VOIDmode)
2959 offmode = TYPE_MODE (TREE_TYPE (offset));
2961 if (addrmode != offmode)
2962 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2963 subreg_lowpart_offset (addrmode,
2966 /* Don't use offset_address here, we don't need a
2967 recognizable address, and we don't want to generate
2969 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2975 if (mode1 == VOIDmode)
2977 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2978 if (bitpos >= BITS_PER_UNIT)
2980 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2981 bitpos %= BITS_PER_UNIT;
2983 else if (bitpos < 0)
2986 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2987 op0 = adjust_address_nv (op0, mode1, units);
2988 bitpos += units * BITS_PER_UNIT;
2990 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2991 op0 = adjust_address_nv (op0, mode, 0);
2992 else if (GET_MODE (op0) != mode1)
2993 op0 = adjust_address_nv (op0, mode1, 0);
2995 op0 = copy_rtx (op0);
2996 if (op0 == orig_op0)
2997 op0 = shallow_copy_rtx (op0);
2998 set_mem_attributes (op0, exp, 0);
3001 if (bitpos == 0 && mode == GET_MODE (op0))
3007 if (GET_MODE (op0) == BLKmode)
3010 if ((bitpos % BITS_PER_UNIT) == 0
3011 && bitsize == GET_MODE_BITSIZE (mode1))
3013 enum machine_mode opmode = GET_MODE (op0);
3015 if (opmode == VOIDmode)
3016 opmode = TYPE_MODE (TREE_TYPE (tem));
3018 /* This condition may hold if we're expanding the address
3019 right past the end of an array that turned out not to
3020 be addressable (i.e., the address was only computed in
3021 debug stmts). The gen_subreg below would rightfully
3022 crash, and the address doesn't really exist, so just
3024 if (bitpos >= GET_MODE_BITSIZE (opmode))
3027 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3028 return simplify_gen_subreg (mode, op0, opmode,
3029 bitpos / BITS_PER_UNIT);
3032 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3033 && TYPE_UNSIGNED (TREE_TYPE (exp))
3035 : ZERO_EXTRACT, mode,
3036 GET_MODE (op0) != VOIDmode
3038 : TYPE_MODE (TREE_TYPE (tem)),
3039 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3043 return simplify_gen_unary (ABS, mode, op0, mode);
3046 return simplify_gen_unary (NEG, mode, op0, mode);
3049 return simplify_gen_unary (NOT, mode, op0, mode);
3052 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3054 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3057 case FIX_TRUNC_EXPR:
3058 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3061 case POINTER_PLUS_EXPR:
3062 /* For the rare target where pointers are not the same size as
3063 size_t, we need to check for mis-matched modes and correct
3066 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3067 && GET_MODE (op0) != GET_MODE (op1))
3069 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
3070 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3073 /* We always sign-extend, regardless of the signedness of
3074 the operand, because the operand is always unsigned
3075 here even if the original C expression is signed. */
3076 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3081 return simplify_gen_binary (PLUS, mode, op0, op1);
3084 return simplify_gen_binary (MINUS, mode, op0, op1);
3087 return simplify_gen_binary (MULT, mode, op0, op1);
3090 case TRUNC_DIV_EXPR:
3091 case EXACT_DIV_EXPR:
3093 return simplify_gen_binary (UDIV, mode, op0, op1);
3095 return simplify_gen_binary (DIV, mode, op0, op1);
3097 case TRUNC_MOD_EXPR:
3098 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3100 case FLOOR_DIV_EXPR:
3102 return simplify_gen_binary (UDIV, mode, op0, op1);
3105 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3106 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3107 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3108 return simplify_gen_binary (PLUS, mode, div, adj);
3111 case FLOOR_MOD_EXPR:
3113 return simplify_gen_binary (UMOD, mode, op0, op1);
3116 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3117 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3118 adj = simplify_gen_unary (NEG, mode,
3119 simplify_gen_binary (MULT, mode, adj, op1),
3121 return simplify_gen_binary (PLUS, mode, mod, adj);
3127 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3128 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3129 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3130 return simplify_gen_binary (PLUS, mode, div, adj);
3134 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3135 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3136 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3137 return simplify_gen_binary (PLUS, mode, div, adj);
3143 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3144 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3145 adj = simplify_gen_unary (NEG, mode,
3146 simplify_gen_binary (MULT, mode, adj, op1),
3148 return simplify_gen_binary (PLUS, mode, mod, adj);
3152 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3153 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3154 adj = simplify_gen_unary (NEG, mode,
3155 simplify_gen_binary (MULT, mode, adj, op1),
3157 return simplify_gen_binary (PLUS, mode, mod, adj);
3160 case ROUND_DIV_EXPR:
3163 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3164 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3165 rtx adj = round_udiv_adjust (mode, mod, op1);
3166 return simplify_gen_binary (PLUS, mode, div, adj);
3170 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3171 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3172 rtx adj = round_sdiv_adjust (mode, mod, op1);
3173 return simplify_gen_binary (PLUS, mode, div, adj);
3176 case ROUND_MOD_EXPR:
3179 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3180 rtx adj = round_udiv_adjust (mode, mod, op1);
3181 adj = simplify_gen_unary (NEG, mode,
3182 simplify_gen_binary (MULT, mode, adj, op1),
3184 return simplify_gen_binary (PLUS, mode, mod, adj);
3188 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3189 rtx adj = round_sdiv_adjust (mode, mod, op1);
3190 adj = simplify_gen_unary (NEG, mode,
3191 simplify_gen_binary (MULT, mode, adj, op1),
3193 return simplify_gen_binary (PLUS, mode, mod, adj);
3197 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3201 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3203 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3206 return simplify_gen_binary (ROTATE, mode, op0, op1);
3209 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3212 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3215 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3218 case TRUTH_AND_EXPR:
3219 return simplify_gen_binary (AND, mode, op0, op1);
3223 return simplify_gen_binary (IOR, mode, op0, op1);
3226 case TRUTH_XOR_EXPR:
3227 return simplify_gen_binary (XOR, mode, op0, op1);
3229 case TRUTH_ANDIF_EXPR:
3230 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3232 case TRUTH_ORIF_EXPR:
3233 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3235 case TRUTH_NOT_EXPR:
3236 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3239 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3243 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3247 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3251 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3255 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3258 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3260 case UNORDERED_EXPR:
3261 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3264 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3267 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3270 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3273 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3276 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3279 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3282 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3285 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3288 gcc_assert (COMPLEX_MODE_P (mode));
3289 if (GET_MODE (op0) == VOIDmode)
3290 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3291 if (GET_MODE (op1) == VOIDmode)
3292 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3293 return gen_rtx_CONCAT (mode, op0, op1);
3296 if (GET_CODE (op0) == CONCAT)
3297 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3298 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3300 GET_MODE_INNER (mode)));
3303 enum machine_mode imode = GET_MODE_INNER (mode);
3308 re = adjust_address_nv (op0, imode, 0);
3309 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3313 enum machine_mode ifmode = int_mode_for_mode (mode);
3314 enum machine_mode ihmode = int_mode_for_mode (imode);
3316 if (ifmode == BLKmode || ihmode == BLKmode)
3318 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3321 re = gen_rtx_SUBREG (ifmode, re, 0);
3322 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3323 if (imode != ihmode)
3324 re = gen_rtx_SUBREG (imode, re, 0);
3325 im = copy_rtx (op0);
3327 im = gen_rtx_SUBREG (ifmode, im, 0);
3328 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3329 if (imode != ihmode)
3330 im = gen_rtx_SUBREG (imode, im, 0);
3332 im = gen_rtx_NEG (imode, im);
3333 return gen_rtx_CONCAT (mode, re, im);
3337 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3338 if (!op0 || !MEM_P (op0))
3340 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3341 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3342 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3343 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3344 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3345 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3347 if (handled_component_p (TREE_OPERAND (exp, 0)))
3349 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3351 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3352 &bitoffset, &bitsize, &maxsize);
3353 if ((TREE_CODE (decl) == VAR_DECL
3354 || TREE_CODE (decl) == PARM_DECL
3355 || TREE_CODE (decl) == RESULT_DECL)
3356 && (!TREE_ADDRESSABLE (decl)
3357 || target_for_debug_bind (decl))
3358 && (bitoffset % BITS_PER_UNIT) == 0
3360 && bitsize == maxsize)
3361 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3362 bitoffset / BITS_PER_UNIT);
3368 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3369 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3374 exp = build_constructor_from_list (TREE_TYPE (exp),
3375 TREE_VECTOR_CST_ELTS (exp));
3379 if (TREE_CLOBBER_P (exp))
3381 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3386 op0 = gen_rtx_CONCATN
3387 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3389 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3391 op1 = expand_debug_expr (val);
3394 XVECEXP (op0, 0, i) = op1;
3397 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3399 op1 = expand_debug_expr
3400 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3405 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3406 XVECEXP (op0, 0, i) = op1;
3412 goto flag_unsupported;
3415 /* ??? Maybe handle some builtins? */
3420 gimple g = get_gimple_for_ssa_name (exp);
3423 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3429 int part = var_to_partition (SA.map, exp);
3431 if (part == NO_PARTITION)
3433 /* If this is a reference to an incoming value of parameter
3434 that is never used in the code or where the incoming
3435 value is never used in the code, use PARM_DECL's
3437 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3438 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3440 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3443 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3450 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3452 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3460 /* Vector stuff. For most of the codes we don't have rtl codes. */
3461 case REALIGN_LOAD_EXPR:
3462 case REDUC_MAX_EXPR:
3463 case REDUC_MIN_EXPR:
3464 case REDUC_PLUS_EXPR:
3466 case VEC_LSHIFT_EXPR:
3467 case VEC_PACK_FIX_TRUNC_EXPR:
3468 case VEC_PACK_SAT_EXPR:
3469 case VEC_PACK_TRUNC_EXPR:
3470 case VEC_RSHIFT_EXPR:
3471 case VEC_UNPACK_FLOAT_HI_EXPR:
3472 case VEC_UNPACK_FLOAT_LO_EXPR:
3473 case VEC_UNPACK_HI_EXPR:
3474 case VEC_UNPACK_LO_EXPR:
3475 case VEC_WIDEN_MULT_HI_EXPR:
3476 case VEC_WIDEN_MULT_LO_EXPR:
3477 case VEC_WIDEN_LSHIFT_HI_EXPR:
3478 case VEC_WIDEN_LSHIFT_LO_EXPR:
3483 case ADDR_SPACE_CONVERT_EXPR:
3484 case FIXED_CONVERT_EXPR:
3486 case WITH_SIZE_EXPR:
3490 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3491 && SCALAR_INT_MODE_P (mode))
3494 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3496 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3499 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3501 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3503 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3504 return simplify_gen_binary (PLUS, mode, op0, op2);
3508 case WIDEN_MULT_EXPR:
3509 case WIDEN_MULT_PLUS_EXPR:
3510 case WIDEN_MULT_MINUS_EXPR:
3511 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3512 && SCALAR_INT_MODE_P (mode))
3514 inner_mode = GET_MODE (op0);
3515 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3516 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3518 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3519 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3520 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3522 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3523 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3524 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3526 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3527 return simplify_gen_binary (PLUS, mode, op0, op2);
3529 return simplify_gen_binary (MINUS, mode, op2, op0);
3533 case WIDEN_SUM_EXPR:
3534 case WIDEN_LSHIFT_EXPR:
3535 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3536 && SCALAR_INT_MODE_P (mode))
3539 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3541 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3543 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3544 ? ASHIFT : PLUS, mode, op0, op1);
3549 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3553 #ifdef ENABLE_CHECKING
3562 /* Return an RTX equivalent to the source bind value of the tree expression
3566 expand_debug_source_expr (tree exp)
3569 enum machine_mode mode = VOIDmode, inner_mode;
3571 switch (TREE_CODE (exp))
3575 mode = DECL_MODE (exp);
3576 op0 = expand_debug_parm_decl (exp);
3579 /* See if this isn't an argument that has been completely
3581 if (!DECL_RTL_SET_P (exp)
3582 && !DECL_INCOMING_RTL (exp)
3583 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3586 if (DECL_ABSTRACT_ORIGIN (exp))
3587 aexp = DECL_ABSTRACT_ORIGIN (exp);
3588 if (DECL_CONTEXT (aexp)
3589 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3591 VEC(tree, gc) **debug_args;
3594 #ifdef ENABLE_CHECKING
3596 for (parm = DECL_ARGUMENTS (current_function_decl);
3597 parm; parm = DECL_CHAIN (parm))
3598 gcc_assert (parm != exp
3599 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3601 debug_args = decl_debug_args_lookup (current_function_decl);
3602 if (debug_args != NULL)
3604 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3607 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3617 if (op0 == NULL_RTX)
3620 inner_mode = GET_MODE (op0);
3621 if (mode == inner_mode)
3624 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3626 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3627 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3628 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3629 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3631 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3633 else if (FLOAT_MODE_P (mode))
3635 else if (FLOAT_MODE_P (inner_mode))
3637 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3638 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3640 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3642 else if (CONSTANT_P (op0)
3643 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3644 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3645 subreg_lowpart_offset (mode, inner_mode));
3646 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3647 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3649 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3654 /* Expand the _LOCs in debug insns. We run this after expanding all
3655 regular insns, so that any variables referenced in the function
3656 will have their DECL_RTLs set. */
3659 expand_debug_locations (void)
3662 rtx last = get_last_insn ();
3663 int save_strict_alias = flag_strict_aliasing;
3665 /* New alias sets while setting up memory attributes cause
3666 -fcompare-debug failures, even though it doesn't bring about any
3668 flag_strict_aliasing = 0;
3670 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3671 if (DEBUG_INSN_P (insn))
3673 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3675 enum machine_mode mode;
3677 if (value == NULL_TREE)
3681 if (INSN_VAR_LOCATION_STATUS (insn)
3682 == VAR_INIT_STATUS_UNINITIALIZED)
3683 val = expand_debug_source_expr (value);
3685 val = expand_debug_expr (value);
3686 gcc_assert (last == get_last_insn ());
3690 val = gen_rtx_UNKNOWN_VAR_LOC ();
3693 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3695 gcc_assert (mode == GET_MODE (val)
3696 || (GET_MODE (val) == VOIDmode
3697 && (CONST_INT_P (val)
3698 || GET_CODE (val) == CONST_FIXED
3699 || GET_CODE (val) == CONST_DOUBLE
3700 || GET_CODE (val) == LABEL_REF)));
3703 INSN_VAR_LOCATION_LOC (insn) = val;
3706 flag_strict_aliasing = save_strict_alias;
3709 /* Expand basic block BB from GIMPLE trees to RTL. */
3712 expand_gimple_basic_block (basic_block bb)
3714 gimple_stmt_iterator gsi;
3723 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3726 /* Note that since we are now transitioning from GIMPLE to RTL, we
3727 cannot use the gsi_*_bb() routines because they expect the basic
3728 block to be in GIMPLE, instead of RTL. Therefore, we need to
3729 access the BB sequence directly. */
3730 stmts = bb_seq (bb);
3731 bb->il.gimple = NULL;
3732 rtl_profile_for_bb (bb);
3733 init_rtl_bb_info (bb);
3734 bb->flags |= BB_RTL;
3736 /* Remove the RETURN_EXPR if we may fall though to the exit
3738 gsi = gsi_last (stmts);
3739 if (!gsi_end_p (gsi)
3740 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3742 gimple ret_stmt = gsi_stmt (gsi);
3744 gcc_assert (single_succ_p (bb));
3745 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3747 if (bb->next_bb == EXIT_BLOCK_PTR
3748 && !gimple_return_retval (ret_stmt))
3750 gsi_remove (&gsi, false);
3751 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3755 gsi = gsi_start (stmts);
3756 if (!gsi_end_p (gsi))
3758 stmt = gsi_stmt (gsi);
3759 if (gimple_code (stmt) != GIMPLE_LABEL)
3763 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3767 last = get_last_insn ();
3771 expand_gimple_stmt (stmt);
3776 emit_label ((rtx) *elt);
3778 /* Java emits line number notes in the top of labels.
3779 ??? Make this go away once line number notes are obsoleted. */
3780 BB_HEAD (bb) = NEXT_INSN (last);
3781 if (NOTE_P (BB_HEAD (bb)))
3782 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3783 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3785 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3788 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3790 NOTE_BASIC_BLOCK (note) = bb;
3792 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3796 stmt = gsi_stmt (gsi);
3798 /* If this statement is a non-debug one, and we generate debug
3799 insns, then this one might be the last real use of a TERed
3800 SSA_NAME, but where there are still some debug uses further
3801 down. Expanding the current SSA name in such further debug
3802 uses by their RHS might lead to wrong debug info, as coalescing
3803 might make the operands of such RHS be placed into the same
3804 pseudo as something else. Like so:
3805 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3809 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3810 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3811 the write to a_2 would actually have clobbered the place which
3814 So, instead of that, we recognize the situation, and generate
3815 debug temporaries at the last real use of TERed SSA names:
3822 if (MAY_HAVE_DEBUG_INSNS
3824 && !is_gimple_debug (stmt))
3830 location_t sloc = get_curr_insn_source_location ();
3831 tree sblock = get_curr_insn_block ();
3833 /* Look for SSA names that have their last use here (TERed
3834 names always have only one real use). */
3835 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3836 if ((def = get_gimple_for_ssa_name (op)))
3838 imm_use_iterator imm_iter;
3839 use_operand_p use_p;
3840 bool have_debug_uses = false;
3842 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3844 if (gimple_debug_bind_p (USE_STMT (use_p)))
3846 have_debug_uses = true;
3851 if (have_debug_uses)
3853 /* OP is a TERed SSA name, with DEF it's defining
3854 statement, and where OP is used in further debug
3855 instructions. Generate a debug temporary, and
3856 replace all uses of OP in debug insns with that
3859 tree value = gimple_assign_rhs_to_tree (def);
3860 tree vexpr = make_node (DEBUG_EXPR_DECL);
3862 enum machine_mode mode;
3864 set_curr_insn_source_location (gimple_location (def));
3865 set_curr_insn_block (gimple_block (def));
3867 DECL_ARTIFICIAL (vexpr) = 1;
3868 TREE_TYPE (vexpr) = TREE_TYPE (value);
3870 mode = DECL_MODE (value);
3872 mode = TYPE_MODE (TREE_TYPE (value));
3873 DECL_MODE (vexpr) = mode;
3875 val = gen_rtx_VAR_LOCATION
3876 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3878 emit_debug_insn (val);
3880 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3882 if (!gimple_debug_bind_p (debugstmt))
3885 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3886 SET_USE (use_p, vexpr);
3888 update_stmt (debugstmt);
3892 set_curr_insn_source_location (sloc);
3893 set_curr_insn_block (sblock);
3896 currently_expanding_gimple_stmt = stmt;
3898 /* Expand this statement, then evaluate the resulting RTL and
3899 fixup the CFG accordingly. */
3900 if (gimple_code (stmt) == GIMPLE_COND)
3902 new_bb = expand_gimple_cond (bb, stmt);
3906 else if (gimple_debug_bind_p (stmt))
3908 location_t sloc = get_curr_insn_source_location ();
3909 tree sblock = get_curr_insn_block ();
3910 gimple_stmt_iterator nsi = gsi;
3914 tree var = gimple_debug_bind_get_var (stmt);
3917 enum machine_mode mode;
3919 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3920 && TREE_CODE (var) != LABEL_DECL
3921 && !target_for_debug_bind (var))
3922 goto delink_debug_stmt;
3924 if (gimple_debug_bind_has_value_p (stmt))
3925 value = gimple_debug_bind_get_value (stmt);
3929 last = get_last_insn ();
3931 set_curr_insn_source_location (gimple_location (stmt));
3932 set_curr_insn_block (gimple_block (stmt));
3935 mode = DECL_MODE (var);
3937 mode = TYPE_MODE (TREE_TYPE (var));
3939 val = gen_rtx_VAR_LOCATION
3940 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3942 emit_debug_insn (val);
3944 if (dump_file && (dump_flags & TDF_DETAILS))
3946 /* We can't dump the insn with a TREE where an RTX
3948 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3949 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3950 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3954 /* In order not to generate too many debug temporaries,
3955 we delink all uses of debug statements we already expanded.
3956 Therefore debug statements between definition and real
3957 use of TERed SSA names will continue to use the SSA name,
3958 and not be replaced with debug temps. */
3959 delink_stmt_imm_use (stmt);
3963 if (gsi_end_p (nsi))
3965 stmt = gsi_stmt (nsi);
3966 if (!gimple_debug_bind_p (stmt))
3970 set_curr_insn_source_location (sloc);
3971 set_curr_insn_block (sblock);
3973 else if (gimple_debug_source_bind_p (stmt))
3975 location_t sloc = get_curr_insn_source_location ();
3976 tree sblock = get_curr_insn_block ();
3977 tree var = gimple_debug_source_bind_get_var (stmt);
3978 tree value = gimple_debug_source_bind_get_value (stmt);
3980 enum machine_mode mode;
3982 last = get_last_insn ();
3984 set_curr_insn_source_location (gimple_location (stmt));
3985 set_curr_insn_block (gimple_block (stmt));
3987 mode = DECL_MODE (var);
3989 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3990 VAR_INIT_STATUS_UNINITIALIZED);
3992 emit_debug_insn (val);
3994 if (dump_file && (dump_flags & TDF_DETAILS))
3996 /* We can't dump the insn with a TREE where an RTX
3998 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3999 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4000 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4003 set_curr_insn_source_location (sloc);
4004 set_curr_insn_block (sblock);
4008 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
4011 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4022 def_operand_p def_p;
4023 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4027 /* Ignore this stmt if it is in the list of
4028 replaceable expressions. */
4030 && bitmap_bit_p (SA.values,
4031 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4034 last = expand_gimple_stmt (stmt);
4035 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4040 currently_expanding_gimple_stmt = NULL;
4042 /* Expand implicit goto and convert goto_locus. */
4043 FOR_EACH_EDGE (e, ei, bb->succs)
4045 if (e->goto_locus && e->goto_block)
4047 set_curr_insn_source_location (e->goto_locus);
4048 set_curr_insn_block (e->goto_block);
4049 e->goto_locus = curr_insn_locator ();
4051 e->goto_block = NULL;
4052 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4054 emit_jump (label_rtx_for_bb (e->dest));
4055 e->flags &= ~EDGE_FALLTHRU;
4059 /* Expanded RTL can create a jump in the last instruction of block.
4060 This later might be assumed to be a jump to successor and break edge insertion.
4061 We need to insert dummy move to prevent this. PR41440. */
4062 if (single_succ_p (bb)
4063 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4064 && (last = get_last_insn ())
4067 rtx dummy = gen_reg_rtx (SImode);
4068 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4071 do_pending_stack_adjust ();
4073 /* Find the block tail. The last insn in the block is the insn
4074 before a barrier and/or table jump insn. */
4075 last = get_last_insn ();
4076 if (BARRIER_P (last))
4077 last = PREV_INSN (last);
4078 if (JUMP_TABLE_DATA_P (last))
4079 last = PREV_INSN (PREV_INSN (last));
4082 update_bb_for_insn (bb);
4088 /* Create a basic block for initialization code. */
4091 construct_init_block (void)
4093 basic_block init_block, first_block;
4097 /* Multiple entry points not supported yet. */
4098 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4099 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4100 init_rtl_bb_info (EXIT_BLOCK_PTR);
4101 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4102 EXIT_BLOCK_PTR->flags |= BB_RTL;
4104 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4106 /* When entry edge points to first basic block, we don't need jump,
4107 otherwise we have to jump into proper target. */
4108 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4110 tree label = gimple_block_label (e->dest);
4112 emit_jump (label_rtx (label));
4116 flags = EDGE_FALLTHRU;
4118 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4121 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4122 init_block->count = ENTRY_BLOCK_PTR->count;
4125 first_block = e->dest;
4126 redirect_edge_succ (e, init_block);
4127 e = make_edge (init_block, first_block, flags);
4130 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4131 e->probability = REG_BR_PROB_BASE;
4132 e->count = ENTRY_BLOCK_PTR->count;
4134 update_bb_for_insn (init_block);
4138 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4139 found in the block tree. */
4142 set_block_levels (tree block, int level)
4146 BLOCK_NUMBER (block) = level;
4147 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4148 block = BLOCK_CHAIN (block);
4152 /* Create a block containing landing pads and similar stuff. */
4155 construct_exit_block (void)
4157 rtx head = get_last_insn ();
4159 basic_block exit_block;
4163 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4165 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4167 /* Make sure the locus is set to the end of the function, so that
4168 epilogue line numbers and warnings are set properly. */
4169 if (cfun->function_end_locus != UNKNOWN_LOCATION)
4170 input_location = cfun->function_end_locus;
4172 /* The following insns belong to the top scope. */
4173 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4175 /* Generate rtl for function exit. */
4176 expand_function_end ();
4178 end = get_last_insn ();
4181 /* While emitting the function end we could move end of the last basic block.
4183 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4184 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4185 head = NEXT_INSN (head);
4186 exit_block = create_basic_block (NEXT_INSN (head), end,
4187 EXIT_BLOCK_PTR->prev_bb);
4188 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4189 exit_block->count = EXIT_BLOCK_PTR->count;
4192 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4194 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4195 if (!(e->flags & EDGE_ABNORMAL))
4196 redirect_edge_succ (e, exit_block);
4201 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4202 e->probability = REG_BR_PROB_BASE;
4203 e->count = EXIT_BLOCK_PTR->count;
4204 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4207 e->count -= e2->count;
4208 exit_block->count -= e2->count;
4209 exit_block->frequency -= EDGE_FREQUENCY (e2);
4213 if (exit_block->count < 0)
4214 exit_block->count = 0;
4215 if (exit_block->frequency < 0)
4216 exit_block->frequency = 0;
4217 update_bb_for_insn (exit_block);
4220 /* Helper function for discover_nonconstant_array_refs.
4221 Look for ARRAY_REF nodes with non-constant indexes and mark them
4225 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4226 void *data ATTRIBUTE_UNUSED)
4230 if (IS_TYPE_OR_DECL_P (t))
4232 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4234 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4235 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4236 && (!TREE_OPERAND (t, 2)
4237 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4238 || (TREE_CODE (t) == COMPONENT_REF
4239 && (!TREE_OPERAND (t,2)
4240 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4241 || TREE_CODE (t) == BIT_FIELD_REF
4242 || TREE_CODE (t) == REALPART_EXPR
4243 || TREE_CODE (t) == IMAGPART_EXPR
4244 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4245 || CONVERT_EXPR_P (t))
4246 t = TREE_OPERAND (t, 0);
4248 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4250 t = get_base_address (t);
4252 && DECL_MODE (t) != BLKmode)
4253 TREE_ADDRESSABLE (t) = 1;
4262 /* RTL expansion is not able to compile array references with variable
4263 offsets for arrays stored in single register. Discover such
4264 expressions and mark variables as addressable to avoid this
4268 discover_nonconstant_array_refs (void)
4271 gimple_stmt_iterator gsi;
4274 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4276 gimple stmt = gsi_stmt (gsi);
4277 if (!is_gimple_debug (stmt))
4278 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4282 /* This function sets crtl->args.internal_arg_pointer to a virtual
4283 register if DRAP is needed. Local register allocator will replace
4284 virtual_incoming_args_rtx with the virtual register. */
4287 expand_stack_alignment (void)
4290 unsigned int preferred_stack_boundary;
4292 if (! SUPPORTS_STACK_ALIGNMENT)
4295 if (cfun->calls_alloca
4296 || cfun->has_nonlocal_label
4297 || crtl->has_nonlocal_goto)
4298 crtl->need_drap = true;
4300 /* Call update_stack_boundary here again to update incoming stack
4301 boundary. It may set incoming stack alignment to a different
4302 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4303 use the minimum incoming stack alignment to check if it is OK
4304 to perform sibcall optimization since sibcall optimization will
4305 only align the outgoing stack to incoming stack boundary. */
4306 if (targetm.calls.update_stack_boundary)
4307 targetm.calls.update_stack_boundary ();
4309 /* The incoming stack frame has to be aligned at least at
4310 parm_stack_boundary. */
4311 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4313 /* Update crtl->stack_alignment_estimated and use it later to align
4314 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4315 exceptions since callgraph doesn't collect incoming stack alignment
4317 if (cfun->can_throw_non_call_exceptions
4318 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4319 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4321 preferred_stack_boundary = crtl->preferred_stack_boundary;
4322 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4323 crtl->stack_alignment_estimated = preferred_stack_boundary;
4324 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4325 crtl->stack_alignment_needed = preferred_stack_boundary;
4327 gcc_assert (crtl->stack_alignment_needed
4328 <= crtl->stack_alignment_estimated);
4330 crtl->stack_realign_needed
4331 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4332 crtl->stack_realign_tried = crtl->stack_realign_needed;
4334 crtl->stack_realign_processed = true;
4336 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4338 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4339 drap_rtx = targetm.calls.get_drap_rtx ();
4341 /* stack_realign_drap and drap_rtx must match. */
4342 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4344 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4345 if (NULL != drap_rtx)
4347 crtl->args.internal_arg_pointer = drap_rtx;
4349 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4351 fixup_tail_calls ();
4355 /* Translate the intermediate representation contained in the CFG
4356 from GIMPLE trees to RTL.
4358 We do conversion per basic block and preserve/update the tree CFG.
4359 This implies we have to do some magic as the CFG can simultaneously
4360 consist of basic blocks containing RTL and GIMPLE trees. This can
4361 confuse the CFG hooks, so be careful to not manipulate CFG during
4365 gimple_expand_cfg (void)
4367 basic_block bb, init_block;
4374 timevar_push (TV_OUT_OF_SSA);
4375 rewrite_out_of_ssa (&SA);
4376 timevar_pop (TV_OUT_OF_SSA);
4377 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4380 /* Some backends want to know that we are expanding to RTL. */
4381 currently_expanding_to_rtl = 1;
4383 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4385 insn_locators_alloc ();
4386 if (!DECL_IS_BUILTIN (current_function_decl))
4388 /* Eventually, all FEs should explicitly set function_start_locus. */
4389 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4390 set_curr_insn_source_location
4391 (DECL_SOURCE_LOCATION (current_function_decl));
4393 set_curr_insn_source_location (cfun->function_start_locus);
4396 set_curr_insn_source_location (UNKNOWN_LOCATION);
4397 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4398 prologue_locator = curr_insn_locator ();
4400 #ifdef INSN_SCHEDULING
4401 init_sched_attrs ();
4404 /* Make sure first insn is a note even if we don't want linenums.
4405 This makes sure the first insn will never be deleted.
4406 Also, final expects a note to appear there. */
4407 emit_note (NOTE_INSN_DELETED);
4409 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4410 discover_nonconstant_array_refs ();
4412 targetm.expand_to_rtl_hook ();
4413 crtl->stack_alignment_needed = STACK_BOUNDARY;
4414 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4415 crtl->stack_alignment_estimated = 0;
4416 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4417 cfun->cfg->max_jumptable_ents = 0;
4419 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4420 of the function section at exapnsion time to predict distance of calls. */
4421 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4423 /* Expand the variables recorded during gimple lowering. */
4424 timevar_push (TV_VAR_EXPAND);
4427 expand_used_vars ();
4429 var_seq = get_insns ();
4431 timevar_pop (TV_VAR_EXPAND);
4433 /* Honor stack protection warnings. */
4434 if (warn_stack_protect)
4436 if (cfun->calls_alloca)
4437 warning (OPT_Wstack_protector,
4438 "stack protector not protecting local variables: "
4439 "variable length buffer");
4440 if (has_short_buffer && !crtl->stack_protect_guard)
4441 warning (OPT_Wstack_protector,
4442 "stack protector not protecting function: "
4443 "all local arrays are less than %d bytes long",
4444 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4447 /* Set up parameters and prepare for return, for the function. */
4448 expand_function_start (current_function_decl);
4450 /* If we emitted any instructions for setting up the variables,
4451 emit them before the FUNCTION_START note. */
4454 emit_insn_before (var_seq, parm_birth_insn);
4456 /* In expand_function_end we'll insert the alloca save/restore
4457 before parm_birth_insn. We've just insertted an alloca call.
4458 Adjust the pointer to match. */
4459 parm_birth_insn = var_seq;
4462 /* Now that we also have the parameter RTXs, copy them over to our
4464 for (i = 0; i < SA.map->num_partitions; i++)
4466 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4468 if (TREE_CODE (var) != VAR_DECL
4469 && !SA.partition_to_pseudo[i])
4470 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4471 gcc_assert (SA.partition_to_pseudo[i]);
4473 /* If this decl was marked as living in multiple places, reset
4474 this now to NULL. */
4475 if (DECL_RTL_IF_SET (var) == pc_rtx)
4476 SET_DECL_RTL (var, NULL);
4478 /* Some RTL parts really want to look at DECL_RTL(x) when x
4479 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4480 SET_DECL_RTL here making this available, but that would mean
4481 to select one of the potentially many RTLs for one DECL. Instead
4482 of doing that we simply reset the MEM_EXPR of the RTL in question,
4483 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4484 if (!DECL_RTL_SET_P (var))
4486 if (MEM_P (SA.partition_to_pseudo[i]))
4487 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4491 /* If we have a class containing differently aligned pointers
4492 we need to merge those into the corresponding RTL pointer
4494 for (i = 1; i < num_ssa_names; i++)
4496 tree name = ssa_name (i);
4501 || !POINTER_TYPE_P (TREE_TYPE (name))
4502 /* We might have generated new SSA names in
4503 update_alias_info_with_stack_vars. They will have a NULL
4504 defining statements, and won't be part of the partitioning,
4506 || !SSA_NAME_DEF_STMT (name))
4508 part = var_to_partition (SA.map, name);
4509 if (part == NO_PARTITION)
4511 r = SA.partition_to_pseudo[part];
4513 mark_reg_pointer (r, get_pointer_alignment (name));
4516 /* If this function is `main', emit a call to `__main'
4517 to run global initializers, etc. */
4518 if (DECL_NAME (current_function_decl)
4519 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4520 && DECL_FILE_SCOPE_P (current_function_decl))
4521 expand_main_function ();
4523 /* Initialize the stack_protect_guard field. This must happen after the
4524 call to __main (if any) so that the external decl is initialized. */
4525 if (crtl->stack_protect_guard)
4526 stack_protect_prologue ();
4528 expand_phi_nodes (&SA);
4530 /* Register rtl specific functions for cfg. */
4531 rtl_register_cfg_hooks ();
4533 init_block = construct_init_block ();
4535 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4536 remaining edges later. */
4537 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4538 e->flags &= ~EDGE_EXECUTABLE;
4540 lab_rtx_for_bb = pointer_map_create ();
4541 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4542 bb = expand_gimple_basic_block (bb);
4544 if (MAY_HAVE_DEBUG_INSNS)
4545 expand_debug_locations ();
4547 execute_free_datastructures ();
4548 timevar_push (TV_OUT_OF_SSA);
4549 finish_out_of_ssa (&SA);
4550 timevar_pop (TV_OUT_OF_SSA);
4552 timevar_push (TV_POST_EXPAND);
4553 /* We are no longer in SSA form. */
4554 cfun->gimple_df->in_ssa_p = false;
4556 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4557 conservatively to true until they are all profile aware. */
4558 pointer_map_destroy (lab_rtx_for_bb);
4561 construct_exit_block ();
4562 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4563 insn_locators_finalize ();
4565 /* Zap the tree EH table. */
4566 set_eh_throw_stmt_table (cfun, NULL);
4568 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4569 split edges which edge insertions might do. */
4570 rebuild_jump_labels (get_insns ());
4572 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4576 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4580 rebuild_jump_labels_chain (e->insns.r);
4581 /* Avoid putting insns before parm_birth_insn. */
4582 if (e->src == ENTRY_BLOCK_PTR
4583 && single_succ_p (ENTRY_BLOCK_PTR)
4586 rtx insns = e->insns.r;
4587 e->insns.r = NULL_RTX;
4588 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4591 commit_one_edge_insertion (e);
4598 /* We're done expanding trees to RTL. */
4599 currently_expanding_to_rtl = 0;
4601 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4605 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4607 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4608 e->flags &= ~EDGE_EXECUTABLE;
4610 /* At the moment not all abnormal edges match the RTL
4611 representation. It is safe to remove them here as
4612 find_many_sub_basic_blocks will rediscover them.
4613 In the future we should get this fixed properly. */
4614 if ((e->flags & EDGE_ABNORMAL)
4615 && !(e->flags & EDGE_SIBCALL))
4622 blocks = sbitmap_alloc (last_basic_block);
4623 sbitmap_ones (blocks);
4624 find_many_sub_basic_blocks (blocks);
4625 sbitmap_free (blocks);
4626 purge_all_dead_edges ();
4630 expand_stack_alignment ();
4632 #ifdef ENABLE_CHECKING
4633 verify_flow_info ();
4636 /* There's no need to defer outputting this function any more; we
4637 know we want to output it. */
4638 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4640 /* Now that we're done expanding trees to RTL, we shouldn't have any
4641 more CONCATs anywhere. */
4642 generating_concat_p = 0;
4647 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4648 /* And the pass manager will dump RTL for us. */
4651 /* If we're emitting a nested function, make sure its parent gets
4652 emitted as well. Doing otherwise confuses debug info. */
4655 for (parent = DECL_CONTEXT (current_function_decl);
4656 parent != NULL_TREE;
4657 parent = get_containing_scope (parent))
4658 if (TREE_CODE (parent) == FUNCTION_DECL)
4659 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4662 /* We are now committed to emitting code for this function. Do any
4663 preparation, such as emitting abstract debug info for the inline
4664 before it gets mangled by optimization. */
4665 if (cgraph_function_possibly_inlined_p (current_function_decl))
4666 (*debug_hooks->outlining_inline_function) (current_function_decl);
4668 TREE_ASM_WRITTEN (current_function_decl) = 1;
4670 /* After expanding, the return labels are no longer needed. */
4671 return_label = NULL;
4672 naked_return_label = NULL;
4674 /* After expanding, the tm_restart map is no longer needed. */
4675 if (cfun->gimple_df->tm_restart)
4677 htab_delete (cfun->gimple_df->tm_restart);
4678 cfun->gimple_df->tm_restart = NULL;
4681 /* Tag the blocks with a depth number so that change_scope can find
4682 the common parent easily. */
4683 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4684 default_rtl_profile ();
4685 timevar_pop (TV_POST_EXPAND);
4689 struct rtl_opt_pass pass_expand =
4693 "expand", /* name */
4695 gimple_expand_cfg, /* execute */
4698 0, /* static_pass_number */
4699 TV_EXPAND, /* tv_id */
4700 PROP_ssa | PROP_gimple_leh | PROP_cfg
4701 | PROP_gimple_lcx, /* properties_required */
4702 PROP_rtl, /* properties_provided */
4703 PROP_ssa | PROP_trees, /* properties_destroyed */
4704 TODO_verify_ssa | TODO_verify_flow
4705 | TODO_verify_stmts, /* todo_flags_start */
4706 TODO_ggc_collect /* todo_flags_finish */