1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "tree-pass.h"
36 #include "diagnostic.h"
37 #include "gimple-pretty-print.h"
41 #include "tree-inline.h"
42 #include "value-prof.h"
44 #include "ssaexpand.h"
48 #include "regs.h" /* For reg_renumber. */
49 #include "insn-attr.h" /* For INSN_SCHEDULING. */
51 /* This variable holds information helping the rewriting of SSA trees
55 /* This variable holds the currently expanded gimple statement for purposes
56 of comminucating the profile info to the builtin expanders. */
57 gimple currently_expanding_gimple_stmt;
59 static rtx expand_debug_expr (tree);
61 /* Return an expression tree corresponding to the RHS of GIMPLE
65 gimple_assign_rhs_to_tree (gimple stmt)
68 enum gimple_rhs_class grhs_class;
70 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
72 if (grhs_class == GIMPLE_TERNARY_RHS)
73 t = build3 (gimple_assign_rhs_code (stmt),
74 TREE_TYPE (gimple_assign_lhs (stmt)),
75 gimple_assign_rhs1 (stmt),
76 gimple_assign_rhs2 (stmt),
77 gimple_assign_rhs3 (stmt));
78 else if (grhs_class == GIMPLE_BINARY_RHS)
79 t = build2 (gimple_assign_rhs_code (stmt),
80 TREE_TYPE (gimple_assign_lhs (stmt)),
81 gimple_assign_rhs1 (stmt),
82 gimple_assign_rhs2 (stmt));
83 else if (grhs_class == GIMPLE_UNARY_RHS)
84 t = build1 (gimple_assign_rhs_code (stmt),
85 TREE_TYPE (gimple_assign_lhs (stmt)),
86 gimple_assign_rhs1 (stmt));
87 else if (grhs_class == GIMPLE_SINGLE_RHS)
89 t = gimple_assign_rhs1 (stmt);
90 /* Avoid modifying this tree in place below. */
91 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
92 && gimple_location (stmt) != EXPR_LOCATION (t))
93 || (gimple_block (stmt)
94 && currently_expanding_to_rtl
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
108 #ifndef STACK_ALIGNMENT_NEEDED
109 #define STACK_ALIGNMENT_NEEDED 1
112 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
114 /* Associate declaration T with storage space X. If T is no
115 SSA name this is exactly SET_DECL_RTL, otherwise make the
116 partition of T associated with X. */
118 set_rtl (tree t, rtx x)
120 if (TREE_CODE (t) == SSA_NAME)
122 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
124 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
125 /* For the benefit of debug information at -O0 (where vartracking
126 doesn't run) record the place also in the base DECL if it's
127 a normal variable (not a parameter). */
128 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
130 tree var = SSA_NAME_VAR (t);
131 /* If we don't yet have something recorded, just record it now. */
132 if (!DECL_RTL_SET_P (var))
133 SET_DECL_RTL (var, x);
134 /* If we have it set already to "multiple places" don't
136 else if (DECL_RTL (var) == pc_rtx)
138 /* If we have something recorded and it's not the same place
139 as we want to record now, we have multiple partitions for the
140 same base variable, with different places. We can't just
141 randomly chose one, hence we have to say that we don't know.
142 This only happens with optimization, and there var-tracking
143 will figure out the right thing. */
144 else if (DECL_RTL (var) != x)
145 SET_DECL_RTL (var, pc_rtx);
152 /* This structure holds data relevant to one variable that will be
153 placed in a stack slot. */
159 /* Initially, the size of the variable. Later, the size of the partition,
160 if this variable becomes it's partition's representative. */
163 /* The *byte* alignment required for this variable. Or as, with the
164 size, the alignment for this partition. */
167 /* The partition representative. */
168 size_t representative;
170 /* The next stack variable in the partition, or EOC. */
173 /* The numbers of conflicting stack variables. */
177 #define EOC ((size_t)-1)
179 /* We have an array of such objects while deciding allocation. */
180 static struct stack_var *stack_vars;
181 static size_t stack_vars_alloc;
182 static size_t stack_vars_num;
183 static struct pointer_map_t *decl_to_stack_part;
185 /* Conflict bitmaps go on this obstack. This allows us to destroy
186 all of them in one big sweep. */
187 static bitmap_obstack stack_var_bitmap_obstack;
189 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
190 is non-decreasing. */
191 static size_t *stack_vars_sorted;
193 /* The phase of the stack frame. This is the known misalignment of
194 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
195 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
196 static int frame_phase;
198 /* Used during expand_used_vars to remember if we saw any decls for
199 which we'd like to enable stack smashing protection. */
200 static bool has_protected_decls;
202 /* Used during expand_used_vars. Remember if we say a character buffer
203 smaller than our cutoff threshold. Used for -Wstack-protector. */
204 static bool has_short_buffer;
206 /* Compute the byte alignment to use for DECL. Ignore alignment
207 we can't do with expected alignment of the stack boundary. */
210 align_local_variable (tree decl)
212 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
213 DECL_ALIGN (decl) = align;
214 return align / BITS_PER_UNIT;
217 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
218 Return the frame offset. */
221 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
223 HOST_WIDE_INT offset, new_frame_offset;
225 new_frame_offset = frame_offset;
226 if (FRAME_GROWS_DOWNWARD)
228 new_frame_offset -= size + frame_phase;
229 new_frame_offset &= -align;
230 new_frame_offset += frame_phase;
231 offset = new_frame_offset;
235 new_frame_offset -= frame_phase;
236 new_frame_offset += align - 1;
237 new_frame_offset &= -align;
238 new_frame_offset += frame_phase;
239 offset = new_frame_offset;
240 new_frame_offset += size;
242 frame_offset = new_frame_offset;
244 if (frame_offset_overflow (frame_offset, cfun->decl))
245 frame_offset = offset = 0;
250 /* Accumulate DECL into STACK_VARS. */
253 add_stack_var (tree decl)
257 if (stack_vars_num >= stack_vars_alloc)
259 if (stack_vars_alloc)
260 stack_vars_alloc = stack_vars_alloc * 3 / 2;
262 stack_vars_alloc = 32;
264 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
266 if (!decl_to_stack_part)
267 decl_to_stack_part = pointer_map_create ();
269 v = &stack_vars[stack_vars_num];
270 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
273 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274 /* Ensure that all variables have size, so that &a != &b for any two
275 variables that are simultaneously live. */
278 v->alignb = align_local_variable (SSAVAR (decl));
279 /* An alignment of zero can mightily confuse us later. */
280 gcc_assert (v->alignb != 0);
282 /* All variables are initially in their own partition. */
283 v->representative = stack_vars_num;
286 /* All variables initially conflict with no other. */
289 /* Ensure that this decl doesn't get put onto the list twice. */
290 set_rtl (decl, pc_rtx);
295 /* Make the decls associated with luid's X and Y conflict. */
298 add_stack_var_conflict (size_t x, size_t y)
300 struct stack_var *a = &stack_vars[x];
301 struct stack_var *b = &stack_vars[y];
303 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
305 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
306 bitmap_set_bit (a->conflicts, y);
307 bitmap_set_bit (b->conflicts, x);
310 /* Check whether the decls associated with luid's X and Y conflict. */
313 stack_var_conflict_p (size_t x, size_t y)
315 struct stack_var *a = &stack_vars[x];
316 struct stack_var *b = &stack_vars[y];
319 /* Partitions containing an SSA name result from gimple registers
320 with things like unsupported modes. They are top-level and
321 hence conflict with everything else. */
322 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
325 if (!a->conflicts || !b->conflicts)
327 return bitmap_bit_p (a->conflicts, y);
330 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
331 enter its partition number into bitmap DATA. */
334 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
336 bitmap active = (bitmap)data;
337 op = get_base_address (op);
340 && DECL_RTL_IF_SET (op) == pc_rtx)
342 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
344 bitmap_set_bit (active, *v);
349 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
350 record conflicts between it and all currently active other partitions
354 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
356 bitmap active = (bitmap)data;
357 op = get_base_address (op);
360 && DECL_RTL_IF_SET (op) == pc_rtx)
363 (size_t *) pointer_map_contains (decl_to_stack_part, op);
364 if (v && bitmap_set_bit (active, *v))
369 gcc_assert (num < stack_vars_num);
370 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
371 add_stack_var_conflict (num, i);
377 /* Helper routine for add_scope_conflicts, calculating the active partitions
378 at the end of BB, leaving the result in WORK. We're called to generate
379 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
383 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
387 gimple_stmt_iterator gsi;
388 bool (*visit)(gimple, tree, void *);
391 FOR_EACH_EDGE (e, ei, bb->preds)
392 bitmap_ior_into (work, (bitmap)e->src->aux);
396 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
398 gimple stmt = gsi_stmt (gsi);
399 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
401 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
403 gimple stmt = gsi_stmt (gsi);
405 if (gimple_clobber_p (stmt))
407 tree lhs = gimple_assign_lhs (stmt);
409 /* Nested function lowering might introduce LHSs
410 that are COMPONENT_REFs. */
411 if (TREE_CODE (lhs) != VAR_DECL)
413 if (DECL_RTL_IF_SET (lhs) == pc_rtx
415 pointer_map_contains (decl_to_stack_part, lhs)))
416 bitmap_clear_bit (work, *v);
418 else if (!is_gimple_debug (stmt))
421 && visit == visit_op)
423 /* If this is the first real instruction in this BB we need
424 to add conflicts for everything live at this point now.
425 Unlike classical liveness for named objects we can't
426 rely on seeing a def/use of the names we're interested in.
427 There might merely be indirect loads/stores. We'd not add any
428 conflicts for such partitions. */
431 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
433 struct stack_var *a = &stack_vars[i];
435 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
436 bitmap_ior_into (a->conflicts, work);
438 visit = visit_conflict;
440 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
445 /* Generate stack partition conflicts between all partitions that are
446 simultaneously live. */
449 add_scope_conflicts (void)
453 bitmap work = BITMAP_ALLOC (NULL);
457 /* We approximate the live range of a stack variable by taking the first
458 mention of its name as starting point(s), and by the end-of-scope
459 death clobber added by gimplify as ending point(s) of the range.
460 This overapproximates in the case we for instance moved an address-taken
461 operation upward, without also moving a dereference to it upwards.
462 But it's conservatively correct as a variable never can hold values
463 before its name is mentioned at least once.
465 We then do a mostly classical bitmap liveness algorithm. */
468 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
470 rpo = XNEWVEC (int, last_basic_block);
471 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
478 for (i = 0; i < n_bbs; i++)
481 bb = BASIC_BLOCK (rpo[i]);
482 active = (bitmap)bb->aux;
483 add_scope_conflicts_1 (bb, work, false);
484 if (bitmap_ior_into (active, work))
490 add_scope_conflicts_1 (bb, work, true);
495 BITMAP_FREE (bb->aux);
498 /* A subroutine of partition_stack_vars. A comparison function for qsort,
499 sorting an array of indices by the properties of the object. */
502 stack_var_cmp (const void *a, const void *b)
504 size_t ia = *(const size_t *)a;
505 size_t ib = *(const size_t *)b;
506 unsigned int aligna = stack_vars[ia].alignb;
507 unsigned int alignb = stack_vars[ib].alignb;
508 HOST_WIDE_INT sizea = stack_vars[ia].size;
509 HOST_WIDE_INT sizeb = stack_vars[ib].size;
510 tree decla = stack_vars[ia].decl;
511 tree declb = stack_vars[ib].decl;
513 unsigned int uida, uidb;
515 /* Primary compare on "large" alignment. Large comes first. */
516 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
517 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
518 if (largea != largeb)
519 return (int)largeb - (int)largea;
521 /* Secondary compare on size, decreasing */
527 /* Tertiary compare on true alignment, decreasing. */
533 /* Final compare on ID for sort stability, increasing.
534 Two SSA names are compared by their version, SSA names come before
535 non-SSA names, and two normal decls are compared by their DECL_UID. */
536 if (TREE_CODE (decla) == SSA_NAME)
538 if (TREE_CODE (declb) == SSA_NAME)
539 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
543 else if (TREE_CODE (declb) == SSA_NAME)
546 uida = DECL_UID (decla), uidb = DECL_UID (declb);
555 /* If the points-to solution *PI points to variables that are in a partition
556 together with other variables add all partition members to the pointed-to
560 add_partitioned_vars_to_ptset (struct pt_solution *pt,
561 struct pointer_map_t *decls_to_partitions,
562 struct pointer_set_t *visited, bitmap temp)
570 /* The pointed-to vars bitmap is shared, it is enough to
572 || pointer_set_insert(visited, pt->vars))
577 /* By using a temporary bitmap to store all members of the partitions
578 we have to add we make sure to visit each of the partitions only
580 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
582 || !bitmap_bit_p (temp, i))
583 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
584 (void *)(size_t) i)))
585 bitmap_ior_into (temp, *part);
586 if (!bitmap_empty_p (temp))
587 bitmap_ior_into (pt->vars, temp);
590 /* Update points-to sets based on partition info, so we can use them on RTL.
591 The bitmaps representing stack partitions will be saved until expand,
592 where partitioned decls used as bases in memory expressions will be
596 update_alias_info_with_stack_vars (void)
598 struct pointer_map_t *decls_to_partitions = NULL;
600 tree var = NULL_TREE;
602 for (i = 0; i < stack_vars_num; i++)
606 struct ptr_info_def *pi;
608 /* Not interested in partitions with single variable. */
609 if (stack_vars[i].representative != i
610 || stack_vars[i].next == EOC)
613 if (!decls_to_partitions)
615 decls_to_partitions = pointer_map_create ();
616 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
619 /* Create an SSA_NAME that points to the partition for use
620 as base during alias-oracle queries on RTL for bases that
621 have been partitioned. */
622 if (var == NULL_TREE)
623 var = create_tmp_var (ptr_type_node, NULL);
624 name = make_ssa_name (var, NULL);
626 /* Create bitmaps representing partitions. They will be used for
627 points-to sets later, so use GGC alloc. */
628 part = BITMAP_GGC_ALLOC ();
629 for (j = i; j != EOC; j = stack_vars[j].next)
631 tree decl = stack_vars[j].decl;
632 unsigned int uid = DECL_PT_UID (decl);
633 bitmap_set_bit (part, uid);
634 *((bitmap *) pointer_map_insert (decls_to_partitions,
635 (void *)(size_t) uid)) = part;
636 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
640 /* Make the SSA name point to all partition members. */
641 pi = get_ptr_info (name);
642 pt_solution_set (&pi->pt, part, false);
645 /* Make all points-to sets that contain one member of a partition
646 contain all members of the partition. */
647 if (decls_to_partitions)
650 struct pointer_set_t *visited = pointer_set_create ();
651 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
653 for (i = 1; i < num_ssa_names; i++)
655 tree name = ssa_name (i);
656 struct ptr_info_def *pi;
659 && POINTER_TYPE_P (TREE_TYPE (name))
660 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
661 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
665 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
666 decls_to_partitions, visited, temp);
668 pointer_set_destroy (visited);
669 pointer_map_destroy (decls_to_partitions);
674 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
675 partitioning algorithm. Partitions A and B are known to be non-conflicting.
676 Merge them into a single partition A. */
679 union_stack_vars (size_t a, size_t b)
681 struct stack_var *vb = &stack_vars[b];
685 gcc_assert (stack_vars[b].next == EOC);
686 /* Add B to A's partition. */
687 stack_vars[b].next = stack_vars[a].next;
688 stack_vars[b].representative = a;
689 stack_vars[a].next = b;
691 /* Update the required alignment of partition A to account for B. */
692 if (stack_vars[a].alignb < stack_vars[b].alignb)
693 stack_vars[a].alignb = stack_vars[b].alignb;
695 /* Update the interference graph and merge the conflicts. */
698 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
699 add_stack_var_conflict (a, stack_vars[u].representative);
700 BITMAP_FREE (vb->conflicts);
704 /* A subroutine of expand_used_vars. Binpack the variables into
705 partitions constrained by the interference graph. The overall
706 algorithm used is as follows:
708 Sort the objects by size in descending order.
713 Look for the largest non-conflicting object B with size <= S.
720 partition_stack_vars (void)
722 size_t si, sj, n = stack_vars_num;
724 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
725 for (si = 0; si < n; ++si)
726 stack_vars_sorted[si] = si;
731 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
733 for (si = 0; si < n; ++si)
735 size_t i = stack_vars_sorted[si];
736 unsigned int ialign = stack_vars[i].alignb;
738 /* Ignore objects that aren't partition representatives. If we
739 see a var that is not a partition representative, it must
740 have been merged earlier. */
741 if (stack_vars[i].representative != i)
744 for (sj = si + 1; sj < n; ++sj)
746 size_t j = stack_vars_sorted[sj];
747 unsigned int jalign = stack_vars[j].alignb;
749 /* Ignore objects that aren't partition representatives. */
750 if (stack_vars[j].representative != j)
753 /* Ignore conflicting objects. */
754 if (stack_var_conflict_p (i, j))
757 /* Do not mix objects of "small" (supported) alignment
758 and "large" (unsupported) alignment. */
759 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
760 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
763 /* UNION the objects, placing J at OFFSET. */
764 union_stack_vars (i, j);
768 update_alias_info_with_stack_vars ();
771 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
774 dump_stack_var_partition (void)
776 size_t si, i, j, n = stack_vars_num;
778 for (si = 0; si < n; ++si)
780 i = stack_vars_sorted[si];
782 /* Skip variables that aren't partition representatives, for now. */
783 if (stack_vars[i].representative != i)
786 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
787 " align %u\n", (unsigned long) i, stack_vars[i].size,
788 stack_vars[i].alignb);
790 for (j = i; j != EOC; j = stack_vars[j].next)
792 fputc ('\t', dump_file);
793 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
795 fputc ('\n', dump_file);
799 /* Assign rtl to DECL at BASE + OFFSET. */
802 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
803 HOST_WIDE_INT offset)
808 /* If this fails, we've overflowed the stack frame. Error nicely? */
809 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
811 x = plus_constant (Pmode, base, offset);
812 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
814 if (TREE_CODE (decl) != SSA_NAME)
816 /* Set alignment we actually gave this decl if it isn't an SSA name.
817 If it is we generate stack slots only accidentally so it isn't as
818 important, we'll simply use the alignment that is already set. */
819 if (base == virtual_stack_vars_rtx)
820 offset -= frame_phase;
821 align = offset & -offset;
822 align *= BITS_PER_UNIT;
823 if (align == 0 || align > base_align)
826 /* One would think that we could assert that we're not decreasing
827 alignment here, but (at least) the i386 port does exactly this
828 via the MINIMUM_ALIGNMENT hook. */
830 DECL_ALIGN (decl) = align;
831 DECL_USER_ALIGN (decl) = 0;
834 set_mem_attributes (x, SSAVAR (decl), true);
838 /* A subroutine of expand_used_vars. Give each partition representative
839 a unique location within the stack frame. Update each partition member
840 with that location. */
843 expand_stack_vars (bool (*pred) (tree))
845 size_t si, i, j, n = stack_vars_num;
846 HOST_WIDE_INT large_size = 0, large_alloc = 0;
847 rtx large_base = NULL;
848 unsigned large_align = 0;
851 /* Determine if there are any variables requiring "large" alignment.
852 Since these are dynamically allocated, we only process these if
853 no predicate involved. */
854 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
855 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
857 /* Find the total size of these variables. */
858 for (si = 0; si < n; ++si)
862 i = stack_vars_sorted[si];
863 alignb = stack_vars[i].alignb;
865 /* Stop when we get to the first decl with "small" alignment. */
866 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
869 /* Skip variables that aren't partition representatives. */
870 if (stack_vars[i].representative != i)
873 /* Skip variables that have already had rtl assigned. See also
874 add_stack_var where we perpetrate this pc_rtx hack. */
875 decl = stack_vars[i].decl;
876 if ((TREE_CODE (decl) == SSA_NAME
877 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
878 : DECL_RTL (decl)) != pc_rtx)
881 large_size += alignb - 1;
882 large_size &= -(HOST_WIDE_INT)alignb;
883 large_size += stack_vars[i].size;
886 /* If there were any, allocate space. */
888 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
892 for (si = 0; si < n; ++si)
895 unsigned base_align, alignb;
896 HOST_WIDE_INT offset;
898 i = stack_vars_sorted[si];
900 /* Skip variables that aren't partition representatives, for now. */
901 if (stack_vars[i].representative != i)
904 /* Skip variables that have already had rtl assigned. See also
905 add_stack_var where we perpetrate this pc_rtx hack. */
906 decl = stack_vars[i].decl;
907 if ((TREE_CODE (decl) == SSA_NAME
908 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
909 : DECL_RTL (decl)) != pc_rtx)
912 /* Check the predicate to see whether this variable should be
913 allocated in this pass. */
914 if (pred && !pred (decl))
917 alignb = stack_vars[i].alignb;
918 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
920 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
921 base = virtual_stack_vars_rtx;
922 base_align = crtl->max_used_stack_slot_alignment;
926 /* Large alignment is only processed in the last pass. */
929 gcc_assert (large_base != NULL);
931 large_alloc += alignb - 1;
932 large_alloc &= -(HOST_WIDE_INT)alignb;
933 offset = large_alloc;
934 large_alloc += stack_vars[i].size;
937 base_align = large_align;
940 /* Create rtl for each variable based on their location within the
942 for (j = i; j != EOC; j = stack_vars[j].next)
944 expand_one_stack_var_at (stack_vars[j].decl,
950 gcc_assert (large_alloc == large_size);
953 /* Take into account all sizes of partitions and reset DECL_RTLs. */
955 account_stack_vars (void)
957 size_t si, j, i, n = stack_vars_num;
958 HOST_WIDE_INT size = 0;
960 for (si = 0; si < n; ++si)
962 i = stack_vars_sorted[si];
964 /* Skip variables that aren't partition representatives, for now. */
965 if (stack_vars[i].representative != i)
968 size += stack_vars[i].size;
969 for (j = i; j != EOC; j = stack_vars[j].next)
970 set_rtl (stack_vars[j].decl, NULL);
975 /* A subroutine of expand_one_var. Called to immediately assign rtl
976 to a variable to be allocated in the stack frame. */
979 expand_one_stack_var (tree var)
981 HOST_WIDE_INT size, offset;
984 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
985 byte_align = align_local_variable (SSAVAR (var));
987 /* We handle highly aligned variables in expand_stack_vars. */
988 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
990 offset = alloc_stack_frame_space (size, byte_align);
992 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
993 crtl->max_used_stack_slot_alignment, offset);
996 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
997 that will reside in a hard register. */
1000 expand_one_hard_reg_var (tree var)
1002 rest_of_decl_compilation (var, 0, 0);
1005 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1006 that will reside in a pseudo register. */
1009 expand_one_register_var (tree var)
1011 tree decl = SSAVAR (var);
1012 tree type = TREE_TYPE (decl);
1013 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1014 rtx x = gen_reg_rtx (reg_mode);
1018 /* Note if the object is a user variable. */
1019 if (!DECL_ARTIFICIAL (decl))
1022 if (POINTER_TYPE_P (type))
1023 mark_reg_pointer (x, get_pointer_alignment (var));
1026 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1027 has some associated error, e.g. its type is error-mark. We just need
1028 to pick something that won't crash the rest of the compiler. */
1031 expand_one_error_var (tree var)
1033 enum machine_mode mode = DECL_MODE (var);
1036 if (mode == BLKmode)
1037 x = gen_rtx_MEM (BLKmode, const0_rtx);
1038 else if (mode == VOIDmode)
1041 x = gen_reg_rtx (mode);
1043 SET_DECL_RTL (var, x);
1046 /* A subroutine of expand_one_var. VAR is a variable that will be
1047 allocated to the local stack frame. Return true if we wish to
1048 add VAR to STACK_VARS so that it will be coalesced with other
1049 variables. Return false to allocate VAR immediately.
1051 This function is used to reduce the number of variables considered
1052 for coalescing, which reduces the size of the quadratic problem. */
1055 defer_stack_allocation (tree var, bool toplevel)
1057 /* If stack protection is enabled, *all* stack variables must be deferred,
1058 so that we can re-order the strings to the top of the frame. */
1059 if (flag_stack_protect)
1062 /* We handle "large" alignment via dynamic allocation. We want to handle
1063 this extra complication in only one place, so defer them. */
1064 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1067 /* Variables in the outermost scope automatically conflict with
1068 every other variable. The only reason to want to defer them
1069 at all is that, after sorting, we can more efficiently pack
1070 small variables in the stack frame. Continue to defer at -O2. */
1071 if (toplevel && optimize < 2)
1074 /* Without optimization, *most* variables are allocated from the
1075 stack, which makes the quadratic problem large exactly when we
1076 want compilation to proceed as quickly as possible. On the
1077 other hand, we don't want the function's stack frame size to
1078 get completely out of hand. So we avoid adding scalars and
1079 "small" aggregates to the list at all. */
1080 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1086 /* A subroutine of expand_used_vars. Expand one variable according to
1087 its flavor. Variables to be placed on the stack are not actually
1088 expanded yet, merely recorded.
1089 When REALLY_EXPAND is false, only add stack values to be allocated.
1090 Return stack usage this variable is supposed to take.
1093 static HOST_WIDE_INT
1094 expand_one_var (tree var, bool toplevel, bool really_expand)
1096 unsigned int align = BITS_PER_UNIT;
1101 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1103 /* Because we don't know if VAR will be in register or on stack,
1104 we conservatively assume it will be on stack even if VAR is
1105 eventually put into register after RA pass. For non-automatic
1106 variables, which won't be on stack, we collect alignment of
1107 type and ignore user specified alignment. */
1108 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1109 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1110 TYPE_MODE (TREE_TYPE (var)),
1111 TYPE_ALIGN (TREE_TYPE (var)));
1112 else if (DECL_HAS_VALUE_EXPR_P (var)
1113 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1114 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1115 or variables which were assigned a stack slot already by
1116 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1117 changed from the offset chosen to it. */
1118 align = crtl->stack_alignment_estimated;
1120 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1122 /* If the variable alignment is very large we'll dynamicaly allocate
1123 it, which means that in-frame portion is just a pointer. */
1124 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1125 align = POINTER_SIZE;
1128 if (SUPPORTS_STACK_ALIGNMENT
1129 && crtl->stack_alignment_estimated < align)
1131 /* stack_alignment_estimated shouldn't change after stack
1132 realign decision made */
1133 gcc_assert(!crtl->stack_realign_processed);
1134 crtl->stack_alignment_estimated = align;
1137 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1138 So here we only make sure stack_alignment_needed >= align. */
1139 if (crtl->stack_alignment_needed < align)
1140 crtl->stack_alignment_needed = align;
1141 if (crtl->max_used_stack_slot_alignment < align)
1142 crtl->max_used_stack_slot_alignment = align;
1144 if (TREE_CODE (origvar) == SSA_NAME)
1146 gcc_assert (TREE_CODE (var) != VAR_DECL
1147 || (!DECL_EXTERNAL (var)
1148 && !DECL_HAS_VALUE_EXPR_P (var)
1149 && !TREE_STATIC (var)
1150 && TREE_TYPE (var) != error_mark_node
1151 && !DECL_HARD_REGISTER (var)
1154 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1156 else if (DECL_EXTERNAL (var))
1158 else if (DECL_HAS_VALUE_EXPR_P (var))
1160 else if (TREE_STATIC (var))
1162 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1164 else if (TREE_TYPE (var) == error_mark_node)
1167 expand_one_error_var (var);
1169 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1172 expand_one_hard_reg_var (var);
1174 else if (use_register_for_decl (var))
1177 expand_one_register_var (origvar);
1179 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1181 /* Reject variables which cover more than half of the address-space. */
1184 error ("size of variable %q+D is too large", var);
1185 expand_one_error_var (var);
1188 else if (defer_stack_allocation (var, toplevel))
1189 add_stack_var (origvar);
1193 expand_one_stack_var (origvar);
1194 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1199 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1200 expanding variables. Those variables that can be put into registers
1201 are allocated pseudos; those that can't are put on the stack.
1203 TOPLEVEL is true if this is the outermost BLOCK. */
1206 expand_used_vars_for_block (tree block, bool toplevel)
1210 /* Expand all variables at this level. */
1211 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1213 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1214 || !DECL_NONSHAREABLE (t)))
1215 expand_one_var (t, toplevel, true);
1217 /* Expand all variables at containing levels. */
1218 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1219 expand_used_vars_for_block (t, false);
1222 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1223 and clear TREE_USED on all local variables. */
1226 clear_tree_used (tree block)
1230 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1231 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1232 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1233 || !DECL_NONSHAREABLE (t))
1236 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1237 clear_tree_used (t);
1240 /* Examine TYPE and determine a bit mask of the following features. */
1242 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1243 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1244 #define SPCT_HAS_ARRAY 4
1245 #define SPCT_HAS_AGGREGATE 8
1248 stack_protect_classify_type (tree type)
1250 unsigned int ret = 0;
1253 switch (TREE_CODE (type))
1256 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1257 if (t == char_type_node
1258 || t == signed_char_type_node
1259 || t == unsigned_char_type_node)
1261 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1262 unsigned HOST_WIDE_INT len;
1264 if (!TYPE_SIZE_UNIT (type)
1265 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1268 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1271 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1273 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1276 ret = SPCT_HAS_ARRAY;
1280 case QUAL_UNION_TYPE:
1282 ret = SPCT_HAS_AGGREGATE;
1283 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1284 if (TREE_CODE (t) == FIELD_DECL)
1285 ret |= stack_protect_classify_type (TREE_TYPE (t));
1295 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1296 part of the local stack frame. Remember if we ever return nonzero for
1297 any variable in this function. The return value is the phase number in
1298 which the variable should be allocated. */
1301 stack_protect_decl_phase (tree decl)
1303 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1306 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1307 has_short_buffer = true;
1309 if (flag_stack_protect == 2)
1311 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1312 && !(bits & SPCT_HAS_AGGREGATE))
1314 else if (bits & SPCT_HAS_ARRAY)
1318 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1321 has_protected_decls = true;
1326 /* Two helper routines that check for phase 1 and phase 2. These are used
1327 as callbacks for expand_stack_vars. */
1330 stack_protect_decl_phase_1 (tree decl)
1332 return stack_protect_decl_phase (decl) == 1;
1336 stack_protect_decl_phase_2 (tree decl)
1338 return stack_protect_decl_phase (decl) == 2;
1341 /* Ensure that variables in different stack protection phases conflict
1342 so that they are not merged and share the same stack slot. */
1345 add_stack_protection_conflicts (void)
1347 size_t i, j, n = stack_vars_num;
1348 unsigned char *phase;
1350 phase = XNEWVEC (unsigned char, n);
1351 for (i = 0; i < n; ++i)
1352 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1354 for (i = 0; i < n; ++i)
1356 unsigned char ph_i = phase[i];
1357 for (j = i + 1; j < n; ++j)
1358 if (ph_i != phase[j])
1359 add_stack_var_conflict (i, j);
1365 /* Create a decl for the guard at the top of the stack frame. */
1368 create_stack_guard (void)
1370 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1371 VAR_DECL, NULL, ptr_type_node);
1372 TREE_THIS_VOLATILE (guard) = 1;
1373 TREE_USED (guard) = 1;
1374 expand_one_stack_var (guard);
1375 crtl->stack_protect_guard = guard;
1378 /* Prepare for expanding variables. */
1380 init_vars_expansion (void)
1382 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1383 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1385 /* A map from decl to stack partition. */
1386 decl_to_stack_part = pointer_map_create ();
1388 /* Initialize local stack smashing state. */
1389 has_protected_decls = false;
1390 has_short_buffer = false;
1393 /* Free up stack variable graph data. */
1395 fini_vars_expansion (void)
1397 bitmap_obstack_release (&stack_var_bitmap_obstack);
1399 XDELETEVEC (stack_vars);
1400 if (stack_vars_sorted)
1401 XDELETEVEC (stack_vars_sorted);
1403 stack_vars_sorted = NULL;
1404 stack_vars_alloc = stack_vars_num = 0;
1405 pointer_map_destroy (decl_to_stack_part);
1406 decl_to_stack_part = NULL;
1409 /* Make a fair guess for the size of the stack frame of the function
1410 in NODE. This doesn't have to be exact, the result is only used in
1411 the inline heuristics. So we don't want to run the full stack var
1412 packing algorithm (which is quadratic in the number of stack vars).
1413 Instead, we calculate the total size of all stack vars. This turns
1414 out to be a pretty fair estimate -- packing of stack vars doesn't
1415 happen very often. */
1418 estimated_stack_frame_size (struct cgraph_node *node)
1420 HOST_WIDE_INT size = 0;
1423 struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
1427 init_vars_expansion ();
1429 FOR_EACH_LOCAL_DECL (fn, i, var)
1430 if (auto_var_in_fn_p (var, fn->decl))
1431 size += expand_one_var (var, true, false);
1433 if (stack_vars_num > 0)
1435 /* Fake sorting the stack vars for account_stack_vars (). */
1436 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1437 for (i = 0; i < stack_vars_num; ++i)
1438 stack_vars_sorted[i] = i;
1439 size += account_stack_vars ();
1442 fini_vars_expansion ();
1447 /* Expand all variables used in the function. */
1450 expand_used_vars (void)
1452 tree var, outer_block = DECL_INITIAL (current_function_decl);
1453 VEC(tree,heap) *maybe_local_decls = NULL;
1454 struct pointer_map_t *ssa_name_decls;
1458 /* Compute the phase of the stack frame for this function. */
1460 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1461 int off = STARTING_FRAME_OFFSET % align;
1462 frame_phase = off ? align - off : 0;
1465 /* Set TREE_USED on all variables in the local_decls. */
1466 FOR_EACH_LOCAL_DECL (cfun, i, var)
1467 TREE_USED (var) = 1;
1468 /* Clear TREE_USED on all variables associated with a block scope. */
1469 clear_tree_used (DECL_INITIAL (current_function_decl));
1471 init_vars_expansion ();
1473 ssa_name_decls = pointer_map_create ();
1474 for (i = 0; i < SA.map->num_partitions; i++)
1476 tree var = partition_to_var (SA.map, i);
1478 gcc_assert (!virtual_operand_p (var));
1480 /* Assign decls to each SSA name partition, share decls for partitions
1481 we could have coalesced (those with the same type). */
1482 if (SSA_NAME_VAR (var) == NULL_TREE)
1484 void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1486 *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1487 replace_ssa_name_symbol (var, (tree) *slot);
1490 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1491 expand_one_var (var, true, true);
1494 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1495 contain the default def (representing the parm or result itself)
1496 we don't do anything here. But those which don't contain the
1497 default def (representing a temporary based on the parm/result)
1498 we need to allocate space just like for normal VAR_DECLs. */
1499 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1501 expand_one_var (var, true, true);
1502 gcc_assert (SA.partition_to_pseudo[i]);
1506 pointer_map_destroy (ssa_name_decls);
1508 /* At this point all variables on the local_decls with TREE_USED
1509 set are not associated with any block scope. Lay them out. */
1511 len = VEC_length (tree, cfun->local_decls);
1512 FOR_EACH_LOCAL_DECL (cfun, i, var)
1514 bool expand_now = false;
1516 /* Expanded above already. */
1517 if (is_gimple_reg (var))
1519 TREE_USED (var) = 0;
1522 /* We didn't set a block for static or extern because it's hard
1523 to tell the difference between a global variable (re)declared
1524 in a local scope, and one that's really declared there to
1525 begin with. And it doesn't really matter much, since we're
1526 not giving them stack space. Expand them now. */
1527 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1530 /* If the variable is not associated with any block, then it
1531 was created by the optimizers, and could be live anywhere
1533 else if (TREE_USED (var))
1536 /* Finally, mark all variables on the list as used. We'll use
1537 this in a moment when we expand those associated with scopes. */
1538 TREE_USED (var) = 1;
1541 expand_one_var (var, true, true);
1544 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1546 rtx rtl = DECL_RTL_IF_SET (var);
1548 /* Keep artificial non-ignored vars in cfun->local_decls
1549 chain until instantiate_decls. */
1550 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1551 add_local_decl (cfun, var);
1552 else if (rtl == NULL_RTX)
1553 /* If rtl isn't set yet, which can happen e.g. with
1554 -fstack-protector, retry before returning from this
1556 VEC_safe_push (tree, heap, maybe_local_decls, var);
1560 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1562 +-----------------+-----------------+
1563 | ...processed... | ...duplicates...|
1564 +-----------------+-----------------+
1566 +-- LEN points here.
1568 We just want the duplicates, as those are the artificial
1569 non-ignored vars that we want to keep until instantiate_decls.
1570 Move them down and truncate the array. */
1571 if (!VEC_empty (tree, cfun->local_decls))
1572 VEC_block_remove (tree, cfun->local_decls, 0, len);
1574 /* At this point, all variables within the block tree with TREE_USED
1575 set are actually used by the optimized function. Lay them out. */
1576 expand_used_vars_for_block (outer_block, true);
1578 if (stack_vars_num > 0)
1580 add_scope_conflicts ();
1582 /* If stack protection is enabled, we don't share space between
1583 vulnerable data and non-vulnerable data. */
1584 if (flag_stack_protect)
1585 add_stack_protection_conflicts ();
1587 /* Now that we have collected all stack variables, and have computed a
1588 minimal interference graph, attempt to save some stack space. */
1589 partition_stack_vars ();
1591 dump_stack_var_partition ();
1594 /* There are several conditions under which we should create a
1595 stack guard: protect-all, alloca used, protected decls present. */
1596 if (flag_stack_protect == 2
1597 || (flag_stack_protect
1598 && (cfun->calls_alloca || has_protected_decls)))
1599 create_stack_guard ();
1601 /* Assign rtl to each variable based on these partitions. */
1602 if (stack_vars_num > 0)
1604 /* Reorder decls to be protected by iterating over the variables
1605 array multiple times, and allocating out of each phase in turn. */
1606 /* ??? We could probably integrate this into the qsort we did
1607 earlier, such that we naturally see these variables first,
1608 and thus naturally allocate things in the right order. */
1609 if (has_protected_decls)
1611 /* Phase 1 contains only character arrays. */
1612 expand_stack_vars (stack_protect_decl_phase_1);
1614 /* Phase 2 contains other kinds of arrays. */
1615 if (flag_stack_protect == 2)
1616 expand_stack_vars (stack_protect_decl_phase_2);
1619 expand_stack_vars (NULL);
1622 fini_vars_expansion ();
1624 /* If there were any artificial non-ignored vars without rtl
1625 found earlier, see if deferred stack allocation hasn't assigned
1627 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1629 rtx rtl = DECL_RTL_IF_SET (var);
1631 /* Keep artificial non-ignored vars in cfun->local_decls
1632 chain until instantiate_decls. */
1633 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1634 add_local_decl (cfun, var);
1636 VEC_free (tree, heap, maybe_local_decls);
1638 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1639 if (STACK_ALIGNMENT_NEEDED)
1641 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1642 if (!FRAME_GROWS_DOWNWARD)
1643 frame_offset += align - 1;
1644 frame_offset &= -align;
1649 /* If we need to produce a detailed dump, print the tree representation
1650 for STMT to the dump file. SINCE is the last RTX after which the RTL
1651 generated for STMT should have been appended. */
1654 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1656 if (dump_file && (dump_flags & TDF_DETAILS))
1658 fprintf (dump_file, "\n;; ");
1659 print_gimple_stmt (dump_file, stmt, 0,
1660 TDF_SLIM | (dump_flags & TDF_LINENO));
1661 fprintf (dump_file, "\n");
1663 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1667 /* Maps the blocks that do not contain tree labels to rtx labels. */
1669 static struct pointer_map_t *lab_rtx_for_bb;
1671 /* Returns the label_rtx expression for a label starting basic block BB. */
1674 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1676 gimple_stmt_iterator gsi;
1681 if (bb->flags & BB_RTL)
1682 return block_label (bb);
1684 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1688 /* Find the tree label if it is present. */
1690 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1692 lab_stmt = gsi_stmt (gsi);
1693 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1696 lab = gimple_label_label (lab_stmt);
1697 if (DECL_NONLOCAL (lab))
1700 return label_rtx (lab);
1703 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1704 *elt = gen_label_rtx ();
1709 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1710 of a basic block where we just expanded the conditional at the end,
1711 possibly clean up the CFG and instruction sequence. LAST is the
1712 last instruction before the just emitted jump sequence. */
1715 maybe_cleanup_end_of_block (edge e, rtx last)
1717 /* Special case: when jumpif decides that the condition is
1718 trivial it emits an unconditional jump (and the necessary
1719 barrier). But we still have two edges, the fallthru one is
1720 wrong. purge_dead_edges would clean this up later. Unfortunately
1721 we have to insert insns (and split edges) before
1722 find_many_sub_basic_blocks and hence before purge_dead_edges.
1723 But splitting edges might create new blocks which depend on the
1724 fact that if there are two edges there's no barrier. So the
1725 barrier would get lost and verify_flow_info would ICE. Instead
1726 of auditing all edge splitters to care for the barrier (which
1727 normally isn't there in a cleaned CFG), fix it here. */
1728 if (BARRIER_P (get_last_insn ()))
1732 /* Now, we have a single successor block, if we have insns to
1733 insert on the remaining edge we potentially will insert
1734 it at the end of this block (if the dest block isn't feasible)
1735 in order to avoid splitting the edge. This insertion will take
1736 place in front of the last jump. But we might have emitted
1737 multiple jumps (conditional and one unconditional) to the
1738 same destination. Inserting in front of the last one then
1739 is a problem. See PR 40021. We fix this by deleting all
1740 jumps except the last unconditional one. */
1741 insn = PREV_INSN (get_last_insn ());
1742 /* Make sure we have an unconditional jump. Otherwise we're
1744 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1745 for (insn = PREV_INSN (insn); insn != last;)
1747 insn = PREV_INSN (insn);
1748 if (JUMP_P (NEXT_INSN (insn)))
1750 if (!any_condjump_p (NEXT_INSN (insn)))
1752 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1753 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1755 delete_insn (NEXT_INSN (insn));
1761 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1762 Returns a new basic block if we've terminated the current basic
1763 block and created a new one. */
1766 expand_gimple_cond (basic_block bb, gimple stmt)
1768 basic_block new_bb, dest;
1773 enum tree_code code;
1776 code = gimple_cond_code (stmt);
1777 op0 = gimple_cond_lhs (stmt);
1778 op1 = gimple_cond_rhs (stmt);
1779 /* We're sometimes presented with such code:
1783 This would expand to two comparisons which then later might
1784 be cleaned up by combine. But some pattern matchers like if-conversion
1785 work better when there's only one compare, so make up for this
1786 here as special exception if TER would have made the same change. */
1787 if (gimple_cond_single_var_p (stmt)
1789 && TREE_CODE (op0) == SSA_NAME
1790 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1792 gimple second = SSA_NAME_DEF_STMT (op0);
1793 if (gimple_code (second) == GIMPLE_ASSIGN)
1795 enum tree_code code2 = gimple_assign_rhs_code (second);
1796 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1799 op0 = gimple_assign_rhs1 (second);
1800 op1 = gimple_assign_rhs2 (second);
1802 /* If jumps are cheap turn some more codes into
1804 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1806 if ((code2 == BIT_AND_EXPR
1807 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1808 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1809 || code2 == TRUTH_AND_EXPR)
1811 code = TRUTH_ANDIF_EXPR;
1812 op0 = gimple_assign_rhs1 (second);
1813 op1 = gimple_assign_rhs2 (second);
1815 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1817 code = TRUTH_ORIF_EXPR;
1818 op0 = gimple_assign_rhs1 (second);
1819 op1 = gimple_assign_rhs2 (second);
1825 last2 = last = get_last_insn ();
1827 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1828 set_curr_insn_location (gimple_location (stmt));
1830 /* These flags have no purpose in RTL land. */
1831 true_edge->flags &= ~EDGE_TRUE_VALUE;
1832 false_edge->flags &= ~EDGE_FALSE_VALUE;
1834 /* We can either have a pure conditional jump with one fallthru edge or
1835 two-way jump that needs to be decomposed into two basic blocks. */
1836 if (false_edge->dest == bb->next_bb)
1838 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1839 true_edge->probability);
1840 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1841 if (true_edge->goto_locus != UNKNOWN_LOCATION)
1842 set_curr_insn_location (true_edge->goto_locus);
1843 false_edge->flags |= EDGE_FALLTHRU;
1844 maybe_cleanup_end_of_block (false_edge, last);
1847 if (true_edge->dest == bb->next_bb)
1849 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1850 false_edge->probability);
1851 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1852 if (false_edge->goto_locus != UNKNOWN_LOCATION)
1853 set_curr_insn_location (false_edge->goto_locus);
1854 true_edge->flags |= EDGE_FALLTHRU;
1855 maybe_cleanup_end_of_block (true_edge, last);
1859 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1860 true_edge->probability);
1861 last = get_last_insn ();
1862 if (false_edge->goto_locus != UNKNOWN_LOCATION)
1863 set_curr_insn_location (false_edge->goto_locus);
1864 emit_jump (label_rtx_for_bb (false_edge->dest));
1867 if (BARRIER_P (BB_END (bb)))
1868 BB_END (bb) = PREV_INSN (BB_END (bb));
1869 update_bb_for_insn (bb);
1871 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1872 dest = false_edge->dest;
1873 redirect_edge_succ (false_edge, new_bb);
1874 false_edge->flags |= EDGE_FALLTHRU;
1875 new_bb->count = false_edge->count;
1876 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1877 if (current_loops && bb->loop_father)
1878 add_bb_to_loop (new_bb, bb->loop_father);
1879 new_edge = make_edge (new_bb, dest, 0);
1880 new_edge->probability = REG_BR_PROB_BASE;
1881 new_edge->count = new_bb->count;
1882 if (BARRIER_P (BB_END (new_bb)))
1883 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1884 update_bb_for_insn (new_bb);
1886 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1888 if (true_edge->goto_locus != UNKNOWN_LOCATION)
1890 set_curr_insn_location (true_edge->goto_locus);
1891 true_edge->goto_locus = curr_insn_location ();
1897 /* Mark all calls that can have a transaction restart. */
1900 mark_transaction_restart_calls (gimple stmt)
1902 struct tm_restart_node dummy;
1905 if (!cfun->gimple_df->tm_restart)
1909 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1912 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1913 tree list = n->label_or_list;
1916 for (insn = next_real_insn (get_last_insn ());
1918 insn = next_real_insn (insn))
1921 if (TREE_CODE (list) == LABEL_DECL)
1922 add_reg_note (insn, REG_TM, label_rtx (list));
1924 for (; list ; list = TREE_CHAIN (list))
1925 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1929 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1933 expand_call_stmt (gimple stmt)
1935 tree exp, decl, lhs;
1939 if (gimple_call_internal_p (stmt))
1941 expand_internal_call (stmt);
1945 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1947 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1948 decl = gimple_call_fndecl (stmt);
1949 builtin_p = decl && DECL_BUILT_IN (decl);
1951 /* If this is not a builtin function, the function type through which the
1952 call is made may be different from the type of the function. */
1955 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
1956 CALL_EXPR_FN (exp));
1958 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1959 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1961 for (i = 0; i < gimple_call_num_args (stmt); i++)
1963 tree arg = gimple_call_arg (stmt, i);
1965 /* TER addresses into arguments of builtin functions so we have a
1966 chance to infer more correct alignment information. See PR39954. */
1968 && TREE_CODE (arg) == SSA_NAME
1969 && (def = get_gimple_for_ssa_name (arg))
1970 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1971 arg = gimple_assign_rhs1 (def);
1972 CALL_EXPR_ARG (exp, i) = arg;
1975 if (gimple_has_side_effects (stmt))
1976 TREE_SIDE_EFFECTS (exp) = 1;
1978 if (gimple_call_nothrow_p (stmt))
1979 TREE_NOTHROW (exp) = 1;
1981 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1982 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1984 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1985 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
1986 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
1987 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
1989 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1990 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1991 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1993 /* Ensure RTL is created for debug args. */
1994 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
1996 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2001 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2003 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2004 expand_debug_expr (dtemp);
2008 lhs = gimple_call_lhs (stmt);
2010 expand_assignment (lhs, exp, false);
2012 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2014 mark_transaction_restart_calls (stmt);
2017 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2018 STMT that doesn't require special handling for outgoing edges. That
2019 is no tailcalls and no GIMPLE_COND. */
2022 expand_gimple_stmt_1 (gimple stmt)
2026 set_curr_insn_location (gimple_location (stmt));
2028 switch (gimple_code (stmt))
2031 op0 = gimple_goto_dest (stmt);
2032 if (TREE_CODE (op0) == LABEL_DECL)
2035 expand_computed_goto (op0);
2038 expand_label (gimple_label_label (stmt));
2041 case GIMPLE_PREDICT:
2047 expand_asm_stmt (stmt);
2050 expand_call_stmt (stmt);
2054 op0 = gimple_return_retval (stmt);
2056 if (op0 && op0 != error_mark_node)
2058 tree result = DECL_RESULT (current_function_decl);
2060 /* If we are not returning the current function's RESULT_DECL,
2061 build an assignment to it. */
2064 /* I believe that a function's RESULT_DECL is unique. */
2065 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2067 /* ??? We'd like to use simply expand_assignment here,
2068 but this fails if the value is of BLKmode but the return
2069 decl is a register. expand_return has special handling
2070 for this combination, which eventually should move
2071 to common code. See comments there. Until then, let's
2072 build a modify expression :-/ */
2073 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2078 expand_null_return ();
2080 expand_return (op0);
2085 tree lhs = gimple_assign_lhs (stmt);
2087 /* Tree expand used to fiddle with |= and &= of two bitfield
2088 COMPONENT_REFs here. This can't happen with gimple, the LHS
2089 of binary assigns must be a gimple reg. */
2091 if (TREE_CODE (lhs) != SSA_NAME
2092 || get_gimple_rhs_class (gimple_expr_code (stmt))
2093 == GIMPLE_SINGLE_RHS)
2095 tree rhs = gimple_assign_rhs1 (stmt);
2096 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2097 == GIMPLE_SINGLE_RHS);
2098 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2099 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2100 if (TREE_CLOBBER_P (rhs))
2101 /* This is a clobber to mark the going out of scope for
2105 expand_assignment (lhs, rhs,
2106 gimple_assign_nontemporal_move_p (stmt));
2111 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2112 struct separate_ops ops;
2113 bool promoted = false;
2115 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2116 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2119 ops.code = gimple_assign_rhs_code (stmt);
2120 ops.type = TREE_TYPE (lhs);
2121 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2123 case GIMPLE_TERNARY_RHS:
2124 ops.op2 = gimple_assign_rhs3 (stmt);
2126 case GIMPLE_BINARY_RHS:
2127 ops.op1 = gimple_assign_rhs2 (stmt);
2129 case GIMPLE_UNARY_RHS:
2130 ops.op0 = gimple_assign_rhs1 (stmt);
2135 ops.location = gimple_location (stmt);
2137 /* If we want to use a nontemporal store, force the value to
2138 register first. If we store into a promoted register,
2139 don't directly expand to target. */
2140 temp = nontemporal || promoted ? NULL_RTX : target;
2141 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2148 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2149 /* If TEMP is a VOIDmode constant, use convert_modes to make
2150 sure that we properly convert it. */
2151 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2153 temp = convert_modes (GET_MODE (target),
2154 TYPE_MODE (ops.type),
2156 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2157 GET_MODE (target), temp, unsignedp);
2160 convert_move (SUBREG_REG (target), temp, unsignedp);
2162 else if (nontemporal && emit_storent_insn (target, temp))
2166 temp = force_operand (temp, target);
2168 emit_move_insn (target, temp);
2179 /* Expand one gimple statement STMT and return the last RTL instruction
2180 before any of the newly generated ones.
2182 In addition to generating the necessary RTL instructions this also
2183 sets REG_EH_REGION notes if necessary and sets the current source
2184 location for diagnostics. */
2187 expand_gimple_stmt (gimple stmt)
2189 location_t saved_location = input_location;
2190 rtx last = get_last_insn ();
2195 /* We need to save and restore the current source location so that errors
2196 discovered during expansion are emitted with the right location. But
2197 it would be better if the diagnostic routines used the source location
2198 embedded in the tree nodes rather than globals. */
2199 if (gimple_has_location (stmt))
2200 input_location = gimple_location (stmt);
2202 expand_gimple_stmt_1 (stmt);
2204 /* Free any temporaries used to evaluate this statement. */
2207 input_location = saved_location;
2209 /* Mark all insns that may trap. */
2210 lp_nr = lookup_stmt_eh_lp (stmt);
2214 for (insn = next_real_insn (last); insn;
2215 insn = next_real_insn (insn))
2217 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2218 /* If we want exceptions for non-call insns, any
2219 may_trap_p instruction may throw. */
2220 && GET_CODE (PATTERN (insn)) != CLOBBER
2221 && GET_CODE (PATTERN (insn)) != USE
2222 && insn_could_throw_p (insn))
2223 make_reg_eh_region_note (insn, 0, lp_nr);
2230 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2231 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2232 generated a tail call (something that might be denied by the ABI
2233 rules governing the call; see calls.c).
2235 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2236 can still reach the rest of BB. The case here is __builtin_sqrt,
2237 where the NaN result goes through the external function (with a
2238 tailcall) and the normal result happens via a sqrt instruction. */
2241 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2249 last2 = last = expand_gimple_stmt (stmt);
2251 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2252 if (CALL_P (last) && SIBLING_CALL_P (last))
2255 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2257 *can_fallthru = true;
2261 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2262 Any instructions emitted here are about to be deleted. */
2263 do_pending_stack_adjust ();
2265 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2266 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2267 EH or abnormal edges, we shouldn't have created a tail call in
2268 the first place. So it seems to me we should just be removing
2269 all edges here, or redirecting the existing fallthru edge to
2275 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2277 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2279 if (e->dest != EXIT_BLOCK_PTR)
2281 e->dest->count -= e->count;
2282 e->dest->frequency -= EDGE_FREQUENCY (e);
2283 if (e->dest->count < 0)
2285 if (e->dest->frequency < 0)
2286 e->dest->frequency = 0;
2289 probability += e->probability;
2296 /* This is somewhat ugly: the call_expr expander often emits instructions
2297 after the sibcall (to perform the function return). These confuse the
2298 find_many_sub_basic_blocks code, so we need to get rid of these. */
2299 last = NEXT_INSN (last);
2300 gcc_assert (BARRIER_P (last));
2302 *can_fallthru = false;
2303 while (NEXT_INSN (last))
2305 /* For instance an sqrt builtin expander expands if with
2306 sibcall in the then and label for `else`. */
2307 if (LABEL_P (NEXT_INSN (last)))
2309 *can_fallthru = true;
2312 delete_insn (NEXT_INSN (last));
2315 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2316 e->probability += probability;
2319 update_bb_for_insn (bb);
2321 if (NEXT_INSN (last))
2323 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2326 if (BARRIER_P (last))
2327 BB_END (bb) = PREV_INSN (last);
2330 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2335 /* Return the difference between the floor and the truncated result of
2336 a signed division by OP1 with remainder MOD. */
2338 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2340 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2341 return gen_rtx_IF_THEN_ELSE
2342 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2343 gen_rtx_IF_THEN_ELSE
2344 (mode, gen_rtx_LT (BImode,
2345 gen_rtx_DIV (mode, op1, mod),
2347 constm1_rtx, const0_rtx),
2351 /* Return the difference between the ceil and the truncated result of
2352 a signed division by OP1 with remainder MOD. */
2354 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2356 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2357 return gen_rtx_IF_THEN_ELSE
2358 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2359 gen_rtx_IF_THEN_ELSE
2360 (mode, gen_rtx_GT (BImode,
2361 gen_rtx_DIV (mode, op1, mod),
2363 const1_rtx, const0_rtx),
2367 /* Return the difference between the ceil and the truncated result of
2368 an unsigned division by OP1 with remainder MOD. */
2370 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2372 /* (mod != 0 ? 1 : 0) */
2373 return gen_rtx_IF_THEN_ELSE
2374 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2375 const1_rtx, const0_rtx);
2378 /* Return the difference between the rounded and the truncated result
2379 of a signed division by OP1 with remainder MOD. Halfway cases are
2380 rounded away from zero, rather than to the nearest even number. */
2382 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2384 /* (abs (mod) >= abs (op1) - abs (mod)
2385 ? (op1 / mod > 0 ? 1 : -1)
2387 return gen_rtx_IF_THEN_ELSE
2388 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2389 gen_rtx_MINUS (mode,
2390 gen_rtx_ABS (mode, op1),
2391 gen_rtx_ABS (mode, mod))),
2392 gen_rtx_IF_THEN_ELSE
2393 (mode, gen_rtx_GT (BImode,
2394 gen_rtx_DIV (mode, op1, mod),
2396 const1_rtx, constm1_rtx),
2400 /* Return the difference between the rounded and the truncated result
2401 of a unsigned division by OP1 with remainder MOD. Halfway cases
2402 are rounded away from zero, rather than to the nearest even
2405 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2407 /* (mod >= op1 - mod ? 1 : 0) */
2408 return gen_rtx_IF_THEN_ELSE
2409 (mode, gen_rtx_GE (BImode, mod,
2410 gen_rtx_MINUS (mode, op1, mod)),
2411 const1_rtx, const0_rtx);
2414 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2418 convert_debug_memory_address (enum machine_mode mode, rtx x,
2421 enum machine_mode xmode = GET_MODE (x);
2423 #ifndef POINTERS_EXTEND_UNSIGNED
2424 gcc_assert (mode == Pmode
2425 || mode == targetm.addr_space.address_mode (as));
2426 gcc_assert (xmode == mode || xmode == VOIDmode);
2430 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2432 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2435 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2436 x = simplify_gen_subreg (mode, x, xmode,
2437 subreg_lowpart_offset
2439 else if (POINTERS_EXTEND_UNSIGNED > 0)
2440 x = gen_rtx_ZERO_EXTEND (mode, x);
2441 else if (!POINTERS_EXTEND_UNSIGNED)
2442 x = gen_rtx_SIGN_EXTEND (mode, x);
2445 switch (GET_CODE (x))
2448 if ((SUBREG_PROMOTED_VAR_P (x)
2449 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2450 || (GET_CODE (SUBREG_REG (x)) == PLUS
2451 && REG_P (XEXP (SUBREG_REG (x), 0))
2452 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2453 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2454 && GET_MODE (SUBREG_REG (x)) == mode)
2455 return SUBREG_REG (x);
2458 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2459 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2462 temp = shallow_copy_rtx (x);
2463 PUT_MODE (temp, mode);
2466 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2468 temp = gen_rtx_CONST (mode, temp);
2472 if (CONST_INT_P (XEXP (x, 1)))
2474 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2476 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2482 /* Don't know how to express ptr_extend as operation in debug info. */
2485 #endif /* POINTERS_EXTEND_UNSIGNED */
2490 /* Return an RTX equivalent to the value of the parameter DECL. */
2493 expand_debug_parm_decl (tree decl)
2495 rtx incoming = DECL_INCOMING_RTL (decl);
2498 && GET_MODE (incoming) != BLKmode
2499 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2500 || (MEM_P (incoming)
2501 && REG_P (XEXP (incoming, 0))
2502 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2504 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2506 #ifdef HAVE_window_save
2507 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2508 If the target machine has an explicit window save instruction, the
2509 actual entry value is the corresponding OUTGOING_REGNO instead. */
2510 if (REG_P (incoming)
2511 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2513 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2514 OUTGOING_REGNO (REGNO (incoming)), 0);
2515 else if (MEM_P (incoming))
2517 rtx reg = XEXP (incoming, 0);
2518 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2520 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2521 incoming = replace_equiv_address_nv (incoming, reg);
2526 ENTRY_VALUE_EXP (rtl) = incoming;
2531 && GET_MODE (incoming) != BLKmode
2532 && !TREE_ADDRESSABLE (decl)
2534 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2535 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2536 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2537 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2543 /* Return an RTX equivalent to the value of the tree expression EXP. */
2546 expand_debug_expr (tree exp)
2548 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2549 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2550 enum machine_mode inner_mode = VOIDmode;
2551 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2554 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2556 case tcc_expression:
2557 switch (TREE_CODE (exp))
2561 case WIDEN_MULT_PLUS_EXPR:
2562 case WIDEN_MULT_MINUS_EXPR:
2566 case TRUTH_ANDIF_EXPR:
2567 case TRUTH_ORIF_EXPR:
2568 case TRUTH_AND_EXPR:
2570 case TRUTH_XOR_EXPR:
2573 case TRUTH_NOT_EXPR:
2582 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2589 case tcc_comparison:
2590 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2597 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2598 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2608 case tcc_exceptional:
2609 case tcc_declaration:
2615 switch (TREE_CODE (exp))
2618 if (!lookup_constant_def (exp))
2620 if (strlen (TREE_STRING_POINTER (exp)) + 1
2621 != (size_t) TREE_STRING_LENGTH (exp))
2623 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2624 op0 = gen_rtx_MEM (BLKmode, op0);
2625 set_mem_attributes (op0, exp, 0);
2628 /* Fall through... */
2633 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2637 gcc_assert (COMPLEX_MODE_P (mode));
2638 op0 = expand_debug_expr (TREE_REALPART (exp));
2639 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2640 return gen_rtx_CONCAT (mode, op0, op1);
2642 case DEBUG_EXPR_DECL:
2643 op0 = DECL_RTL_IF_SET (exp);
2648 op0 = gen_rtx_DEBUG_EXPR (mode);
2649 DEBUG_EXPR_TREE_DECL (op0) = exp;
2650 SET_DECL_RTL (exp, op0);
2660 op0 = DECL_RTL_IF_SET (exp);
2662 /* This decl was probably optimized away. */
2665 if (TREE_CODE (exp) != VAR_DECL
2666 || DECL_EXTERNAL (exp)
2667 || !TREE_STATIC (exp)
2669 || DECL_HARD_REGISTER (exp)
2670 || DECL_IN_CONSTANT_POOL (exp)
2671 || mode == VOIDmode)
2674 op0 = make_decl_rtl_for_debug (exp);
2676 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2677 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2681 op0 = copy_rtx (op0);
2683 if (GET_MODE (op0) == BLKmode
2684 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2685 below would ICE. While it is likely a FE bug,
2686 try to be robust here. See PR43166. */
2688 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2690 gcc_assert (MEM_P (op0));
2691 op0 = adjust_address_nv (op0, mode, 0);
2702 inner_mode = GET_MODE (op0);
2704 if (mode == inner_mode)
2707 if (inner_mode == VOIDmode)
2709 if (TREE_CODE (exp) == SSA_NAME)
2710 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2712 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2713 if (mode == inner_mode)
2717 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2719 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2720 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2721 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2722 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2724 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2726 else if (FLOAT_MODE_P (mode))
2728 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2729 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2730 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2732 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2734 else if (FLOAT_MODE_P (inner_mode))
2737 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2739 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2741 else if (CONSTANT_P (op0)
2742 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2743 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2744 subreg_lowpart_offset (mode,
2746 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2747 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2749 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2751 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2757 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2759 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2760 TREE_OPERAND (exp, 0),
2761 TREE_OPERAND (exp, 1));
2763 return expand_debug_expr (newexp);
2767 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2768 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2772 if (TREE_CODE (exp) == MEM_REF)
2774 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2775 || (GET_CODE (op0) == PLUS
2776 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2777 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2778 Instead just use get_inner_reference. */
2781 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2782 if (!op1 || !CONST_INT_P (op1))
2785 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
2788 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2789 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2791 as = ADDR_SPACE_GENERIC;
2793 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2795 if (op0 == NULL_RTX)
2798 op0 = gen_rtx_MEM (mode, op0);
2799 set_mem_attributes (op0, exp, 0);
2800 if (TREE_CODE (exp) == MEM_REF
2801 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2802 set_mem_expr (op0, NULL_TREE);
2803 set_mem_addr_space (op0, as);
2807 case TARGET_MEM_REF:
2808 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2809 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2812 op0 = expand_debug_expr
2813 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2817 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2818 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2820 as = ADDR_SPACE_GENERIC;
2822 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2824 if (op0 == NULL_RTX)
2827 op0 = gen_rtx_MEM (mode, op0);
2829 set_mem_attributes (op0, exp, 0);
2830 set_mem_addr_space (op0, as);
2836 case ARRAY_RANGE_REF:
2841 case VIEW_CONVERT_EXPR:
2843 enum machine_mode mode1;
2844 HOST_WIDE_INT bitsize, bitpos;
2847 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2848 &mode1, &unsignedp, &volatilep, false);
2854 orig_op0 = op0 = expand_debug_expr (tem);
2861 enum machine_mode addrmode, offmode;
2866 op0 = XEXP (op0, 0);
2867 addrmode = GET_MODE (op0);
2868 if (addrmode == VOIDmode)
2871 op1 = expand_debug_expr (offset);
2875 offmode = GET_MODE (op1);
2876 if (offmode == VOIDmode)
2877 offmode = TYPE_MODE (TREE_TYPE (offset));
2879 if (addrmode != offmode)
2880 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2881 subreg_lowpart_offset (addrmode,
2884 /* Don't use offset_address here, we don't need a
2885 recognizable address, and we don't want to generate
2887 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2893 if (mode1 == VOIDmode)
2895 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2896 if (bitpos >= BITS_PER_UNIT)
2898 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2899 bitpos %= BITS_PER_UNIT;
2901 else if (bitpos < 0)
2904 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2905 op0 = adjust_address_nv (op0, mode1, units);
2906 bitpos += units * BITS_PER_UNIT;
2908 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2909 op0 = adjust_address_nv (op0, mode, 0);
2910 else if (GET_MODE (op0) != mode1)
2911 op0 = adjust_address_nv (op0, mode1, 0);
2913 op0 = copy_rtx (op0);
2914 if (op0 == orig_op0)
2915 op0 = shallow_copy_rtx (op0);
2916 set_mem_attributes (op0, exp, 0);
2919 if (bitpos == 0 && mode == GET_MODE (op0))
2925 if (GET_MODE (op0) == BLKmode)
2928 if ((bitpos % BITS_PER_UNIT) == 0
2929 && bitsize == GET_MODE_BITSIZE (mode1))
2931 enum machine_mode opmode = GET_MODE (op0);
2933 if (opmode == VOIDmode)
2934 opmode = TYPE_MODE (TREE_TYPE (tem));
2936 /* This condition may hold if we're expanding the address
2937 right past the end of an array that turned out not to
2938 be addressable (i.e., the address was only computed in
2939 debug stmts). The gen_subreg below would rightfully
2940 crash, and the address doesn't really exist, so just
2942 if (bitpos >= GET_MODE_BITSIZE (opmode))
2945 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2946 return simplify_gen_subreg (mode, op0, opmode,
2947 bitpos / BITS_PER_UNIT);
2950 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2951 && TYPE_UNSIGNED (TREE_TYPE (exp))
2953 : ZERO_EXTRACT, mode,
2954 GET_MODE (op0) != VOIDmode
2956 : TYPE_MODE (TREE_TYPE (tem)),
2957 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2961 return simplify_gen_unary (ABS, mode, op0, mode);
2964 return simplify_gen_unary (NEG, mode, op0, mode);
2967 return simplify_gen_unary (NOT, mode, op0, mode);
2970 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
2972 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
2975 case FIX_TRUNC_EXPR:
2976 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
2979 case POINTER_PLUS_EXPR:
2980 /* For the rare target where pointers are not the same size as
2981 size_t, we need to check for mis-matched modes and correct
2984 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2985 && GET_MODE (op0) != GET_MODE (op1))
2987 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2988 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
2991 /* We always sign-extend, regardless of the signedness of
2992 the operand, because the operand is always unsigned
2993 here even if the original C expression is signed. */
2994 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
2999 return simplify_gen_binary (PLUS, mode, op0, op1);
3002 return simplify_gen_binary (MINUS, mode, op0, op1);
3005 return simplify_gen_binary (MULT, mode, op0, op1);
3008 case TRUNC_DIV_EXPR:
3009 case EXACT_DIV_EXPR:
3011 return simplify_gen_binary (UDIV, mode, op0, op1);
3013 return simplify_gen_binary (DIV, mode, op0, op1);
3015 case TRUNC_MOD_EXPR:
3016 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3018 case FLOOR_DIV_EXPR:
3020 return simplify_gen_binary (UDIV, mode, op0, op1);
3023 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3024 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3025 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3026 return simplify_gen_binary (PLUS, mode, div, adj);
3029 case FLOOR_MOD_EXPR:
3031 return simplify_gen_binary (UMOD, mode, op0, op1);
3034 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3035 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3036 adj = simplify_gen_unary (NEG, mode,
3037 simplify_gen_binary (MULT, mode, adj, op1),
3039 return simplify_gen_binary (PLUS, mode, mod, adj);
3045 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3046 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3047 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3048 return simplify_gen_binary (PLUS, mode, div, adj);
3052 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3053 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3054 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3055 return simplify_gen_binary (PLUS, mode, div, adj);
3061 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3062 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3063 adj = simplify_gen_unary (NEG, mode,
3064 simplify_gen_binary (MULT, mode, adj, op1),
3066 return simplify_gen_binary (PLUS, mode, mod, adj);
3070 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3071 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3072 adj = simplify_gen_unary (NEG, mode,
3073 simplify_gen_binary (MULT, mode, adj, op1),
3075 return simplify_gen_binary (PLUS, mode, mod, adj);
3078 case ROUND_DIV_EXPR:
3081 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3082 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3083 rtx adj = round_udiv_adjust (mode, mod, op1);
3084 return simplify_gen_binary (PLUS, mode, div, adj);
3088 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3089 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3090 rtx adj = round_sdiv_adjust (mode, mod, op1);
3091 return simplify_gen_binary (PLUS, mode, div, adj);
3094 case ROUND_MOD_EXPR:
3097 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3098 rtx adj = round_udiv_adjust (mode, mod, op1);
3099 adj = simplify_gen_unary (NEG, mode,
3100 simplify_gen_binary (MULT, mode, adj, op1),
3102 return simplify_gen_binary (PLUS, mode, mod, adj);
3106 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3107 rtx adj = round_sdiv_adjust (mode, mod, op1);
3108 adj = simplify_gen_unary (NEG, mode,
3109 simplify_gen_binary (MULT, mode, adj, op1),
3111 return simplify_gen_binary (PLUS, mode, mod, adj);
3115 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3119 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3121 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3124 return simplify_gen_binary (ROTATE, mode, op0, op1);
3127 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3130 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3133 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3136 case TRUTH_AND_EXPR:
3137 return simplify_gen_binary (AND, mode, op0, op1);
3141 return simplify_gen_binary (IOR, mode, op0, op1);
3144 case TRUTH_XOR_EXPR:
3145 return simplify_gen_binary (XOR, mode, op0, op1);
3147 case TRUTH_ANDIF_EXPR:
3148 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3150 case TRUTH_ORIF_EXPR:
3151 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3153 case TRUTH_NOT_EXPR:
3154 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3157 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3161 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3165 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3169 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3173 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3176 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3178 case UNORDERED_EXPR:
3179 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3182 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3185 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3188 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3191 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3194 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3197 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3200 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3203 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3206 gcc_assert (COMPLEX_MODE_P (mode));
3207 if (GET_MODE (op0) == VOIDmode)
3208 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3209 if (GET_MODE (op1) == VOIDmode)
3210 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3211 return gen_rtx_CONCAT (mode, op0, op1);
3214 if (GET_CODE (op0) == CONCAT)
3215 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3216 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3218 GET_MODE_INNER (mode)));
3221 enum machine_mode imode = GET_MODE_INNER (mode);
3226 re = adjust_address_nv (op0, imode, 0);
3227 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3231 enum machine_mode ifmode = int_mode_for_mode (mode);
3232 enum machine_mode ihmode = int_mode_for_mode (imode);
3234 if (ifmode == BLKmode || ihmode == BLKmode)
3236 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3239 re = gen_rtx_SUBREG (ifmode, re, 0);
3240 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3241 if (imode != ihmode)
3242 re = gen_rtx_SUBREG (imode, re, 0);
3243 im = copy_rtx (op0);
3245 im = gen_rtx_SUBREG (ifmode, im, 0);
3246 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3247 if (imode != ihmode)
3248 im = gen_rtx_SUBREG (imode, im, 0);
3250 im = gen_rtx_NEG (imode, im);
3251 return gen_rtx_CONCAT (mode, re, im);
3255 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3256 if (!op0 || !MEM_P (op0))
3258 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3259 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3260 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3261 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3262 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3263 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3265 if (handled_component_p (TREE_OPERAND (exp, 0)))
3267 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3269 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3270 &bitoffset, &bitsize, &maxsize);
3271 if ((TREE_CODE (decl) == VAR_DECL
3272 || TREE_CODE (decl) == PARM_DECL
3273 || TREE_CODE (decl) == RESULT_DECL)
3274 && (!TREE_ADDRESSABLE (decl)
3275 || target_for_debug_bind (decl))
3276 && (bitoffset % BITS_PER_UNIT) == 0
3278 && bitsize == maxsize)
3280 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
3281 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
3288 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3289 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3297 op0 = gen_rtx_CONCATN
3298 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3300 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3302 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3305 XVECEXP (op0, 0, i) = op1;
3312 if (TREE_CLOBBER_P (exp))
3314 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3319 op0 = gen_rtx_CONCATN
3320 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3322 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3324 op1 = expand_debug_expr (val);
3327 XVECEXP (op0, 0, i) = op1;
3330 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3332 op1 = expand_debug_expr
3333 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3338 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3339 XVECEXP (op0, 0, i) = op1;
3345 goto flag_unsupported;
3348 /* ??? Maybe handle some builtins? */
3353 gimple g = get_gimple_for_ssa_name (exp);
3356 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3362 int part = var_to_partition (SA.map, exp);
3364 if (part == NO_PARTITION)
3366 /* If this is a reference to an incoming value of parameter
3367 that is never used in the code or where the incoming
3368 value is never used in the code, use PARM_DECL's
3370 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3371 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3373 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3376 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3383 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3385 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3393 /* Vector stuff. For most of the codes we don't have rtl codes. */
3394 case REALIGN_LOAD_EXPR:
3395 case REDUC_MAX_EXPR:
3396 case REDUC_MIN_EXPR:
3397 case REDUC_PLUS_EXPR:
3399 case VEC_LSHIFT_EXPR:
3400 case VEC_PACK_FIX_TRUNC_EXPR:
3401 case VEC_PACK_SAT_EXPR:
3402 case VEC_PACK_TRUNC_EXPR:
3403 case VEC_RSHIFT_EXPR:
3404 case VEC_UNPACK_FLOAT_HI_EXPR:
3405 case VEC_UNPACK_FLOAT_LO_EXPR:
3406 case VEC_UNPACK_HI_EXPR:
3407 case VEC_UNPACK_LO_EXPR:
3408 case VEC_WIDEN_MULT_HI_EXPR:
3409 case VEC_WIDEN_MULT_LO_EXPR:
3410 case VEC_WIDEN_MULT_EVEN_EXPR:
3411 case VEC_WIDEN_MULT_ODD_EXPR:
3412 case VEC_WIDEN_LSHIFT_HI_EXPR:
3413 case VEC_WIDEN_LSHIFT_LO_EXPR:
3418 case ADDR_SPACE_CONVERT_EXPR:
3419 case FIXED_CONVERT_EXPR:
3421 case WITH_SIZE_EXPR:
3425 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3426 && SCALAR_INT_MODE_P (mode))
3429 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3431 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3434 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3436 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3438 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3439 return simplify_gen_binary (PLUS, mode, op0, op2);
3443 case WIDEN_MULT_EXPR:
3444 case WIDEN_MULT_PLUS_EXPR:
3445 case WIDEN_MULT_MINUS_EXPR:
3446 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3447 && SCALAR_INT_MODE_P (mode))
3449 inner_mode = GET_MODE (op0);
3450 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3451 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3453 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3454 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3455 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3457 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3458 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3459 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3461 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3462 return simplify_gen_binary (PLUS, mode, op0, op2);
3464 return simplify_gen_binary (MINUS, mode, op2, op0);
3468 case MULT_HIGHPART_EXPR:
3469 /* ??? Similar to the above. */
3472 case WIDEN_SUM_EXPR:
3473 case WIDEN_LSHIFT_EXPR:
3474 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3475 && SCALAR_INT_MODE_P (mode))
3478 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3480 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3482 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3483 ? ASHIFT : PLUS, mode, op0, op1);
3488 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3492 #ifdef ENABLE_CHECKING
3501 /* Return an RTX equivalent to the source bind value of the tree expression
3505 expand_debug_source_expr (tree exp)
3508 enum machine_mode mode = VOIDmode, inner_mode;
3510 switch (TREE_CODE (exp))
3514 mode = DECL_MODE (exp);
3515 op0 = expand_debug_parm_decl (exp);
3518 /* See if this isn't an argument that has been completely
3520 if (!DECL_RTL_SET_P (exp)
3521 && !DECL_INCOMING_RTL (exp)
3522 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3525 if (DECL_ABSTRACT_ORIGIN (exp))
3526 aexp = DECL_ABSTRACT_ORIGIN (exp);
3527 if (DECL_CONTEXT (aexp)
3528 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3530 VEC(tree, gc) **debug_args;
3533 #ifdef ENABLE_CHECKING
3535 for (parm = DECL_ARGUMENTS (current_function_decl);
3536 parm; parm = DECL_CHAIN (parm))
3537 gcc_assert (parm != exp
3538 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3540 debug_args = decl_debug_args_lookup (current_function_decl);
3541 if (debug_args != NULL)
3543 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3546 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3556 if (op0 == NULL_RTX)
3559 inner_mode = GET_MODE (op0);
3560 if (mode == inner_mode)
3563 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3565 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3566 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3567 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3568 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3570 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3572 else if (FLOAT_MODE_P (mode))
3574 else if (FLOAT_MODE_P (inner_mode))
3576 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3577 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3579 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3581 else if (CONSTANT_P (op0)
3582 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3583 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3584 subreg_lowpart_offset (mode, inner_mode));
3585 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3586 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3588 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3593 /* Expand the _LOCs in debug insns. We run this after expanding all
3594 regular insns, so that any variables referenced in the function
3595 will have their DECL_RTLs set. */
3598 expand_debug_locations (void)
3601 rtx last = get_last_insn ();
3602 int save_strict_alias = flag_strict_aliasing;
3604 /* New alias sets while setting up memory attributes cause
3605 -fcompare-debug failures, even though it doesn't bring about any
3607 flag_strict_aliasing = 0;
3609 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3610 if (DEBUG_INSN_P (insn))
3612 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3614 enum machine_mode mode;
3616 if (value == NULL_TREE)
3620 if (INSN_VAR_LOCATION_STATUS (insn)
3621 == VAR_INIT_STATUS_UNINITIALIZED)
3622 val = expand_debug_source_expr (value);
3624 val = expand_debug_expr (value);
3625 gcc_assert (last == get_last_insn ());
3629 val = gen_rtx_UNKNOWN_VAR_LOC ();
3632 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3634 gcc_assert (mode == GET_MODE (val)
3635 || (GET_MODE (val) == VOIDmode
3636 && (CONST_INT_P (val)
3637 || GET_CODE (val) == CONST_FIXED
3638 || CONST_DOUBLE_AS_INT_P (val)
3639 || GET_CODE (val) == LABEL_REF)));
3642 INSN_VAR_LOCATION_LOC (insn) = val;
3645 flag_strict_aliasing = save_strict_alias;
3648 /* Expand basic block BB from GIMPLE trees to RTL. */
3651 expand_gimple_basic_block (basic_block bb)
3653 gimple_stmt_iterator gsi;
3662 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3665 /* Note that since we are now transitioning from GIMPLE to RTL, we
3666 cannot use the gsi_*_bb() routines because they expect the basic
3667 block to be in GIMPLE, instead of RTL. Therefore, we need to
3668 access the BB sequence directly. */
3669 stmts = bb_seq (bb);
3670 bb->il.gimple.seq = NULL;
3671 bb->il.gimple.phi_nodes = NULL;
3672 rtl_profile_for_bb (bb);
3673 init_rtl_bb_info (bb);
3674 bb->flags |= BB_RTL;
3676 /* Remove the RETURN_EXPR if we may fall though to the exit
3678 gsi = gsi_last (stmts);
3679 if (!gsi_end_p (gsi)
3680 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3682 gimple ret_stmt = gsi_stmt (gsi);
3684 gcc_assert (single_succ_p (bb));
3685 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3687 if (bb->next_bb == EXIT_BLOCK_PTR
3688 && !gimple_return_retval (ret_stmt))
3690 gsi_remove (&gsi, false);
3691 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3695 gsi = gsi_start (stmts);
3696 if (!gsi_end_p (gsi))
3698 stmt = gsi_stmt (gsi);
3699 if (gimple_code (stmt) != GIMPLE_LABEL)
3703 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3707 last = get_last_insn ();
3711 expand_gimple_stmt (stmt);
3716 emit_label ((rtx) *elt);
3718 /* Java emits line number notes in the top of labels.
3719 ??? Make this go away once line number notes are obsoleted. */
3720 BB_HEAD (bb) = NEXT_INSN (last);
3721 if (NOTE_P (BB_HEAD (bb)))
3722 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3723 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3725 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3728 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3730 NOTE_BASIC_BLOCK (note) = bb;
3732 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3736 stmt = gsi_stmt (gsi);
3738 /* If this statement is a non-debug one, and we generate debug
3739 insns, then this one might be the last real use of a TERed
3740 SSA_NAME, but where there are still some debug uses further
3741 down. Expanding the current SSA name in such further debug
3742 uses by their RHS might lead to wrong debug info, as coalescing
3743 might make the operands of such RHS be placed into the same
3744 pseudo as something else. Like so:
3745 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3749 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3750 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3751 the write to a_2 would actually have clobbered the place which
3754 So, instead of that, we recognize the situation, and generate
3755 debug temporaries at the last real use of TERed SSA names:
3762 if (MAY_HAVE_DEBUG_INSNS
3764 && !is_gimple_debug (stmt))
3770 location_t sloc = curr_insn_location ();
3772 /* Look for SSA names that have their last use here (TERed
3773 names always have only one real use). */
3774 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3775 if ((def = get_gimple_for_ssa_name (op)))
3777 imm_use_iterator imm_iter;
3778 use_operand_p use_p;
3779 bool have_debug_uses = false;
3781 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3783 if (gimple_debug_bind_p (USE_STMT (use_p)))
3785 have_debug_uses = true;
3790 if (have_debug_uses)
3792 /* OP is a TERed SSA name, with DEF it's defining
3793 statement, and where OP is used in further debug
3794 instructions. Generate a debug temporary, and
3795 replace all uses of OP in debug insns with that
3798 tree value = gimple_assign_rhs_to_tree (def);
3799 tree vexpr = make_node (DEBUG_EXPR_DECL);
3801 enum machine_mode mode;
3803 set_curr_insn_location (gimple_location (def));
3805 DECL_ARTIFICIAL (vexpr) = 1;
3806 TREE_TYPE (vexpr) = TREE_TYPE (value);
3808 mode = DECL_MODE (value);
3810 mode = TYPE_MODE (TREE_TYPE (value));
3811 DECL_MODE (vexpr) = mode;
3813 val = gen_rtx_VAR_LOCATION
3814 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3816 emit_debug_insn (val);
3818 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3820 if (!gimple_debug_bind_p (debugstmt))
3823 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3824 SET_USE (use_p, vexpr);
3826 update_stmt (debugstmt);
3830 set_curr_insn_location (sloc);
3833 currently_expanding_gimple_stmt = stmt;
3835 /* Expand this statement, then evaluate the resulting RTL and
3836 fixup the CFG accordingly. */
3837 if (gimple_code (stmt) == GIMPLE_COND)
3839 new_bb = expand_gimple_cond (bb, stmt);
3843 else if (gimple_debug_bind_p (stmt))
3845 location_t sloc = curr_insn_location ();
3846 gimple_stmt_iterator nsi = gsi;
3850 tree var = gimple_debug_bind_get_var (stmt);
3853 enum machine_mode mode;
3855 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3856 && TREE_CODE (var) != LABEL_DECL
3857 && !target_for_debug_bind (var))
3858 goto delink_debug_stmt;
3860 if (gimple_debug_bind_has_value_p (stmt))
3861 value = gimple_debug_bind_get_value (stmt);
3865 last = get_last_insn ();
3867 set_curr_insn_location (gimple_location (stmt));
3870 mode = DECL_MODE (var);
3872 mode = TYPE_MODE (TREE_TYPE (var));
3874 val = gen_rtx_VAR_LOCATION
3875 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3877 emit_debug_insn (val);
3879 if (dump_file && (dump_flags & TDF_DETAILS))
3881 /* We can't dump the insn with a TREE where an RTX
3883 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3884 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3885 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3889 /* In order not to generate too many debug temporaries,
3890 we delink all uses of debug statements we already expanded.
3891 Therefore debug statements between definition and real
3892 use of TERed SSA names will continue to use the SSA name,
3893 and not be replaced with debug temps. */
3894 delink_stmt_imm_use (stmt);
3898 if (gsi_end_p (nsi))
3900 stmt = gsi_stmt (nsi);
3901 if (!gimple_debug_bind_p (stmt))
3905 set_curr_insn_location (sloc);
3907 else if (gimple_debug_source_bind_p (stmt))
3909 location_t sloc = curr_insn_location ();
3910 tree var = gimple_debug_source_bind_get_var (stmt);
3911 tree value = gimple_debug_source_bind_get_value (stmt);
3913 enum machine_mode mode;
3915 last = get_last_insn ();
3917 set_curr_insn_location (gimple_location (stmt));
3919 mode = DECL_MODE (var);
3921 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3922 VAR_INIT_STATUS_UNINITIALIZED);
3924 emit_debug_insn (val);
3926 if (dump_file && (dump_flags & TDF_DETAILS))
3928 /* We can't dump the insn with a TREE where an RTX
3930 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3931 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3932 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3935 set_curr_insn_location (sloc);
3939 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3942 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3953 def_operand_p def_p;
3954 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3958 /* Ignore this stmt if it is in the list of
3959 replaceable expressions. */
3961 && bitmap_bit_p (SA.values,
3962 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3965 last = expand_gimple_stmt (stmt);
3966 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3971 currently_expanding_gimple_stmt = NULL;
3973 /* Expand implicit goto and convert goto_locus. */
3974 FOR_EACH_EDGE (e, ei, bb->succs)
3976 if (e->goto_locus != UNKNOWN_LOCATION)
3977 set_curr_insn_location (e->goto_locus);
3978 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3980 emit_jump (label_rtx_for_bb (e->dest));
3981 e->flags &= ~EDGE_FALLTHRU;
3985 /* Expanded RTL can create a jump in the last instruction of block.
3986 This later might be assumed to be a jump to successor and break edge insertion.
3987 We need to insert dummy move to prevent this. PR41440. */
3988 if (single_succ_p (bb)
3989 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3990 && (last = get_last_insn ())
3993 rtx dummy = gen_reg_rtx (SImode);
3994 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3997 do_pending_stack_adjust ();
3999 /* Find the block tail. The last insn in the block is the insn
4000 before a barrier and/or table jump insn. */
4001 last = get_last_insn ();
4002 if (BARRIER_P (last))
4003 last = PREV_INSN (last);
4004 if (JUMP_TABLE_DATA_P (last))
4005 last = PREV_INSN (PREV_INSN (last));
4008 update_bb_for_insn (bb);
4014 /* Create a basic block for initialization code. */
4017 construct_init_block (void)
4019 basic_block init_block, first_block;
4023 /* Multiple entry points not supported yet. */
4024 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4025 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4026 init_rtl_bb_info (EXIT_BLOCK_PTR);
4027 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4028 EXIT_BLOCK_PTR->flags |= BB_RTL;
4030 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4032 /* When entry edge points to first basic block, we don't need jump,
4033 otherwise we have to jump into proper target. */
4034 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4036 tree label = gimple_block_label (e->dest);
4038 emit_jump (label_rtx (label));
4042 flags = EDGE_FALLTHRU;
4044 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4047 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4048 init_block->count = ENTRY_BLOCK_PTR->count;
4049 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
4050 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
4053 first_block = e->dest;
4054 redirect_edge_succ (e, init_block);
4055 e = make_edge (init_block, first_block, flags);
4058 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4059 e->probability = REG_BR_PROB_BASE;
4060 e->count = ENTRY_BLOCK_PTR->count;
4062 update_bb_for_insn (init_block);
4066 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4067 found in the block tree. */
4070 set_block_levels (tree block, int level)
4074 BLOCK_NUMBER (block) = level;
4075 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4076 block = BLOCK_CHAIN (block);
4080 /* Create a block containing landing pads and similar stuff. */
4083 construct_exit_block (void)
4085 rtx head = get_last_insn ();
4087 basic_block exit_block;
4091 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4093 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4095 /* Make sure the locus is set to the end of the function, so that
4096 epilogue line numbers and warnings are set properly. */
4097 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
4098 input_location = cfun->function_end_locus;
4100 /* Generate rtl for function exit. */
4101 expand_function_end ();
4103 end = get_last_insn ();
4106 /* While emitting the function end we could move end of the last basic block.
4108 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4109 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4110 head = NEXT_INSN (head);
4111 exit_block = create_basic_block (NEXT_INSN (head), end,
4112 EXIT_BLOCK_PTR->prev_bb);
4113 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4114 exit_block->count = EXIT_BLOCK_PTR->count;
4115 if (current_loops && EXIT_BLOCK_PTR->loop_father)
4116 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
4119 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4121 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4122 if (!(e->flags & EDGE_ABNORMAL))
4123 redirect_edge_succ (e, exit_block);
4128 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4129 e->probability = REG_BR_PROB_BASE;
4130 e->count = EXIT_BLOCK_PTR->count;
4131 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4134 e->count -= e2->count;
4135 exit_block->count -= e2->count;
4136 exit_block->frequency -= EDGE_FREQUENCY (e2);
4140 if (exit_block->count < 0)
4141 exit_block->count = 0;
4142 if (exit_block->frequency < 0)
4143 exit_block->frequency = 0;
4144 update_bb_for_insn (exit_block);
4147 /* Helper function for discover_nonconstant_array_refs.
4148 Look for ARRAY_REF nodes with non-constant indexes and mark them
4152 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4153 void *data ATTRIBUTE_UNUSED)
4157 if (IS_TYPE_OR_DECL_P (t))
4159 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4161 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4162 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4163 && (!TREE_OPERAND (t, 2)
4164 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4165 || (TREE_CODE (t) == COMPONENT_REF
4166 && (!TREE_OPERAND (t,2)
4167 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4168 || TREE_CODE (t) == BIT_FIELD_REF
4169 || TREE_CODE (t) == REALPART_EXPR
4170 || TREE_CODE (t) == IMAGPART_EXPR
4171 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4172 || CONVERT_EXPR_P (t))
4173 t = TREE_OPERAND (t, 0);
4175 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4177 t = get_base_address (t);
4179 && DECL_MODE (t) != BLKmode)
4180 TREE_ADDRESSABLE (t) = 1;
4189 /* RTL expansion is not able to compile array references with variable
4190 offsets for arrays stored in single register. Discover such
4191 expressions and mark variables as addressable to avoid this
4195 discover_nonconstant_array_refs (void)
4198 gimple_stmt_iterator gsi;
4201 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4203 gimple stmt = gsi_stmt (gsi);
4204 if (!is_gimple_debug (stmt))
4205 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4209 /* This function sets crtl->args.internal_arg_pointer to a virtual
4210 register if DRAP is needed. Local register allocator will replace
4211 virtual_incoming_args_rtx with the virtual register. */
4214 expand_stack_alignment (void)
4217 unsigned int preferred_stack_boundary;
4219 if (! SUPPORTS_STACK_ALIGNMENT)
4222 if (cfun->calls_alloca
4223 || cfun->has_nonlocal_label
4224 || crtl->has_nonlocal_goto)
4225 crtl->need_drap = true;
4227 /* Call update_stack_boundary here again to update incoming stack
4228 boundary. It may set incoming stack alignment to a different
4229 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4230 use the minimum incoming stack alignment to check if it is OK
4231 to perform sibcall optimization since sibcall optimization will
4232 only align the outgoing stack to incoming stack boundary. */
4233 if (targetm.calls.update_stack_boundary)
4234 targetm.calls.update_stack_boundary ();
4236 /* The incoming stack frame has to be aligned at least at
4237 parm_stack_boundary. */
4238 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4240 /* Update crtl->stack_alignment_estimated and use it later to align
4241 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4242 exceptions since callgraph doesn't collect incoming stack alignment
4244 if (cfun->can_throw_non_call_exceptions
4245 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4246 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4248 preferred_stack_boundary = crtl->preferred_stack_boundary;
4249 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4250 crtl->stack_alignment_estimated = preferred_stack_boundary;
4251 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4252 crtl->stack_alignment_needed = preferred_stack_boundary;
4254 gcc_assert (crtl->stack_alignment_needed
4255 <= crtl->stack_alignment_estimated);
4257 crtl->stack_realign_needed
4258 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4259 crtl->stack_realign_tried = crtl->stack_realign_needed;
4261 crtl->stack_realign_processed = true;
4263 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4265 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4266 drap_rtx = targetm.calls.get_drap_rtx ();
4268 /* stack_realign_drap and drap_rtx must match. */
4269 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4271 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4272 if (NULL != drap_rtx)
4274 crtl->args.internal_arg_pointer = drap_rtx;
4276 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4278 fixup_tail_calls ();
4282 /* Translate the intermediate representation contained in the CFG
4283 from GIMPLE trees to RTL.
4285 We do conversion per basic block and preserve/update the tree CFG.
4286 This implies we have to do some magic as the CFG can simultaneously
4287 consist of basic blocks containing RTL and GIMPLE trees. This can
4288 confuse the CFG hooks, so be careful to not manipulate CFG during
4292 gimple_expand_cfg (void)
4294 basic_block bb, init_block;
4301 timevar_push (TV_OUT_OF_SSA);
4302 rewrite_out_of_ssa (&SA);
4303 timevar_pop (TV_OUT_OF_SSA);
4304 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
4306 /* Make sure all values used by the optimization passes have sane
4310 /* Some backends want to know that we are expanding to RTL. */
4311 currently_expanding_to_rtl = 1;
4312 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4313 free_dominance_info (CDI_DOMINATORS);
4315 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4317 insn_locations_init ();
4318 if (!DECL_IS_BUILTIN (current_function_decl))
4320 /* Eventually, all FEs should explicitly set function_start_locus. */
4321 if (LOCATION_LOCUS (cfun->function_start_locus) == UNKNOWN_LOCATION)
4322 set_curr_insn_location
4323 (DECL_SOURCE_LOCATION (current_function_decl));
4325 set_curr_insn_location (cfun->function_start_locus);
4328 set_curr_insn_location (UNKNOWN_LOCATION);
4329 prologue_location = curr_insn_location ();
4331 #ifdef INSN_SCHEDULING
4332 init_sched_attrs ();
4335 /* Make sure first insn is a note even if we don't want linenums.
4336 This makes sure the first insn will never be deleted.
4337 Also, final expects a note to appear there. */
4338 emit_note (NOTE_INSN_DELETED);
4340 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4341 discover_nonconstant_array_refs ();
4343 targetm.expand_to_rtl_hook ();
4344 crtl->stack_alignment_needed = STACK_BOUNDARY;
4345 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4346 crtl->stack_alignment_estimated = 0;
4347 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4348 cfun->cfg->max_jumptable_ents = 0;
4350 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4351 of the function section at exapnsion time to predict distance of calls. */
4352 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4354 /* Expand the variables recorded during gimple lowering. */
4355 timevar_push (TV_VAR_EXPAND);
4358 expand_used_vars ();
4360 var_seq = get_insns ();
4362 timevar_pop (TV_VAR_EXPAND);
4364 /* Honor stack protection warnings. */
4365 if (warn_stack_protect)
4367 if (cfun->calls_alloca)
4368 warning (OPT_Wstack_protector,
4369 "stack protector not protecting local variables: "
4370 "variable length buffer");
4371 if (has_short_buffer && !crtl->stack_protect_guard)
4372 warning (OPT_Wstack_protector,
4373 "stack protector not protecting function: "
4374 "all local arrays are less than %d bytes long",
4375 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4378 /* Set up parameters and prepare for return, for the function. */
4379 expand_function_start (current_function_decl);
4381 /* If we emitted any instructions for setting up the variables,
4382 emit them before the FUNCTION_START note. */
4385 emit_insn_before (var_seq, parm_birth_insn);
4387 /* In expand_function_end we'll insert the alloca save/restore
4388 before parm_birth_insn. We've just insertted an alloca call.
4389 Adjust the pointer to match. */
4390 parm_birth_insn = var_seq;
4393 /* Now that we also have the parameter RTXs, copy them over to our
4395 for (i = 0; i < SA.map->num_partitions; i++)
4397 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4399 if (TREE_CODE (var) != VAR_DECL
4400 && !SA.partition_to_pseudo[i])
4401 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4402 gcc_assert (SA.partition_to_pseudo[i]);
4404 /* If this decl was marked as living in multiple places, reset
4405 this now to NULL. */
4406 if (DECL_RTL_IF_SET (var) == pc_rtx)
4407 SET_DECL_RTL (var, NULL);
4409 /* Some RTL parts really want to look at DECL_RTL(x) when x
4410 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4411 SET_DECL_RTL here making this available, but that would mean
4412 to select one of the potentially many RTLs for one DECL. Instead
4413 of doing that we simply reset the MEM_EXPR of the RTL in question,
4414 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4415 if (!DECL_RTL_SET_P (var))
4417 if (MEM_P (SA.partition_to_pseudo[i]))
4418 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4422 /* If we have a class containing differently aligned pointers
4423 we need to merge those into the corresponding RTL pointer
4425 for (i = 1; i < num_ssa_names; i++)
4427 tree name = ssa_name (i);
4432 /* We might have generated new SSA names in
4433 update_alias_info_with_stack_vars. They will have a NULL
4434 defining statements, and won't be part of the partitioning,
4436 || !SSA_NAME_DEF_STMT (name))
4438 part = var_to_partition (SA.map, name);
4439 if (part == NO_PARTITION)
4442 /* Adjust all partition members to get the underlying decl of
4443 the representative which we might have created in expand_one_var. */
4444 if (SSA_NAME_VAR (name) == NULL_TREE)
4446 tree leader = partition_to_var (SA.map, part);
4447 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
4448 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
4450 if (!POINTER_TYPE_P (TREE_TYPE (name)))
4453 r = SA.partition_to_pseudo[part];
4455 mark_reg_pointer (r, get_pointer_alignment (name));
4458 /* If this function is `main', emit a call to `__main'
4459 to run global initializers, etc. */
4460 if (DECL_NAME (current_function_decl)
4461 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4462 && DECL_FILE_SCOPE_P (current_function_decl))
4463 expand_main_function ();
4465 /* Initialize the stack_protect_guard field. This must happen after the
4466 call to __main (if any) so that the external decl is initialized. */
4467 if (crtl->stack_protect_guard)
4468 stack_protect_prologue ();
4470 expand_phi_nodes (&SA);
4472 /* Register rtl specific functions for cfg. */
4473 rtl_register_cfg_hooks ();
4475 init_block = construct_init_block ();
4477 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4478 remaining edges later. */
4479 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4480 e->flags &= ~EDGE_EXECUTABLE;
4482 lab_rtx_for_bb = pointer_map_create ();
4483 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4484 bb = expand_gimple_basic_block (bb);
4486 if (MAY_HAVE_DEBUG_INSNS)
4487 expand_debug_locations ();
4489 /* Free stuff we no longer need after GIMPLE optimizations. */
4490 free_dominance_info (CDI_DOMINATORS);
4491 free_dominance_info (CDI_POST_DOMINATORS);
4492 delete_tree_cfg_annotations ();
4494 timevar_push (TV_OUT_OF_SSA);
4495 finish_out_of_ssa (&SA);
4496 timevar_pop (TV_OUT_OF_SSA);
4498 timevar_push (TV_POST_EXPAND);
4499 /* We are no longer in SSA form. */
4500 cfun->gimple_df->in_ssa_p = false;
4502 loops_state_clear (LOOP_CLOSED_SSA);
4504 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4505 conservatively to true until they are all profile aware. */
4506 pointer_map_destroy (lab_rtx_for_bb);
4509 construct_exit_block ();
4510 insn_locations_finalize ();
4512 /* Zap the tree EH table. */
4513 set_eh_throw_stmt_table (cfun, NULL);
4515 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4516 split edges which edge insertions might do. */
4517 rebuild_jump_labels (get_insns ());
4519 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4523 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4527 rebuild_jump_labels_chain (e->insns.r);
4528 /* Avoid putting insns before parm_birth_insn. */
4529 if (e->src == ENTRY_BLOCK_PTR
4530 && single_succ_p (ENTRY_BLOCK_PTR)
4533 rtx insns = e->insns.r;
4534 e->insns.r = NULL_RTX;
4535 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4538 commit_one_edge_insertion (e);
4545 /* We're done expanding trees to RTL. */
4546 currently_expanding_to_rtl = 0;
4548 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4552 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4554 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4555 e->flags &= ~EDGE_EXECUTABLE;
4557 /* At the moment not all abnormal edges match the RTL
4558 representation. It is safe to remove them here as
4559 find_many_sub_basic_blocks will rediscover them.
4560 In the future we should get this fixed properly. */
4561 if ((e->flags & EDGE_ABNORMAL)
4562 && !(e->flags & EDGE_SIBCALL))
4569 blocks = sbitmap_alloc (last_basic_block);
4570 sbitmap_ones (blocks);
4571 find_many_sub_basic_blocks (blocks);
4572 sbitmap_free (blocks);
4573 purge_all_dead_edges ();
4575 expand_stack_alignment ();
4577 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4579 if (crtl->tail_call_emit)
4580 fixup_tail_calls ();
4582 /* After initial rtl generation, call back to finish generating
4583 exception support code. We need to do this before cleaning up
4584 the CFG as the code does not expect dead landing pads. */
4585 if (cfun->eh->region_tree != NULL)
4586 finish_eh_generation ();
4588 /* Remove unreachable blocks, otherwise we cannot compute dominators
4589 which are needed for loop state verification. As a side-effect
4590 this also compacts blocks.
4591 ??? We cannot remove trivially dead insns here as for example
4592 the DRAP reg on i?86 is not magically live at this point.
4593 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4594 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4596 #ifdef ENABLE_CHECKING
4597 verify_flow_info ();
4600 /* Initialize pseudos allocated for hard registers. */
4601 emit_initial_value_sets ();
4603 /* And finally unshare all RTL. */
4606 /* There's no need to defer outputting this function any more; we
4607 know we want to output it. */
4608 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4610 /* Now that we're done expanding trees to RTL, we shouldn't have any
4611 more CONCATs anywhere. */
4612 generating_concat_p = 0;
4617 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4618 /* And the pass manager will dump RTL for us. */
4621 /* If we're emitting a nested function, make sure its parent gets
4622 emitted as well. Doing otherwise confuses debug info. */
4625 for (parent = DECL_CONTEXT (current_function_decl);
4626 parent != NULL_TREE;
4627 parent = get_containing_scope (parent))
4628 if (TREE_CODE (parent) == FUNCTION_DECL)
4629 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4632 /* We are now committed to emitting code for this function. Do any
4633 preparation, such as emitting abstract debug info for the inline
4634 before it gets mangled by optimization. */
4635 if (cgraph_function_possibly_inlined_p (current_function_decl))
4636 (*debug_hooks->outlining_inline_function) (current_function_decl);
4638 TREE_ASM_WRITTEN (current_function_decl) = 1;
4640 /* After expanding, the return labels are no longer needed. */
4641 return_label = NULL;
4642 naked_return_label = NULL;
4644 /* After expanding, the tm_restart map is no longer needed. */
4645 if (cfun->gimple_df->tm_restart)
4647 htab_delete (cfun->gimple_df->tm_restart);
4648 cfun->gimple_df->tm_restart = NULL;
4651 /* Tag the blocks with a depth number so that change_scope can find
4652 the common parent easily. */
4653 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4654 default_rtl_profile ();
4656 timevar_pop (TV_POST_EXPAND);
4661 struct rtl_opt_pass pass_expand =
4665 "expand", /* name */
4667 gimple_expand_cfg, /* execute */
4670 0, /* static_pass_number */
4671 TV_EXPAND, /* tv_id */
4672 PROP_ssa | PROP_gimple_leh | PROP_cfg
4673 | PROP_gimple_lcx, /* properties_required */
4674 PROP_rtl, /* properties_provided */
4675 PROP_ssa | PROP_trees, /* properties_destroyed */
4676 TODO_verify_ssa | TODO_verify_flow
4677 | TODO_verify_stmts, /* todo_flags_start */
4678 TODO_ggc_collect /* todo_flags_finish */