1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
47 #include "ssaexpand.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 /* Return an expression tree corresponding to the RHS of GIMPLE
64 gimple_assign_rhs_to_tree (gimple stmt)
67 enum gimple_rhs_class grhs_class;
69 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
71 if (grhs_class == GIMPLE_TERNARY_RHS)
72 t = build3 (gimple_assign_rhs_code (stmt),
73 TREE_TYPE (gimple_assign_lhs (stmt)),
74 gimple_assign_rhs1 (stmt),
75 gimple_assign_rhs2 (stmt),
76 gimple_assign_rhs3 (stmt));
77 else if (grhs_class == GIMPLE_BINARY_RHS)
78 t = build2 (gimple_assign_rhs_code (stmt),
79 TREE_TYPE (gimple_assign_lhs (stmt)),
80 gimple_assign_rhs1 (stmt),
81 gimple_assign_rhs2 (stmt));
82 else if (grhs_class == GIMPLE_UNARY_RHS)
83 t = build1 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt));
86 else if (grhs_class == GIMPLE_SINGLE_RHS)
88 t = gimple_assign_rhs1 (stmt);
89 /* Avoid modifying this tree in place below. */
90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
91 && gimple_location (stmt) != EXPR_LOCATION (t))
92 || (gimple_block (stmt)
93 && currently_expanding_to_rtl
95 && gimple_block (stmt) != TREE_BLOCK (t)))
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
103 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
104 TREE_BLOCK (t) = gimple_block (stmt);
110 #ifndef STACK_ALIGNMENT_NEEDED
111 #define STACK_ALIGNMENT_NEEDED 1
114 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
116 /* Associate declaration T with storage space X. If T is no
117 SSA name this is exactly SET_DECL_RTL, otherwise make the
118 partition of T associated with X. */
120 set_rtl (tree t, rtx x)
122 if (TREE_CODE (t) == SSA_NAME)
124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
127 /* For the benefit of debug information at -O0 (where vartracking
128 doesn't run) record the place also in the base DECL if it's
129 a normal variable (not a parameter). */
130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
132 tree var = SSA_NAME_VAR (t);
133 /* If we don't yet have something recorded, just record it now. */
134 if (!DECL_RTL_SET_P (var))
135 SET_DECL_RTL (var, x);
136 /* If we have it set alrady to "multiple places" don't
138 else if (DECL_RTL (var) == pc_rtx)
140 /* If we have something recorded and it's not the same place
141 as we want to record now, we have multiple partitions for the
142 same base variable, with different places. We can't just
143 randomly chose one, hence we have to say that we don't know.
144 This only happens with optimization, and there var-tracking
145 will figure out the right thing. */
146 else if (DECL_RTL (var) != x)
147 SET_DECL_RTL (var, pc_rtx);
154 /* This structure holds data relevant to one variable that will be
155 placed in a stack slot. */
161 /* The offset of the variable. During partitioning, this is the
162 offset relative to the partition. After partitioning, this
163 is relative to the stack frame. */
164 HOST_WIDE_INT offset;
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
174 /* The partition representative. */
175 size_t representative;
177 /* The next stack variable in the partition, or EOC. */
180 /* The numbers of conflicting stack variables. */
184 #define EOC ((size_t)-1)
186 /* We have an array of such objects while deciding allocation. */
187 static struct stack_var *stack_vars;
188 static size_t stack_vars_alloc;
189 static size_t stack_vars_num;
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
192 is non-decreasing. */
193 static size_t *stack_vars_sorted;
195 /* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198 static int frame_phase;
200 /* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202 static bool has_protected_decls;
204 /* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206 static bool has_short_buffer;
208 /* Discover the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
212 get_decl_align_unit (tree decl)
216 align = LOCAL_DECL_ALIGNMENT (decl);
218 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
219 align = MAX_SUPPORTED_STACK_ALIGNMENT;
221 if (SUPPORTS_STACK_ALIGNMENT)
223 if (crtl->stack_alignment_estimated < align)
225 gcc_assert(!crtl->stack_realign_processed);
226 crtl->stack_alignment_estimated = align;
230 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
231 So here we only make sure stack_alignment_needed >= align. */
232 if (crtl->stack_alignment_needed < align)
233 crtl->stack_alignment_needed = align;
234 if (crtl->max_used_stack_slot_alignment < align)
235 crtl->max_used_stack_slot_alignment = align;
237 return align / BITS_PER_UNIT;
240 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
241 Return the frame offset. */
244 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
246 HOST_WIDE_INT offset, new_frame_offset;
248 new_frame_offset = frame_offset;
249 if (FRAME_GROWS_DOWNWARD)
251 new_frame_offset -= size + frame_phase;
252 new_frame_offset &= -align;
253 new_frame_offset += frame_phase;
254 offset = new_frame_offset;
258 new_frame_offset -= frame_phase;
259 new_frame_offset += align - 1;
260 new_frame_offset &= -align;
261 new_frame_offset += frame_phase;
262 offset = new_frame_offset;
263 new_frame_offset += size;
265 frame_offset = new_frame_offset;
267 if (frame_offset_overflow (frame_offset, cfun->decl))
268 frame_offset = offset = 0;
273 /* Accumulate DECL into STACK_VARS. */
276 add_stack_var (tree decl)
278 if (stack_vars_num >= stack_vars_alloc)
280 if (stack_vars_alloc)
281 stack_vars_alloc = stack_vars_alloc * 3 / 2;
283 stack_vars_alloc = 32;
285 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
287 stack_vars[stack_vars_num].decl = decl;
288 stack_vars[stack_vars_num].offset = 0;
289 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
290 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
292 /* All variables are initially in their own partition. */
293 stack_vars[stack_vars_num].representative = stack_vars_num;
294 stack_vars[stack_vars_num].next = EOC;
296 /* All variables initially conflict with no other. */
297 stack_vars[stack_vars_num].conflicts = NULL;
299 /* Ensure that this decl doesn't get put onto the list twice. */
300 set_rtl (decl, pc_rtx);
305 /* Make the decls associated with luid's X and Y conflict. */
308 add_stack_var_conflict (size_t x, size_t y)
310 struct stack_var *a = &stack_vars[x];
311 struct stack_var *b = &stack_vars[y];
313 a->conflicts = BITMAP_ALLOC (NULL);
315 b->conflicts = BITMAP_ALLOC (NULL);
316 bitmap_set_bit (a->conflicts, y);
317 bitmap_set_bit (b->conflicts, x);
320 /* Check whether the decls associated with luid's X and Y conflict. */
323 stack_var_conflict_p (size_t x, size_t y)
325 struct stack_var *a = &stack_vars[x];
326 struct stack_var *b = &stack_vars[y];
327 if (!a->conflicts || !b->conflicts)
329 return bitmap_bit_p (a->conflicts, y);
332 /* Returns true if TYPE is or contains a union type. */
335 aggregate_contains_union_type (tree type)
339 if (TREE_CODE (type) == UNION_TYPE
340 || TREE_CODE (type) == QUAL_UNION_TYPE)
342 if (TREE_CODE (type) == ARRAY_TYPE)
343 return aggregate_contains_union_type (TREE_TYPE (type));
344 if (TREE_CODE (type) != RECORD_TYPE)
347 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
348 if (TREE_CODE (field) == FIELD_DECL)
349 if (aggregate_contains_union_type (TREE_TYPE (field)))
355 /* A subroutine of expand_used_vars. If two variables X and Y have alias
356 sets that do not conflict, then do add a conflict for these variables
357 in the interference graph. We also need to make sure to add conflicts
358 for union containing structures. Else RTL alias analysis comes along
359 and due to type based aliasing rules decides that for two overlapping
360 union temporaries { short s; int i; } accesses to the same mem through
361 different types may not alias and happily reorders stores across
362 life-time boundaries of the temporaries (See PR25654).
363 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
366 add_alias_set_conflicts (void)
368 size_t i, j, n = stack_vars_num;
370 for (i = 0; i < n; ++i)
372 tree type_i = TREE_TYPE (stack_vars[i].decl);
373 bool aggr_i = AGGREGATE_TYPE_P (type_i);
376 contains_union = aggregate_contains_union_type (type_i);
377 for (j = 0; j < i; ++j)
379 tree type_j = TREE_TYPE (stack_vars[j].decl);
380 bool aggr_j = AGGREGATE_TYPE_P (type_j);
382 /* Either the objects conflict by means of type based
383 aliasing rules, or we need to add a conflict. */
384 || !objects_must_conflict_p (type_i, type_j)
385 /* In case the types do not conflict ensure that access
386 to elements will conflict. In case of unions we have
387 to be careful as type based aliasing rules may say
388 access to the same memory does not conflict. So play
389 safe and add a conflict in this case. */
391 add_stack_var_conflict (i, j);
396 /* A subroutine of partition_stack_vars. A comparison function for qsort,
397 sorting an array of indices by the size and type of the object. */
400 stack_var_size_cmp (const void *a, const void *b)
402 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
403 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
405 unsigned int uida, uidb;
411 decla = stack_vars[*(const size_t *)a].decl;
412 declb = stack_vars[*(const size_t *)b].decl;
413 /* For stack variables of the same size use and id of the decls
414 to make the sort stable. Two SSA names are compared by their
415 version, SSA names come before non-SSA names, and two normal
416 decls are compared by their DECL_UID. */
417 if (TREE_CODE (decla) == SSA_NAME)
419 if (TREE_CODE (declb) == SSA_NAME)
420 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
424 else if (TREE_CODE (declb) == SSA_NAME)
427 uida = DECL_UID (decla), uidb = DECL_UID (declb);
436 /* If the points-to solution *PI points to variables that are in a partition
437 together with other variables add all partition members to the pointed-to
441 add_partitioned_vars_to_ptset (struct pt_solution *pt,
442 struct pointer_map_t *decls_to_partitions,
443 struct pointer_set_t *visited, bitmap temp)
451 /* The pointed-to vars bitmap is shared, it is enough to
453 || pointer_set_insert(visited, pt->vars))
458 /* By using a temporary bitmap to store all members of the partitions
459 we have to add we make sure to visit each of the partitions only
461 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
463 || !bitmap_bit_p (temp, i))
464 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
465 (void *)(size_t) i)))
466 bitmap_ior_into (temp, *part);
467 if (!bitmap_empty_p (temp))
468 bitmap_ior_into (pt->vars, temp);
471 /* Update points-to sets based on partition info, so we can use them on RTL.
472 The bitmaps representing stack partitions will be saved until expand,
473 where partitioned decls used as bases in memory expressions will be
477 update_alias_info_with_stack_vars (void)
479 struct pointer_map_t *decls_to_partitions = NULL;
481 tree var = NULL_TREE;
483 for (i = 0; i < stack_vars_num; i++)
487 struct ptr_info_def *pi;
489 /* Not interested in partitions with single variable. */
490 if (stack_vars[i].representative != i
491 || stack_vars[i].next == EOC)
494 if (!decls_to_partitions)
496 decls_to_partitions = pointer_map_create ();
497 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
500 /* Create an SSA_NAME that points to the partition for use
501 as base during alias-oracle queries on RTL for bases that
502 have been partitioned. */
503 if (var == NULL_TREE)
504 var = create_tmp_var (ptr_type_node, NULL);
505 name = make_ssa_name (var, NULL);
507 /* Create bitmaps representing partitions. They will be used for
508 points-to sets later, so use GGC alloc. */
509 part = BITMAP_GGC_ALLOC ();
510 for (j = i; j != EOC; j = stack_vars[j].next)
512 tree decl = stack_vars[j].decl;
513 unsigned int uid = DECL_PT_UID (decl);
514 /* We should never end up partitioning SSA names (though they
515 may end up on the stack). Neither should we allocate stack
516 space to something that is unused and thus unreferenced. */
517 gcc_assert (DECL_P (decl)
518 && referenced_var_lookup (DECL_UID (decl)));
519 bitmap_set_bit (part, uid);
520 *((bitmap *) pointer_map_insert (decls_to_partitions,
521 (void *)(size_t) uid)) = part;
522 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
526 /* Make the SSA name point to all partition members. */
527 pi = get_ptr_info (name);
528 pt_solution_set (&pi->pt, part, false, false);
531 /* Make all points-to sets that contain one member of a partition
532 contain all members of the partition. */
533 if (decls_to_partitions)
536 struct pointer_set_t *visited = pointer_set_create ();
537 bitmap temp = BITMAP_ALLOC (NULL);
539 for (i = 1; i < num_ssa_names; i++)
541 tree name = ssa_name (i);
542 struct ptr_info_def *pi;
545 && POINTER_TYPE_P (TREE_TYPE (name))
546 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
547 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
551 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
552 decls_to_partitions, visited, temp);
554 pointer_set_destroy (visited);
555 pointer_map_destroy (decls_to_partitions);
560 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
561 partitioning algorithm. Partitions A and B are known to be non-conflicting.
562 Merge them into a single partition A.
564 At the same time, add OFFSET to all variables in partition B. At the end
565 of the partitioning process we've have a nice block easy to lay out within
569 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
572 struct stack_var *vb = &stack_vars[b];
576 /* Update each element of partition B with the given offset,
577 and merge them into partition A. */
578 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
580 stack_vars[i].offset += offset;
581 stack_vars[i].representative = a;
583 stack_vars[last].next = stack_vars[a].next;
584 stack_vars[a].next = b;
586 /* Update the required alignment of partition A to account for B. */
587 if (stack_vars[a].alignb < stack_vars[b].alignb)
588 stack_vars[a].alignb = stack_vars[b].alignb;
590 /* Update the interference graph and merge the conflicts. */
593 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
594 add_stack_var_conflict (a, stack_vars[u].representative);
595 BITMAP_FREE (vb->conflicts);
599 /* A subroutine of expand_used_vars. Binpack the variables into
600 partitions constrained by the interference graph. The overall
601 algorithm used is as follows:
603 Sort the objects by size.
608 Look for the largest non-conflicting object B with size <= S.
618 partition_stack_vars (void)
620 size_t si, sj, n = stack_vars_num;
622 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
623 for (si = 0; si < n; ++si)
624 stack_vars_sorted[si] = si;
629 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
631 for (si = 0; si < n; ++si)
633 size_t i = stack_vars_sorted[si];
634 HOST_WIDE_INT isize = stack_vars[i].size;
635 HOST_WIDE_INT offset = 0;
637 for (sj = si; sj-- > 0; )
639 size_t j = stack_vars_sorted[sj];
640 HOST_WIDE_INT jsize = stack_vars[j].size;
641 unsigned int jalign = stack_vars[j].alignb;
643 /* Ignore objects that aren't partition representatives. */
644 if (stack_vars[j].representative != j)
647 /* Ignore objects too large for the remaining space. */
651 /* Ignore conflicting objects. */
652 if (stack_var_conflict_p (i, j))
655 /* Refine the remaining space check to include alignment. */
656 if (offset & (jalign - 1))
658 HOST_WIDE_INT toff = offset;
660 toff &= -(HOST_WIDE_INT)jalign;
661 if (isize - (toff - offset) < jsize)
664 isize -= toff - offset;
668 /* UNION the objects, placing J at OFFSET. */
669 union_stack_vars (i, j, offset);
678 update_alias_info_with_stack_vars ();
681 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
684 dump_stack_var_partition (void)
686 size_t si, i, j, n = stack_vars_num;
688 for (si = 0; si < n; ++si)
690 i = stack_vars_sorted[si];
692 /* Skip variables that aren't partition representatives, for now. */
693 if (stack_vars[i].representative != i)
696 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
697 " align %u\n", (unsigned long) i, stack_vars[i].size,
698 stack_vars[i].alignb);
700 for (j = i; j != EOC; j = stack_vars[j].next)
702 fputc ('\t', dump_file);
703 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
704 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
705 stack_vars[j].offset);
710 /* Assign rtl to DECL at frame offset OFFSET. */
713 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
715 /* Alignment is unsigned. */
716 unsigned HOST_WIDE_INT align;
719 /* If this fails, we've overflowed the stack frame. Error nicely? */
720 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
722 x = plus_constant (virtual_stack_vars_rtx, offset);
723 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
725 if (TREE_CODE (decl) != SSA_NAME)
727 /* Set alignment we actually gave this decl if it isn't an SSA name.
728 If it is we generate stack slots only accidentally so it isn't as
729 important, we'll simply use the alignment that is already set. */
730 offset -= frame_phase;
731 align = offset & -offset;
732 align *= BITS_PER_UNIT;
734 align = STACK_BOUNDARY;
735 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
736 align = MAX_SUPPORTED_STACK_ALIGNMENT;
738 DECL_ALIGN (decl) = align;
739 DECL_USER_ALIGN (decl) = 0;
742 set_mem_attributes (x, SSAVAR (decl), true);
746 /* A subroutine of expand_used_vars. Give each partition representative
747 a unique location within the stack frame. Update each partition member
748 with that location. */
751 expand_stack_vars (bool (*pred) (tree))
753 size_t si, i, j, n = stack_vars_num;
755 for (si = 0; si < n; ++si)
757 HOST_WIDE_INT offset;
759 i = stack_vars_sorted[si];
761 /* Skip variables that aren't partition representatives, for now. */
762 if (stack_vars[i].representative != i)
765 /* Skip variables that have already had rtl assigned. See also
766 add_stack_var where we perpetrate this pc_rtx hack. */
767 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
768 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
769 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
772 /* Check the predicate to see whether this variable should be
773 allocated in this pass. */
774 if (pred && !pred (stack_vars[i].decl))
777 offset = alloc_stack_frame_space (stack_vars[i].size,
778 stack_vars[i].alignb);
780 /* Create rtl for each variable based on their location within the
782 for (j = i; j != EOC; j = stack_vars[j].next)
784 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
785 expand_one_stack_var_at (stack_vars[j].decl,
786 stack_vars[j].offset + offset);
791 /* Take into account all sizes of partitions and reset DECL_RTLs. */
793 account_stack_vars (void)
795 size_t si, j, i, n = stack_vars_num;
796 HOST_WIDE_INT size = 0;
798 for (si = 0; si < n; ++si)
800 i = stack_vars_sorted[si];
802 /* Skip variables that aren't partition representatives, for now. */
803 if (stack_vars[i].representative != i)
806 size += stack_vars[i].size;
807 for (j = i; j != EOC; j = stack_vars[j].next)
808 set_rtl (stack_vars[j].decl, NULL);
813 /* A subroutine of expand_one_var. Called to immediately assign rtl
814 to a variable to be allocated in the stack frame. */
817 expand_one_stack_var (tree var)
819 HOST_WIDE_INT size, offset, align;
821 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
822 align = get_decl_align_unit (SSAVAR (var));
823 offset = alloc_stack_frame_space (size, align);
825 expand_one_stack_var_at (var, offset);
828 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
829 that will reside in a hard register. */
832 expand_one_hard_reg_var (tree var)
834 rest_of_decl_compilation (var, 0, 0);
837 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
838 that will reside in a pseudo register. */
841 expand_one_register_var (tree var)
843 tree decl = SSAVAR (var);
844 tree type = TREE_TYPE (decl);
845 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
846 rtx x = gen_reg_rtx (reg_mode);
850 /* Note if the object is a user variable. */
851 if (!DECL_ARTIFICIAL (decl))
854 if (POINTER_TYPE_P (type))
855 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
858 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
859 has some associated error, e.g. its type is error-mark. We just need
860 to pick something that won't crash the rest of the compiler. */
863 expand_one_error_var (tree var)
865 enum machine_mode mode = DECL_MODE (var);
869 x = gen_rtx_MEM (BLKmode, const0_rtx);
870 else if (mode == VOIDmode)
873 x = gen_reg_rtx (mode);
875 SET_DECL_RTL (var, x);
878 /* A subroutine of expand_one_var. VAR is a variable that will be
879 allocated to the local stack frame. Return true if we wish to
880 add VAR to STACK_VARS so that it will be coalesced with other
881 variables. Return false to allocate VAR immediately.
883 This function is used to reduce the number of variables considered
884 for coalescing, which reduces the size of the quadratic problem. */
887 defer_stack_allocation (tree var, bool toplevel)
889 /* If stack protection is enabled, *all* stack variables must be deferred,
890 so that we can re-order the strings to the top of the frame. */
891 if (flag_stack_protect)
894 /* Variables in the outermost scope automatically conflict with
895 every other variable. The only reason to want to defer them
896 at all is that, after sorting, we can more efficiently pack
897 small variables in the stack frame. Continue to defer at -O2. */
898 if (toplevel && optimize < 2)
901 /* Without optimization, *most* variables are allocated from the
902 stack, which makes the quadratic problem large exactly when we
903 want compilation to proceed as quickly as possible. On the
904 other hand, we don't want the function's stack frame size to
905 get completely out of hand. So we avoid adding scalars and
906 "small" aggregates to the list at all. */
907 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
913 /* A subroutine of expand_used_vars. Expand one variable according to
914 its flavor. Variables to be placed on the stack are not actually
915 expanded yet, merely recorded.
916 When REALLY_EXPAND is false, only add stack values to be allocated.
917 Return stack usage this variable is supposed to take.
921 expand_one_var (tree var, bool toplevel, bool really_expand)
926 if (SUPPORTS_STACK_ALIGNMENT
927 && TREE_TYPE (var) != error_mark_node
928 && TREE_CODE (var) == VAR_DECL)
932 /* Because we don't know if VAR will be in register or on stack,
933 we conservatively assume it will be on stack even if VAR is
934 eventually put into register after RA pass. For non-automatic
935 variables, which won't be on stack, we collect alignment of
936 type and ignore user specified alignment. */
937 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
938 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
939 TYPE_MODE (TREE_TYPE (var)),
940 TYPE_ALIGN (TREE_TYPE (var)));
942 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
944 if (crtl->stack_alignment_estimated < align)
946 /* stack_alignment_estimated shouldn't change after stack
947 realign decision made */
948 gcc_assert(!crtl->stack_realign_processed);
949 crtl->stack_alignment_estimated = align;
953 if (TREE_CODE (origvar) == SSA_NAME)
955 gcc_assert (TREE_CODE (var) != VAR_DECL
956 || (!DECL_EXTERNAL (var)
957 && !DECL_HAS_VALUE_EXPR_P (var)
958 && !TREE_STATIC (var)
959 && TREE_TYPE (var) != error_mark_node
960 && !DECL_HARD_REGISTER (var)
963 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
965 else if (DECL_EXTERNAL (var))
967 else if (DECL_HAS_VALUE_EXPR_P (var))
969 else if (TREE_STATIC (var))
971 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
973 else if (TREE_TYPE (var) == error_mark_node)
976 expand_one_error_var (var);
978 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
981 expand_one_hard_reg_var (var);
983 else if (use_register_for_decl (var))
986 expand_one_register_var (origvar);
988 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
992 error ("size of variable %q+D is too large", var);
993 expand_one_error_var (var);
996 else if (defer_stack_allocation (var, toplevel))
997 add_stack_var (origvar);
1001 expand_one_stack_var (origvar);
1002 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1007 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1008 expanding variables. Those variables that can be put into registers
1009 are allocated pseudos; those that can't are put on the stack.
1011 TOPLEVEL is true if this is the outermost BLOCK. */
1014 expand_used_vars_for_block (tree block, bool toplevel)
1016 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1019 old_sv_num = toplevel ? 0 : stack_vars_num;
1021 /* Expand all variables at this level. */
1022 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1024 expand_one_var (t, toplevel, true);
1026 this_sv_num = stack_vars_num;
1028 /* Expand all variables at containing levels. */
1029 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1030 expand_used_vars_for_block (t, false);
1032 /* Since we do not track exact variable lifetimes (which is not even
1033 possible for variables whose address escapes), we mirror the block
1034 tree in the interference graph. Here we cause all variables at this
1035 level, and all sublevels, to conflict. */
1036 if (old_sv_num < this_sv_num)
1038 new_sv_num = stack_vars_num;
1040 for (i = old_sv_num; i < new_sv_num; ++i)
1041 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1042 add_stack_var_conflict (i, j);
1046 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1047 and clear TREE_USED on all local variables. */
1050 clear_tree_used (tree block)
1054 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1055 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1058 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1059 clear_tree_used (t);
1062 /* Examine TYPE and determine a bit mask of the following features. */
1064 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1065 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1066 #define SPCT_HAS_ARRAY 4
1067 #define SPCT_HAS_AGGREGATE 8
1070 stack_protect_classify_type (tree type)
1072 unsigned int ret = 0;
1075 switch (TREE_CODE (type))
1078 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1079 if (t == char_type_node
1080 || t == signed_char_type_node
1081 || t == unsigned_char_type_node)
1083 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1084 unsigned HOST_WIDE_INT len;
1086 if (!TYPE_SIZE_UNIT (type)
1087 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1090 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1093 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1095 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1098 ret = SPCT_HAS_ARRAY;
1102 case QUAL_UNION_TYPE:
1104 ret = SPCT_HAS_AGGREGATE;
1105 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1106 if (TREE_CODE (t) == FIELD_DECL)
1107 ret |= stack_protect_classify_type (TREE_TYPE (t));
1117 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1118 part of the local stack frame. Remember if we ever return nonzero for
1119 any variable in this function. The return value is the phase number in
1120 which the variable should be allocated. */
1123 stack_protect_decl_phase (tree decl)
1125 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1128 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1129 has_short_buffer = true;
1131 if (flag_stack_protect == 2)
1133 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1134 && !(bits & SPCT_HAS_AGGREGATE))
1136 else if (bits & SPCT_HAS_ARRAY)
1140 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1143 has_protected_decls = true;
1148 /* Two helper routines that check for phase 1 and phase 2. These are used
1149 as callbacks for expand_stack_vars. */
1152 stack_protect_decl_phase_1 (tree decl)
1154 return stack_protect_decl_phase (decl) == 1;
1158 stack_protect_decl_phase_2 (tree decl)
1160 return stack_protect_decl_phase (decl) == 2;
1163 /* Ensure that variables in different stack protection phases conflict
1164 so that they are not merged and share the same stack slot. */
1167 add_stack_protection_conflicts (void)
1169 size_t i, j, n = stack_vars_num;
1170 unsigned char *phase;
1172 phase = XNEWVEC (unsigned char, n);
1173 for (i = 0; i < n; ++i)
1174 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1176 for (i = 0; i < n; ++i)
1178 unsigned char ph_i = phase[i];
1179 for (j = 0; j < i; ++j)
1180 if (ph_i != phase[j])
1181 add_stack_var_conflict (i, j);
1187 /* Create a decl for the guard at the top of the stack frame. */
1190 create_stack_guard (void)
1192 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1193 VAR_DECL, NULL, ptr_type_node);
1194 TREE_THIS_VOLATILE (guard) = 1;
1195 TREE_USED (guard) = 1;
1196 expand_one_stack_var (guard);
1197 crtl->stack_protect_guard = guard;
1200 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1201 expanding variables. Those variables that can be put into registers
1202 are allocated pseudos; those that can't are put on the stack.
1204 TOPLEVEL is true if this is the outermost BLOCK. */
1206 static HOST_WIDE_INT
1207 account_used_vars_for_block (tree block, bool toplevel)
1210 HOST_WIDE_INT size = 0;
1212 /* Expand all variables at this level. */
1213 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1215 size += expand_one_var (t, toplevel, false);
1217 /* Expand all variables at containing levels. */
1218 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1219 size += account_used_vars_for_block (t, false);
1224 /* Prepare for expanding variables. */
1226 init_vars_expansion (void)
1230 /* Set TREE_USED on all variables in the local_decls. */
1231 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1234 /* Clear TREE_USED on all variables associated with a block scope. */
1235 clear_tree_used (DECL_INITIAL (current_function_decl));
1237 /* Initialize local stack smashing state. */
1238 has_protected_decls = false;
1239 has_short_buffer = false;
1242 /* Free up stack variable graph data. */
1244 fini_vars_expansion (void)
1246 size_t i, n = stack_vars_num;
1247 for (i = 0; i < n; i++)
1248 BITMAP_FREE (stack_vars[i].conflicts);
1249 XDELETEVEC (stack_vars);
1250 XDELETEVEC (stack_vars_sorted);
1252 stack_vars_alloc = stack_vars_num = 0;
1255 /* Make a fair guess for the size of the stack frame of the decl
1256 passed. This doesn't have to be exact, the result is only used
1257 in the inline heuristics. So we don't want to run the full stack
1258 var packing algorithm (which is quadratic in the number of stack
1259 vars). Instead, we calculate the total size of all stack vars.
1260 This turns out to be a pretty fair estimate -- packing of stack
1261 vars doesn't happen very often. */
1264 estimated_stack_frame_size (tree decl)
1266 HOST_WIDE_INT size = 0;
1268 tree var, outer_block = DECL_INITIAL (current_function_decl);
1270 tree old_cur_fun_decl = current_function_decl;
1271 current_function_decl = decl;
1272 push_cfun (DECL_STRUCT_FUNCTION (decl));
1274 init_vars_expansion ();
1276 FOR_EACH_LOCAL_DECL (cfun, ix, var)
1278 if (TREE_USED (var))
1279 size += expand_one_var (var, true, false);
1280 TREE_USED (var) = 1;
1282 size += account_used_vars_for_block (outer_block, true);
1284 if (stack_vars_num > 0)
1286 /* Fake sorting the stack vars for account_stack_vars (). */
1287 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1288 for (i = 0; i < stack_vars_num; ++i)
1289 stack_vars_sorted[i] = i;
1290 size += account_stack_vars ();
1291 fini_vars_expansion ();
1294 current_function_decl = old_cur_fun_decl;
1298 /* Expand all variables used in the function. */
1301 expand_used_vars (void)
1303 tree var, outer_block = DECL_INITIAL (current_function_decl);
1304 VEC(tree,heap) *maybe_local_decls = NULL;
1308 /* Compute the phase of the stack frame for this function. */
1310 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1311 int off = STARTING_FRAME_OFFSET % align;
1312 frame_phase = off ? align - off : 0;
1315 init_vars_expansion ();
1317 for (i = 0; i < SA.map->num_partitions; i++)
1319 tree var = partition_to_var (SA.map, i);
1321 gcc_assert (is_gimple_reg (var));
1322 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1323 expand_one_var (var, true, true);
1326 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1327 contain the default def (representing the parm or result itself)
1328 we don't do anything here. But those which don't contain the
1329 default def (representing a temporary based on the parm/result)
1330 we need to allocate space just like for normal VAR_DECLs. */
1331 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1333 expand_one_var (var, true, true);
1334 gcc_assert (SA.partition_to_pseudo[i]);
1339 /* At this point all variables on the local_decls with TREE_USED
1340 set are not associated with any block scope. Lay them out. */
1342 len = VEC_length (tree, cfun->local_decls);
1343 FOR_EACH_LOCAL_DECL (cfun, i, var)
1345 bool expand_now = false;
1347 /* Expanded above already. */
1348 if (is_gimple_reg (var))
1350 TREE_USED (var) = 0;
1353 /* We didn't set a block for static or extern because it's hard
1354 to tell the difference between a global variable (re)declared
1355 in a local scope, and one that's really declared there to
1356 begin with. And it doesn't really matter much, since we're
1357 not giving them stack space. Expand them now. */
1358 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1361 /* If the variable is not associated with any block, then it
1362 was created by the optimizers, and could be live anywhere
1364 else if (TREE_USED (var))
1367 /* Finally, mark all variables on the list as used. We'll use
1368 this in a moment when we expand those associated with scopes. */
1369 TREE_USED (var) = 1;
1372 expand_one_var (var, true, true);
1375 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1377 rtx rtl = DECL_RTL_IF_SET (var);
1379 /* Keep artificial non-ignored vars in cfun->local_decls
1380 chain until instantiate_decls. */
1381 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1382 add_local_decl (cfun, var);
1383 else if (rtl == NULL_RTX)
1384 /* If rtl isn't set yet, which can happen e.g. with
1385 -fstack-protector, retry before returning from this
1387 VEC_safe_push (tree, heap, maybe_local_decls, var);
1391 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1393 +-----------------+-----------------+
1394 | ...processed... | ...duplicates...|
1395 +-----------------+-----------------+
1397 +-- LEN points here.
1399 We just want the duplicates, as those are the artificial
1400 non-ignored vars that we want to keep until instantiate_decls.
1401 Move them down and truncate the array. */
1402 if (!VEC_empty (tree, cfun->local_decls))
1403 VEC_block_remove (tree, cfun->local_decls, 0, len);
1405 /* At this point, all variables within the block tree with TREE_USED
1406 set are actually used by the optimized function. Lay them out. */
1407 expand_used_vars_for_block (outer_block, true);
1409 if (stack_vars_num > 0)
1411 /* Due to the way alias sets work, no variables with non-conflicting
1412 alias sets may be assigned the same address. Add conflicts to
1414 add_alias_set_conflicts ();
1416 /* If stack protection is enabled, we don't share space between
1417 vulnerable data and non-vulnerable data. */
1418 if (flag_stack_protect)
1419 add_stack_protection_conflicts ();
1421 /* Now that we have collected all stack variables, and have computed a
1422 minimal interference graph, attempt to save some stack space. */
1423 partition_stack_vars ();
1425 dump_stack_var_partition ();
1428 /* There are several conditions under which we should create a
1429 stack guard: protect-all, alloca used, protected decls present. */
1430 if (flag_stack_protect == 2
1431 || (flag_stack_protect
1432 && (cfun->calls_alloca || has_protected_decls)))
1433 create_stack_guard ();
1435 /* Assign rtl to each variable based on these partitions. */
1436 if (stack_vars_num > 0)
1438 /* Reorder decls to be protected by iterating over the variables
1439 array multiple times, and allocating out of each phase in turn. */
1440 /* ??? We could probably integrate this into the qsort we did
1441 earlier, such that we naturally see these variables first,
1442 and thus naturally allocate things in the right order. */
1443 if (has_protected_decls)
1445 /* Phase 1 contains only character arrays. */
1446 expand_stack_vars (stack_protect_decl_phase_1);
1448 /* Phase 2 contains other kinds of arrays. */
1449 if (flag_stack_protect == 2)
1450 expand_stack_vars (stack_protect_decl_phase_2);
1453 expand_stack_vars (NULL);
1455 fini_vars_expansion ();
1458 /* If there were any artificial non-ignored vars without rtl
1459 found earlier, see if deferred stack allocation hasn't assigned
1461 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1463 rtx rtl = DECL_RTL_IF_SET (var);
1465 /* Keep artificial non-ignored vars in cfun->local_decls
1466 chain until instantiate_decls. */
1467 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1468 add_local_decl (cfun, var);
1470 VEC_free (tree, heap, maybe_local_decls);
1472 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1473 if (STACK_ALIGNMENT_NEEDED)
1475 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1476 if (!FRAME_GROWS_DOWNWARD)
1477 frame_offset += align - 1;
1478 frame_offset &= -align;
1483 /* If we need to produce a detailed dump, print the tree representation
1484 for STMT to the dump file. SINCE is the last RTX after which the RTL
1485 generated for STMT should have been appended. */
1488 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1490 if (dump_file && (dump_flags & TDF_DETAILS))
1492 fprintf (dump_file, "\n;; ");
1493 print_gimple_stmt (dump_file, stmt, 0,
1494 TDF_SLIM | (dump_flags & TDF_LINENO));
1495 fprintf (dump_file, "\n");
1497 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1501 /* Maps the blocks that do not contain tree labels to rtx labels. */
1503 static struct pointer_map_t *lab_rtx_for_bb;
1505 /* Returns the label_rtx expression for a label starting basic block BB. */
1508 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1510 gimple_stmt_iterator gsi;
1515 if (bb->flags & BB_RTL)
1516 return block_label (bb);
1518 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1522 /* Find the tree label if it is present. */
1524 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1526 lab_stmt = gsi_stmt (gsi);
1527 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1530 lab = gimple_label_label (lab_stmt);
1531 if (DECL_NONLOCAL (lab))
1534 return label_rtx (lab);
1537 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1538 *elt = gen_label_rtx ();
1543 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1544 of a basic block where we just expanded the conditional at the end,
1545 possibly clean up the CFG and instruction sequence. LAST is the
1546 last instruction before the just emitted jump sequence. */
1549 maybe_cleanup_end_of_block (edge e, rtx last)
1551 /* Special case: when jumpif decides that the condition is
1552 trivial it emits an unconditional jump (and the necessary
1553 barrier). But we still have two edges, the fallthru one is
1554 wrong. purge_dead_edges would clean this up later. Unfortunately
1555 we have to insert insns (and split edges) before
1556 find_many_sub_basic_blocks and hence before purge_dead_edges.
1557 But splitting edges might create new blocks which depend on the
1558 fact that if there are two edges there's no barrier. So the
1559 barrier would get lost and verify_flow_info would ICE. Instead
1560 of auditing all edge splitters to care for the barrier (which
1561 normally isn't there in a cleaned CFG), fix it here. */
1562 if (BARRIER_P (get_last_insn ()))
1566 /* Now, we have a single successor block, if we have insns to
1567 insert on the remaining edge we potentially will insert
1568 it at the end of this block (if the dest block isn't feasible)
1569 in order to avoid splitting the edge. This insertion will take
1570 place in front of the last jump. But we might have emitted
1571 multiple jumps (conditional and one unconditional) to the
1572 same destination. Inserting in front of the last one then
1573 is a problem. See PR 40021. We fix this by deleting all
1574 jumps except the last unconditional one. */
1575 insn = PREV_INSN (get_last_insn ());
1576 /* Make sure we have an unconditional jump. Otherwise we're
1578 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1579 for (insn = PREV_INSN (insn); insn != last;)
1581 insn = PREV_INSN (insn);
1582 if (JUMP_P (NEXT_INSN (insn)))
1583 delete_insn (NEXT_INSN (insn));
1588 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1589 Returns a new basic block if we've terminated the current basic
1590 block and created a new one. */
1593 expand_gimple_cond (basic_block bb, gimple stmt)
1595 basic_block new_bb, dest;
1600 enum tree_code code;
1603 code = gimple_cond_code (stmt);
1604 op0 = gimple_cond_lhs (stmt);
1605 op1 = gimple_cond_rhs (stmt);
1606 /* We're sometimes presented with such code:
1610 This would expand to two comparisons which then later might
1611 be cleaned up by combine. But some pattern matchers like if-conversion
1612 work better when there's only one compare, so make up for this
1613 here as special exception if TER would have made the same change. */
1614 if (gimple_cond_single_var_p (stmt)
1616 && TREE_CODE (op0) == SSA_NAME
1617 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1619 gimple second = SSA_NAME_DEF_STMT (op0);
1620 if (gimple_code (second) == GIMPLE_ASSIGN)
1622 enum tree_code code2 = gimple_assign_rhs_code (second);
1623 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1626 op0 = gimple_assign_rhs1 (second);
1627 op1 = gimple_assign_rhs2 (second);
1629 /* If jumps are cheap turn some more codes into
1631 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1633 if ((code2 == BIT_AND_EXPR
1634 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1635 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1636 || code2 == TRUTH_AND_EXPR)
1638 code = TRUTH_ANDIF_EXPR;
1639 op0 = gimple_assign_rhs1 (second);
1640 op1 = gimple_assign_rhs2 (second);
1642 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1644 code = TRUTH_ORIF_EXPR;
1645 op0 = gimple_assign_rhs1 (second);
1646 op1 = gimple_assign_rhs2 (second);
1652 last2 = last = get_last_insn ();
1654 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1655 if (gimple_has_location (stmt))
1657 set_curr_insn_source_location (gimple_location (stmt));
1658 set_curr_insn_block (gimple_block (stmt));
1661 /* These flags have no purpose in RTL land. */
1662 true_edge->flags &= ~EDGE_TRUE_VALUE;
1663 false_edge->flags &= ~EDGE_FALSE_VALUE;
1665 /* We can either have a pure conditional jump with one fallthru edge or
1666 two-way jump that needs to be decomposed into two basic blocks. */
1667 if (false_edge->dest == bb->next_bb)
1669 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1670 true_edge->probability);
1671 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1672 if (true_edge->goto_locus)
1674 set_curr_insn_source_location (true_edge->goto_locus);
1675 set_curr_insn_block (true_edge->goto_block);
1676 true_edge->goto_locus = curr_insn_locator ();
1678 true_edge->goto_block = NULL;
1679 false_edge->flags |= EDGE_FALLTHRU;
1680 maybe_cleanup_end_of_block (false_edge, last);
1683 if (true_edge->dest == bb->next_bb)
1685 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1686 false_edge->probability);
1687 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1688 if (false_edge->goto_locus)
1690 set_curr_insn_source_location (false_edge->goto_locus);
1691 set_curr_insn_block (false_edge->goto_block);
1692 false_edge->goto_locus = curr_insn_locator ();
1694 false_edge->goto_block = NULL;
1695 true_edge->flags |= EDGE_FALLTHRU;
1696 maybe_cleanup_end_of_block (true_edge, last);
1700 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1701 true_edge->probability);
1702 last = get_last_insn ();
1703 if (false_edge->goto_locus)
1705 set_curr_insn_source_location (false_edge->goto_locus);
1706 set_curr_insn_block (false_edge->goto_block);
1707 false_edge->goto_locus = curr_insn_locator ();
1709 false_edge->goto_block = NULL;
1710 emit_jump (label_rtx_for_bb (false_edge->dest));
1713 if (BARRIER_P (BB_END (bb)))
1714 BB_END (bb) = PREV_INSN (BB_END (bb));
1715 update_bb_for_insn (bb);
1717 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1718 dest = false_edge->dest;
1719 redirect_edge_succ (false_edge, new_bb);
1720 false_edge->flags |= EDGE_FALLTHRU;
1721 new_bb->count = false_edge->count;
1722 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1723 new_edge = make_edge (new_bb, dest, 0);
1724 new_edge->probability = REG_BR_PROB_BASE;
1725 new_edge->count = new_bb->count;
1726 if (BARRIER_P (BB_END (new_bb)))
1727 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1728 update_bb_for_insn (new_bb);
1730 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1732 if (true_edge->goto_locus)
1734 set_curr_insn_source_location (true_edge->goto_locus);
1735 set_curr_insn_block (true_edge->goto_block);
1736 true_edge->goto_locus = curr_insn_locator ();
1738 true_edge->goto_block = NULL;
1743 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1747 expand_call_stmt (gimple stmt)
1750 tree lhs = gimple_call_lhs (stmt);
1755 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1757 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1758 decl = gimple_call_fndecl (stmt);
1759 builtin_p = decl && DECL_BUILT_IN (decl);
1761 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1762 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1764 for (i = 0; i < gimple_call_num_args (stmt); i++)
1766 tree arg = gimple_call_arg (stmt, i);
1768 /* TER addresses into arguments of builtin functions so we have a
1769 chance to infer more correct alignment information. See PR39954. */
1771 && TREE_CODE (arg) == SSA_NAME
1772 && (def = get_gimple_for_ssa_name (arg))
1773 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1774 arg = gimple_assign_rhs1 (def);
1775 CALL_EXPR_ARG (exp, i) = arg;
1778 if (gimple_has_side_effects (stmt))
1779 TREE_SIDE_EFFECTS (exp) = 1;
1781 if (gimple_call_nothrow_p (stmt))
1782 TREE_NOTHROW (exp) = 1;
1784 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1785 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1786 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1787 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1788 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1789 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1790 TREE_BLOCK (exp) = gimple_block (stmt);
1793 expand_assignment (lhs, exp, false);
1795 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1798 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1799 STMT that doesn't require special handling for outgoing edges. That
1800 is no tailcalls and no GIMPLE_COND. */
1803 expand_gimple_stmt_1 (gimple stmt)
1806 switch (gimple_code (stmt))
1809 op0 = gimple_goto_dest (stmt);
1810 if (TREE_CODE (op0) == LABEL_DECL)
1813 expand_computed_goto (op0);
1816 expand_label (gimple_label_label (stmt));
1819 case GIMPLE_PREDICT:
1825 expand_asm_stmt (stmt);
1828 expand_call_stmt (stmt);
1832 op0 = gimple_return_retval (stmt);
1834 if (op0 && op0 != error_mark_node)
1836 tree result = DECL_RESULT (current_function_decl);
1838 /* If we are not returning the current function's RESULT_DECL,
1839 build an assignment to it. */
1842 /* I believe that a function's RESULT_DECL is unique. */
1843 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1845 /* ??? We'd like to use simply expand_assignment here,
1846 but this fails if the value is of BLKmode but the return
1847 decl is a register. expand_return has special handling
1848 for this combination, which eventually should move
1849 to common code. See comments there. Until then, let's
1850 build a modify expression :-/ */
1851 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1856 expand_null_return ();
1858 expand_return (op0);
1863 tree lhs = gimple_assign_lhs (stmt);
1865 /* Tree expand used to fiddle with |= and &= of two bitfield
1866 COMPONENT_REFs here. This can't happen with gimple, the LHS
1867 of binary assigns must be a gimple reg. */
1869 if (TREE_CODE (lhs) != SSA_NAME
1870 || get_gimple_rhs_class (gimple_expr_code (stmt))
1871 == GIMPLE_SINGLE_RHS)
1873 tree rhs = gimple_assign_rhs1 (stmt);
1874 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1875 == GIMPLE_SINGLE_RHS);
1876 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1877 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1878 expand_assignment (lhs, rhs,
1879 gimple_assign_nontemporal_move_p (stmt));
1884 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1885 struct separate_ops ops;
1886 bool promoted = false;
1888 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1889 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1892 ops.code = gimple_assign_rhs_code (stmt);
1893 ops.type = TREE_TYPE (lhs);
1894 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1896 case GIMPLE_TERNARY_RHS:
1897 ops.op2 = gimple_assign_rhs3 (stmt);
1899 case GIMPLE_BINARY_RHS:
1900 ops.op1 = gimple_assign_rhs2 (stmt);
1902 case GIMPLE_UNARY_RHS:
1903 ops.op0 = gimple_assign_rhs1 (stmt);
1908 ops.location = gimple_location (stmt);
1910 /* If we want to use a nontemporal store, force the value to
1911 register first. If we store into a promoted register,
1912 don't directly expand to target. */
1913 temp = nontemporal || promoted ? NULL_RTX : target;
1914 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
1921 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
1922 /* If TEMP is a VOIDmode constant, use convert_modes to make
1923 sure that we properly convert it. */
1924 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1926 temp = convert_modes (GET_MODE (target),
1927 TYPE_MODE (ops.type),
1929 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
1930 GET_MODE (target), temp, unsignedp);
1933 convert_move (SUBREG_REG (target), temp, unsignedp);
1935 else if (nontemporal && emit_storent_insn (target, temp))
1939 temp = force_operand (temp, target);
1941 emit_move_insn (target, temp);
1952 /* Expand one gimple statement STMT and return the last RTL instruction
1953 before any of the newly generated ones.
1955 In addition to generating the necessary RTL instructions this also
1956 sets REG_EH_REGION notes if necessary and sets the current source
1957 location for diagnostics. */
1960 expand_gimple_stmt (gimple stmt)
1964 location_t saved_location = input_location;
1966 last = get_last_insn ();
1968 /* If this is an expression of some kind and it has an associated line
1969 number, then emit the line number before expanding the expression.
1971 We need to save and restore the file and line information so that
1972 errors discovered during expansion are emitted with the right
1973 information. It would be better of the diagnostic routines
1974 used the file/line information embedded in the tree nodes rather
1978 if (gimple_has_location (stmt))
1980 input_location = gimple_location (stmt);
1981 set_curr_insn_source_location (input_location);
1983 /* Record where the insns produced belong. */
1984 set_curr_insn_block (gimple_block (stmt));
1987 expand_gimple_stmt_1 (stmt);
1988 /* Free any temporaries used to evaluate this statement. */
1991 input_location = saved_location;
1993 /* Mark all insns that may trap. */
1994 lp_nr = lookup_stmt_eh_lp (stmt);
1998 for (insn = next_real_insn (last); insn;
1999 insn = next_real_insn (insn))
2001 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2002 /* If we want exceptions for non-call insns, any
2003 may_trap_p instruction may throw. */
2004 && GET_CODE (PATTERN (insn)) != CLOBBER
2005 && GET_CODE (PATTERN (insn)) != USE
2006 && insn_could_throw_p (insn))
2007 make_reg_eh_region_note (insn, 0, lp_nr);
2014 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2015 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2016 generated a tail call (something that might be denied by the ABI
2017 rules governing the call; see calls.c).
2019 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2020 can still reach the rest of BB. The case here is __builtin_sqrt,
2021 where the NaN result goes through the external function (with a
2022 tailcall) and the normal result happens via a sqrt instruction. */
2025 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2033 last2 = last = expand_gimple_stmt (stmt);
2035 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2036 if (CALL_P (last) && SIBLING_CALL_P (last))
2039 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2041 *can_fallthru = true;
2045 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2046 Any instructions emitted here are about to be deleted. */
2047 do_pending_stack_adjust ();
2049 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2050 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2051 EH or abnormal edges, we shouldn't have created a tail call in
2052 the first place. So it seems to me we should just be removing
2053 all edges here, or redirecting the existing fallthru edge to
2059 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2061 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2063 if (e->dest != EXIT_BLOCK_PTR)
2065 e->dest->count -= e->count;
2066 e->dest->frequency -= EDGE_FREQUENCY (e);
2067 if (e->dest->count < 0)
2069 if (e->dest->frequency < 0)
2070 e->dest->frequency = 0;
2073 probability += e->probability;
2080 /* This is somewhat ugly: the call_expr expander often emits instructions
2081 after the sibcall (to perform the function return). These confuse the
2082 find_many_sub_basic_blocks code, so we need to get rid of these. */
2083 last = NEXT_INSN (last);
2084 gcc_assert (BARRIER_P (last));
2086 *can_fallthru = false;
2087 while (NEXT_INSN (last))
2089 /* For instance an sqrt builtin expander expands if with
2090 sibcall in the then and label for `else`. */
2091 if (LABEL_P (NEXT_INSN (last)))
2093 *can_fallthru = true;
2096 delete_insn (NEXT_INSN (last));
2099 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2100 e->probability += probability;
2103 update_bb_for_insn (bb);
2105 if (NEXT_INSN (last))
2107 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2110 if (BARRIER_P (last))
2111 BB_END (bb) = PREV_INSN (last);
2114 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2119 /* Return the difference between the floor and the truncated result of
2120 a signed division by OP1 with remainder MOD. */
2122 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2124 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2125 return gen_rtx_IF_THEN_ELSE
2126 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2127 gen_rtx_IF_THEN_ELSE
2128 (mode, gen_rtx_LT (BImode,
2129 gen_rtx_DIV (mode, op1, mod),
2131 constm1_rtx, const0_rtx),
2135 /* Return the difference between the ceil and the truncated result of
2136 a signed division by OP1 with remainder MOD. */
2138 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2140 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2141 return gen_rtx_IF_THEN_ELSE
2142 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2143 gen_rtx_IF_THEN_ELSE
2144 (mode, gen_rtx_GT (BImode,
2145 gen_rtx_DIV (mode, op1, mod),
2147 const1_rtx, const0_rtx),
2151 /* Return the difference between the ceil and the truncated result of
2152 an unsigned division by OP1 with remainder MOD. */
2154 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2156 /* (mod != 0 ? 1 : 0) */
2157 return gen_rtx_IF_THEN_ELSE
2158 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2159 const1_rtx, const0_rtx);
2162 /* Return the difference between the rounded and the truncated result
2163 of a signed division by OP1 with remainder MOD. Halfway cases are
2164 rounded away from zero, rather than to the nearest even number. */
2166 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2168 /* (abs (mod) >= abs (op1) - abs (mod)
2169 ? (op1 / mod > 0 ? 1 : -1)
2171 return gen_rtx_IF_THEN_ELSE
2172 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2173 gen_rtx_MINUS (mode,
2174 gen_rtx_ABS (mode, op1),
2175 gen_rtx_ABS (mode, mod))),
2176 gen_rtx_IF_THEN_ELSE
2177 (mode, gen_rtx_GT (BImode,
2178 gen_rtx_DIV (mode, op1, mod),
2180 const1_rtx, constm1_rtx),
2184 /* Return the difference between the rounded and the truncated result
2185 of a unsigned division by OP1 with remainder MOD. Halfway cases
2186 are rounded away from zero, rather than to the nearest even
2189 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2191 /* (mod >= op1 - mod ? 1 : 0) */
2192 return gen_rtx_IF_THEN_ELSE
2193 (mode, gen_rtx_GE (BImode, mod,
2194 gen_rtx_MINUS (mode, op1, mod)),
2195 const1_rtx, const0_rtx);
2198 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2202 convert_debug_memory_address (enum machine_mode mode, rtx x)
2204 enum machine_mode xmode = GET_MODE (x);
2206 #ifndef POINTERS_EXTEND_UNSIGNED
2207 gcc_assert (mode == Pmode);
2208 gcc_assert (xmode == mode || xmode == VOIDmode);
2210 gcc_assert (mode == Pmode || mode == ptr_mode);
2212 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2215 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2216 x = simplify_gen_subreg (mode, x, xmode,
2217 subreg_lowpart_offset
2219 else if (POINTERS_EXTEND_UNSIGNED > 0)
2220 x = gen_rtx_ZERO_EXTEND (mode, x);
2221 else if (!POINTERS_EXTEND_UNSIGNED)
2222 x = gen_rtx_SIGN_EXTEND (mode, x);
2225 #endif /* POINTERS_EXTEND_UNSIGNED */
2230 /* Return an RTX equivalent to the value of the tree expression
2234 expand_debug_expr (tree exp)
2236 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2237 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2238 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2241 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2243 case tcc_expression:
2244 switch (TREE_CODE (exp))
2248 case WIDEN_MULT_PLUS_EXPR:
2249 case WIDEN_MULT_MINUS_EXPR:
2252 case TRUTH_ANDIF_EXPR:
2253 case TRUTH_ORIF_EXPR:
2254 case TRUTH_AND_EXPR:
2256 case TRUTH_XOR_EXPR:
2259 case TRUTH_NOT_EXPR:
2268 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2275 case tcc_comparison:
2276 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2283 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2293 case tcc_exceptional:
2294 case tcc_declaration:
2300 switch (TREE_CODE (exp))
2303 if (!lookup_constant_def (exp))
2305 if (strlen (TREE_STRING_POINTER (exp)) + 1
2306 != (size_t) TREE_STRING_LENGTH (exp))
2308 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2309 op0 = gen_rtx_MEM (BLKmode, op0);
2310 set_mem_attributes (op0, exp, 0);
2313 /* Fall through... */
2318 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2322 gcc_assert (COMPLEX_MODE_P (mode));
2323 op0 = expand_debug_expr (TREE_REALPART (exp));
2324 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2325 return gen_rtx_CONCAT (mode, op0, op1);
2327 case DEBUG_EXPR_DECL:
2328 op0 = DECL_RTL_IF_SET (exp);
2333 op0 = gen_rtx_DEBUG_EXPR (mode);
2334 DEBUG_EXPR_TREE_DECL (op0) = exp;
2335 SET_DECL_RTL (exp, op0);
2345 op0 = DECL_RTL_IF_SET (exp);
2347 /* This decl was probably optimized away. */
2350 if (TREE_CODE (exp) != VAR_DECL
2351 || DECL_EXTERNAL (exp)
2352 || !TREE_STATIC (exp)
2354 || DECL_HARD_REGISTER (exp)
2355 || mode == VOIDmode)
2358 op0 = make_decl_rtl_for_debug (exp);
2360 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2361 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2365 op0 = copy_rtx (op0);
2367 if (GET_MODE (op0) == BLKmode
2368 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2369 below would ICE. While it is likely a FE bug,
2370 try to be robust here. See PR43166. */
2372 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2374 gcc_assert (MEM_P (op0));
2375 op0 = adjust_address_nv (op0, mode, 0);
2386 enum machine_mode inner_mode = GET_MODE (op0);
2388 if (mode == inner_mode)
2391 if (inner_mode == VOIDmode)
2393 if (TREE_CODE (exp) == SSA_NAME)
2394 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2396 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2397 if (mode == inner_mode)
2401 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2403 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2404 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2405 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2406 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2408 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2410 else if (FLOAT_MODE_P (mode))
2412 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2413 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2414 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2416 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2418 else if (FLOAT_MODE_P (inner_mode))
2421 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2423 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2425 else if (CONSTANT_P (op0)
2426 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2427 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2428 subreg_lowpart_offset (mode,
2431 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2433 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2440 if (!integer_zerop (TREE_OPERAND (exp, 1)))
2444 case MISALIGNED_INDIRECT_REF:
2445 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2449 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2450 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2452 as = ADDR_SPACE_GENERIC;
2454 op0 = gen_rtx_MEM (mode, op0);
2456 set_mem_attributes (op0, exp, 0);
2457 set_mem_addr_space (op0, as);
2461 case TARGET_MEM_REF:
2462 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
2465 op0 = expand_debug_expr
2466 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2470 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
2472 op0 = gen_rtx_MEM (mode, op0);
2474 set_mem_attributes (op0, exp, 0);
2475 set_mem_addr_space (op0, as);
2480 case ARRAY_RANGE_REF:
2485 case VIEW_CONVERT_EXPR:
2487 enum machine_mode mode1;
2488 HOST_WIDE_INT bitsize, bitpos;
2491 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2492 &mode1, &unsignedp, &volatilep, false);
2498 orig_op0 = op0 = expand_debug_expr (tem);
2505 enum machine_mode addrmode, offmode;
2510 op0 = XEXP (op0, 0);
2511 addrmode = GET_MODE (op0);
2512 if (addrmode == VOIDmode)
2515 op1 = expand_debug_expr (offset);
2519 offmode = GET_MODE (op1);
2520 if (offmode == VOIDmode)
2521 offmode = TYPE_MODE (TREE_TYPE (offset));
2523 if (addrmode != offmode)
2524 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2525 subreg_lowpart_offset (addrmode,
2528 /* Don't use offset_address here, we don't need a
2529 recognizable address, and we don't want to generate
2531 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2536 if (mode1 == VOIDmode)
2538 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2539 if (bitpos >= BITS_PER_UNIT)
2541 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2542 bitpos %= BITS_PER_UNIT;
2544 else if (bitpos < 0)
2547 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2548 op0 = adjust_address_nv (op0, mode1, units);
2549 bitpos += units * BITS_PER_UNIT;
2551 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2552 op0 = adjust_address_nv (op0, mode, 0);
2553 else if (GET_MODE (op0) != mode1)
2554 op0 = adjust_address_nv (op0, mode1, 0);
2556 op0 = copy_rtx (op0);
2557 if (op0 == orig_op0)
2558 op0 = shallow_copy_rtx (op0);
2559 set_mem_attributes (op0, exp, 0);
2562 if (bitpos == 0 && mode == GET_MODE (op0))
2568 if (GET_MODE (op0) == BLKmode)
2571 if ((bitpos % BITS_PER_UNIT) == 0
2572 && bitsize == GET_MODE_BITSIZE (mode1))
2574 enum machine_mode opmode = GET_MODE (op0);
2576 if (opmode == VOIDmode)
2579 /* This condition may hold if we're expanding the address
2580 right past the end of an array that turned out not to
2581 be addressable (i.e., the address was only computed in
2582 debug stmts). The gen_subreg below would rightfully
2583 crash, and the address doesn't really exist, so just
2585 if (bitpos >= GET_MODE_BITSIZE (opmode))
2588 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2589 return simplify_gen_subreg (mode, op0, opmode,
2590 bitpos / BITS_PER_UNIT);
2593 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2594 && TYPE_UNSIGNED (TREE_TYPE (exp))
2596 : ZERO_EXTRACT, mode,
2597 GET_MODE (op0) != VOIDmode
2598 ? GET_MODE (op0) : mode1,
2599 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2603 return gen_rtx_ABS (mode, op0);
2606 return gen_rtx_NEG (mode, op0);
2609 return gen_rtx_NOT (mode, op0);
2613 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2615 return gen_rtx_FLOAT (mode, op0);
2617 case FIX_TRUNC_EXPR:
2619 return gen_rtx_UNSIGNED_FIX (mode, op0);
2621 return gen_rtx_FIX (mode, op0);
2623 case POINTER_PLUS_EXPR:
2624 /* For the rare target where pointers are not the same size as
2625 size_t, we need to check for mis-matched modes and correct
2628 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2629 && GET_MODE (op0) != GET_MODE (op1))
2631 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2632 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2634 /* We always sign-extend, regardless of the signedness of
2635 the operand, because the operand is always unsigned
2636 here even if the original C expression is signed. */
2637 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2641 return gen_rtx_PLUS (mode, op0, op1);
2644 return gen_rtx_MINUS (mode, op0, op1);
2647 return gen_rtx_MULT (mode, op0, op1);
2650 case TRUNC_DIV_EXPR:
2651 case EXACT_DIV_EXPR:
2653 return gen_rtx_UDIV (mode, op0, op1);
2655 return gen_rtx_DIV (mode, op0, op1);
2657 case TRUNC_MOD_EXPR:
2659 return gen_rtx_UMOD (mode, op0, op1);
2661 return gen_rtx_MOD (mode, op0, op1);
2663 case FLOOR_DIV_EXPR:
2665 return gen_rtx_UDIV (mode, op0, op1);
2668 rtx div = gen_rtx_DIV (mode, op0, op1);
2669 rtx mod = gen_rtx_MOD (mode, op0, op1);
2670 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2671 return gen_rtx_PLUS (mode, div, adj);
2674 case FLOOR_MOD_EXPR:
2676 return gen_rtx_UMOD (mode, op0, op1);
2679 rtx mod = gen_rtx_MOD (mode, op0, op1);
2680 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2681 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2682 return gen_rtx_PLUS (mode, mod, adj);
2688 rtx div = gen_rtx_UDIV (mode, op0, op1);
2689 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2690 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2691 return gen_rtx_PLUS (mode, div, adj);
2695 rtx div = gen_rtx_DIV (mode, op0, op1);
2696 rtx mod = gen_rtx_MOD (mode, op0, op1);
2697 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2698 return gen_rtx_PLUS (mode, div, adj);
2704 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2705 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2706 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2707 return gen_rtx_PLUS (mode, mod, adj);
2711 rtx mod = gen_rtx_MOD (mode, op0, op1);
2712 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2713 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2714 return gen_rtx_PLUS (mode, mod, adj);
2717 case ROUND_DIV_EXPR:
2720 rtx div = gen_rtx_UDIV (mode, op0, op1);
2721 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2722 rtx adj = round_udiv_adjust (mode, mod, op1);
2723 return gen_rtx_PLUS (mode, div, adj);
2727 rtx div = gen_rtx_DIV (mode, op0, op1);
2728 rtx mod = gen_rtx_MOD (mode, op0, op1);
2729 rtx adj = round_sdiv_adjust (mode, mod, op1);
2730 return gen_rtx_PLUS (mode, div, adj);
2733 case ROUND_MOD_EXPR:
2736 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2737 rtx adj = round_udiv_adjust (mode, mod, op1);
2738 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2739 return gen_rtx_PLUS (mode, mod, adj);
2743 rtx mod = gen_rtx_MOD (mode, op0, op1);
2744 rtx adj = round_sdiv_adjust (mode, mod, op1);
2745 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2746 return gen_rtx_PLUS (mode, mod, adj);
2750 return gen_rtx_ASHIFT (mode, op0, op1);
2754 return gen_rtx_LSHIFTRT (mode, op0, op1);
2756 return gen_rtx_ASHIFTRT (mode, op0, op1);
2759 return gen_rtx_ROTATE (mode, op0, op1);
2762 return gen_rtx_ROTATERT (mode, op0, op1);
2766 return gen_rtx_UMIN (mode, op0, op1);
2768 return gen_rtx_SMIN (mode, op0, op1);
2772 return gen_rtx_UMAX (mode, op0, op1);
2774 return gen_rtx_SMAX (mode, op0, op1);
2777 case TRUTH_AND_EXPR:
2778 return gen_rtx_AND (mode, op0, op1);
2782 return gen_rtx_IOR (mode, op0, op1);
2785 case TRUTH_XOR_EXPR:
2786 return gen_rtx_XOR (mode, op0, op1);
2788 case TRUTH_ANDIF_EXPR:
2789 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2791 case TRUTH_ORIF_EXPR:
2792 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2794 case TRUTH_NOT_EXPR:
2795 return gen_rtx_EQ (mode, op0, const0_rtx);
2799 return gen_rtx_LTU (mode, op0, op1);
2801 return gen_rtx_LT (mode, op0, op1);
2805 return gen_rtx_LEU (mode, op0, op1);
2807 return gen_rtx_LE (mode, op0, op1);
2811 return gen_rtx_GTU (mode, op0, op1);
2813 return gen_rtx_GT (mode, op0, op1);
2817 return gen_rtx_GEU (mode, op0, op1);
2819 return gen_rtx_GE (mode, op0, op1);
2822 return gen_rtx_EQ (mode, op0, op1);
2825 return gen_rtx_NE (mode, op0, op1);
2827 case UNORDERED_EXPR:
2828 return gen_rtx_UNORDERED (mode, op0, op1);
2831 return gen_rtx_ORDERED (mode, op0, op1);
2834 return gen_rtx_UNLT (mode, op0, op1);
2837 return gen_rtx_UNLE (mode, op0, op1);
2840 return gen_rtx_UNGT (mode, op0, op1);
2843 return gen_rtx_UNGE (mode, op0, op1);
2846 return gen_rtx_UNEQ (mode, op0, op1);
2849 return gen_rtx_LTGT (mode, op0, op1);
2852 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2855 gcc_assert (COMPLEX_MODE_P (mode));
2856 if (GET_MODE (op0) == VOIDmode)
2857 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2858 if (GET_MODE (op1) == VOIDmode)
2859 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2860 return gen_rtx_CONCAT (mode, op0, op1);
2863 if (GET_CODE (op0) == CONCAT)
2864 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2865 gen_rtx_NEG (GET_MODE_INNER (mode),
2869 enum machine_mode imode = GET_MODE_INNER (mode);
2874 re = adjust_address_nv (op0, imode, 0);
2875 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2879 enum machine_mode ifmode = int_mode_for_mode (mode);
2880 enum machine_mode ihmode = int_mode_for_mode (imode);
2882 if (ifmode == BLKmode || ihmode == BLKmode)
2884 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2887 re = gen_rtx_SUBREG (ifmode, re, 0);
2888 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2889 if (imode != ihmode)
2890 re = gen_rtx_SUBREG (imode, re, 0);
2891 im = copy_rtx (op0);
2893 im = gen_rtx_SUBREG (ifmode, im, 0);
2894 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
2895 if (imode != ihmode)
2896 im = gen_rtx_SUBREG (imode, im, 0);
2898 im = gen_rtx_NEG (imode, im);
2899 return gen_rtx_CONCAT (mode, re, im);
2903 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2904 if (!op0 || !MEM_P (op0))
2907 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
2912 exp = build_constructor_from_list (TREE_TYPE (exp),
2913 TREE_VECTOR_CST_ELTS (exp));
2917 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
2922 op0 = gen_rtx_CONCATN
2923 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
2925 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
2927 op1 = expand_debug_expr (val);
2930 XVECEXP (op0, 0, i) = op1;
2933 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
2935 op1 = expand_debug_expr
2936 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
2941 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
2942 XVECEXP (op0, 0, i) = op1;
2948 goto flag_unsupported;
2951 /* ??? Maybe handle some builtins? */
2956 gimple g = get_gimple_for_ssa_name (exp);
2959 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
2965 int part = var_to_partition (SA.map, exp);
2967 if (part == NO_PARTITION)
2970 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
2972 op0 = SA.partition_to_pseudo[part];
2980 /* Vector stuff. For most of the codes we don't have rtl codes. */
2981 case REALIGN_LOAD_EXPR:
2982 case REDUC_MAX_EXPR:
2983 case REDUC_MIN_EXPR:
2984 case REDUC_PLUS_EXPR:
2986 case VEC_EXTRACT_EVEN_EXPR:
2987 case VEC_EXTRACT_ODD_EXPR:
2988 case VEC_INTERLEAVE_HIGH_EXPR:
2989 case VEC_INTERLEAVE_LOW_EXPR:
2990 case VEC_LSHIFT_EXPR:
2991 case VEC_PACK_FIX_TRUNC_EXPR:
2992 case VEC_PACK_SAT_EXPR:
2993 case VEC_PACK_TRUNC_EXPR:
2994 case VEC_RSHIFT_EXPR:
2995 case VEC_UNPACK_FLOAT_HI_EXPR:
2996 case VEC_UNPACK_FLOAT_LO_EXPR:
2997 case VEC_UNPACK_HI_EXPR:
2998 case VEC_UNPACK_LO_EXPR:
2999 case VEC_WIDEN_MULT_HI_EXPR:
3000 case VEC_WIDEN_MULT_LO_EXPR:
3004 case ADDR_SPACE_CONVERT_EXPR:
3005 case FIXED_CONVERT_EXPR:
3007 case WITH_SIZE_EXPR:
3011 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3012 && SCALAR_INT_MODE_P (mode))
3014 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3015 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3017 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3018 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3019 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3021 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3022 op0 = gen_rtx_MULT (mode, op0, op1);
3023 return gen_rtx_PLUS (mode, op0, op2);
3027 case WIDEN_MULT_EXPR:
3028 case WIDEN_MULT_PLUS_EXPR:
3029 case WIDEN_MULT_MINUS_EXPR:
3030 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3031 && SCALAR_INT_MODE_P (mode))
3033 enum machine_mode inner_mode = GET_MODE (op0);
3034 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3035 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3037 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3038 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3039 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3041 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3042 op0 = gen_rtx_MULT (mode, op0, op1);
3043 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3045 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3046 return gen_rtx_PLUS (mode, op0, op2);
3048 return gen_rtx_MINUS (mode, op2, op0);
3052 case WIDEN_SUM_EXPR:
3053 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3054 && SCALAR_INT_MODE_P (mode))
3056 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3057 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3059 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3060 return gen_rtx_PLUS (mode, op0, op1);
3066 #ifdef ENABLE_CHECKING
3075 /* Expand the _LOCs in debug insns. We run this after expanding all
3076 regular insns, so that any variables referenced in the function
3077 will have their DECL_RTLs set. */
3080 expand_debug_locations (void)
3083 rtx last = get_last_insn ();
3084 int save_strict_alias = flag_strict_aliasing;
3086 /* New alias sets while setting up memory attributes cause
3087 -fcompare-debug failures, even though it doesn't bring about any
3089 flag_strict_aliasing = 0;
3091 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3092 if (DEBUG_INSN_P (insn))
3094 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3096 enum machine_mode mode;
3098 if (value == NULL_TREE)
3102 val = expand_debug_expr (value);
3103 gcc_assert (last == get_last_insn ());
3107 val = gen_rtx_UNKNOWN_VAR_LOC ();
3110 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3112 gcc_assert (mode == GET_MODE (val)
3113 || (GET_MODE (val) == VOIDmode
3114 && (CONST_INT_P (val)
3115 || GET_CODE (val) == CONST_FIXED
3116 || GET_CODE (val) == CONST_DOUBLE
3117 || GET_CODE (val) == LABEL_REF)));
3120 INSN_VAR_LOCATION_LOC (insn) = val;
3123 flag_strict_aliasing = save_strict_alias;
3126 /* Expand basic block BB from GIMPLE trees to RTL. */
3129 expand_gimple_basic_block (basic_block bb)
3131 gimple_stmt_iterator gsi;
3140 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3143 /* Note that since we are now transitioning from GIMPLE to RTL, we
3144 cannot use the gsi_*_bb() routines because they expect the basic
3145 block to be in GIMPLE, instead of RTL. Therefore, we need to
3146 access the BB sequence directly. */
3147 stmts = bb_seq (bb);
3148 bb->il.gimple = NULL;
3149 rtl_profile_for_bb (bb);
3150 init_rtl_bb_info (bb);
3151 bb->flags |= BB_RTL;
3153 /* Remove the RETURN_EXPR if we may fall though to the exit
3155 gsi = gsi_last (stmts);
3156 if (!gsi_end_p (gsi)
3157 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3159 gimple ret_stmt = gsi_stmt (gsi);
3161 gcc_assert (single_succ_p (bb));
3162 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3164 if (bb->next_bb == EXIT_BLOCK_PTR
3165 && !gimple_return_retval (ret_stmt))
3167 gsi_remove (&gsi, false);
3168 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3172 gsi = gsi_start (stmts);
3173 if (!gsi_end_p (gsi))
3175 stmt = gsi_stmt (gsi);
3176 if (gimple_code (stmt) != GIMPLE_LABEL)
3180 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3184 last = get_last_insn ();
3188 expand_gimple_stmt (stmt);
3193 emit_label ((rtx) *elt);
3195 /* Java emits line number notes in the top of labels.
3196 ??? Make this go away once line number notes are obsoleted. */
3197 BB_HEAD (bb) = NEXT_INSN (last);
3198 if (NOTE_P (BB_HEAD (bb)))
3199 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3200 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3202 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3205 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3207 NOTE_BASIC_BLOCK (note) = bb;
3209 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3213 stmt = gsi_stmt (gsi);
3215 /* If this statement is a non-debug one, and we generate debug
3216 insns, then this one might be the last real use of a TERed
3217 SSA_NAME, but where there are still some debug uses further
3218 down. Expanding the current SSA name in such further debug
3219 uses by their RHS might lead to wrong debug info, as coalescing
3220 might make the operands of such RHS be placed into the same
3221 pseudo as something else. Like so:
3222 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3226 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3227 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3228 the write to a_2 would actually have clobbered the place which
3231 So, instead of that, we recognize the situation, and generate
3232 debug temporaries at the last real use of TERed SSA names:
3239 if (MAY_HAVE_DEBUG_INSNS
3241 && !is_gimple_debug (stmt))
3247 location_t sloc = get_curr_insn_source_location ();
3248 tree sblock = get_curr_insn_block ();
3250 /* Look for SSA names that have their last use here (TERed
3251 names always have only one real use). */
3252 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3253 if ((def = get_gimple_for_ssa_name (op)))
3255 imm_use_iterator imm_iter;
3256 use_operand_p use_p;
3257 bool have_debug_uses = false;
3259 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3261 if (gimple_debug_bind_p (USE_STMT (use_p)))
3263 have_debug_uses = true;
3268 if (have_debug_uses)
3270 /* OP is a TERed SSA name, with DEF it's defining
3271 statement, and where OP is used in further debug
3272 instructions. Generate a debug temporary, and
3273 replace all uses of OP in debug insns with that
3276 tree value = gimple_assign_rhs_to_tree (def);
3277 tree vexpr = make_node (DEBUG_EXPR_DECL);
3279 enum machine_mode mode;
3281 set_curr_insn_source_location (gimple_location (def));
3282 set_curr_insn_block (gimple_block (def));
3284 DECL_ARTIFICIAL (vexpr) = 1;
3285 TREE_TYPE (vexpr) = TREE_TYPE (value);
3287 mode = DECL_MODE (value);
3289 mode = TYPE_MODE (TREE_TYPE (value));
3290 DECL_MODE (vexpr) = mode;
3292 val = gen_rtx_VAR_LOCATION
3293 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3295 val = emit_debug_insn (val);
3297 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3299 if (!gimple_debug_bind_p (debugstmt))
3302 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3303 SET_USE (use_p, vexpr);
3305 update_stmt (debugstmt);
3309 set_curr_insn_source_location (sloc);
3310 set_curr_insn_block (sblock);
3313 currently_expanding_gimple_stmt = stmt;
3315 /* Expand this statement, then evaluate the resulting RTL and
3316 fixup the CFG accordingly. */
3317 if (gimple_code (stmt) == GIMPLE_COND)
3319 new_bb = expand_gimple_cond (bb, stmt);
3323 else if (gimple_debug_bind_p (stmt))
3325 location_t sloc = get_curr_insn_source_location ();
3326 tree sblock = get_curr_insn_block ();
3327 gimple_stmt_iterator nsi = gsi;
3331 tree var = gimple_debug_bind_get_var (stmt);
3334 enum machine_mode mode;
3336 if (gimple_debug_bind_has_value_p (stmt))
3337 value = gimple_debug_bind_get_value (stmt);
3341 last = get_last_insn ();
3343 set_curr_insn_source_location (gimple_location (stmt));
3344 set_curr_insn_block (gimple_block (stmt));
3347 mode = DECL_MODE (var);
3349 mode = TYPE_MODE (TREE_TYPE (var));
3351 val = gen_rtx_VAR_LOCATION
3352 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3354 val = emit_debug_insn (val);
3356 if (dump_file && (dump_flags & TDF_DETAILS))
3358 /* We can't dump the insn with a TREE where an RTX
3360 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3361 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3362 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3365 /* In order not to generate too many debug temporaries,
3366 we delink all uses of debug statements we already expanded.
3367 Therefore debug statements between definition and real
3368 use of TERed SSA names will continue to use the SSA name,
3369 and not be replaced with debug temps. */
3370 delink_stmt_imm_use (stmt);
3374 if (gsi_end_p (nsi))
3376 stmt = gsi_stmt (nsi);
3377 if (!gimple_debug_bind_p (stmt))
3381 set_curr_insn_source_location (sloc);
3382 set_curr_insn_block (sblock);
3386 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3389 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3400 def_operand_p def_p;
3401 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3405 /* Ignore this stmt if it is in the list of
3406 replaceable expressions. */
3408 && bitmap_bit_p (SA.values,
3409 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3412 last = expand_gimple_stmt (stmt);
3413 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3418 currently_expanding_gimple_stmt = NULL;
3420 /* Expand implicit goto and convert goto_locus. */
3421 FOR_EACH_EDGE (e, ei, bb->succs)
3423 if (e->goto_locus && e->goto_block)
3425 set_curr_insn_source_location (e->goto_locus);
3426 set_curr_insn_block (e->goto_block);
3427 e->goto_locus = curr_insn_locator ();
3429 e->goto_block = NULL;
3430 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3432 emit_jump (label_rtx_for_bb (e->dest));
3433 e->flags &= ~EDGE_FALLTHRU;
3437 /* Expanded RTL can create a jump in the last instruction of block.
3438 This later might be assumed to be a jump to successor and break edge insertion.
3439 We need to insert dummy move to prevent this. PR41440. */
3440 if (single_succ_p (bb)
3441 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3442 && (last = get_last_insn ())
3445 rtx dummy = gen_reg_rtx (SImode);
3446 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3449 do_pending_stack_adjust ();
3451 /* Find the block tail. The last insn in the block is the insn
3452 before a barrier and/or table jump insn. */
3453 last = get_last_insn ();
3454 if (BARRIER_P (last))
3455 last = PREV_INSN (last);
3456 if (JUMP_TABLE_DATA_P (last))
3457 last = PREV_INSN (PREV_INSN (last));
3460 update_bb_for_insn (bb);
3466 /* Create a basic block for initialization code. */
3469 construct_init_block (void)
3471 basic_block init_block, first_block;
3475 /* Multiple entry points not supported yet. */
3476 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3477 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3478 init_rtl_bb_info (EXIT_BLOCK_PTR);
3479 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3480 EXIT_BLOCK_PTR->flags |= BB_RTL;
3482 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3484 /* When entry edge points to first basic block, we don't need jump,
3485 otherwise we have to jump into proper target. */
3486 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3488 tree label = gimple_block_label (e->dest);
3490 emit_jump (label_rtx (label));
3494 flags = EDGE_FALLTHRU;
3496 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3499 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3500 init_block->count = ENTRY_BLOCK_PTR->count;
3503 first_block = e->dest;
3504 redirect_edge_succ (e, init_block);
3505 e = make_edge (init_block, first_block, flags);
3508 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3509 e->probability = REG_BR_PROB_BASE;
3510 e->count = ENTRY_BLOCK_PTR->count;
3512 update_bb_for_insn (init_block);
3516 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3517 found in the block tree. */
3520 set_block_levels (tree block, int level)
3524 BLOCK_NUMBER (block) = level;
3525 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3526 block = BLOCK_CHAIN (block);
3530 /* Create a block containing landing pads and similar stuff. */
3533 construct_exit_block (void)
3535 rtx head = get_last_insn ();
3537 basic_block exit_block;
3541 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3543 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3545 /* Make sure the locus is set to the end of the function, so that
3546 epilogue line numbers and warnings are set properly. */
3547 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3548 input_location = cfun->function_end_locus;
3550 /* The following insns belong to the top scope. */
3551 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3553 /* Generate rtl for function exit. */
3554 expand_function_end ();
3556 end = get_last_insn ();
3559 /* While emitting the function end we could move end of the last basic block.
3561 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3562 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3563 head = NEXT_INSN (head);
3564 exit_block = create_basic_block (NEXT_INSN (head), end,
3565 EXIT_BLOCK_PTR->prev_bb);
3566 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3567 exit_block->count = EXIT_BLOCK_PTR->count;
3570 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3572 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3573 if (!(e->flags & EDGE_ABNORMAL))
3574 redirect_edge_succ (e, exit_block);
3579 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3580 e->probability = REG_BR_PROB_BASE;
3581 e->count = EXIT_BLOCK_PTR->count;
3582 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3585 e->count -= e2->count;
3586 exit_block->count -= e2->count;
3587 exit_block->frequency -= EDGE_FREQUENCY (e2);
3591 if (exit_block->count < 0)
3592 exit_block->count = 0;
3593 if (exit_block->frequency < 0)
3594 exit_block->frequency = 0;
3595 update_bb_for_insn (exit_block);
3598 /* Helper function for discover_nonconstant_array_refs.
3599 Look for ARRAY_REF nodes with non-constant indexes and mark them
3603 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3604 void *data ATTRIBUTE_UNUSED)
3608 if (IS_TYPE_OR_DECL_P (t))
3610 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3612 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3613 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3614 && (!TREE_OPERAND (t, 2)
3615 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3616 || (TREE_CODE (t) == COMPONENT_REF
3617 && (!TREE_OPERAND (t,2)
3618 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3619 || TREE_CODE (t) == BIT_FIELD_REF
3620 || TREE_CODE (t) == REALPART_EXPR
3621 || TREE_CODE (t) == IMAGPART_EXPR
3622 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3623 || CONVERT_EXPR_P (t))
3624 t = TREE_OPERAND (t, 0);
3626 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3628 t = get_base_address (t);
3630 && DECL_MODE (t) != BLKmode)
3631 TREE_ADDRESSABLE (t) = 1;
3640 /* RTL expansion is not able to compile array references with variable
3641 offsets for arrays stored in single register. Discover such
3642 expressions and mark variables as addressable to avoid this
3646 discover_nonconstant_array_refs (void)
3649 gimple_stmt_iterator gsi;
3652 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3654 gimple stmt = gsi_stmt (gsi);
3655 if (!is_gimple_debug (stmt))
3656 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3660 /* This function sets crtl->args.internal_arg_pointer to a virtual
3661 register if DRAP is needed. Local register allocator will replace
3662 virtual_incoming_args_rtx with the virtual register. */
3665 expand_stack_alignment (void)
3668 unsigned int preferred_stack_boundary;
3670 if (! SUPPORTS_STACK_ALIGNMENT)
3673 if (cfun->calls_alloca
3674 || cfun->has_nonlocal_label
3675 || crtl->has_nonlocal_goto)
3676 crtl->need_drap = true;
3678 /* Call update_stack_boundary here again to update incoming stack
3679 boundary. It may set incoming stack alignment to a different
3680 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3681 use the minimum incoming stack alignment to check if it is OK
3682 to perform sibcall optimization since sibcall optimization will
3683 only align the outgoing stack to incoming stack boundary. */
3684 if (targetm.calls.update_stack_boundary)
3685 targetm.calls.update_stack_boundary ();
3687 /* The incoming stack frame has to be aligned at least at
3688 parm_stack_boundary. */
3689 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3691 /* Update crtl->stack_alignment_estimated and use it later to align
3692 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3693 exceptions since callgraph doesn't collect incoming stack alignment
3695 if (cfun->can_throw_non_call_exceptions
3696 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3697 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3699 preferred_stack_boundary = crtl->preferred_stack_boundary;
3700 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3701 crtl->stack_alignment_estimated = preferred_stack_boundary;
3702 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3703 crtl->stack_alignment_needed = preferred_stack_boundary;
3705 gcc_assert (crtl->stack_alignment_needed
3706 <= crtl->stack_alignment_estimated);
3708 crtl->stack_realign_needed
3709 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3710 crtl->stack_realign_tried = crtl->stack_realign_needed;
3712 crtl->stack_realign_processed = true;
3714 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3716 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3717 drap_rtx = targetm.calls.get_drap_rtx ();
3719 /* stack_realign_drap and drap_rtx must match. */
3720 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3722 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3723 if (NULL != drap_rtx)
3725 crtl->args.internal_arg_pointer = drap_rtx;
3727 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3729 fixup_tail_calls ();
3733 /* Translate the intermediate representation contained in the CFG
3734 from GIMPLE trees to RTL.
3736 We do conversion per basic block and preserve/update the tree CFG.
3737 This implies we have to do some magic as the CFG can simultaneously
3738 consist of basic blocks containing RTL and GIMPLE trees. This can
3739 confuse the CFG hooks, so be careful to not manipulate CFG during
3743 gimple_expand_cfg (void)
3745 basic_block bb, init_block;
3751 timevar_push (TV_OUT_OF_SSA);
3752 rewrite_out_of_ssa (&SA);
3753 timevar_pop (TV_OUT_OF_SSA);
3754 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3757 /* Some backends want to know that we are expanding to RTL. */
3758 currently_expanding_to_rtl = 1;
3760 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3762 insn_locators_alloc ();
3763 if (!DECL_IS_BUILTIN (current_function_decl))
3765 /* Eventually, all FEs should explicitly set function_start_locus. */
3766 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3767 set_curr_insn_source_location
3768 (DECL_SOURCE_LOCATION (current_function_decl));
3770 set_curr_insn_source_location (cfun->function_start_locus);
3772 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3773 prologue_locator = curr_insn_locator ();
3775 #ifdef INSN_SCHEDULING
3776 init_sched_attrs ();
3779 /* Make sure first insn is a note even if we don't want linenums.
3780 This makes sure the first insn will never be deleted.
3781 Also, final expects a note to appear there. */
3782 emit_note (NOTE_INSN_DELETED);
3784 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3785 discover_nonconstant_array_refs ();
3787 targetm.expand_to_rtl_hook ();
3788 crtl->stack_alignment_needed = STACK_BOUNDARY;
3789 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3790 crtl->stack_alignment_estimated = 0;
3791 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3792 cfun->cfg->max_jumptable_ents = 0;
3795 /* Expand the variables recorded during gimple lowering. */
3796 timevar_push (TV_VAR_EXPAND);
3797 expand_used_vars ();
3798 timevar_pop (TV_VAR_EXPAND);
3800 /* Honor stack protection warnings. */
3801 if (warn_stack_protect)
3803 if (cfun->calls_alloca)
3804 warning (OPT_Wstack_protector,
3805 "stack protector not protecting local variables: "
3806 "variable length buffer");
3807 if (has_short_buffer && !crtl->stack_protect_guard)
3808 warning (OPT_Wstack_protector,
3809 "stack protector not protecting function: "
3810 "all local arrays are less than %d bytes long",
3811 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3814 /* Set up parameters and prepare for return, for the function. */
3815 expand_function_start (current_function_decl);
3817 /* Now that we also have the parameter RTXs, copy them over to our
3819 for (i = 0; i < SA.map->num_partitions; i++)
3821 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3823 if (TREE_CODE (var) != VAR_DECL
3824 && !SA.partition_to_pseudo[i])
3825 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3826 gcc_assert (SA.partition_to_pseudo[i]);
3828 /* If this decl was marked as living in multiple places, reset
3829 this now to NULL. */
3830 if (DECL_RTL_IF_SET (var) == pc_rtx)
3831 SET_DECL_RTL (var, NULL);
3833 /* Some RTL parts really want to look at DECL_RTL(x) when x
3834 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3835 SET_DECL_RTL here making this available, but that would mean
3836 to select one of the potentially many RTLs for one DECL. Instead
3837 of doing that we simply reset the MEM_EXPR of the RTL in question,
3838 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3839 if (!DECL_RTL_SET_P (var))
3841 if (MEM_P (SA.partition_to_pseudo[i]))
3842 set_mem_expr (SA.partition_to_pseudo[i], NULL);
3846 /* If this function is `main', emit a call to `__main'
3847 to run global initializers, etc. */
3848 if (DECL_NAME (current_function_decl)
3849 && MAIN_NAME_P (DECL_NAME (current_function_decl))
3850 && DECL_FILE_SCOPE_P (current_function_decl))
3851 expand_main_function ();
3853 /* Initialize the stack_protect_guard field. This must happen after the
3854 call to __main (if any) so that the external decl is initialized. */
3855 if (crtl->stack_protect_guard)
3856 stack_protect_prologue ();
3858 expand_phi_nodes (&SA);
3860 /* Register rtl specific functions for cfg. */
3861 rtl_register_cfg_hooks ();
3863 init_block = construct_init_block ();
3865 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
3866 remaining edges later. */
3867 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3868 e->flags &= ~EDGE_EXECUTABLE;
3870 lab_rtx_for_bb = pointer_map_create ();
3871 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
3872 bb = expand_gimple_basic_block (bb);
3874 if (MAY_HAVE_DEBUG_INSNS)
3875 expand_debug_locations ();
3877 execute_free_datastructures ();
3878 timevar_push (TV_OUT_OF_SSA);
3879 finish_out_of_ssa (&SA);
3880 timevar_pop (TV_OUT_OF_SSA);
3882 timevar_push (TV_POST_EXPAND);
3883 /* We are no longer in SSA form. */
3884 cfun->gimple_df->in_ssa_p = false;
3886 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3887 conservatively to true until they are all profile aware. */
3888 pointer_map_destroy (lab_rtx_for_bb);
3891 construct_exit_block ();
3892 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3893 insn_locators_finalize ();
3895 /* Zap the tree EH table. */
3896 set_eh_throw_stmt_table (cfun, NULL);
3898 rebuild_jump_labels (get_insns ());
3900 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3904 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3907 commit_one_edge_insertion (e);
3913 /* We're done expanding trees to RTL. */
3914 currently_expanding_to_rtl = 0;
3916 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
3920 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3922 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
3923 e->flags &= ~EDGE_EXECUTABLE;
3925 /* At the moment not all abnormal edges match the RTL
3926 representation. It is safe to remove them here as
3927 find_many_sub_basic_blocks will rediscover them.
3928 In the future we should get this fixed properly. */
3929 if ((e->flags & EDGE_ABNORMAL)
3930 && !(e->flags & EDGE_SIBCALL))
3937 blocks = sbitmap_alloc (last_basic_block);
3938 sbitmap_ones (blocks);
3939 find_many_sub_basic_blocks (blocks);
3940 sbitmap_free (blocks);
3941 purge_all_dead_edges ();
3945 expand_stack_alignment ();
3947 #ifdef ENABLE_CHECKING
3948 verify_flow_info ();
3951 /* There's no need to defer outputting this function any more; we
3952 know we want to output it. */
3953 DECL_DEFER_OUTPUT (current_function_decl) = 0;
3955 /* Now that we're done expanding trees to RTL, we shouldn't have any
3956 more CONCATs anywhere. */
3957 generating_concat_p = 0;
3962 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
3963 /* And the pass manager will dump RTL for us. */
3966 /* If we're emitting a nested function, make sure its parent gets
3967 emitted as well. Doing otherwise confuses debug info. */
3970 for (parent = DECL_CONTEXT (current_function_decl);
3971 parent != NULL_TREE;
3972 parent = get_containing_scope (parent))
3973 if (TREE_CODE (parent) == FUNCTION_DECL)
3974 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
3977 /* We are now committed to emitting code for this function. Do any
3978 preparation, such as emitting abstract debug info for the inline
3979 before it gets mangled by optimization. */
3980 if (cgraph_function_possibly_inlined_p (current_function_decl))
3981 (*debug_hooks->outlining_inline_function) (current_function_decl);
3983 TREE_ASM_WRITTEN (current_function_decl) = 1;
3985 /* After expanding, the return labels are no longer needed. */
3986 return_label = NULL;
3987 naked_return_label = NULL;
3988 /* Tag the blocks with a depth number so that change_scope can find
3989 the common parent easily. */
3990 set_block_levels (DECL_INITIAL (cfun->decl), 0);
3991 default_rtl_profile ();
3992 timevar_pop (TV_POST_EXPAND);
3996 struct rtl_opt_pass pass_expand =
4000 "expand", /* name */
4002 gimple_expand_cfg, /* execute */
4005 0, /* static_pass_number */
4006 TV_EXPAND, /* tv_id */
4007 PROP_ssa | PROP_gimple_leh | PROP_cfg
4008 | PROP_gimple_lcx, /* properties_required */
4009 PROP_rtl, /* properties_provided */
4010 PROP_ssa | PROP_trees, /* properties_destroyed */
4011 TODO_verify_ssa | TODO_verify_flow
4012 | TODO_verify_stmts, /* todo_flags_start */
4014 | TODO_ggc_collect /* todo_flags_finish */