1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
47 #include "ssaexpand.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 /* Return an expression tree corresponding to the RHS of GIMPLE
64 gimple_assign_rhs_to_tree (gimple stmt)
67 enum gimple_rhs_class grhs_class;
69 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
71 if (grhs_class == GIMPLE_TERNARY_RHS)
72 t = build3 (gimple_assign_rhs_code (stmt),
73 TREE_TYPE (gimple_assign_lhs (stmt)),
74 gimple_assign_rhs1 (stmt),
75 gimple_assign_rhs2 (stmt),
76 gimple_assign_rhs3 (stmt));
77 else if (grhs_class == GIMPLE_BINARY_RHS)
78 t = build2 (gimple_assign_rhs_code (stmt),
79 TREE_TYPE (gimple_assign_lhs (stmt)),
80 gimple_assign_rhs1 (stmt),
81 gimple_assign_rhs2 (stmt));
82 else if (grhs_class == GIMPLE_UNARY_RHS)
83 t = build1 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt));
86 else if (grhs_class == GIMPLE_SINGLE_RHS)
88 t = gimple_assign_rhs1 (stmt);
89 /* Avoid modifying this tree in place below. */
90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
91 && gimple_location (stmt) != EXPR_LOCATION (t))
92 || (gimple_block (stmt)
93 && currently_expanding_to_rtl
95 && gimple_block (stmt) != TREE_BLOCK (t)))
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
103 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
104 TREE_BLOCK (t) = gimple_block (stmt);
110 #ifndef STACK_ALIGNMENT_NEEDED
111 #define STACK_ALIGNMENT_NEEDED 1
114 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
116 /* Associate declaration T with storage space X. If T is no
117 SSA name this is exactly SET_DECL_RTL, otherwise make the
118 partition of T associated with X. */
120 set_rtl (tree t, rtx x)
122 if (TREE_CODE (t) == SSA_NAME)
124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
127 /* For the benefit of debug information at -O0 (where vartracking
128 doesn't run) record the place also in the base DECL if it's
129 a normal variable (not a parameter). */
130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
132 tree var = SSA_NAME_VAR (t);
133 /* If we don't yet have something recorded, just record it now. */
134 if (!DECL_RTL_SET_P (var))
135 SET_DECL_RTL (var, x);
136 /* If we have it set alrady to "multiple places" don't
138 else if (DECL_RTL (var) == pc_rtx)
140 /* If we have something recorded and it's not the same place
141 as we want to record now, we have multiple partitions for the
142 same base variable, with different places. We can't just
143 randomly chose one, hence we have to say that we don't know.
144 This only happens with optimization, and there var-tracking
145 will figure out the right thing. */
146 else if (DECL_RTL (var) != x)
147 SET_DECL_RTL (var, pc_rtx);
154 /* This structure holds data relevant to one variable that will be
155 placed in a stack slot. */
161 /* The offset of the variable. During partitioning, this is the
162 offset relative to the partition. After partitioning, this
163 is relative to the stack frame. */
164 HOST_WIDE_INT offset;
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
174 /* The partition representative. */
175 size_t representative;
177 /* The next stack variable in the partition, or EOC. */
180 /* The numbers of conflicting stack variables. */
184 #define EOC ((size_t)-1)
186 /* We have an array of such objects while deciding allocation. */
187 static struct stack_var *stack_vars;
188 static size_t stack_vars_alloc;
189 static size_t stack_vars_num;
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
192 is non-decreasing. */
193 static size_t *stack_vars_sorted;
195 /* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198 static int frame_phase;
200 /* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202 static bool has_protected_decls;
204 /* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206 static bool has_short_buffer;
208 /* Discover the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
212 get_decl_align_unit (tree decl)
216 align = LOCAL_DECL_ALIGNMENT (decl);
218 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
219 align = MAX_SUPPORTED_STACK_ALIGNMENT;
221 if (SUPPORTS_STACK_ALIGNMENT)
223 if (crtl->stack_alignment_estimated < align)
225 gcc_assert(!crtl->stack_realign_processed);
226 crtl->stack_alignment_estimated = align;
230 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
231 So here we only make sure stack_alignment_needed >= align. */
232 if (crtl->stack_alignment_needed < align)
233 crtl->stack_alignment_needed = align;
234 if (crtl->max_used_stack_slot_alignment < align)
235 crtl->max_used_stack_slot_alignment = align;
237 return align / BITS_PER_UNIT;
240 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
241 Return the frame offset. */
244 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
246 HOST_WIDE_INT offset, new_frame_offset;
248 new_frame_offset = frame_offset;
249 if (FRAME_GROWS_DOWNWARD)
251 new_frame_offset -= size + frame_phase;
252 new_frame_offset &= -align;
253 new_frame_offset += frame_phase;
254 offset = new_frame_offset;
258 new_frame_offset -= frame_phase;
259 new_frame_offset += align - 1;
260 new_frame_offset &= -align;
261 new_frame_offset += frame_phase;
262 offset = new_frame_offset;
263 new_frame_offset += size;
265 frame_offset = new_frame_offset;
267 if (frame_offset_overflow (frame_offset, cfun->decl))
268 frame_offset = offset = 0;
273 /* Accumulate DECL into STACK_VARS. */
276 add_stack_var (tree decl)
278 if (stack_vars_num >= stack_vars_alloc)
280 if (stack_vars_alloc)
281 stack_vars_alloc = stack_vars_alloc * 3 / 2;
283 stack_vars_alloc = 32;
285 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
287 stack_vars[stack_vars_num].decl = decl;
288 stack_vars[stack_vars_num].offset = 0;
289 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
290 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
292 /* All variables are initially in their own partition. */
293 stack_vars[stack_vars_num].representative = stack_vars_num;
294 stack_vars[stack_vars_num].next = EOC;
296 /* All variables initially conflict with no other. */
297 stack_vars[stack_vars_num].conflicts = NULL;
299 /* Ensure that this decl doesn't get put onto the list twice. */
300 set_rtl (decl, pc_rtx);
305 /* Make the decls associated with luid's X and Y conflict. */
308 add_stack_var_conflict (size_t x, size_t y)
310 struct stack_var *a = &stack_vars[x];
311 struct stack_var *b = &stack_vars[y];
313 a->conflicts = BITMAP_ALLOC (NULL);
315 b->conflicts = BITMAP_ALLOC (NULL);
316 bitmap_set_bit (a->conflicts, y);
317 bitmap_set_bit (b->conflicts, x);
320 /* Check whether the decls associated with luid's X and Y conflict. */
323 stack_var_conflict_p (size_t x, size_t y)
325 struct stack_var *a = &stack_vars[x];
326 struct stack_var *b = &stack_vars[y];
327 if (!a->conflicts || !b->conflicts)
329 return bitmap_bit_p (a->conflicts, y);
332 /* Returns true if TYPE is or contains a union type. */
335 aggregate_contains_union_type (tree type)
339 if (TREE_CODE (type) == UNION_TYPE
340 || TREE_CODE (type) == QUAL_UNION_TYPE)
342 if (TREE_CODE (type) == ARRAY_TYPE)
343 return aggregate_contains_union_type (TREE_TYPE (type));
344 if (TREE_CODE (type) != RECORD_TYPE)
347 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
348 if (TREE_CODE (field) == FIELD_DECL)
349 if (aggregate_contains_union_type (TREE_TYPE (field)))
355 /* A subroutine of expand_used_vars. If two variables X and Y have alias
356 sets that do not conflict, then do add a conflict for these variables
357 in the interference graph. We also need to make sure to add conflicts
358 for union containing structures. Else RTL alias analysis comes along
359 and due to type based aliasing rules decides that for two overlapping
360 union temporaries { short s; int i; } accesses to the same mem through
361 different types may not alias and happily reorders stores across
362 life-time boundaries of the temporaries (See PR25654).
363 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
366 add_alias_set_conflicts (void)
368 size_t i, j, n = stack_vars_num;
370 for (i = 0; i < n; ++i)
372 tree type_i = TREE_TYPE (stack_vars[i].decl);
373 bool aggr_i = AGGREGATE_TYPE_P (type_i);
376 contains_union = aggregate_contains_union_type (type_i);
377 for (j = 0; j < i; ++j)
379 tree type_j = TREE_TYPE (stack_vars[j].decl);
380 bool aggr_j = AGGREGATE_TYPE_P (type_j);
382 /* Either the objects conflict by means of type based
383 aliasing rules, or we need to add a conflict. */
384 || !objects_must_conflict_p (type_i, type_j)
385 /* In case the types do not conflict ensure that access
386 to elements will conflict. In case of unions we have
387 to be careful as type based aliasing rules may say
388 access to the same memory does not conflict. So play
389 safe and add a conflict in this case. */
391 add_stack_var_conflict (i, j);
396 /* A subroutine of partition_stack_vars. A comparison function for qsort,
397 sorting an array of indices by the size and type of the object. */
400 stack_var_size_cmp (const void *a, const void *b)
402 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
403 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
405 unsigned int uida, uidb;
411 decla = stack_vars[*(const size_t *)a].decl;
412 declb = stack_vars[*(const size_t *)b].decl;
413 /* For stack variables of the same size use and id of the decls
414 to make the sort stable. Two SSA names are compared by their
415 version, SSA names come before non-SSA names, and two normal
416 decls are compared by their DECL_UID. */
417 if (TREE_CODE (decla) == SSA_NAME)
419 if (TREE_CODE (declb) == SSA_NAME)
420 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
424 else if (TREE_CODE (declb) == SSA_NAME)
427 uida = DECL_UID (decla), uidb = DECL_UID (declb);
436 /* If the points-to solution *PI points to variables that are in a partition
437 together with other variables add all partition members to the pointed-to
441 add_partitioned_vars_to_ptset (struct pt_solution *pt,
442 struct pointer_map_t *decls_to_partitions,
443 struct pointer_set_t *visited, bitmap temp)
451 /* The pointed-to vars bitmap is shared, it is enough to
453 || pointer_set_insert(visited, pt->vars))
458 /* By using a temporary bitmap to store all members of the partitions
459 we have to add we make sure to visit each of the partitions only
461 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
463 || !bitmap_bit_p (temp, i))
464 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
465 (void *)(size_t) i)))
466 bitmap_ior_into (temp, *part);
467 if (!bitmap_empty_p (temp))
468 bitmap_ior_into (pt->vars, temp);
471 /* Update points-to sets based on partition info, so we can use them on RTL.
472 The bitmaps representing stack partitions will be saved until expand,
473 where partitioned decls used as bases in memory expressions will be
477 update_alias_info_with_stack_vars (void)
479 struct pointer_map_t *decls_to_partitions = NULL;
481 tree var = NULL_TREE;
483 for (i = 0; i < stack_vars_num; i++)
487 struct ptr_info_def *pi;
489 /* Not interested in partitions with single variable. */
490 if (stack_vars[i].representative != i
491 || stack_vars[i].next == EOC)
494 if (!decls_to_partitions)
496 decls_to_partitions = pointer_map_create ();
497 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
500 /* Create an SSA_NAME that points to the partition for use
501 as base during alias-oracle queries on RTL for bases that
502 have been partitioned. */
503 if (var == NULL_TREE)
504 var = create_tmp_var (ptr_type_node, NULL);
505 name = make_ssa_name (var, NULL);
507 /* Create bitmaps representing partitions. They will be used for
508 points-to sets later, so use GGC alloc. */
509 part = BITMAP_GGC_ALLOC ();
510 for (j = i; j != EOC; j = stack_vars[j].next)
512 tree decl = stack_vars[j].decl;
513 unsigned int uid = DECL_PT_UID (decl);
514 /* We should never end up partitioning SSA names (though they
515 may end up on the stack). Neither should we allocate stack
516 space to something that is unused and thus unreferenced. */
517 gcc_assert (DECL_P (decl)
518 && referenced_var_lookup (DECL_UID (decl)));
519 bitmap_set_bit (part, uid);
520 *((bitmap *) pointer_map_insert (decls_to_partitions,
521 (void *)(size_t) uid)) = part;
522 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
526 /* Make the SSA name point to all partition members. */
527 pi = get_ptr_info (name);
528 pt_solution_set (&pi->pt, part, false, false);
531 /* Make all points-to sets that contain one member of a partition
532 contain all members of the partition. */
533 if (decls_to_partitions)
536 struct pointer_set_t *visited = pointer_set_create ();
537 bitmap temp = BITMAP_ALLOC (NULL);
539 for (i = 1; i < num_ssa_names; i++)
541 tree name = ssa_name (i);
542 struct ptr_info_def *pi;
545 && POINTER_TYPE_P (TREE_TYPE (name))
546 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
547 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
551 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
552 decls_to_partitions, visited, temp);
554 pointer_set_destroy (visited);
555 pointer_map_destroy (decls_to_partitions);
560 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
561 partitioning algorithm. Partitions A and B are known to be non-conflicting.
562 Merge them into a single partition A.
564 At the same time, add OFFSET to all variables in partition B. At the end
565 of the partitioning process we've have a nice block easy to lay out within
569 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
572 struct stack_var *vb = &stack_vars[b];
576 /* Update each element of partition B with the given offset,
577 and merge them into partition A. */
578 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
580 stack_vars[i].offset += offset;
581 stack_vars[i].representative = a;
583 stack_vars[last].next = stack_vars[a].next;
584 stack_vars[a].next = b;
586 /* Update the required alignment of partition A to account for B. */
587 if (stack_vars[a].alignb < stack_vars[b].alignb)
588 stack_vars[a].alignb = stack_vars[b].alignb;
590 /* Update the interference graph and merge the conflicts. */
593 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
594 add_stack_var_conflict (a, stack_vars[u].representative);
595 BITMAP_FREE (vb->conflicts);
599 /* A subroutine of expand_used_vars. Binpack the variables into
600 partitions constrained by the interference graph. The overall
601 algorithm used is as follows:
603 Sort the objects by size.
608 Look for the largest non-conflicting object B with size <= S.
618 partition_stack_vars (void)
620 size_t si, sj, n = stack_vars_num;
622 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
623 for (si = 0; si < n; ++si)
624 stack_vars_sorted[si] = si;
629 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
631 for (si = 0; si < n; ++si)
633 size_t i = stack_vars_sorted[si];
634 HOST_WIDE_INT isize = stack_vars[i].size;
635 HOST_WIDE_INT offset = 0;
637 for (sj = si; sj-- > 0; )
639 size_t j = stack_vars_sorted[sj];
640 HOST_WIDE_INT jsize = stack_vars[j].size;
641 unsigned int jalign = stack_vars[j].alignb;
643 /* Ignore objects that aren't partition representatives. */
644 if (stack_vars[j].representative != j)
647 /* Ignore objects too large for the remaining space. */
651 /* Ignore conflicting objects. */
652 if (stack_var_conflict_p (i, j))
655 /* Refine the remaining space check to include alignment. */
656 if (offset & (jalign - 1))
658 HOST_WIDE_INT toff = offset;
660 toff &= -(HOST_WIDE_INT)jalign;
661 if (isize - (toff - offset) < jsize)
664 isize -= toff - offset;
668 /* UNION the objects, placing J at OFFSET. */
669 union_stack_vars (i, j, offset);
678 update_alias_info_with_stack_vars ();
681 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
684 dump_stack_var_partition (void)
686 size_t si, i, j, n = stack_vars_num;
688 for (si = 0; si < n; ++si)
690 i = stack_vars_sorted[si];
692 /* Skip variables that aren't partition representatives, for now. */
693 if (stack_vars[i].representative != i)
696 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
697 " align %u\n", (unsigned long) i, stack_vars[i].size,
698 stack_vars[i].alignb);
700 for (j = i; j != EOC; j = stack_vars[j].next)
702 fputc ('\t', dump_file);
703 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
704 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
705 stack_vars[j].offset);
710 /* Assign rtl to DECL at frame offset OFFSET. */
713 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
715 /* Alignment is unsigned. */
716 unsigned HOST_WIDE_INT align, max_align;
719 /* If this fails, we've overflowed the stack frame. Error nicely? */
720 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
722 x = plus_constant (virtual_stack_vars_rtx, offset);
723 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
725 if (TREE_CODE (decl) != SSA_NAME)
727 /* Set alignment we actually gave this decl if it isn't an SSA name.
728 If it is we generate stack slots only accidentally so it isn't as
729 important, we'll simply use the alignment that is already set. */
730 offset -= frame_phase;
731 align = offset & -offset;
732 align *= BITS_PER_UNIT;
733 max_align = MAX (crtl->max_used_stack_slot_alignment,
734 PREFERRED_STACK_BOUNDARY);
735 if (align == 0 || align > max_align)
738 DECL_ALIGN (decl) = align;
739 DECL_USER_ALIGN (decl) = 0;
742 set_mem_attributes (x, SSAVAR (decl), true);
746 /* A subroutine of expand_used_vars. Give each partition representative
747 a unique location within the stack frame. Update each partition member
748 with that location. */
751 expand_stack_vars (bool (*pred) (tree))
753 size_t si, i, j, n = stack_vars_num;
755 for (si = 0; si < n; ++si)
757 HOST_WIDE_INT offset;
759 i = stack_vars_sorted[si];
761 /* Skip variables that aren't partition representatives, for now. */
762 if (stack_vars[i].representative != i)
765 /* Skip variables that have already had rtl assigned. See also
766 add_stack_var where we perpetrate this pc_rtx hack. */
767 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
768 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
769 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
772 /* Check the predicate to see whether this variable should be
773 allocated in this pass. */
774 if (pred && !pred (stack_vars[i].decl))
777 offset = alloc_stack_frame_space (stack_vars[i].size,
778 stack_vars[i].alignb);
780 /* Create rtl for each variable based on their location within the
782 for (j = i; j != EOC; j = stack_vars[j].next)
784 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
785 expand_one_stack_var_at (stack_vars[j].decl,
786 stack_vars[j].offset + offset);
791 /* Take into account all sizes of partitions and reset DECL_RTLs. */
793 account_stack_vars (void)
795 size_t si, j, i, n = stack_vars_num;
796 HOST_WIDE_INT size = 0;
798 for (si = 0; si < n; ++si)
800 i = stack_vars_sorted[si];
802 /* Skip variables that aren't partition representatives, for now. */
803 if (stack_vars[i].representative != i)
806 size += stack_vars[i].size;
807 for (j = i; j != EOC; j = stack_vars[j].next)
808 set_rtl (stack_vars[j].decl, NULL);
813 /* A subroutine of expand_one_var. Called to immediately assign rtl
814 to a variable to be allocated in the stack frame. */
817 expand_one_stack_var (tree var)
819 HOST_WIDE_INT size, offset, align;
821 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
822 align = get_decl_align_unit (SSAVAR (var));
823 offset = alloc_stack_frame_space (size, align);
825 expand_one_stack_var_at (var, offset);
828 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
829 that will reside in a hard register. */
832 expand_one_hard_reg_var (tree var)
834 rest_of_decl_compilation (var, 0, 0);
837 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
838 that will reside in a pseudo register. */
841 expand_one_register_var (tree var)
843 tree decl = SSAVAR (var);
844 tree type = TREE_TYPE (decl);
845 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
846 rtx x = gen_reg_rtx (reg_mode);
850 /* Note if the object is a user variable. */
851 if (!DECL_ARTIFICIAL (decl))
854 if (POINTER_TYPE_P (type))
855 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
858 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
859 has some associated error, e.g. its type is error-mark. We just need
860 to pick something that won't crash the rest of the compiler. */
863 expand_one_error_var (tree var)
865 enum machine_mode mode = DECL_MODE (var);
869 x = gen_rtx_MEM (BLKmode, const0_rtx);
870 else if (mode == VOIDmode)
873 x = gen_reg_rtx (mode);
875 SET_DECL_RTL (var, x);
878 /* A subroutine of expand_one_var. VAR is a variable that will be
879 allocated to the local stack frame. Return true if we wish to
880 add VAR to STACK_VARS so that it will be coalesced with other
881 variables. Return false to allocate VAR immediately.
883 This function is used to reduce the number of variables considered
884 for coalescing, which reduces the size of the quadratic problem. */
887 defer_stack_allocation (tree var, bool toplevel)
889 /* If stack protection is enabled, *all* stack variables must be deferred,
890 so that we can re-order the strings to the top of the frame. */
891 if (flag_stack_protect)
894 /* Variables in the outermost scope automatically conflict with
895 every other variable. The only reason to want to defer them
896 at all is that, after sorting, we can more efficiently pack
897 small variables in the stack frame. Continue to defer at -O2. */
898 if (toplevel && optimize < 2)
901 /* Without optimization, *most* variables are allocated from the
902 stack, which makes the quadratic problem large exactly when we
903 want compilation to proceed as quickly as possible. On the
904 other hand, we don't want the function's stack frame size to
905 get completely out of hand. So we avoid adding scalars and
906 "small" aggregates to the list at all. */
907 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
913 /* A subroutine of expand_used_vars. Expand one variable according to
914 its flavor. Variables to be placed on the stack are not actually
915 expanded yet, merely recorded.
916 When REALLY_EXPAND is false, only add stack values to be allocated.
917 Return stack usage this variable is supposed to take.
921 expand_one_var (tree var, bool toplevel, bool really_expand)
926 if (SUPPORTS_STACK_ALIGNMENT
927 && TREE_TYPE (var) != error_mark_node
928 && TREE_CODE (var) == VAR_DECL)
932 /* Because we don't know if VAR will be in register or on stack,
933 we conservatively assume it will be on stack even if VAR is
934 eventually put into register after RA pass. For non-automatic
935 variables, which won't be on stack, we collect alignment of
936 type and ignore user specified alignment. */
937 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
938 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
939 TYPE_MODE (TREE_TYPE (var)),
940 TYPE_ALIGN (TREE_TYPE (var)));
941 else if (DECL_HAS_VALUE_EXPR_P (var)
942 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
943 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
944 or variables which were assigned a stack slot already by
945 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
946 changed from the offset chosen to it. */
947 align = crtl->stack_alignment_estimated;
949 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
951 if (crtl->stack_alignment_estimated < align)
953 /* stack_alignment_estimated shouldn't change after stack
954 realign decision made */
955 gcc_assert(!crtl->stack_realign_processed);
956 crtl->stack_alignment_estimated = align;
960 if (TREE_CODE (origvar) == SSA_NAME)
962 gcc_assert (TREE_CODE (var) != VAR_DECL
963 || (!DECL_EXTERNAL (var)
964 && !DECL_HAS_VALUE_EXPR_P (var)
965 && !TREE_STATIC (var)
966 && TREE_TYPE (var) != error_mark_node
967 && !DECL_HARD_REGISTER (var)
970 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
972 else if (DECL_EXTERNAL (var))
974 else if (DECL_HAS_VALUE_EXPR_P (var))
976 else if (TREE_STATIC (var))
978 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
980 else if (TREE_TYPE (var) == error_mark_node)
983 expand_one_error_var (var);
985 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
988 expand_one_hard_reg_var (var);
990 else if (use_register_for_decl (var))
993 expand_one_register_var (origvar);
995 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
999 error ("size of variable %q+D is too large", var);
1000 expand_one_error_var (var);
1003 else if (defer_stack_allocation (var, toplevel))
1004 add_stack_var (origvar);
1008 expand_one_stack_var (origvar);
1009 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1014 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1015 expanding variables. Those variables that can be put into registers
1016 are allocated pseudos; those that can't are put on the stack.
1018 TOPLEVEL is true if this is the outermost BLOCK. */
1021 expand_used_vars_for_block (tree block, bool toplevel)
1023 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1026 old_sv_num = toplevel ? 0 : stack_vars_num;
1028 /* Expand all variables at this level. */
1029 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1031 expand_one_var (t, toplevel, true);
1033 this_sv_num = stack_vars_num;
1035 /* Expand all variables at containing levels. */
1036 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1037 expand_used_vars_for_block (t, false);
1039 /* Since we do not track exact variable lifetimes (which is not even
1040 possible for variables whose address escapes), we mirror the block
1041 tree in the interference graph. Here we cause all variables at this
1042 level, and all sublevels, to conflict. */
1043 if (old_sv_num < this_sv_num)
1045 new_sv_num = stack_vars_num;
1047 for (i = old_sv_num; i < new_sv_num; ++i)
1048 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1049 add_stack_var_conflict (i, j);
1053 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1054 and clear TREE_USED on all local variables. */
1057 clear_tree_used (tree block)
1061 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1062 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1065 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1066 clear_tree_used (t);
1069 /* Examine TYPE and determine a bit mask of the following features. */
1071 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1072 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1073 #define SPCT_HAS_ARRAY 4
1074 #define SPCT_HAS_AGGREGATE 8
1077 stack_protect_classify_type (tree type)
1079 unsigned int ret = 0;
1082 switch (TREE_CODE (type))
1085 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1086 if (t == char_type_node
1087 || t == signed_char_type_node
1088 || t == unsigned_char_type_node)
1090 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1091 unsigned HOST_WIDE_INT len;
1093 if (!TYPE_SIZE_UNIT (type)
1094 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1097 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1100 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1102 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1105 ret = SPCT_HAS_ARRAY;
1109 case QUAL_UNION_TYPE:
1111 ret = SPCT_HAS_AGGREGATE;
1112 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1113 if (TREE_CODE (t) == FIELD_DECL)
1114 ret |= stack_protect_classify_type (TREE_TYPE (t));
1124 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1125 part of the local stack frame. Remember if we ever return nonzero for
1126 any variable in this function. The return value is the phase number in
1127 which the variable should be allocated. */
1130 stack_protect_decl_phase (tree decl)
1132 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1135 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1136 has_short_buffer = true;
1138 if (flag_stack_protect == 2)
1140 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1141 && !(bits & SPCT_HAS_AGGREGATE))
1143 else if (bits & SPCT_HAS_ARRAY)
1147 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1150 has_protected_decls = true;
1155 /* Two helper routines that check for phase 1 and phase 2. These are used
1156 as callbacks for expand_stack_vars. */
1159 stack_protect_decl_phase_1 (tree decl)
1161 return stack_protect_decl_phase (decl) == 1;
1165 stack_protect_decl_phase_2 (tree decl)
1167 return stack_protect_decl_phase (decl) == 2;
1170 /* Ensure that variables in different stack protection phases conflict
1171 so that they are not merged and share the same stack slot. */
1174 add_stack_protection_conflicts (void)
1176 size_t i, j, n = stack_vars_num;
1177 unsigned char *phase;
1179 phase = XNEWVEC (unsigned char, n);
1180 for (i = 0; i < n; ++i)
1181 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1183 for (i = 0; i < n; ++i)
1185 unsigned char ph_i = phase[i];
1186 for (j = 0; j < i; ++j)
1187 if (ph_i != phase[j])
1188 add_stack_var_conflict (i, j);
1194 /* Create a decl for the guard at the top of the stack frame. */
1197 create_stack_guard (void)
1199 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1200 VAR_DECL, NULL, ptr_type_node);
1201 TREE_THIS_VOLATILE (guard) = 1;
1202 TREE_USED (guard) = 1;
1203 expand_one_stack_var (guard);
1204 crtl->stack_protect_guard = guard;
1207 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1208 expanding variables. Those variables that can be put into registers
1209 are allocated pseudos; those that can't are put on the stack.
1211 TOPLEVEL is true if this is the outermost BLOCK. */
1213 static HOST_WIDE_INT
1214 account_used_vars_for_block (tree block, bool toplevel)
1217 HOST_WIDE_INT size = 0;
1219 /* Expand all variables at this level. */
1220 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1222 size += expand_one_var (t, toplevel, false);
1224 /* Expand all variables at containing levels. */
1225 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1226 size += account_used_vars_for_block (t, false);
1231 /* Prepare for expanding variables. */
1233 init_vars_expansion (void)
1237 /* Set TREE_USED on all variables in the local_decls. */
1238 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1241 /* Clear TREE_USED on all variables associated with a block scope. */
1242 clear_tree_used (DECL_INITIAL (current_function_decl));
1244 /* Initialize local stack smashing state. */
1245 has_protected_decls = false;
1246 has_short_buffer = false;
1249 /* Free up stack variable graph data. */
1251 fini_vars_expansion (void)
1253 size_t i, n = stack_vars_num;
1254 for (i = 0; i < n; i++)
1255 BITMAP_FREE (stack_vars[i].conflicts);
1256 XDELETEVEC (stack_vars);
1257 XDELETEVEC (stack_vars_sorted);
1259 stack_vars_alloc = stack_vars_num = 0;
1262 /* Make a fair guess for the size of the stack frame of the decl
1263 passed. This doesn't have to be exact, the result is only used
1264 in the inline heuristics. So we don't want to run the full stack
1265 var packing algorithm (which is quadratic in the number of stack
1266 vars). Instead, we calculate the total size of all stack vars.
1267 This turns out to be a pretty fair estimate -- packing of stack
1268 vars doesn't happen very often. */
1271 estimated_stack_frame_size (tree decl)
1273 HOST_WIDE_INT size = 0;
1275 tree var, outer_block = DECL_INITIAL (current_function_decl);
1277 tree old_cur_fun_decl = current_function_decl;
1278 current_function_decl = decl;
1279 push_cfun (DECL_STRUCT_FUNCTION (decl));
1281 init_vars_expansion ();
1283 FOR_EACH_LOCAL_DECL (cfun, ix, var)
1285 if (TREE_USED (var))
1286 size += expand_one_var (var, true, false);
1287 TREE_USED (var) = 1;
1289 size += account_used_vars_for_block (outer_block, true);
1291 if (stack_vars_num > 0)
1293 /* Fake sorting the stack vars for account_stack_vars (). */
1294 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1295 for (i = 0; i < stack_vars_num; ++i)
1296 stack_vars_sorted[i] = i;
1297 size += account_stack_vars ();
1298 fini_vars_expansion ();
1301 current_function_decl = old_cur_fun_decl;
1305 /* Expand all variables used in the function. */
1308 expand_used_vars (void)
1310 tree var, outer_block = DECL_INITIAL (current_function_decl);
1311 VEC(tree,heap) *maybe_local_decls = NULL;
1315 /* Compute the phase of the stack frame for this function. */
1317 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1318 int off = STARTING_FRAME_OFFSET % align;
1319 frame_phase = off ? align - off : 0;
1322 init_vars_expansion ();
1324 for (i = 0; i < SA.map->num_partitions; i++)
1326 tree var = partition_to_var (SA.map, i);
1328 gcc_assert (is_gimple_reg (var));
1329 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1330 expand_one_var (var, true, true);
1333 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1334 contain the default def (representing the parm or result itself)
1335 we don't do anything here. But those which don't contain the
1336 default def (representing a temporary based on the parm/result)
1337 we need to allocate space just like for normal VAR_DECLs. */
1338 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1340 expand_one_var (var, true, true);
1341 gcc_assert (SA.partition_to_pseudo[i]);
1346 /* At this point all variables on the local_decls with TREE_USED
1347 set are not associated with any block scope. Lay them out. */
1349 len = VEC_length (tree, cfun->local_decls);
1350 FOR_EACH_LOCAL_DECL (cfun, i, var)
1352 bool expand_now = false;
1354 /* Expanded above already. */
1355 if (is_gimple_reg (var))
1357 TREE_USED (var) = 0;
1360 /* We didn't set a block for static or extern because it's hard
1361 to tell the difference between a global variable (re)declared
1362 in a local scope, and one that's really declared there to
1363 begin with. And it doesn't really matter much, since we're
1364 not giving them stack space. Expand them now. */
1365 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1368 /* If the variable is not associated with any block, then it
1369 was created by the optimizers, and could be live anywhere
1371 else if (TREE_USED (var))
1374 /* Finally, mark all variables on the list as used. We'll use
1375 this in a moment when we expand those associated with scopes. */
1376 TREE_USED (var) = 1;
1379 expand_one_var (var, true, true);
1382 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1384 rtx rtl = DECL_RTL_IF_SET (var);
1386 /* Keep artificial non-ignored vars in cfun->local_decls
1387 chain until instantiate_decls. */
1388 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1389 add_local_decl (cfun, var);
1390 else if (rtl == NULL_RTX)
1391 /* If rtl isn't set yet, which can happen e.g. with
1392 -fstack-protector, retry before returning from this
1394 VEC_safe_push (tree, heap, maybe_local_decls, var);
1398 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1400 +-----------------+-----------------+
1401 | ...processed... | ...duplicates...|
1402 +-----------------+-----------------+
1404 +-- LEN points here.
1406 We just want the duplicates, as those are the artificial
1407 non-ignored vars that we want to keep until instantiate_decls.
1408 Move them down and truncate the array. */
1409 if (!VEC_empty (tree, cfun->local_decls))
1410 VEC_block_remove (tree, cfun->local_decls, 0, len);
1412 /* At this point, all variables within the block tree with TREE_USED
1413 set are actually used by the optimized function. Lay them out. */
1414 expand_used_vars_for_block (outer_block, true);
1416 if (stack_vars_num > 0)
1418 /* Due to the way alias sets work, no variables with non-conflicting
1419 alias sets may be assigned the same address. Add conflicts to
1421 add_alias_set_conflicts ();
1423 /* If stack protection is enabled, we don't share space between
1424 vulnerable data and non-vulnerable data. */
1425 if (flag_stack_protect)
1426 add_stack_protection_conflicts ();
1428 /* Now that we have collected all stack variables, and have computed a
1429 minimal interference graph, attempt to save some stack space. */
1430 partition_stack_vars ();
1432 dump_stack_var_partition ();
1435 /* There are several conditions under which we should create a
1436 stack guard: protect-all, alloca used, protected decls present. */
1437 if (flag_stack_protect == 2
1438 || (flag_stack_protect
1439 && (cfun->calls_alloca || has_protected_decls)))
1440 create_stack_guard ();
1442 /* Assign rtl to each variable based on these partitions. */
1443 if (stack_vars_num > 0)
1445 /* Reorder decls to be protected by iterating over the variables
1446 array multiple times, and allocating out of each phase in turn. */
1447 /* ??? We could probably integrate this into the qsort we did
1448 earlier, such that we naturally see these variables first,
1449 and thus naturally allocate things in the right order. */
1450 if (has_protected_decls)
1452 /* Phase 1 contains only character arrays. */
1453 expand_stack_vars (stack_protect_decl_phase_1);
1455 /* Phase 2 contains other kinds of arrays. */
1456 if (flag_stack_protect == 2)
1457 expand_stack_vars (stack_protect_decl_phase_2);
1460 expand_stack_vars (NULL);
1462 fini_vars_expansion ();
1465 /* If there were any artificial non-ignored vars without rtl
1466 found earlier, see if deferred stack allocation hasn't assigned
1468 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1470 rtx rtl = DECL_RTL_IF_SET (var);
1472 /* Keep artificial non-ignored vars in cfun->local_decls
1473 chain until instantiate_decls. */
1474 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1475 add_local_decl (cfun, var);
1477 VEC_free (tree, heap, maybe_local_decls);
1479 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1480 if (STACK_ALIGNMENT_NEEDED)
1482 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1483 if (!FRAME_GROWS_DOWNWARD)
1484 frame_offset += align - 1;
1485 frame_offset &= -align;
1490 /* If we need to produce a detailed dump, print the tree representation
1491 for STMT to the dump file. SINCE is the last RTX after which the RTL
1492 generated for STMT should have been appended. */
1495 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1497 if (dump_file && (dump_flags & TDF_DETAILS))
1499 fprintf (dump_file, "\n;; ");
1500 print_gimple_stmt (dump_file, stmt, 0,
1501 TDF_SLIM | (dump_flags & TDF_LINENO));
1502 fprintf (dump_file, "\n");
1504 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1508 /* Maps the blocks that do not contain tree labels to rtx labels. */
1510 static struct pointer_map_t *lab_rtx_for_bb;
1512 /* Returns the label_rtx expression for a label starting basic block BB. */
1515 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1517 gimple_stmt_iterator gsi;
1522 if (bb->flags & BB_RTL)
1523 return block_label (bb);
1525 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1529 /* Find the tree label if it is present. */
1531 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1533 lab_stmt = gsi_stmt (gsi);
1534 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1537 lab = gimple_label_label (lab_stmt);
1538 if (DECL_NONLOCAL (lab))
1541 return label_rtx (lab);
1544 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1545 *elt = gen_label_rtx ();
1550 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1551 of a basic block where we just expanded the conditional at the end,
1552 possibly clean up the CFG and instruction sequence. LAST is the
1553 last instruction before the just emitted jump sequence. */
1556 maybe_cleanup_end_of_block (edge e, rtx last)
1558 /* Special case: when jumpif decides that the condition is
1559 trivial it emits an unconditional jump (and the necessary
1560 barrier). But we still have two edges, the fallthru one is
1561 wrong. purge_dead_edges would clean this up later. Unfortunately
1562 we have to insert insns (and split edges) before
1563 find_many_sub_basic_blocks and hence before purge_dead_edges.
1564 But splitting edges might create new blocks which depend on the
1565 fact that if there are two edges there's no barrier. So the
1566 barrier would get lost and verify_flow_info would ICE. Instead
1567 of auditing all edge splitters to care for the barrier (which
1568 normally isn't there in a cleaned CFG), fix it here. */
1569 if (BARRIER_P (get_last_insn ()))
1573 /* Now, we have a single successor block, if we have insns to
1574 insert on the remaining edge we potentially will insert
1575 it at the end of this block (if the dest block isn't feasible)
1576 in order to avoid splitting the edge. This insertion will take
1577 place in front of the last jump. But we might have emitted
1578 multiple jumps (conditional and one unconditional) to the
1579 same destination. Inserting in front of the last one then
1580 is a problem. See PR 40021. We fix this by deleting all
1581 jumps except the last unconditional one. */
1582 insn = PREV_INSN (get_last_insn ());
1583 /* Make sure we have an unconditional jump. Otherwise we're
1585 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1586 for (insn = PREV_INSN (insn); insn != last;)
1588 insn = PREV_INSN (insn);
1589 if (JUMP_P (NEXT_INSN (insn)))
1590 delete_insn (NEXT_INSN (insn));
1595 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1596 Returns a new basic block if we've terminated the current basic
1597 block and created a new one. */
1600 expand_gimple_cond (basic_block bb, gimple stmt)
1602 basic_block new_bb, dest;
1607 enum tree_code code;
1610 code = gimple_cond_code (stmt);
1611 op0 = gimple_cond_lhs (stmt);
1612 op1 = gimple_cond_rhs (stmt);
1613 /* We're sometimes presented with such code:
1617 This would expand to two comparisons which then later might
1618 be cleaned up by combine. But some pattern matchers like if-conversion
1619 work better when there's only one compare, so make up for this
1620 here as special exception if TER would have made the same change. */
1621 if (gimple_cond_single_var_p (stmt)
1623 && TREE_CODE (op0) == SSA_NAME
1624 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1626 gimple second = SSA_NAME_DEF_STMT (op0);
1627 if (gimple_code (second) == GIMPLE_ASSIGN)
1629 enum tree_code code2 = gimple_assign_rhs_code (second);
1630 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1633 op0 = gimple_assign_rhs1 (second);
1634 op1 = gimple_assign_rhs2 (second);
1636 /* If jumps are cheap turn some more codes into
1638 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1640 if ((code2 == BIT_AND_EXPR
1641 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1642 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1643 || code2 == TRUTH_AND_EXPR)
1645 code = TRUTH_ANDIF_EXPR;
1646 op0 = gimple_assign_rhs1 (second);
1647 op1 = gimple_assign_rhs2 (second);
1649 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1651 code = TRUTH_ORIF_EXPR;
1652 op0 = gimple_assign_rhs1 (second);
1653 op1 = gimple_assign_rhs2 (second);
1659 last2 = last = get_last_insn ();
1661 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1662 if (gimple_has_location (stmt))
1664 set_curr_insn_source_location (gimple_location (stmt));
1665 set_curr_insn_block (gimple_block (stmt));
1668 /* These flags have no purpose in RTL land. */
1669 true_edge->flags &= ~EDGE_TRUE_VALUE;
1670 false_edge->flags &= ~EDGE_FALSE_VALUE;
1672 /* We can either have a pure conditional jump with one fallthru edge or
1673 two-way jump that needs to be decomposed into two basic blocks. */
1674 if (false_edge->dest == bb->next_bb)
1676 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1677 true_edge->probability);
1678 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1679 if (true_edge->goto_locus)
1681 set_curr_insn_source_location (true_edge->goto_locus);
1682 set_curr_insn_block (true_edge->goto_block);
1683 true_edge->goto_locus = curr_insn_locator ();
1685 true_edge->goto_block = NULL;
1686 false_edge->flags |= EDGE_FALLTHRU;
1687 maybe_cleanup_end_of_block (false_edge, last);
1690 if (true_edge->dest == bb->next_bb)
1692 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1693 false_edge->probability);
1694 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1695 if (false_edge->goto_locus)
1697 set_curr_insn_source_location (false_edge->goto_locus);
1698 set_curr_insn_block (false_edge->goto_block);
1699 false_edge->goto_locus = curr_insn_locator ();
1701 false_edge->goto_block = NULL;
1702 true_edge->flags |= EDGE_FALLTHRU;
1703 maybe_cleanup_end_of_block (true_edge, last);
1707 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1708 true_edge->probability);
1709 last = get_last_insn ();
1710 if (false_edge->goto_locus)
1712 set_curr_insn_source_location (false_edge->goto_locus);
1713 set_curr_insn_block (false_edge->goto_block);
1714 false_edge->goto_locus = curr_insn_locator ();
1716 false_edge->goto_block = NULL;
1717 emit_jump (label_rtx_for_bb (false_edge->dest));
1720 if (BARRIER_P (BB_END (bb)))
1721 BB_END (bb) = PREV_INSN (BB_END (bb));
1722 update_bb_for_insn (bb);
1724 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1725 dest = false_edge->dest;
1726 redirect_edge_succ (false_edge, new_bb);
1727 false_edge->flags |= EDGE_FALLTHRU;
1728 new_bb->count = false_edge->count;
1729 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1730 new_edge = make_edge (new_bb, dest, 0);
1731 new_edge->probability = REG_BR_PROB_BASE;
1732 new_edge->count = new_bb->count;
1733 if (BARRIER_P (BB_END (new_bb)))
1734 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1735 update_bb_for_insn (new_bb);
1737 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1739 if (true_edge->goto_locus)
1741 set_curr_insn_source_location (true_edge->goto_locus);
1742 set_curr_insn_block (true_edge->goto_block);
1743 true_edge->goto_locus = curr_insn_locator ();
1745 true_edge->goto_block = NULL;
1750 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1754 expand_call_stmt (gimple stmt)
1757 tree lhs = gimple_call_lhs (stmt);
1762 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1764 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1765 decl = gimple_call_fndecl (stmt);
1766 builtin_p = decl && DECL_BUILT_IN (decl);
1768 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1769 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1771 for (i = 0; i < gimple_call_num_args (stmt); i++)
1773 tree arg = gimple_call_arg (stmt, i);
1775 /* TER addresses into arguments of builtin functions so we have a
1776 chance to infer more correct alignment information. See PR39954. */
1778 && TREE_CODE (arg) == SSA_NAME
1779 && (def = get_gimple_for_ssa_name (arg))
1780 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1781 arg = gimple_assign_rhs1 (def);
1782 CALL_EXPR_ARG (exp, i) = arg;
1785 if (gimple_has_side_effects (stmt))
1786 TREE_SIDE_EFFECTS (exp) = 1;
1788 if (gimple_call_nothrow_p (stmt))
1789 TREE_NOTHROW (exp) = 1;
1791 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1792 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1793 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1794 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1795 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1796 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1797 TREE_BLOCK (exp) = gimple_block (stmt);
1800 expand_assignment (lhs, exp, false);
1802 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1805 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1806 STMT that doesn't require special handling for outgoing edges. That
1807 is no tailcalls and no GIMPLE_COND. */
1810 expand_gimple_stmt_1 (gimple stmt)
1813 switch (gimple_code (stmt))
1816 op0 = gimple_goto_dest (stmt);
1817 if (TREE_CODE (op0) == LABEL_DECL)
1820 expand_computed_goto (op0);
1823 expand_label (gimple_label_label (stmt));
1826 case GIMPLE_PREDICT:
1832 expand_asm_stmt (stmt);
1835 expand_call_stmt (stmt);
1839 op0 = gimple_return_retval (stmt);
1841 if (op0 && op0 != error_mark_node)
1843 tree result = DECL_RESULT (current_function_decl);
1845 /* If we are not returning the current function's RESULT_DECL,
1846 build an assignment to it. */
1849 /* I believe that a function's RESULT_DECL is unique. */
1850 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1852 /* ??? We'd like to use simply expand_assignment here,
1853 but this fails if the value is of BLKmode but the return
1854 decl is a register. expand_return has special handling
1855 for this combination, which eventually should move
1856 to common code. See comments there. Until then, let's
1857 build a modify expression :-/ */
1858 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1863 expand_null_return ();
1865 expand_return (op0);
1870 tree lhs = gimple_assign_lhs (stmt);
1872 /* Tree expand used to fiddle with |= and &= of two bitfield
1873 COMPONENT_REFs here. This can't happen with gimple, the LHS
1874 of binary assigns must be a gimple reg. */
1876 if (TREE_CODE (lhs) != SSA_NAME
1877 || get_gimple_rhs_class (gimple_expr_code (stmt))
1878 == GIMPLE_SINGLE_RHS)
1880 tree rhs = gimple_assign_rhs1 (stmt);
1881 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1882 == GIMPLE_SINGLE_RHS);
1883 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1884 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1885 expand_assignment (lhs, rhs,
1886 gimple_assign_nontemporal_move_p (stmt));
1891 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1892 struct separate_ops ops;
1893 bool promoted = false;
1895 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1896 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1899 ops.code = gimple_assign_rhs_code (stmt);
1900 ops.type = TREE_TYPE (lhs);
1901 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1903 case GIMPLE_TERNARY_RHS:
1904 ops.op2 = gimple_assign_rhs3 (stmt);
1906 case GIMPLE_BINARY_RHS:
1907 ops.op1 = gimple_assign_rhs2 (stmt);
1909 case GIMPLE_UNARY_RHS:
1910 ops.op0 = gimple_assign_rhs1 (stmt);
1915 ops.location = gimple_location (stmt);
1917 /* If we want to use a nontemporal store, force the value to
1918 register first. If we store into a promoted register,
1919 don't directly expand to target. */
1920 temp = nontemporal || promoted ? NULL_RTX : target;
1921 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
1928 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
1929 /* If TEMP is a VOIDmode constant, use convert_modes to make
1930 sure that we properly convert it. */
1931 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1933 temp = convert_modes (GET_MODE (target),
1934 TYPE_MODE (ops.type),
1936 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
1937 GET_MODE (target), temp, unsignedp);
1940 convert_move (SUBREG_REG (target), temp, unsignedp);
1942 else if (nontemporal && emit_storent_insn (target, temp))
1946 temp = force_operand (temp, target);
1948 emit_move_insn (target, temp);
1959 /* Expand one gimple statement STMT and return the last RTL instruction
1960 before any of the newly generated ones.
1962 In addition to generating the necessary RTL instructions this also
1963 sets REG_EH_REGION notes if necessary and sets the current source
1964 location for diagnostics. */
1967 expand_gimple_stmt (gimple stmt)
1971 location_t saved_location = input_location;
1973 last = get_last_insn ();
1975 /* If this is an expression of some kind and it has an associated line
1976 number, then emit the line number before expanding the expression.
1978 We need to save and restore the file and line information so that
1979 errors discovered during expansion are emitted with the right
1980 information. It would be better of the diagnostic routines
1981 used the file/line information embedded in the tree nodes rather
1985 if (gimple_has_location (stmt))
1987 input_location = gimple_location (stmt);
1988 set_curr_insn_source_location (input_location);
1990 /* Record where the insns produced belong. */
1991 set_curr_insn_block (gimple_block (stmt));
1994 expand_gimple_stmt_1 (stmt);
1995 /* Free any temporaries used to evaluate this statement. */
1998 input_location = saved_location;
2000 /* Mark all insns that may trap. */
2001 lp_nr = lookup_stmt_eh_lp (stmt);
2005 for (insn = next_real_insn (last); insn;
2006 insn = next_real_insn (insn))
2008 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2009 /* If we want exceptions for non-call insns, any
2010 may_trap_p instruction may throw. */
2011 && GET_CODE (PATTERN (insn)) != CLOBBER
2012 && GET_CODE (PATTERN (insn)) != USE
2013 && insn_could_throw_p (insn))
2014 make_reg_eh_region_note (insn, 0, lp_nr);
2021 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2022 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2023 generated a tail call (something that might be denied by the ABI
2024 rules governing the call; see calls.c).
2026 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2027 can still reach the rest of BB. The case here is __builtin_sqrt,
2028 where the NaN result goes through the external function (with a
2029 tailcall) and the normal result happens via a sqrt instruction. */
2032 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2040 last2 = last = expand_gimple_stmt (stmt);
2042 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2043 if (CALL_P (last) && SIBLING_CALL_P (last))
2046 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2048 *can_fallthru = true;
2052 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2053 Any instructions emitted here are about to be deleted. */
2054 do_pending_stack_adjust ();
2056 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2057 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2058 EH or abnormal edges, we shouldn't have created a tail call in
2059 the first place. So it seems to me we should just be removing
2060 all edges here, or redirecting the existing fallthru edge to
2066 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2068 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2070 if (e->dest != EXIT_BLOCK_PTR)
2072 e->dest->count -= e->count;
2073 e->dest->frequency -= EDGE_FREQUENCY (e);
2074 if (e->dest->count < 0)
2076 if (e->dest->frequency < 0)
2077 e->dest->frequency = 0;
2080 probability += e->probability;
2087 /* This is somewhat ugly: the call_expr expander often emits instructions
2088 after the sibcall (to perform the function return). These confuse the
2089 find_many_sub_basic_blocks code, so we need to get rid of these. */
2090 last = NEXT_INSN (last);
2091 gcc_assert (BARRIER_P (last));
2093 *can_fallthru = false;
2094 while (NEXT_INSN (last))
2096 /* For instance an sqrt builtin expander expands if with
2097 sibcall in the then and label for `else`. */
2098 if (LABEL_P (NEXT_INSN (last)))
2100 *can_fallthru = true;
2103 delete_insn (NEXT_INSN (last));
2106 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2107 e->probability += probability;
2110 update_bb_for_insn (bb);
2112 if (NEXT_INSN (last))
2114 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2117 if (BARRIER_P (last))
2118 BB_END (bb) = PREV_INSN (last);
2121 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2126 /* Return the difference between the floor and the truncated result of
2127 a signed division by OP1 with remainder MOD. */
2129 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2131 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2132 return gen_rtx_IF_THEN_ELSE
2133 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2134 gen_rtx_IF_THEN_ELSE
2135 (mode, gen_rtx_LT (BImode,
2136 gen_rtx_DIV (mode, op1, mod),
2138 constm1_rtx, const0_rtx),
2142 /* Return the difference between the ceil and the truncated result of
2143 a signed division by OP1 with remainder MOD. */
2145 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2147 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2148 return gen_rtx_IF_THEN_ELSE
2149 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2150 gen_rtx_IF_THEN_ELSE
2151 (mode, gen_rtx_GT (BImode,
2152 gen_rtx_DIV (mode, op1, mod),
2154 const1_rtx, const0_rtx),
2158 /* Return the difference between the ceil and the truncated result of
2159 an unsigned division by OP1 with remainder MOD. */
2161 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2163 /* (mod != 0 ? 1 : 0) */
2164 return gen_rtx_IF_THEN_ELSE
2165 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2166 const1_rtx, const0_rtx);
2169 /* Return the difference between the rounded and the truncated result
2170 of a signed division by OP1 with remainder MOD. Halfway cases are
2171 rounded away from zero, rather than to the nearest even number. */
2173 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2175 /* (abs (mod) >= abs (op1) - abs (mod)
2176 ? (op1 / mod > 0 ? 1 : -1)
2178 return gen_rtx_IF_THEN_ELSE
2179 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2180 gen_rtx_MINUS (mode,
2181 gen_rtx_ABS (mode, op1),
2182 gen_rtx_ABS (mode, mod))),
2183 gen_rtx_IF_THEN_ELSE
2184 (mode, gen_rtx_GT (BImode,
2185 gen_rtx_DIV (mode, op1, mod),
2187 const1_rtx, constm1_rtx),
2191 /* Return the difference between the rounded and the truncated result
2192 of a unsigned division by OP1 with remainder MOD. Halfway cases
2193 are rounded away from zero, rather than to the nearest even
2196 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2198 /* (mod >= op1 - mod ? 1 : 0) */
2199 return gen_rtx_IF_THEN_ELSE
2200 (mode, gen_rtx_GE (BImode, mod,
2201 gen_rtx_MINUS (mode, op1, mod)),
2202 const1_rtx, const0_rtx);
2205 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2209 convert_debug_memory_address (enum machine_mode mode, rtx x)
2211 enum machine_mode xmode = GET_MODE (x);
2213 #ifndef POINTERS_EXTEND_UNSIGNED
2214 gcc_assert (mode == Pmode);
2215 gcc_assert (xmode == mode || xmode == VOIDmode);
2217 gcc_assert (mode == Pmode || mode == ptr_mode);
2219 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2222 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2223 x = simplify_gen_subreg (mode, x, xmode,
2224 subreg_lowpart_offset
2226 else if (POINTERS_EXTEND_UNSIGNED > 0)
2227 x = gen_rtx_ZERO_EXTEND (mode, x);
2228 else if (!POINTERS_EXTEND_UNSIGNED)
2229 x = gen_rtx_SIGN_EXTEND (mode, x);
2232 #endif /* POINTERS_EXTEND_UNSIGNED */
2237 /* Return an RTX equivalent to the value of the tree expression
2241 expand_debug_expr (tree exp)
2243 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2244 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2245 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2248 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2250 case tcc_expression:
2251 switch (TREE_CODE (exp))
2255 case WIDEN_MULT_PLUS_EXPR:
2256 case WIDEN_MULT_MINUS_EXPR:
2259 case TRUTH_ANDIF_EXPR:
2260 case TRUTH_ORIF_EXPR:
2261 case TRUTH_AND_EXPR:
2263 case TRUTH_XOR_EXPR:
2266 case TRUTH_NOT_EXPR:
2275 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2282 case tcc_comparison:
2283 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2290 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2300 case tcc_exceptional:
2301 case tcc_declaration:
2307 switch (TREE_CODE (exp))
2310 if (!lookup_constant_def (exp))
2312 if (strlen (TREE_STRING_POINTER (exp)) + 1
2313 != (size_t) TREE_STRING_LENGTH (exp))
2315 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2316 op0 = gen_rtx_MEM (BLKmode, op0);
2317 set_mem_attributes (op0, exp, 0);
2320 /* Fall through... */
2325 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2329 gcc_assert (COMPLEX_MODE_P (mode));
2330 op0 = expand_debug_expr (TREE_REALPART (exp));
2331 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2332 return gen_rtx_CONCAT (mode, op0, op1);
2334 case DEBUG_EXPR_DECL:
2335 op0 = DECL_RTL_IF_SET (exp);
2340 op0 = gen_rtx_DEBUG_EXPR (mode);
2341 DEBUG_EXPR_TREE_DECL (op0) = exp;
2342 SET_DECL_RTL (exp, op0);
2352 op0 = DECL_RTL_IF_SET (exp);
2354 /* This decl was probably optimized away. */
2357 if (TREE_CODE (exp) != VAR_DECL
2358 || DECL_EXTERNAL (exp)
2359 || !TREE_STATIC (exp)
2361 || DECL_HARD_REGISTER (exp)
2362 || mode == VOIDmode)
2365 op0 = make_decl_rtl_for_debug (exp);
2367 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2368 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2372 op0 = copy_rtx (op0);
2374 if (GET_MODE (op0) == BLKmode
2375 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2376 below would ICE. While it is likely a FE bug,
2377 try to be robust here. See PR43166. */
2379 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2381 gcc_assert (MEM_P (op0));
2382 op0 = adjust_address_nv (op0, mode, 0);
2393 enum machine_mode inner_mode = GET_MODE (op0);
2395 if (mode == inner_mode)
2398 if (inner_mode == VOIDmode)
2400 if (TREE_CODE (exp) == SSA_NAME)
2401 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2403 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2404 if (mode == inner_mode)
2408 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2410 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2411 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2412 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2413 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2415 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2417 else if (FLOAT_MODE_P (mode))
2419 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2420 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2421 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2423 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2425 else if (FLOAT_MODE_P (inner_mode))
2428 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2430 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2432 else if (CONSTANT_P (op0)
2433 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2434 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2435 subreg_lowpart_offset (mode,
2437 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2438 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2440 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2442 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2449 if (!integer_zerop (TREE_OPERAND (exp, 1)))
2453 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2457 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2458 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2460 as = ADDR_SPACE_GENERIC;
2462 op0 = gen_rtx_MEM (mode, op0);
2464 set_mem_attributes (op0, exp, 0);
2465 set_mem_addr_space (op0, as);
2469 case TARGET_MEM_REF:
2470 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2471 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2474 op0 = expand_debug_expr
2475 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2479 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
2481 op0 = gen_rtx_MEM (mode, op0);
2483 set_mem_attributes (op0, exp, 0);
2484 set_mem_addr_space (op0, as);
2489 case ARRAY_RANGE_REF:
2494 case VIEW_CONVERT_EXPR:
2496 enum machine_mode mode1;
2497 HOST_WIDE_INT bitsize, bitpos;
2500 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2501 &mode1, &unsignedp, &volatilep, false);
2507 orig_op0 = op0 = expand_debug_expr (tem);
2514 enum machine_mode addrmode, offmode;
2519 op0 = XEXP (op0, 0);
2520 addrmode = GET_MODE (op0);
2521 if (addrmode == VOIDmode)
2524 op1 = expand_debug_expr (offset);
2528 offmode = GET_MODE (op1);
2529 if (offmode == VOIDmode)
2530 offmode = TYPE_MODE (TREE_TYPE (offset));
2532 if (addrmode != offmode)
2533 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2534 subreg_lowpart_offset (addrmode,
2537 /* Don't use offset_address here, we don't need a
2538 recognizable address, and we don't want to generate
2540 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2545 if (mode1 == VOIDmode)
2547 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2548 if (bitpos >= BITS_PER_UNIT)
2550 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2551 bitpos %= BITS_PER_UNIT;
2553 else if (bitpos < 0)
2556 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2557 op0 = adjust_address_nv (op0, mode1, units);
2558 bitpos += units * BITS_PER_UNIT;
2560 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2561 op0 = adjust_address_nv (op0, mode, 0);
2562 else if (GET_MODE (op0) != mode1)
2563 op0 = adjust_address_nv (op0, mode1, 0);
2565 op0 = copy_rtx (op0);
2566 if (op0 == orig_op0)
2567 op0 = shallow_copy_rtx (op0);
2568 set_mem_attributes (op0, exp, 0);
2571 if (bitpos == 0 && mode == GET_MODE (op0))
2577 if (GET_MODE (op0) == BLKmode)
2580 if ((bitpos % BITS_PER_UNIT) == 0
2581 && bitsize == GET_MODE_BITSIZE (mode1))
2583 enum machine_mode opmode = GET_MODE (op0);
2585 if (opmode == VOIDmode)
2588 /* This condition may hold if we're expanding the address
2589 right past the end of an array that turned out not to
2590 be addressable (i.e., the address was only computed in
2591 debug stmts). The gen_subreg below would rightfully
2592 crash, and the address doesn't really exist, so just
2594 if (bitpos >= GET_MODE_BITSIZE (opmode))
2597 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2598 return simplify_gen_subreg (mode, op0, opmode,
2599 bitpos / BITS_PER_UNIT);
2602 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2603 && TYPE_UNSIGNED (TREE_TYPE (exp))
2605 : ZERO_EXTRACT, mode,
2606 GET_MODE (op0) != VOIDmode
2607 ? GET_MODE (op0) : mode1,
2608 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2612 return gen_rtx_ABS (mode, op0);
2615 return gen_rtx_NEG (mode, op0);
2618 return gen_rtx_NOT (mode, op0);
2622 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2624 return gen_rtx_FLOAT (mode, op0);
2626 case FIX_TRUNC_EXPR:
2628 return gen_rtx_UNSIGNED_FIX (mode, op0);
2630 return gen_rtx_FIX (mode, op0);
2632 case POINTER_PLUS_EXPR:
2633 /* For the rare target where pointers are not the same size as
2634 size_t, we need to check for mis-matched modes and correct
2637 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2638 && GET_MODE (op0) != GET_MODE (op1))
2640 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2641 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2643 /* We always sign-extend, regardless of the signedness of
2644 the operand, because the operand is always unsigned
2645 here even if the original C expression is signed. */
2646 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2650 return gen_rtx_PLUS (mode, op0, op1);
2653 return gen_rtx_MINUS (mode, op0, op1);
2656 return gen_rtx_MULT (mode, op0, op1);
2659 case TRUNC_DIV_EXPR:
2660 case EXACT_DIV_EXPR:
2662 return gen_rtx_UDIV (mode, op0, op1);
2664 return gen_rtx_DIV (mode, op0, op1);
2666 case TRUNC_MOD_EXPR:
2668 return gen_rtx_UMOD (mode, op0, op1);
2670 return gen_rtx_MOD (mode, op0, op1);
2672 case FLOOR_DIV_EXPR:
2674 return gen_rtx_UDIV (mode, op0, op1);
2677 rtx div = gen_rtx_DIV (mode, op0, op1);
2678 rtx mod = gen_rtx_MOD (mode, op0, op1);
2679 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2680 return gen_rtx_PLUS (mode, div, adj);
2683 case FLOOR_MOD_EXPR:
2685 return gen_rtx_UMOD (mode, op0, op1);
2688 rtx mod = gen_rtx_MOD (mode, op0, op1);
2689 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2690 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2691 return gen_rtx_PLUS (mode, mod, adj);
2697 rtx div = gen_rtx_UDIV (mode, op0, op1);
2698 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2699 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2700 return gen_rtx_PLUS (mode, div, adj);
2704 rtx div = gen_rtx_DIV (mode, op0, op1);
2705 rtx mod = gen_rtx_MOD (mode, op0, op1);
2706 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2707 return gen_rtx_PLUS (mode, div, adj);
2713 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2714 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2715 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2716 return gen_rtx_PLUS (mode, mod, adj);
2720 rtx mod = gen_rtx_MOD (mode, op0, op1);
2721 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2722 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2723 return gen_rtx_PLUS (mode, mod, adj);
2726 case ROUND_DIV_EXPR:
2729 rtx div = gen_rtx_UDIV (mode, op0, op1);
2730 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2731 rtx adj = round_udiv_adjust (mode, mod, op1);
2732 return gen_rtx_PLUS (mode, div, adj);
2736 rtx div = gen_rtx_DIV (mode, op0, op1);
2737 rtx mod = gen_rtx_MOD (mode, op0, op1);
2738 rtx adj = round_sdiv_adjust (mode, mod, op1);
2739 return gen_rtx_PLUS (mode, div, adj);
2742 case ROUND_MOD_EXPR:
2745 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2746 rtx adj = round_udiv_adjust (mode, mod, op1);
2747 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2748 return gen_rtx_PLUS (mode, mod, adj);
2752 rtx mod = gen_rtx_MOD (mode, op0, op1);
2753 rtx adj = round_sdiv_adjust (mode, mod, op1);
2754 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2755 return gen_rtx_PLUS (mode, mod, adj);
2759 return gen_rtx_ASHIFT (mode, op0, op1);
2763 return gen_rtx_LSHIFTRT (mode, op0, op1);
2765 return gen_rtx_ASHIFTRT (mode, op0, op1);
2768 return gen_rtx_ROTATE (mode, op0, op1);
2771 return gen_rtx_ROTATERT (mode, op0, op1);
2775 return gen_rtx_UMIN (mode, op0, op1);
2777 return gen_rtx_SMIN (mode, op0, op1);
2781 return gen_rtx_UMAX (mode, op0, op1);
2783 return gen_rtx_SMAX (mode, op0, op1);
2786 case TRUTH_AND_EXPR:
2787 return gen_rtx_AND (mode, op0, op1);
2791 return gen_rtx_IOR (mode, op0, op1);
2794 case TRUTH_XOR_EXPR:
2795 return gen_rtx_XOR (mode, op0, op1);
2797 case TRUTH_ANDIF_EXPR:
2798 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2800 case TRUTH_ORIF_EXPR:
2801 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2803 case TRUTH_NOT_EXPR:
2804 return gen_rtx_EQ (mode, op0, const0_rtx);
2808 return gen_rtx_LTU (mode, op0, op1);
2810 return gen_rtx_LT (mode, op0, op1);
2814 return gen_rtx_LEU (mode, op0, op1);
2816 return gen_rtx_LE (mode, op0, op1);
2820 return gen_rtx_GTU (mode, op0, op1);
2822 return gen_rtx_GT (mode, op0, op1);
2826 return gen_rtx_GEU (mode, op0, op1);
2828 return gen_rtx_GE (mode, op0, op1);
2831 return gen_rtx_EQ (mode, op0, op1);
2834 return gen_rtx_NE (mode, op0, op1);
2836 case UNORDERED_EXPR:
2837 return gen_rtx_UNORDERED (mode, op0, op1);
2840 return gen_rtx_ORDERED (mode, op0, op1);
2843 return gen_rtx_UNLT (mode, op0, op1);
2846 return gen_rtx_UNLE (mode, op0, op1);
2849 return gen_rtx_UNGT (mode, op0, op1);
2852 return gen_rtx_UNGE (mode, op0, op1);
2855 return gen_rtx_UNEQ (mode, op0, op1);
2858 return gen_rtx_LTGT (mode, op0, op1);
2861 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2864 gcc_assert (COMPLEX_MODE_P (mode));
2865 if (GET_MODE (op0) == VOIDmode)
2866 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2867 if (GET_MODE (op1) == VOIDmode)
2868 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2869 return gen_rtx_CONCAT (mode, op0, op1);
2872 if (GET_CODE (op0) == CONCAT)
2873 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2874 gen_rtx_NEG (GET_MODE_INNER (mode),
2878 enum machine_mode imode = GET_MODE_INNER (mode);
2883 re = adjust_address_nv (op0, imode, 0);
2884 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2888 enum machine_mode ifmode = int_mode_for_mode (mode);
2889 enum machine_mode ihmode = int_mode_for_mode (imode);
2891 if (ifmode == BLKmode || ihmode == BLKmode)
2893 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2896 re = gen_rtx_SUBREG (ifmode, re, 0);
2897 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2898 if (imode != ihmode)
2899 re = gen_rtx_SUBREG (imode, re, 0);
2900 im = copy_rtx (op0);
2902 im = gen_rtx_SUBREG (ifmode, im, 0);
2903 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
2904 if (imode != ihmode)
2905 im = gen_rtx_SUBREG (imode, im, 0);
2907 im = gen_rtx_NEG (imode, im);
2908 return gen_rtx_CONCAT (mode, re, im);
2912 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2913 if (!op0 || !MEM_P (op0))
2915 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
2916 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
2917 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
2918 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
2919 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
2921 if (handled_component_p (TREE_OPERAND (exp, 0)))
2923 HOST_WIDE_INT bitoffset, bitsize, maxsize;
2925 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
2926 &bitoffset, &bitsize, &maxsize);
2927 if ((TREE_CODE (decl) == VAR_DECL
2928 || TREE_CODE (decl) == PARM_DECL
2929 || TREE_CODE (decl) == RESULT_DECL)
2930 && !TREE_ADDRESSABLE (decl)
2931 && (bitoffset % BITS_PER_UNIT) == 0
2933 && bitsize == maxsize)
2934 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
2935 bitoffset / BITS_PER_UNIT);
2941 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
2946 exp = build_constructor_from_list (TREE_TYPE (exp),
2947 TREE_VECTOR_CST_ELTS (exp));
2951 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
2956 op0 = gen_rtx_CONCATN
2957 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
2959 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
2961 op1 = expand_debug_expr (val);
2964 XVECEXP (op0, 0, i) = op1;
2967 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
2969 op1 = expand_debug_expr
2970 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
2975 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
2976 XVECEXP (op0, 0, i) = op1;
2982 goto flag_unsupported;
2985 /* ??? Maybe handle some builtins? */
2990 gimple g = get_gimple_for_ssa_name (exp);
2993 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
2999 int part = var_to_partition (SA.map, exp);
3001 if (part == NO_PARTITION)
3004 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3006 op0 = SA.partition_to_pseudo[part];
3014 /* Vector stuff. For most of the codes we don't have rtl codes. */
3015 case REALIGN_LOAD_EXPR:
3016 case REDUC_MAX_EXPR:
3017 case REDUC_MIN_EXPR:
3018 case REDUC_PLUS_EXPR:
3020 case VEC_EXTRACT_EVEN_EXPR:
3021 case VEC_EXTRACT_ODD_EXPR:
3022 case VEC_INTERLEAVE_HIGH_EXPR:
3023 case VEC_INTERLEAVE_LOW_EXPR:
3024 case VEC_LSHIFT_EXPR:
3025 case VEC_PACK_FIX_TRUNC_EXPR:
3026 case VEC_PACK_SAT_EXPR:
3027 case VEC_PACK_TRUNC_EXPR:
3028 case VEC_RSHIFT_EXPR:
3029 case VEC_UNPACK_FLOAT_HI_EXPR:
3030 case VEC_UNPACK_FLOAT_LO_EXPR:
3031 case VEC_UNPACK_HI_EXPR:
3032 case VEC_UNPACK_LO_EXPR:
3033 case VEC_WIDEN_MULT_HI_EXPR:
3034 case VEC_WIDEN_MULT_LO_EXPR:
3038 case ADDR_SPACE_CONVERT_EXPR:
3039 case FIXED_CONVERT_EXPR:
3041 case WITH_SIZE_EXPR:
3045 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3046 && SCALAR_INT_MODE_P (mode))
3048 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3049 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3051 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3052 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3053 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3055 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3056 op0 = gen_rtx_MULT (mode, op0, op1);
3057 return gen_rtx_PLUS (mode, op0, op2);
3061 case WIDEN_MULT_EXPR:
3062 case WIDEN_MULT_PLUS_EXPR:
3063 case WIDEN_MULT_MINUS_EXPR:
3064 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3065 && SCALAR_INT_MODE_P (mode))
3067 enum machine_mode inner_mode = GET_MODE (op0);
3068 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3069 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3071 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3072 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3073 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3075 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3076 op0 = gen_rtx_MULT (mode, op0, op1);
3077 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3079 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3080 return gen_rtx_PLUS (mode, op0, op2);
3082 return gen_rtx_MINUS (mode, op2, op0);
3086 case WIDEN_SUM_EXPR:
3087 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3088 && SCALAR_INT_MODE_P (mode))
3090 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3091 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3093 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3094 return gen_rtx_PLUS (mode, op0, op1);
3100 #ifdef ENABLE_CHECKING
3109 /* Expand the _LOCs in debug insns. We run this after expanding all
3110 regular insns, so that any variables referenced in the function
3111 will have their DECL_RTLs set. */
3114 expand_debug_locations (void)
3117 rtx last = get_last_insn ();
3118 int save_strict_alias = flag_strict_aliasing;
3120 /* New alias sets while setting up memory attributes cause
3121 -fcompare-debug failures, even though it doesn't bring about any
3123 flag_strict_aliasing = 0;
3125 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3126 if (DEBUG_INSN_P (insn))
3128 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3130 enum machine_mode mode;
3132 if (value == NULL_TREE)
3136 val = expand_debug_expr (value);
3137 gcc_assert (last == get_last_insn ());
3141 val = gen_rtx_UNKNOWN_VAR_LOC ();
3144 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3146 gcc_assert (mode == GET_MODE (val)
3147 || (GET_MODE (val) == VOIDmode
3148 && (CONST_INT_P (val)
3149 || GET_CODE (val) == CONST_FIXED
3150 || GET_CODE (val) == CONST_DOUBLE
3151 || GET_CODE (val) == LABEL_REF)));
3154 INSN_VAR_LOCATION_LOC (insn) = val;
3157 flag_strict_aliasing = save_strict_alias;
3160 /* Expand basic block BB from GIMPLE trees to RTL. */
3163 expand_gimple_basic_block (basic_block bb)
3165 gimple_stmt_iterator gsi;
3174 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3177 /* Note that since we are now transitioning from GIMPLE to RTL, we
3178 cannot use the gsi_*_bb() routines because they expect the basic
3179 block to be in GIMPLE, instead of RTL. Therefore, we need to
3180 access the BB sequence directly. */
3181 stmts = bb_seq (bb);
3182 bb->il.gimple = NULL;
3183 rtl_profile_for_bb (bb);
3184 init_rtl_bb_info (bb);
3185 bb->flags |= BB_RTL;
3187 /* Remove the RETURN_EXPR if we may fall though to the exit
3189 gsi = gsi_last (stmts);
3190 if (!gsi_end_p (gsi)
3191 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3193 gimple ret_stmt = gsi_stmt (gsi);
3195 gcc_assert (single_succ_p (bb));
3196 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3198 if (bb->next_bb == EXIT_BLOCK_PTR
3199 && !gimple_return_retval (ret_stmt))
3201 gsi_remove (&gsi, false);
3202 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3206 gsi = gsi_start (stmts);
3207 if (!gsi_end_p (gsi))
3209 stmt = gsi_stmt (gsi);
3210 if (gimple_code (stmt) != GIMPLE_LABEL)
3214 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3218 last = get_last_insn ();
3222 expand_gimple_stmt (stmt);
3227 emit_label ((rtx) *elt);
3229 /* Java emits line number notes in the top of labels.
3230 ??? Make this go away once line number notes are obsoleted. */
3231 BB_HEAD (bb) = NEXT_INSN (last);
3232 if (NOTE_P (BB_HEAD (bb)))
3233 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3234 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3236 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3239 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3241 NOTE_BASIC_BLOCK (note) = bb;
3243 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3247 stmt = gsi_stmt (gsi);
3249 /* If this statement is a non-debug one, and we generate debug
3250 insns, then this one might be the last real use of a TERed
3251 SSA_NAME, but where there are still some debug uses further
3252 down. Expanding the current SSA name in such further debug
3253 uses by their RHS might lead to wrong debug info, as coalescing
3254 might make the operands of such RHS be placed into the same
3255 pseudo as something else. Like so:
3256 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3260 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3261 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3262 the write to a_2 would actually have clobbered the place which
3265 So, instead of that, we recognize the situation, and generate
3266 debug temporaries at the last real use of TERed SSA names:
3273 if (MAY_HAVE_DEBUG_INSNS
3275 && !is_gimple_debug (stmt))
3281 location_t sloc = get_curr_insn_source_location ();
3282 tree sblock = get_curr_insn_block ();
3284 /* Look for SSA names that have their last use here (TERed
3285 names always have only one real use). */
3286 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3287 if ((def = get_gimple_for_ssa_name (op)))
3289 imm_use_iterator imm_iter;
3290 use_operand_p use_p;
3291 bool have_debug_uses = false;
3293 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3295 if (gimple_debug_bind_p (USE_STMT (use_p)))
3297 have_debug_uses = true;
3302 if (have_debug_uses)
3304 /* OP is a TERed SSA name, with DEF it's defining
3305 statement, and where OP is used in further debug
3306 instructions. Generate a debug temporary, and
3307 replace all uses of OP in debug insns with that
3310 tree value = gimple_assign_rhs_to_tree (def);
3311 tree vexpr = make_node (DEBUG_EXPR_DECL);
3313 enum machine_mode mode;
3315 set_curr_insn_source_location (gimple_location (def));
3316 set_curr_insn_block (gimple_block (def));
3318 DECL_ARTIFICIAL (vexpr) = 1;
3319 TREE_TYPE (vexpr) = TREE_TYPE (value);
3321 mode = DECL_MODE (value);
3323 mode = TYPE_MODE (TREE_TYPE (value));
3324 DECL_MODE (vexpr) = mode;
3326 val = gen_rtx_VAR_LOCATION
3327 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3329 val = emit_debug_insn (val);
3331 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3333 if (!gimple_debug_bind_p (debugstmt))
3336 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3337 SET_USE (use_p, vexpr);
3339 update_stmt (debugstmt);
3343 set_curr_insn_source_location (sloc);
3344 set_curr_insn_block (sblock);
3347 currently_expanding_gimple_stmt = stmt;
3349 /* Expand this statement, then evaluate the resulting RTL and
3350 fixup the CFG accordingly. */
3351 if (gimple_code (stmt) == GIMPLE_COND)
3353 new_bb = expand_gimple_cond (bb, stmt);
3357 else if (gimple_debug_bind_p (stmt))
3359 location_t sloc = get_curr_insn_source_location ();
3360 tree sblock = get_curr_insn_block ();
3361 gimple_stmt_iterator nsi = gsi;
3365 tree var = gimple_debug_bind_get_var (stmt);
3368 enum machine_mode mode;
3370 if (gimple_debug_bind_has_value_p (stmt))
3371 value = gimple_debug_bind_get_value (stmt);
3375 last = get_last_insn ();
3377 set_curr_insn_source_location (gimple_location (stmt));
3378 set_curr_insn_block (gimple_block (stmt));
3381 mode = DECL_MODE (var);
3383 mode = TYPE_MODE (TREE_TYPE (var));
3385 val = gen_rtx_VAR_LOCATION
3386 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3388 val = emit_debug_insn (val);
3390 if (dump_file && (dump_flags & TDF_DETAILS))
3392 /* We can't dump the insn with a TREE where an RTX
3394 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3395 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3396 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3399 /* In order not to generate too many debug temporaries,
3400 we delink all uses of debug statements we already expanded.
3401 Therefore debug statements between definition and real
3402 use of TERed SSA names will continue to use the SSA name,
3403 and not be replaced with debug temps. */
3404 delink_stmt_imm_use (stmt);
3408 if (gsi_end_p (nsi))
3410 stmt = gsi_stmt (nsi);
3411 if (!gimple_debug_bind_p (stmt))
3415 set_curr_insn_source_location (sloc);
3416 set_curr_insn_block (sblock);
3420 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3423 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3434 def_operand_p def_p;
3435 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3439 /* Ignore this stmt if it is in the list of
3440 replaceable expressions. */
3442 && bitmap_bit_p (SA.values,
3443 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3446 last = expand_gimple_stmt (stmt);
3447 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3452 currently_expanding_gimple_stmt = NULL;
3454 /* Expand implicit goto and convert goto_locus. */
3455 FOR_EACH_EDGE (e, ei, bb->succs)
3457 if (e->goto_locus && e->goto_block)
3459 set_curr_insn_source_location (e->goto_locus);
3460 set_curr_insn_block (e->goto_block);
3461 e->goto_locus = curr_insn_locator ();
3463 e->goto_block = NULL;
3464 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3466 emit_jump (label_rtx_for_bb (e->dest));
3467 e->flags &= ~EDGE_FALLTHRU;
3471 /* Expanded RTL can create a jump in the last instruction of block.
3472 This later might be assumed to be a jump to successor and break edge insertion.
3473 We need to insert dummy move to prevent this. PR41440. */
3474 if (single_succ_p (bb)
3475 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3476 && (last = get_last_insn ())
3479 rtx dummy = gen_reg_rtx (SImode);
3480 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3483 do_pending_stack_adjust ();
3485 /* Find the block tail. The last insn in the block is the insn
3486 before a barrier and/or table jump insn. */
3487 last = get_last_insn ();
3488 if (BARRIER_P (last))
3489 last = PREV_INSN (last);
3490 if (JUMP_TABLE_DATA_P (last))
3491 last = PREV_INSN (PREV_INSN (last));
3494 update_bb_for_insn (bb);
3500 /* Create a basic block for initialization code. */
3503 construct_init_block (void)
3505 basic_block init_block, first_block;
3509 /* Multiple entry points not supported yet. */
3510 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3511 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3512 init_rtl_bb_info (EXIT_BLOCK_PTR);
3513 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3514 EXIT_BLOCK_PTR->flags |= BB_RTL;
3516 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3518 /* When entry edge points to first basic block, we don't need jump,
3519 otherwise we have to jump into proper target. */
3520 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3522 tree label = gimple_block_label (e->dest);
3524 emit_jump (label_rtx (label));
3528 flags = EDGE_FALLTHRU;
3530 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3533 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3534 init_block->count = ENTRY_BLOCK_PTR->count;
3537 first_block = e->dest;
3538 redirect_edge_succ (e, init_block);
3539 e = make_edge (init_block, first_block, flags);
3542 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3543 e->probability = REG_BR_PROB_BASE;
3544 e->count = ENTRY_BLOCK_PTR->count;
3546 update_bb_for_insn (init_block);
3550 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3551 found in the block tree. */
3554 set_block_levels (tree block, int level)
3558 BLOCK_NUMBER (block) = level;
3559 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3560 block = BLOCK_CHAIN (block);
3564 /* Create a block containing landing pads and similar stuff. */
3567 construct_exit_block (void)
3569 rtx head = get_last_insn ();
3571 basic_block exit_block;
3575 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3577 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3579 /* Make sure the locus is set to the end of the function, so that
3580 epilogue line numbers and warnings are set properly. */
3581 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3582 input_location = cfun->function_end_locus;
3584 /* The following insns belong to the top scope. */
3585 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3587 /* Generate rtl for function exit. */
3588 expand_function_end ();
3590 end = get_last_insn ();
3593 /* While emitting the function end we could move end of the last basic block.
3595 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3596 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3597 head = NEXT_INSN (head);
3598 exit_block = create_basic_block (NEXT_INSN (head), end,
3599 EXIT_BLOCK_PTR->prev_bb);
3600 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3601 exit_block->count = EXIT_BLOCK_PTR->count;
3604 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3606 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3607 if (!(e->flags & EDGE_ABNORMAL))
3608 redirect_edge_succ (e, exit_block);
3613 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3614 e->probability = REG_BR_PROB_BASE;
3615 e->count = EXIT_BLOCK_PTR->count;
3616 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3619 e->count -= e2->count;
3620 exit_block->count -= e2->count;
3621 exit_block->frequency -= EDGE_FREQUENCY (e2);
3625 if (exit_block->count < 0)
3626 exit_block->count = 0;
3627 if (exit_block->frequency < 0)
3628 exit_block->frequency = 0;
3629 update_bb_for_insn (exit_block);
3632 /* Helper function for discover_nonconstant_array_refs.
3633 Look for ARRAY_REF nodes with non-constant indexes and mark them
3637 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3638 void *data ATTRIBUTE_UNUSED)
3642 if (IS_TYPE_OR_DECL_P (t))
3644 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3646 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3647 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3648 && (!TREE_OPERAND (t, 2)
3649 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3650 || (TREE_CODE (t) == COMPONENT_REF
3651 && (!TREE_OPERAND (t,2)
3652 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3653 || TREE_CODE (t) == BIT_FIELD_REF
3654 || TREE_CODE (t) == REALPART_EXPR
3655 || TREE_CODE (t) == IMAGPART_EXPR
3656 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3657 || CONVERT_EXPR_P (t))
3658 t = TREE_OPERAND (t, 0);
3660 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3662 t = get_base_address (t);
3664 && DECL_MODE (t) != BLKmode)
3665 TREE_ADDRESSABLE (t) = 1;
3674 /* RTL expansion is not able to compile array references with variable
3675 offsets for arrays stored in single register. Discover such
3676 expressions and mark variables as addressable to avoid this
3680 discover_nonconstant_array_refs (void)
3683 gimple_stmt_iterator gsi;
3686 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3688 gimple stmt = gsi_stmt (gsi);
3689 if (!is_gimple_debug (stmt))
3690 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3694 /* This function sets crtl->args.internal_arg_pointer to a virtual
3695 register if DRAP is needed. Local register allocator will replace
3696 virtual_incoming_args_rtx with the virtual register. */
3699 expand_stack_alignment (void)
3702 unsigned int preferred_stack_boundary;
3704 if (! SUPPORTS_STACK_ALIGNMENT)
3707 if (cfun->calls_alloca
3708 || cfun->has_nonlocal_label
3709 || crtl->has_nonlocal_goto)
3710 crtl->need_drap = true;
3712 /* Call update_stack_boundary here again to update incoming stack
3713 boundary. It may set incoming stack alignment to a different
3714 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3715 use the minimum incoming stack alignment to check if it is OK
3716 to perform sibcall optimization since sibcall optimization will
3717 only align the outgoing stack to incoming stack boundary. */
3718 if (targetm.calls.update_stack_boundary)
3719 targetm.calls.update_stack_boundary ();
3721 /* The incoming stack frame has to be aligned at least at
3722 parm_stack_boundary. */
3723 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3725 /* Update crtl->stack_alignment_estimated and use it later to align
3726 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3727 exceptions since callgraph doesn't collect incoming stack alignment
3729 if (cfun->can_throw_non_call_exceptions
3730 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3731 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3733 preferred_stack_boundary = crtl->preferred_stack_boundary;
3734 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3735 crtl->stack_alignment_estimated = preferred_stack_boundary;
3736 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3737 crtl->stack_alignment_needed = preferred_stack_boundary;
3739 gcc_assert (crtl->stack_alignment_needed
3740 <= crtl->stack_alignment_estimated);
3742 crtl->stack_realign_needed
3743 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3744 crtl->stack_realign_tried = crtl->stack_realign_needed;
3746 crtl->stack_realign_processed = true;
3748 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3750 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3751 drap_rtx = targetm.calls.get_drap_rtx ();
3753 /* stack_realign_drap and drap_rtx must match. */
3754 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3756 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3757 if (NULL != drap_rtx)
3759 crtl->args.internal_arg_pointer = drap_rtx;
3761 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3763 fixup_tail_calls ();
3767 /* Translate the intermediate representation contained in the CFG
3768 from GIMPLE trees to RTL.
3770 We do conversion per basic block and preserve/update the tree CFG.
3771 This implies we have to do some magic as the CFG can simultaneously
3772 consist of basic blocks containing RTL and GIMPLE trees. This can
3773 confuse the CFG hooks, so be careful to not manipulate CFG during
3777 gimple_expand_cfg (void)
3779 basic_block bb, init_block;
3785 timevar_push (TV_OUT_OF_SSA);
3786 rewrite_out_of_ssa (&SA);
3787 timevar_pop (TV_OUT_OF_SSA);
3788 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3791 /* Some backends want to know that we are expanding to RTL. */
3792 currently_expanding_to_rtl = 1;
3794 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3796 insn_locators_alloc ();
3797 if (!DECL_IS_BUILTIN (current_function_decl))
3799 /* Eventually, all FEs should explicitly set function_start_locus. */
3800 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3801 set_curr_insn_source_location
3802 (DECL_SOURCE_LOCATION (current_function_decl));
3804 set_curr_insn_source_location (cfun->function_start_locus);
3806 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3807 prologue_locator = curr_insn_locator ();
3809 #ifdef INSN_SCHEDULING
3810 init_sched_attrs ();
3813 /* Make sure first insn is a note even if we don't want linenums.
3814 This makes sure the first insn will never be deleted.
3815 Also, final expects a note to appear there. */
3816 emit_note (NOTE_INSN_DELETED);
3818 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3819 discover_nonconstant_array_refs ();
3821 targetm.expand_to_rtl_hook ();
3822 crtl->stack_alignment_needed = STACK_BOUNDARY;
3823 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3824 crtl->stack_alignment_estimated = 0;
3825 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3826 cfun->cfg->max_jumptable_ents = 0;
3829 /* Expand the variables recorded during gimple lowering. */
3830 timevar_push (TV_VAR_EXPAND);
3831 expand_used_vars ();
3832 timevar_pop (TV_VAR_EXPAND);
3834 /* Honor stack protection warnings. */
3835 if (warn_stack_protect)
3837 if (cfun->calls_alloca)
3838 warning (OPT_Wstack_protector,
3839 "stack protector not protecting local variables: "
3840 "variable length buffer");
3841 if (has_short_buffer && !crtl->stack_protect_guard)
3842 warning (OPT_Wstack_protector,
3843 "stack protector not protecting function: "
3844 "all local arrays are less than %d bytes long",
3845 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3848 /* Set up parameters and prepare for return, for the function. */
3849 expand_function_start (current_function_decl);
3851 /* Now that we also have the parameter RTXs, copy them over to our
3853 for (i = 0; i < SA.map->num_partitions; i++)
3855 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3857 if (TREE_CODE (var) != VAR_DECL
3858 && !SA.partition_to_pseudo[i])
3859 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3860 gcc_assert (SA.partition_to_pseudo[i]);
3862 /* If this decl was marked as living in multiple places, reset
3863 this now to NULL. */
3864 if (DECL_RTL_IF_SET (var) == pc_rtx)
3865 SET_DECL_RTL (var, NULL);
3867 /* Some RTL parts really want to look at DECL_RTL(x) when x
3868 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3869 SET_DECL_RTL here making this available, but that would mean
3870 to select one of the potentially many RTLs for one DECL. Instead
3871 of doing that we simply reset the MEM_EXPR of the RTL in question,
3872 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3873 if (!DECL_RTL_SET_P (var))
3875 if (MEM_P (SA.partition_to_pseudo[i]))
3876 set_mem_expr (SA.partition_to_pseudo[i], NULL);
3880 /* If this function is `main', emit a call to `__main'
3881 to run global initializers, etc. */
3882 if (DECL_NAME (current_function_decl)
3883 && MAIN_NAME_P (DECL_NAME (current_function_decl))
3884 && DECL_FILE_SCOPE_P (current_function_decl))
3885 expand_main_function ();
3887 /* Initialize the stack_protect_guard field. This must happen after the
3888 call to __main (if any) so that the external decl is initialized. */
3889 if (crtl->stack_protect_guard)
3890 stack_protect_prologue ();
3892 expand_phi_nodes (&SA);
3894 /* Register rtl specific functions for cfg. */
3895 rtl_register_cfg_hooks ();
3897 init_block = construct_init_block ();
3899 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
3900 remaining edges later. */
3901 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3902 e->flags &= ~EDGE_EXECUTABLE;
3904 lab_rtx_for_bb = pointer_map_create ();
3905 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
3906 bb = expand_gimple_basic_block (bb);
3908 if (MAY_HAVE_DEBUG_INSNS)
3909 expand_debug_locations ();
3911 execute_free_datastructures ();
3912 timevar_push (TV_OUT_OF_SSA);
3913 finish_out_of_ssa (&SA);
3914 timevar_pop (TV_OUT_OF_SSA);
3916 timevar_push (TV_POST_EXPAND);
3917 /* We are no longer in SSA form. */
3918 cfun->gimple_df->in_ssa_p = false;
3920 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3921 conservatively to true until they are all profile aware. */
3922 pointer_map_destroy (lab_rtx_for_bb);
3925 construct_exit_block ();
3926 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3927 insn_locators_finalize ();
3929 /* Zap the tree EH table. */
3930 set_eh_throw_stmt_table (cfun, NULL);
3932 rebuild_jump_labels (get_insns ());
3934 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3938 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3941 commit_one_edge_insertion (e);
3947 /* We're done expanding trees to RTL. */
3948 currently_expanding_to_rtl = 0;
3950 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
3954 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3956 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
3957 e->flags &= ~EDGE_EXECUTABLE;
3959 /* At the moment not all abnormal edges match the RTL
3960 representation. It is safe to remove them here as
3961 find_many_sub_basic_blocks will rediscover them.
3962 In the future we should get this fixed properly. */
3963 if ((e->flags & EDGE_ABNORMAL)
3964 && !(e->flags & EDGE_SIBCALL))
3971 blocks = sbitmap_alloc (last_basic_block);
3972 sbitmap_ones (blocks);
3973 find_many_sub_basic_blocks (blocks);
3974 sbitmap_free (blocks);
3975 purge_all_dead_edges ();
3979 expand_stack_alignment ();
3981 #ifdef ENABLE_CHECKING
3982 verify_flow_info ();
3985 /* There's no need to defer outputting this function any more; we
3986 know we want to output it. */
3987 DECL_DEFER_OUTPUT (current_function_decl) = 0;
3989 /* Now that we're done expanding trees to RTL, we shouldn't have any
3990 more CONCATs anywhere. */
3991 generating_concat_p = 0;
3996 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
3997 /* And the pass manager will dump RTL for us. */
4000 /* If we're emitting a nested function, make sure its parent gets
4001 emitted as well. Doing otherwise confuses debug info. */
4004 for (parent = DECL_CONTEXT (current_function_decl);
4005 parent != NULL_TREE;
4006 parent = get_containing_scope (parent))
4007 if (TREE_CODE (parent) == FUNCTION_DECL)
4008 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4011 /* We are now committed to emitting code for this function. Do any
4012 preparation, such as emitting abstract debug info for the inline
4013 before it gets mangled by optimization. */
4014 if (cgraph_function_possibly_inlined_p (current_function_decl))
4015 (*debug_hooks->outlining_inline_function) (current_function_decl);
4017 TREE_ASM_WRITTEN (current_function_decl) = 1;
4019 /* After expanding, the return labels are no longer needed. */
4020 return_label = NULL;
4021 naked_return_label = NULL;
4022 /* Tag the blocks with a depth number so that change_scope can find
4023 the common parent easily. */
4024 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4025 default_rtl_profile ();
4026 timevar_pop (TV_POST_EXPAND);
4030 struct rtl_opt_pass pass_expand =
4034 "expand", /* name */
4036 gimple_expand_cfg, /* execute */
4039 0, /* static_pass_number */
4040 TV_EXPAND, /* tv_id */
4041 PROP_ssa | PROP_gimple_leh | PROP_cfg
4042 | PROP_gimple_lcx, /* properties_required */
4043 PROP_rtl, /* properties_provided */
4044 PROP_ssa | PROP_trees, /* properties_destroyed */
4045 TODO_verify_ssa | TODO_verify_flow
4046 | TODO_verify_stmts, /* todo_flags_start */
4048 | TODO_ggc_collect /* todo_flags_finish */