1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
42 #include "tree-inline.h"
43 #include "value-prof.h"
45 #include "ssaexpand.h"
48 /* This variable holds information helping the rewriting of SSA trees
52 /* This variable holds the currently expanded gimple statement for purposes
53 of comminucating the profile info to the builtin expanders. */
54 gimple currently_expanding_gimple_stmt;
56 /* Return an expression tree corresponding to the RHS of GIMPLE
60 gimple_assign_rhs_to_tree (gimple stmt)
63 enum gimple_rhs_class grhs_class;
65 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
67 if (grhs_class == GIMPLE_BINARY_RHS)
68 t = build2 (gimple_assign_rhs_code (stmt),
69 TREE_TYPE (gimple_assign_lhs (stmt)),
70 gimple_assign_rhs1 (stmt),
71 gimple_assign_rhs2 (stmt));
72 else if (grhs_class == GIMPLE_UNARY_RHS)
73 t = build1 (gimple_assign_rhs_code (stmt),
74 TREE_TYPE (gimple_assign_lhs (stmt)),
75 gimple_assign_rhs1 (stmt));
76 else if (grhs_class == GIMPLE_SINGLE_RHS)
78 t = gimple_assign_rhs1 (stmt);
79 /* Avoid modifying this tree in place below. */
80 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
81 && gimple_location (stmt) != EXPR_LOCATION (t))
87 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
88 SET_EXPR_LOCATION (t, gimple_location (stmt));
94 #ifndef STACK_ALIGNMENT_NEEDED
95 #define STACK_ALIGNMENT_NEEDED 1
98 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
100 /* Associate declaration T with storage space X. If T is no
101 SSA name this is exactly SET_DECL_RTL, otherwise make the
102 partition of T associated with X. */
104 set_rtl (tree t, rtx x)
106 if (TREE_CODE (t) == SSA_NAME)
108 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
110 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
111 /* For the benefit of debug information at -O0 (where vartracking
112 doesn't run) record the place also in the base DECL if it's
113 a normal variable (not a parameter). */
114 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
116 tree var = SSA_NAME_VAR (t);
117 /* If we don't yet have something recorded, just record it now. */
118 if (!DECL_RTL_SET_P (var))
119 SET_DECL_RTL (var, x);
120 /* If we have it set alrady to "multiple places" don't
122 else if (DECL_RTL (var) == pc_rtx)
124 /* If we have something recorded and it's not the same place
125 as we want to record now, we have multiple partitions for the
126 same base variable, with different places. We can't just
127 randomly chose one, hence we have to say that we don't know.
128 This only happens with optimization, and there var-tracking
129 will figure out the right thing. */
130 else if (DECL_RTL (var) != x)
131 SET_DECL_RTL (var, pc_rtx);
138 /* This structure holds data relevant to one variable that will be
139 placed in a stack slot. */
145 /* The offset of the variable. During partitioning, this is the
146 offset relative to the partition. After partitioning, this
147 is relative to the stack frame. */
148 HOST_WIDE_INT offset;
150 /* Initially, the size of the variable. Later, the size of the partition,
151 if this variable becomes it's partition's representative. */
154 /* The *byte* alignment required for this variable. Or as, with the
155 size, the alignment for this partition. */
158 /* The partition representative. */
159 size_t representative;
161 /* The next stack variable in the partition, or EOC. */
164 /* The numbers of conflicting stack variables. */
168 #define EOC ((size_t)-1)
170 /* We have an array of such objects while deciding allocation. */
171 static struct stack_var *stack_vars;
172 static size_t stack_vars_alloc;
173 static size_t stack_vars_num;
175 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
176 is non-decreasing. */
177 static size_t *stack_vars_sorted;
179 /* The phase of the stack frame. This is the known misalignment of
180 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
181 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
182 static int frame_phase;
184 /* Used during expand_used_vars to remember if we saw any decls for
185 which we'd like to enable stack smashing protection. */
186 static bool has_protected_decls;
188 /* Used during expand_used_vars. Remember if we say a character buffer
189 smaller than our cutoff threshold. Used for -Wstack-protector. */
190 static bool has_short_buffer;
192 /* Discover the byte alignment to use for DECL. Ignore alignment
193 we can't do with expected alignment of the stack boundary. */
196 get_decl_align_unit (tree decl)
200 align = LOCAL_DECL_ALIGNMENT (decl);
202 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
203 align = MAX_SUPPORTED_STACK_ALIGNMENT;
205 if (SUPPORTS_STACK_ALIGNMENT)
207 if (crtl->stack_alignment_estimated < align)
209 gcc_assert(!crtl->stack_realign_processed);
210 crtl->stack_alignment_estimated = align;
214 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
215 So here we only make sure stack_alignment_needed >= align. */
216 if (crtl->stack_alignment_needed < align)
217 crtl->stack_alignment_needed = align;
218 if (crtl->max_used_stack_slot_alignment < align)
219 crtl->max_used_stack_slot_alignment = align;
221 return align / BITS_PER_UNIT;
224 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
225 Return the frame offset. */
228 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
230 HOST_WIDE_INT offset, new_frame_offset;
232 new_frame_offset = frame_offset;
233 if (FRAME_GROWS_DOWNWARD)
235 new_frame_offset -= size + frame_phase;
236 new_frame_offset &= -align;
237 new_frame_offset += frame_phase;
238 offset = new_frame_offset;
242 new_frame_offset -= frame_phase;
243 new_frame_offset += align - 1;
244 new_frame_offset &= -align;
245 new_frame_offset += frame_phase;
246 offset = new_frame_offset;
247 new_frame_offset += size;
249 frame_offset = new_frame_offset;
251 if (frame_offset_overflow (frame_offset, cfun->decl))
252 frame_offset = offset = 0;
257 /* Accumulate DECL into STACK_VARS. */
260 add_stack_var (tree decl)
262 if (stack_vars_num >= stack_vars_alloc)
264 if (stack_vars_alloc)
265 stack_vars_alloc = stack_vars_alloc * 3 / 2;
267 stack_vars_alloc = 32;
269 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
271 stack_vars[stack_vars_num].decl = decl;
272 stack_vars[stack_vars_num].offset = 0;
273 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
276 /* All variables are initially in their own partition. */
277 stack_vars[stack_vars_num].representative = stack_vars_num;
278 stack_vars[stack_vars_num].next = EOC;
280 /* All variables initially conflict with no other. */
281 stack_vars[stack_vars_num].conflicts = NULL;
283 /* Ensure that this decl doesn't get put onto the list twice. */
284 set_rtl (decl, pc_rtx);
289 /* Make the decls associated with luid's X and Y conflict. */
292 add_stack_var_conflict (size_t x, size_t y)
294 struct stack_var *a = &stack_vars[x];
295 struct stack_var *b = &stack_vars[y];
297 a->conflicts = BITMAP_ALLOC (NULL);
299 b->conflicts = BITMAP_ALLOC (NULL);
300 bitmap_set_bit (a->conflicts, y);
301 bitmap_set_bit (b->conflicts, x);
304 /* Check whether the decls associated with luid's X and Y conflict. */
307 stack_var_conflict_p (size_t x, size_t y)
309 struct stack_var *a = &stack_vars[x];
310 struct stack_var *b = &stack_vars[y];
311 if (!a->conflicts || !b->conflicts)
313 return bitmap_bit_p (a->conflicts, y);
316 /* Returns true if TYPE is or contains a union type. */
319 aggregate_contains_union_type (tree type)
323 if (TREE_CODE (type) == UNION_TYPE
324 || TREE_CODE (type) == QUAL_UNION_TYPE)
326 if (TREE_CODE (type) == ARRAY_TYPE)
327 return aggregate_contains_union_type (TREE_TYPE (type));
328 if (TREE_CODE (type) != RECORD_TYPE)
331 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
332 if (TREE_CODE (field) == FIELD_DECL)
333 if (aggregate_contains_union_type (TREE_TYPE (field)))
339 /* A subroutine of expand_used_vars. If two variables X and Y have alias
340 sets that do not conflict, then do add a conflict for these variables
341 in the interference graph. We also need to make sure to add conflicts
342 for union containing structures. Else RTL alias analysis comes along
343 and due to type based aliasing rules decides that for two overlapping
344 union temporaries { short s; int i; } accesses to the same mem through
345 different types may not alias and happily reorders stores across
346 life-time boundaries of the temporaries (See PR25654).
347 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
350 add_alias_set_conflicts (void)
352 size_t i, j, n = stack_vars_num;
354 for (i = 0; i < n; ++i)
356 tree type_i = TREE_TYPE (stack_vars[i].decl);
357 bool aggr_i = AGGREGATE_TYPE_P (type_i);
360 contains_union = aggregate_contains_union_type (type_i);
361 for (j = 0; j < i; ++j)
363 tree type_j = TREE_TYPE (stack_vars[j].decl);
364 bool aggr_j = AGGREGATE_TYPE_P (type_j);
366 /* Either the objects conflict by means of type based
367 aliasing rules, or we need to add a conflict. */
368 || !objects_must_conflict_p (type_i, type_j)
369 /* In case the types do not conflict ensure that access
370 to elements will conflict. In case of unions we have
371 to be careful as type based aliasing rules may say
372 access to the same memory does not conflict. So play
373 safe and add a conflict in this case. */
375 add_stack_var_conflict (i, j);
380 /* A subroutine of partition_stack_vars. A comparison function for qsort,
381 sorting an array of indices by the size and type of the object. */
384 stack_var_size_cmp (const void *a, const void *b)
386 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
387 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
389 unsigned int uida, uidb;
395 decla = stack_vars[*(const size_t *)a].decl;
396 declb = stack_vars[*(const size_t *)b].decl;
397 /* For stack variables of the same size use and id of the decls
398 to make the sort stable. Two SSA names are compared by their
399 version, SSA names come before non-SSA names, and two normal
400 decls are compared by their DECL_UID. */
401 if (TREE_CODE (decla) == SSA_NAME)
403 if (TREE_CODE (declb) == SSA_NAME)
404 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
408 else if (TREE_CODE (declb) == SSA_NAME)
411 uida = DECL_UID (decla), uidb = DECL_UID (declb);
420 /* If the points-to solution *PI points to variables that are in a partition
421 together with other variables add all partition members to the pointed-to
425 add_partitioned_vars_to_ptset (struct pt_solution *pt,
426 struct pointer_map_t *decls_to_partitions,
427 struct pointer_set_t *visited, bitmap temp)
435 /* The pointed-to vars bitmap is shared, it is enough to
437 || pointer_set_insert(visited, pt->vars))
442 /* By using a temporary bitmap to store all members of the partitions
443 we have to add we make sure to visit each of the partitions only
445 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
447 || !bitmap_bit_p (temp, i))
448 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
449 (void *)(size_t) i)))
450 bitmap_ior_into (temp, *part);
451 if (!bitmap_empty_p (temp))
452 bitmap_ior_into (pt->vars, temp);
455 /* Update points-to sets based on partition info, so we can use them on RTL.
456 The bitmaps representing stack partitions will be saved until expand,
457 where partitioned decls used as bases in memory expressions will be
461 update_alias_info_with_stack_vars (void)
463 struct pointer_map_t *decls_to_partitions = NULL;
465 tree var = NULL_TREE;
467 for (i = 0; i < stack_vars_num; i++)
471 struct ptr_info_def *pi;
473 /* Not interested in partitions with single variable. */
474 if (stack_vars[i].representative != i
475 || stack_vars[i].next == EOC)
478 if (!decls_to_partitions)
480 decls_to_partitions = pointer_map_create ();
481 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
484 /* Create an SSA_NAME that points to the partition for use
485 as base during alias-oracle queries on RTL for bases that
486 have been partitioned. */
487 if (var == NULL_TREE)
488 var = create_tmp_var (ptr_type_node, NULL);
489 name = make_ssa_name (var, NULL);
491 /* Create bitmaps representing partitions. They will be used for
492 points-to sets later, so use GGC alloc. */
493 part = BITMAP_GGC_ALLOC ();
494 for (j = i; j != EOC; j = stack_vars[j].next)
496 tree decl = stack_vars[j].decl;
497 unsigned int uid = DECL_UID (decl);
498 /* We should never end up partitioning SSA names (though they
499 may end up on the stack). Neither should we allocate stack
500 space to something that is unused and thus unreferenced. */
501 gcc_assert (DECL_P (decl)
502 && referenced_var_lookup (uid));
503 bitmap_set_bit (part, uid);
504 *((bitmap *) pointer_map_insert (decls_to_partitions,
505 (void *)(size_t) uid)) = part;
506 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
510 /* Make the SSA name point to all partition members. */
511 pi = get_ptr_info (name);
512 pt_solution_set (&pi->pt, part);
515 /* Make all points-to sets that contain one member of a partition
516 contain all members of the partition. */
517 if (decls_to_partitions)
520 struct pointer_set_t *visited = pointer_set_create ();
521 bitmap temp = BITMAP_ALLOC (NULL);
523 for (i = 1; i < num_ssa_names; i++)
525 tree name = ssa_name (i);
526 struct ptr_info_def *pi;
529 && POINTER_TYPE_P (TREE_TYPE (name))
530 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
531 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
535 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
536 decls_to_partitions, visited, temp);
537 add_partitioned_vars_to_ptset (&cfun->gimple_df->callused,
538 decls_to_partitions, visited, temp);
540 pointer_set_destroy (visited);
541 pointer_map_destroy (decls_to_partitions);
546 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
547 partitioning algorithm. Partitions A and B are known to be non-conflicting.
548 Merge them into a single partition A.
550 At the same time, add OFFSET to all variables in partition B. At the end
551 of the partitioning process we've have a nice block easy to lay out within
555 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
558 struct stack_var *vb = &stack_vars[b];
562 /* Update each element of partition B with the given offset,
563 and merge them into partition A. */
564 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
566 stack_vars[i].offset += offset;
567 stack_vars[i].representative = a;
569 stack_vars[last].next = stack_vars[a].next;
570 stack_vars[a].next = b;
572 /* Update the required alignment of partition A to account for B. */
573 if (stack_vars[a].alignb < stack_vars[b].alignb)
574 stack_vars[a].alignb = stack_vars[b].alignb;
576 /* Update the interference graph and merge the conflicts. */
579 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
580 add_stack_var_conflict (a, stack_vars[u].representative);
581 BITMAP_FREE (vb->conflicts);
585 /* A subroutine of expand_used_vars. Binpack the variables into
586 partitions constrained by the interference graph. The overall
587 algorithm used is as follows:
589 Sort the objects by size.
594 Look for the largest non-conflicting object B with size <= S.
604 partition_stack_vars (void)
606 size_t si, sj, n = stack_vars_num;
608 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
609 for (si = 0; si < n; ++si)
610 stack_vars_sorted[si] = si;
615 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
617 for (si = 0; si < n; ++si)
619 size_t i = stack_vars_sorted[si];
620 HOST_WIDE_INT isize = stack_vars[i].size;
621 HOST_WIDE_INT offset = 0;
623 for (sj = si; sj-- > 0; )
625 size_t j = stack_vars_sorted[sj];
626 HOST_WIDE_INT jsize = stack_vars[j].size;
627 unsigned int jalign = stack_vars[j].alignb;
629 /* Ignore objects that aren't partition representatives. */
630 if (stack_vars[j].representative != j)
633 /* Ignore objects too large for the remaining space. */
637 /* Ignore conflicting objects. */
638 if (stack_var_conflict_p (i, j))
641 /* Refine the remaining space check to include alignment. */
642 if (offset & (jalign - 1))
644 HOST_WIDE_INT toff = offset;
646 toff &= -(HOST_WIDE_INT)jalign;
647 if (isize - (toff - offset) < jsize)
650 isize -= toff - offset;
654 /* UNION the objects, placing J at OFFSET. */
655 union_stack_vars (i, j, offset);
664 update_alias_info_with_stack_vars ();
667 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
670 dump_stack_var_partition (void)
672 size_t si, i, j, n = stack_vars_num;
674 for (si = 0; si < n; ++si)
676 i = stack_vars_sorted[si];
678 /* Skip variables that aren't partition representatives, for now. */
679 if (stack_vars[i].representative != i)
682 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
683 " align %u\n", (unsigned long) i, stack_vars[i].size,
684 stack_vars[i].alignb);
686 for (j = i; j != EOC; j = stack_vars[j].next)
688 fputc ('\t', dump_file);
689 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
690 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
691 stack_vars[j].offset);
696 /* Assign rtl to DECL at frame offset OFFSET. */
699 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
701 /* Alignment is unsigned. */
702 unsigned HOST_WIDE_INT align;
705 /* If this fails, we've overflowed the stack frame. Error nicely? */
706 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
708 x = plus_constant (virtual_stack_vars_rtx, offset);
709 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
711 if (TREE_CODE (decl) != SSA_NAME)
713 /* Set alignment we actually gave this decl if it isn't an SSA name.
714 If it is we generate stack slots only accidentally so it isn't as
715 important, we'll simply use the alignment that is already set. */
716 offset -= frame_phase;
717 align = offset & -offset;
718 align *= BITS_PER_UNIT;
720 align = STACK_BOUNDARY;
721 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
722 align = MAX_SUPPORTED_STACK_ALIGNMENT;
724 DECL_ALIGN (decl) = align;
725 DECL_USER_ALIGN (decl) = 0;
728 set_mem_attributes (x, SSAVAR (decl), true);
732 /* A subroutine of expand_used_vars. Give each partition representative
733 a unique location within the stack frame. Update each partition member
734 with that location. */
737 expand_stack_vars (bool (*pred) (tree))
739 size_t si, i, j, n = stack_vars_num;
741 for (si = 0; si < n; ++si)
743 HOST_WIDE_INT offset;
745 i = stack_vars_sorted[si];
747 /* Skip variables that aren't partition representatives, for now. */
748 if (stack_vars[i].representative != i)
751 /* Skip variables that have already had rtl assigned. See also
752 add_stack_var where we perpetrate this pc_rtx hack. */
753 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
754 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
755 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
758 /* Check the predicate to see whether this variable should be
759 allocated in this pass. */
760 if (pred && !pred (stack_vars[i].decl))
763 offset = alloc_stack_frame_space (stack_vars[i].size,
764 stack_vars[i].alignb);
766 /* Create rtl for each variable based on their location within the
768 for (j = i; j != EOC; j = stack_vars[j].next)
770 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
771 expand_one_stack_var_at (stack_vars[j].decl,
772 stack_vars[j].offset + offset);
777 /* Take into account all sizes of partitions and reset DECL_RTLs. */
779 account_stack_vars (void)
781 size_t si, j, i, n = stack_vars_num;
782 HOST_WIDE_INT size = 0;
784 for (si = 0; si < n; ++si)
786 i = stack_vars_sorted[si];
788 /* Skip variables that aren't partition representatives, for now. */
789 if (stack_vars[i].representative != i)
792 size += stack_vars[i].size;
793 for (j = i; j != EOC; j = stack_vars[j].next)
794 set_rtl (stack_vars[j].decl, NULL);
799 /* A subroutine of expand_one_var. Called to immediately assign rtl
800 to a variable to be allocated in the stack frame. */
803 expand_one_stack_var (tree var)
805 HOST_WIDE_INT size, offset, align;
807 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
808 align = get_decl_align_unit (SSAVAR (var));
809 offset = alloc_stack_frame_space (size, align);
811 expand_one_stack_var_at (var, offset);
814 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
815 that will reside in a hard register. */
818 expand_one_hard_reg_var (tree var)
820 rest_of_decl_compilation (var, 0, 0);
823 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
824 that will reside in a pseudo register. */
827 expand_one_register_var (tree var)
829 tree decl = SSAVAR (var);
830 tree type = TREE_TYPE (decl);
831 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
832 rtx x = gen_reg_rtx (reg_mode);
836 /* Note if the object is a user variable. */
837 if (!DECL_ARTIFICIAL (decl))
840 if (POINTER_TYPE_P (type))
841 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
844 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
845 has some associated error, e.g. its type is error-mark. We just need
846 to pick something that won't crash the rest of the compiler. */
849 expand_one_error_var (tree var)
851 enum machine_mode mode = DECL_MODE (var);
855 x = gen_rtx_MEM (BLKmode, const0_rtx);
856 else if (mode == VOIDmode)
859 x = gen_reg_rtx (mode);
861 SET_DECL_RTL (var, x);
864 /* A subroutine of expand_one_var. VAR is a variable that will be
865 allocated to the local stack frame. Return true if we wish to
866 add VAR to STACK_VARS so that it will be coalesced with other
867 variables. Return false to allocate VAR immediately.
869 This function is used to reduce the number of variables considered
870 for coalescing, which reduces the size of the quadratic problem. */
873 defer_stack_allocation (tree var, bool toplevel)
875 /* If stack protection is enabled, *all* stack variables must be deferred,
876 so that we can re-order the strings to the top of the frame. */
877 if (flag_stack_protect)
880 /* Variables in the outermost scope automatically conflict with
881 every other variable. The only reason to want to defer them
882 at all is that, after sorting, we can more efficiently pack
883 small variables in the stack frame. Continue to defer at -O2. */
884 if (toplevel && optimize < 2)
887 /* Without optimization, *most* variables are allocated from the
888 stack, which makes the quadratic problem large exactly when we
889 want compilation to proceed as quickly as possible. On the
890 other hand, we don't want the function's stack frame size to
891 get completely out of hand. So we avoid adding scalars and
892 "small" aggregates to the list at all. */
893 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
899 /* A subroutine of expand_used_vars. Expand one variable according to
900 its flavor. Variables to be placed on the stack are not actually
901 expanded yet, merely recorded.
902 When REALLY_EXPAND is false, only add stack values to be allocated.
903 Return stack usage this variable is supposed to take.
907 expand_one_var (tree var, bool toplevel, bool really_expand)
912 if (SUPPORTS_STACK_ALIGNMENT
913 && TREE_TYPE (var) != error_mark_node
914 && TREE_CODE (var) == VAR_DECL)
918 /* Because we don't know if VAR will be in register or on stack,
919 we conservatively assume it will be on stack even if VAR is
920 eventually put into register after RA pass. For non-automatic
921 variables, which won't be on stack, we collect alignment of
922 type and ignore user specified alignment. */
923 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
924 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
925 TYPE_MODE (TREE_TYPE (var)),
926 TYPE_ALIGN (TREE_TYPE (var)));
928 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
930 if (crtl->stack_alignment_estimated < align)
932 /* stack_alignment_estimated shouldn't change after stack
933 realign decision made */
934 gcc_assert(!crtl->stack_realign_processed);
935 crtl->stack_alignment_estimated = align;
939 if (TREE_CODE (origvar) == SSA_NAME)
941 gcc_assert (TREE_CODE (var) != VAR_DECL
942 || (!DECL_EXTERNAL (var)
943 && !DECL_HAS_VALUE_EXPR_P (var)
944 && !TREE_STATIC (var)
945 && TREE_TYPE (var) != error_mark_node
946 && !DECL_HARD_REGISTER (var)
949 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
951 else if (DECL_EXTERNAL (var))
953 else if (DECL_HAS_VALUE_EXPR_P (var))
955 else if (TREE_STATIC (var))
957 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
959 else if (TREE_TYPE (var) == error_mark_node)
962 expand_one_error_var (var);
964 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
967 expand_one_hard_reg_var (var);
969 else if (use_register_for_decl (var))
972 expand_one_register_var (origvar);
974 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
978 error ("size of variable %q+D is too large", var);
979 expand_one_error_var (var);
982 else if (defer_stack_allocation (var, toplevel))
983 add_stack_var (origvar);
987 expand_one_stack_var (origvar);
988 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
993 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
994 expanding variables. Those variables that can be put into registers
995 are allocated pseudos; those that can't are put on the stack.
997 TOPLEVEL is true if this is the outermost BLOCK. */
1000 expand_used_vars_for_block (tree block, bool toplevel)
1002 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1005 old_sv_num = toplevel ? 0 : stack_vars_num;
1007 /* Expand all variables at this level. */
1008 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1010 expand_one_var (t, toplevel, true);
1012 this_sv_num = stack_vars_num;
1014 /* Expand all variables at containing levels. */
1015 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1016 expand_used_vars_for_block (t, false);
1018 /* Since we do not track exact variable lifetimes (which is not even
1019 possible for variables whose address escapes), we mirror the block
1020 tree in the interference graph. Here we cause all variables at this
1021 level, and all sublevels, to conflict. */
1022 if (old_sv_num < this_sv_num)
1024 new_sv_num = stack_vars_num;
1026 for (i = old_sv_num; i < new_sv_num; ++i)
1027 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1028 add_stack_var_conflict (i, j);
1032 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1033 and clear TREE_USED on all local variables. */
1036 clear_tree_used (tree block)
1040 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1041 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1044 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1045 clear_tree_used (t);
1048 /* Examine TYPE and determine a bit mask of the following features. */
1050 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1051 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1052 #define SPCT_HAS_ARRAY 4
1053 #define SPCT_HAS_AGGREGATE 8
1056 stack_protect_classify_type (tree type)
1058 unsigned int ret = 0;
1061 switch (TREE_CODE (type))
1064 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1065 if (t == char_type_node
1066 || t == signed_char_type_node
1067 || t == unsigned_char_type_node)
1069 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1070 unsigned HOST_WIDE_INT len;
1072 if (!TYPE_SIZE_UNIT (type)
1073 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1076 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1079 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1081 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1084 ret = SPCT_HAS_ARRAY;
1088 case QUAL_UNION_TYPE:
1090 ret = SPCT_HAS_AGGREGATE;
1091 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1092 if (TREE_CODE (t) == FIELD_DECL)
1093 ret |= stack_protect_classify_type (TREE_TYPE (t));
1103 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1104 part of the local stack frame. Remember if we ever return nonzero for
1105 any variable in this function. The return value is the phase number in
1106 which the variable should be allocated. */
1109 stack_protect_decl_phase (tree decl)
1111 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1114 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1115 has_short_buffer = true;
1117 if (flag_stack_protect == 2)
1119 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1120 && !(bits & SPCT_HAS_AGGREGATE))
1122 else if (bits & SPCT_HAS_ARRAY)
1126 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1129 has_protected_decls = true;
1134 /* Two helper routines that check for phase 1 and phase 2. These are used
1135 as callbacks for expand_stack_vars. */
1138 stack_protect_decl_phase_1 (tree decl)
1140 return stack_protect_decl_phase (decl) == 1;
1144 stack_protect_decl_phase_2 (tree decl)
1146 return stack_protect_decl_phase (decl) == 2;
1149 /* Ensure that variables in different stack protection phases conflict
1150 so that they are not merged and share the same stack slot. */
1153 add_stack_protection_conflicts (void)
1155 size_t i, j, n = stack_vars_num;
1156 unsigned char *phase;
1158 phase = XNEWVEC (unsigned char, n);
1159 for (i = 0; i < n; ++i)
1160 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1162 for (i = 0; i < n; ++i)
1164 unsigned char ph_i = phase[i];
1165 for (j = 0; j < i; ++j)
1166 if (ph_i != phase[j])
1167 add_stack_var_conflict (i, j);
1173 /* Create a decl for the guard at the top of the stack frame. */
1176 create_stack_guard (void)
1178 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1179 VAR_DECL, NULL, ptr_type_node);
1180 TREE_THIS_VOLATILE (guard) = 1;
1181 TREE_USED (guard) = 1;
1182 expand_one_stack_var (guard);
1183 crtl->stack_protect_guard = guard;
1186 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1187 expanding variables. Those variables that can be put into registers
1188 are allocated pseudos; those that can't are put on the stack.
1190 TOPLEVEL is true if this is the outermost BLOCK. */
1192 static HOST_WIDE_INT
1193 account_used_vars_for_block (tree block, bool toplevel)
1196 HOST_WIDE_INT size = 0;
1198 /* Expand all variables at this level. */
1199 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1201 size += expand_one_var (t, toplevel, false);
1203 /* Expand all variables at containing levels. */
1204 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1205 size += account_used_vars_for_block (t, false);
1210 /* Prepare for expanding variables. */
1212 init_vars_expansion (void)
1215 /* Set TREE_USED on all variables in the local_decls. */
1216 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1217 TREE_USED (TREE_VALUE (t)) = 1;
1219 /* Clear TREE_USED on all variables associated with a block scope. */
1220 clear_tree_used (DECL_INITIAL (current_function_decl));
1222 /* Initialize local stack smashing state. */
1223 has_protected_decls = false;
1224 has_short_buffer = false;
1227 /* Free up stack variable graph data. */
1229 fini_vars_expansion (void)
1231 size_t i, n = stack_vars_num;
1232 for (i = 0; i < n; i++)
1233 BITMAP_FREE (stack_vars[i].conflicts);
1234 XDELETEVEC (stack_vars);
1235 XDELETEVEC (stack_vars_sorted);
1237 stack_vars_alloc = stack_vars_num = 0;
1240 /* Make a fair guess for the size of the stack frame of the current
1241 function. This doesn't have to be exact, the result is only used
1242 in the inline heuristics. So we don't want to run the full stack
1243 var packing algorithm (which is quadratic in the number of stack
1244 vars). Instead, we calculate the total size of all stack vars.
1245 This turns out to be a pretty fair estimate -- packing of stack
1246 vars doesn't happen very often. */
1249 estimated_stack_frame_size (void)
1251 HOST_WIDE_INT size = 0;
1253 tree t, outer_block = DECL_INITIAL (current_function_decl);
1255 init_vars_expansion ();
1257 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1259 tree var = TREE_VALUE (t);
1261 if (TREE_USED (var))
1262 size += expand_one_var (var, true, false);
1263 TREE_USED (var) = 1;
1265 size += account_used_vars_for_block (outer_block, true);
1267 if (stack_vars_num > 0)
1269 /* Fake sorting the stack vars for account_stack_vars (). */
1270 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1271 for (i = 0; i < stack_vars_num; ++i)
1272 stack_vars_sorted[i] = i;
1273 size += account_stack_vars ();
1274 fini_vars_expansion ();
1280 /* Expand all variables used in the function. */
1283 expand_used_vars (void)
1285 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
1288 /* Compute the phase of the stack frame for this function. */
1290 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1291 int off = STARTING_FRAME_OFFSET % align;
1292 frame_phase = off ? align - off : 0;
1295 init_vars_expansion ();
1297 for (i = 0; i < SA.map->num_partitions; i++)
1299 tree var = partition_to_var (SA.map, i);
1301 gcc_assert (is_gimple_reg (var));
1302 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1303 expand_one_var (var, true, true);
1306 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1307 contain the default def (representing the parm or result itself)
1308 we don't do anything here. But those which don't contain the
1309 default def (representing a temporary based on the parm/result)
1310 we need to allocate space just like for normal VAR_DECLs. */
1311 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1313 expand_one_var (var, true, true);
1314 gcc_assert (SA.partition_to_pseudo[i]);
1319 /* At this point all variables on the local_decls with TREE_USED
1320 set are not associated with any block scope. Lay them out. */
1321 t = cfun->local_decls;
1322 cfun->local_decls = NULL_TREE;
1325 tree var = TREE_VALUE (t);
1326 bool expand_now = false;
1328 next = TREE_CHAIN (t);
1330 /* Expanded above already. */
1331 if (is_gimple_reg (var))
1333 TREE_USED (var) = 0;
1336 /* We didn't set a block for static or extern because it's hard
1337 to tell the difference between a global variable (re)declared
1338 in a local scope, and one that's really declared there to
1339 begin with. And it doesn't really matter much, since we're
1340 not giving them stack space. Expand them now. */
1341 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1344 /* If the variable is not associated with any block, then it
1345 was created by the optimizers, and could be live anywhere
1347 else if (TREE_USED (var))
1350 /* Finally, mark all variables on the list as used. We'll use
1351 this in a moment when we expand those associated with scopes. */
1352 TREE_USED (var) = 1;
1355 expand_one_var (var, true, true);
1358 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1360 rtx rtl = DECL_RTL_IF_SET (var);
1362 /* Keep artificial non-ignored vars in cfun->local_decls
1363 chain until instantiate_decls. */
1364 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1366 TREE_CHAIN (t) = cfun->local_decls;
1367 cfun->local_decls = t;
1375 /* At this point, all variables within the block tree with TREE_USED
1376 set are actually used by the optimized function. Lay them out. */
1377 expand_used_vars_for_block (outer_block, true);
1379 if (stack_vars_num > 0)
1381 /* Due to the way alias sets work, no variables with non-conflicting
1382 alias sets may be assigned the same address. Add conflicts to
1384 add_alias_set_conflicts ();
1386 /* If stack protection is enabled, we don't share space between
1387 vulnerable data and non-vulnerable data. */
1388 if (flag_stack_protect)
1389 add_stack_protection_conflicts ();
1391 /* Now that we have collected all stack variables, and have computed a
1392 minimal interference graph, attempt to save some stack space. */
1393 partition_stack_vars ();
1395 dump_stack_var_partition ();
1398 /* There are several conditions under which we should create a
1399 stack guard: protect-all, alloca used, protected decls present. */
1400 if (flag_stack_protect == 2
1401 || (flag_stack_protect
1402 && (cfun->calls_alloca || has_protected_decls)))
1403 create_stack_guard ();
1405 /* Assign rtl to each variable based on these partitions. */
1406 if (stack_vars_num > 0)
1408 /* Reorder decls to be protected by iterating over the variables
1409 array multiple times, and allocating out of each phase in turn. */
1410 /* ??? We could probably integrate this into the qsort we did
1411 earlier, such that we naturally see these variables first,
1412 and thus naturally allocate things in the right order. */
1413 if (has_protected_decls)
1415 /* Phase 1 contains only character arrays. */
1416 expand_stack_vars (stack_protect_decl_phase_1);
1418 /* Phase 2 contains other kinds of arrays. */
1419 if (flag_stack_protect == 2)
1420 expand_stack_vars (stack_protect_decl_phase_2);
1423 expand_stack_vars (NULL);
1425 fini_vars_expansion ();
1428 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1429 if (STACK_ALIGNMENT_NEEDED)
1431 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1432 if (!FRAME_GROWS_DOWNWARD)
1433 frame_offset += align - 1;
1434 frame_offset &= -align;
1439 /* If we need to produce a detailed dump, print the tree representation
1440 for STMT to the dump file. SINCE is the last RTX after which the RTL
1441 generated for STMT should have been appended. */
1444 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1446 if (dump_file && (dump_flags & TDF_DETAILS))
1448 fprintf (dump_file, "\n;; ");
1449 print_gimple_stmt (dump_file, stmt, 0,
1450 TDF_SLIM | (dump_flags & TDF_LINENO));
1451 fprintf (dump_file, "\n");
1453 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1457 /* Maps the blocks that do not contain tree labels to rtx labels. */
1459 static struct pointer_map_t *lab_rtx_for_bb;
1461 /* Returns the label_rtx expression for a label starting basic block BB. */
1464 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1466 gimple_stmt_iterator gsi;
1471 if (bb->flags & BB_RTL)
1472 return block_label (bb);
1474 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1478 /* Find the tree label if it is present. */
1480 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1482 lab_stmt = gsi_stmt (gsi);
1483 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1486 lab = gimple_label_label (lab_stmt);
1487 if (DECL_NONLOCAL (lab))
1490 return label_rtx (lab);
1493 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1494 *elt = gen_label_rtx ();
1499 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1500 of a basic block where we just expanded the conditional at the end,
1501 possibly clean up the CFG and instruction sequence. LAST is the
1502 last instruction before the just emitted jump sequence. */
1505 maybe_cleanup_end_of_block (edge e, rtx last)
1507 /* Special case: when jumpif decides that the condition is
1508 trivial it emits an unconditional jump (and the necessary
1509 barrier). But we still have two edges, the fallthru one is
1510 wrong. purge_dead_edges would clean this up later. Unfortunately
1511 we have to insert insns (and split edges) before
1512 find_many_sub_basic_blocks and hence before purge_dead_edges.
1513 But splitting edges might create new blocks which depend on the
1514 fact that if there are two edges there's no barrier. So the
1515 barrier would get lost and verify_flow_info would ICE. Instead
1516 of auditing all edge splitters to care for the barrier (which
1517 normally isn't there in a cleaned CFG), fix it here. */
1518 if (BARRIER_P (get_last_insn ()))
1522 /* Now, we have a single successor block, if we have insns to
1523 insert on the remaining edge we potentially will insert
1524 it at the end of this block (if the dest block isn't feasible)
1525 in order to avoid splitting the edge. This insertion will take
1526 place in front of the last jump. But we might have emitted
1527 multiple jumps (conditional and one unconditional) to the
1528 same destination. Inserting in front of the last one then
1529 is a problem. See PR 40021. We fix this by deleting all
1530 jumps except the last unconditional one. */
1531 insn = PREV_INSN (get_last_insn ());
1532 /* Make sure we have an unconditional jump. Otherwise we're
1534 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1535 for (insn = PREV_INSN (insn); insn != last;)
1537 insn = PREV_INSN (insn);
1538 if (JUMP_P (NEXT_INSN (insn)))
1539 delete_insn (NEXT_INSN (insn));
1544 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1545 Returns a new basic block if we've terminated the current basic
1546 block and created a new one. */
1549 expand_gimple_cond (basic_block bb, gimple stmt)
1551 basic_block new_bb, dest;
1556 enum tree_code code;
1559 code = gimple_cond_code (stmt);
1560 op0 = gimple_cond_lhs (stmt);
1561 op1 = gimple_cond_rhs (stmt);
1562 /* We're sometimes presented with such code:
1566 This would expand to two comparisons which then later might
1567 be cleaned up by combine. But some pattern matchers like if-conversion
1568 work better when there's only one compare, so make up for this
1569 here as special exception if TER would have made the same change. */
1570 if (gimple_cond_single_var_p (stmt)
1572 && TREE_CODE (op0) == SSA_NAME
1573 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1575 gimple second = SSA_NAME_DEF_STMT (op0);
1576 if (gimple_code (second) == GIMPLE_ASSIGN)
1578 enum tree_code code2 = gimple_assign_rhs_code (second);
1579 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1582 op0 = gimple_assign_rhs1 (second);
1583 op1 = gimple_assign_rhs2 (second);
1585 /* If jumps are cheap turn some more codes into
1587 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1589 if ((code2 == BIT_AND_EXPR
1590 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1591 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1592 || code2 == TRUTH_AND_EXPR)
1594 code = TRUTH_ANDIF_EXPR;
1595 op0 = gimple_assign_rhs1 (second);
1596 op1 = gimple_assign_rhs2 (second);
1598 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1600 code = TRUTH_ORIF_EXPR;
1601 op0 = gimple_assign_rhs1 (second);
1602 op1 = gimple_assign_rhs2 (second);
1608 last2 = last = get_last_insn ();
1610 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1611 if (gimple_has_location (stmt))
1613 set_curr_insn_source_location (gimple_location (stmt));
1614 set_curr_insn_block (gimple_block (stmt));
1617 /* These flags have no purpose in RTL land. */
1618 true_edge->flags &= ~EDGE_TRUE_VALUE;
1619 false_edge->flags &= ~EDGE_FALSE_VALUE;
1621 /* We can either have a pure conditional jump with one fallthru edge or
1622 two-way jump that needs to be decomposed into two basic blocks. */
1623 if (false_edge->dest == bb->next_bb)
1625 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1626 true_edge->probability);
1627 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1628 if (true_edge->goto_locus)
1630 set_curr_insn_source_location (true_edge->goto_locus);
1631 set_curr_insn_block (true_edge->goto_block);
1632 true_edge->goto_locus = curr_insn_locator ();
1634 true_edge->goto_block = NULL;
1635 false_edge->flags |= EDGE_FALLTHRU;
1636 maybe_cleanup_end_of_block (false_edge, last);
1639 if (true_edge->dest == bb->next_bb)
1641 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1642 false_edge->probability);
1643 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1644 if (false_edge->goto_locus)
1646 set_curr_insn_source_location (false_edge->goto_locus);
1647 set_curr_insn_block (false_edge->goto_block);
1648 false_edge->goto_locus = curr_insn_locator ();
1650 false_edge->goto_block = NULL;
1651 true_edge->flags |= EDGE_FALLTHRU;
1652 maybe_cleanup_end_of_block (true_edge, last);
1656 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1657 true_edge->probability);
1658 last = get_last_insn ();
1659 if (false_edge->goto_locus)
1661 set_curr_insn_source_location (false_edge->goto_locus);
1662 set_curr_insn_block (false_edge->goto_block);
1663 false_edge->goto_locus = curr_insn_locator ();
1665 false_edge->goto_block = NULL;
1666 emit_jump (label_rtx_for_bb (false_edge->dest));
1669 if (BARRIER_P (BB_END (bb)))
1670 BB_END (bb) = PREV_INSN (BB_END (bb));
1671 update_bb_for_insn (bb);
1673 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1674 dest = false_edge->dest;
1675 redirect_edge_succ (false_edge, new_bb);
1676 false_edge->flags |= EDGE_FALLTHRU;
1677 new_bb->count = false_edge->count;
1678 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1679 new_edge = make_edge (new_bb, dest, 0);
1680 new_edge->probability = REG_BR_PROB_BASE;
1681 new_edge->count = new_bb->count;
1682 if (BARRIER_P (BB_END (new_bb)))
1683 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1684 update_bb_for_insn (new_bb);
1686 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1688 if (true_edge->goto_locus)
1690 set_curr_insn_source_location (true_edge->goto_locus);
1691 set_curr_insn_block (true_edge->goto_block);
1692 true_edge->goto_locus = curr_insn_locator ();
1694 true_edge->goto_block = NULL;
1699 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1703 expand_call_stmt (gimple stmt)
1706 tree lhs = gimple_call_lhs (stmt);
1711 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1713 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1714 decl = gimple_call_fndecl (stmt);
1715 builtin_p = decl && DECL_BUILT_IN (decl);
1717 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1718 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1720 for (i = 0; i < gimple_call_num_args (stmt); i++)
1722 tree arg = gimple_call_arg (stmt, i);
1724 /* TER addresses into arguments of builtin functions so we have a
1725 chance to infer more correct alignment information. See PR39954. */
1727 && TREE_CODE (arg) == SSA_NAME
1728 && (def = get_gimple_for_ssa_name (arg))
1729 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1730 arg = gimple_assign_rhs1 (def);
1731 CALL_EXPR_ARG (exp, i) = arg;
1734 if (gimple_has_side_effects (stmt))
1735 TREE_SIDE_EFFECTS (exp) = 1;
1737 if (gimple_call_nothrow_p (stmt))
1738 TREE_NOTHROW (exp) = 1;
1740 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1741 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1742 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1743 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1744 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1745 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1746 TREE_BLOCK (exp) = gimple_block (stmt);
1749 expand_assignment (lhs, exp, false);
1751 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1754 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1755 STMT that doesn't require special handling for outgoing edges. That
1756 is no tailcalls and no GIMPLE_COND. */
1759 expand_gimple_stmt_1 (gimple stmt)
1762 switch (gimple_code (stmt))
1765 op0 = gimple_goto_dest (stmt);
1766 if (TREE_CODE (op0) == LABEL_DECL)
1769 expand_computed_goto (op0);
1772 expand_label (gimple_label_label (stmt));
1775 case GIMPLE_PREDICT:
1781 expand_asm_stmt (stmt);
1784 expand_call_stmt (stmt);
1788 op0 = gimple_return_retval (stmt);
1790 if (op0 && op0 != error_mark_node)
1792 tree result = DECL_RESULT (current_function_decl);
1794 /* If we are not returning the current function's RESULT_DECL,
1795 build an assignment to it. */
1798 /* I believe that a function's RESULT_DECL is unique. */
1799 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1801 /* ??? We'd like to use simply expand_assignment here,
1802 but this fails if the value is of BLKmode but the return
1803 decl is a register. expand_return has special handling
1804 for this combination, which eventually should move
1805 to common code. See comments there. Until then, let's
1806 build a modify expression :-/ */
1807 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1812 expand_null_return ();
1814 expand_return (op0);
1819 tree lhs = gimple_assign_lhs (stmt);
1821 /* Tree expand used to fiddle with |= and &= of two bitfield
1822 COMPONENT_REFs here. This can't happen with gimple, the LHS
1823 of binary assigns must be a gimple reg. */
1825 if (TREE_CODE (lhs) != SSA_NAME
1826 || get_gimple_rhs_class (gimple_expr_code (stmt))
1827 == GIMPLE_SINGLE_RHS)
1829 tree rhs = gimple_assign_rhs1 (stmt);
1830 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1831 == GIMPLE_SINGLE_RHS);
1832 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1833 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1834 expand_assignment (lhs, rhs,
1835 gimple_assign_nontemporal_move_p (stmt));
1840 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1841 struct separate_ops ops;
1842 bool promoted = false;
1844 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1845 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1848 ops.code = gimple_assign_rhs_code (stmt);
1849 ops.type = TREE_TYPE (lhs);
1850 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1852 case GIMPLE_BINARY_RHS:
1853 ops.op1 = gimple_assign_rhs2 (stmt);
1855 case GIMPLE_UNARY_RHS:
1856 ops.op0 = gimple_assign_rhs1 (stmt);
1861 ops.location = gimple_location (stmt);
1863 /* If we want to use a nontemporal store, force the value to
1864 register first. If we store into a promoted register,
1865 don't directly expand to target. */
1866 temp = nontemporal || promoted ? NULL_RTX : target;
1867 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
1874 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
1875 /* If TEMP is a VOIDmode constant, use convert_modes to make
1876 sure that we properly convert it. */
1877 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1879 temp = convert_modes (GET_MODE (target),
1880 TYPE_MODE (ops.type),
1882 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
1883 GET_MODE (target), temp, unsignedp);
1886 convert_move (SUBREG_REG (target), temp, unsignedp);
1888 else if (nontemporal && emit_storent_insn (target, temp))
1892 temp = force_operand (temp, target);
1894 emit_move_insn (target, temp);
1905 /* Expand one gimple statement STMT and return the last RTL instruction
1906 before any of the newly generated ones.
1908 In addition to generating the necessary RTL instructions this also
1909 sets REG_EH_REGION notes if necessary and sets the current source
1910 location for diagnostics. */
1913 expand_gimple_stmt (gimple stmt)
1917 location_t saved_location = input_location;
1919 last = get_last_insn ();
1921 /* If this is an expression of some kind and it has an associated line
1922 number, then emit the line number before expanding the expression.
1924 We need to save and restore the file and line information so that
1925 errors discovered during expansion are emitted with the right
1926 information. It would be better of the diagnostic routines
1927 used the file/line information embedded in the tree nodes rather
1931 if (gimple_has_location (stmt))
1933 input_location = gimple_location (stmt);
1934 set_curr_insn_source_location (input_location);
1936 /* Record where the insns produced belong. */
1937 set_curr_insn_block (gimple_block (stmt));
1940 expand_gimple_stmt_1 (stmt);
1941 /* Free any temporaries used to evaluate this statement. */
1944 input_location = saved_location;
1946 /* Mark all insns that may trap. */
1947 lp_nr = lookup_stmt_eh_lp (stmt);
1951 for (insn = next_real_insn (last); insn;
1952 insn = next_real_insn (insn))
1954 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1955 /* If we want exceptions for non-call insns, any
1956 may_trap_p instruction may throw. */
1957 && GET_CODE (PATTERN (insn)) != CLOBBER
1958 && GET_CODE (PATTERN (insn)) != USE
1959 && insn_could_throw_p (insn))
1960 make_reg_eh_region_note (insn, 0, lp_nr);
1967 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
1968 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1969 generated a tail call (something that might be denied by the ABI
1970 rules governing the call; see calls.c).
1972 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1973 can still reach the rest of BB. The case here is __builtin_sqrt,
1974 where the NaN result goes through the external function (with a
1975 tailcall) and the normal result happens via a sqrt instruction. */
1978 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
1986 last2 = last = expand_gimple_stmt (stmt);
1988 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
1989 if (CALL_P (last) && SIBLING_CALL_P (last))
1992 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1994 *can_fallthru = true;
1998 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1999 Any instructions emitted here are about to be deleted. */
2000 do_pending_stack_adjust ();
2002 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2003 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2004 EH or abnormal edges, we shouldn't have created a tail call in
2005 the first place. So it seems to me we should just be removing
2006 all edges here, or redirecting the existing fallthru edge to
2012 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2014 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2016 if (e->dest != EXIT_BLOCK_PTR)
2018 e->dest->count -= e->count;
2019 e->dest->frequency -= EDGE_FREQUENCY (e);
2020 if (e->dest->count < 0)
2022 if (e->dest->frequency < 0)
2023 e->dest->frequency = 0;
2026 probability += e->probability;
2033 /* This is somewhat ugly: the call_expr expander often emits instructions
2034 after the sibcall (to perform the function return). These confuse the
2035 find_many_sub_basic_blocks code, so we need to get rid of these. */
2036 last = NEXT_INSN (last);
2037 gcc_assert (BARRIER_P (last));
2039 *can_fallthru = false;
2040 while (NEXT_INSN (last))
2042 /* For instance an sqrt builtin expander expands if with
2043 sibcall in the then and label for `else`. */
2044 if (LABEL_P (NEXT_INSN (last)))
2046 *can_fallthru = true;
2049 delete_insn (NEXT_INSN (last));
2052 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2053 e->probability += probability;
2056 update_bb_for_insn (bb);
2058 if (NEXT_INSN (last))
2060 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2063 if (BARRIER_P (last))
2064 BB_END (bb) = PREV_INSN (last);
2067 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2072 /* Return the difference between the floor and the truncated result of
2073 a signed division by OP1 with remainder MOD. */
2075 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2077 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2078 return gen_rtx_IF_THEN_ELSE
2079 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2080 gen_rtx_IF_THEN_ELSE
2081 (mode, gen_rtx_LT (BImode,
2082 gen_rtx_DIV (mode, op1, mod),
2084 constm1_rtx, const0_rtx),
2088 /* Return the difference between the ceil and the truncated result of
2089 a signed division by OP1 with remainder MOD. */
2091 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2093 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2094 return gen_rtx_IF_THEN_ELSE
2095 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2096 gen_rtx_IF_THEN_ELSE
2097 (mode, gen_rtx_GT (BImode,
2098 gen_rtx_DIV (mode, op1, mod),
2100 const1_rtx, const0_rtx),
2104 /* Return the difference between the ceil and the truncated result of
2105 an unsigned division by OP1 with remainder MOD. */
2107 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2109 /* (mod != 0 ? 1 : 0) */
2110 return gen_rtx_IF_THEN_ELSE
2111 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2112 const1_rtx, const0_rtx);
2115 /* Return the difference between the rounded and the truncated result
2116 of a signed division by OP1 with remainder MOD. Halfway cases are
2117 rounded away from zero, rather than to the nearest even number. */
2119 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2121 /* (abs (mod) >= abs (op1) - abs (mod)
2122 ? (op1 / mod > 0 ? 1 : -1)
2124 return gen_rtx_IF_THEN_ELSE
2125 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2126 gen_rtx_MINUS (mode,
2127 gen_rtx_ABS (mode, op1),
2128 gen_rtx_ABS (mode, mod))),
2129 gen_rtx_IF_THEN_ELSE
2130 (mode, gen_rtx_GT (BImode,
2131 gen_rtx_DIV (mode, op1, mod),
2133 const1_rtx, constm1_rtx),
2137 /* Return the difference between the rounded and the truncated result
2138 of a unsigned division by OP1 with remainder MOD. Halfway cases
2139 are rounded away from zero, rather than to the nearest even
2142 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2144 /* (mod >= op1 - mod ? 1 : 0) */
2145 return gen_rtx_IF_THEN_ELSE
2146 (mode, gen_rtx_GE (BImode, mod,
2147 gen_rtx_MINUS (mode, op1, mod)),
2148 const1_rtx, const0_rtx);
2151 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2155 convert_debug_memory_address (enum machine_mode mode, rtx x)
2157 enum machine_mode xmode = GET_MODE (x);
2159 #ifndef POINTERS_EXTEND_UNSIGNED
2160 gcc_assert (mode == Pmode);
2161 gcc_assert (xmode == mode || xmode == VOIDmode);
2163 gcc_assert (mode == Pmode || mode == ptr_mode);
2165 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2168 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2169 x = simplify_gen_subreg (mode, x, xmode,
2170 subreg_lowpart_offset
2172 else if (POINTERS_EXTEND_UNSIGNED > 0)
2173 x = gen_rtx_ZERO_EXTEND (mode, x);
2174 else if (!POINTERS_EXTEND_UNSIGNED)
2175 x = gen_rtx_SIGN_EXTEND (mode, x);
2178 #endif /* POINTERS_EXTEND_UNSIGNED */
2183 /* Return an RTX equivalent to the value of the tree expression
2187 expand_debug_expr (tree exp)
2189 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2190 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2191 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2193 enum machine_mode address_mode;
2195 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2197 case tcc_expression:
2198 switch (TREE_CODE (exp))
2203 case TRUTH_ANDIF_EXPR:
2204 case TRUTH_ORIF_EXPR:
2205 case TRUTH_AND_EXPR:
2207 case TRUTH_XOR_EXPR:
2210 case TRUTH_NOT_EXPR:
2219 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2226 case tcc_comparison:
2227 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2234 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2244 case tcc_exceptional:
2245 case tcc_declaration:
2251 switch (TREE_CODE (exp))
2254 if (!lookup_constant_def (exp))
2256 if (strlen (TREE_STRING_POINTER (exp)) + 1
2257 != (size_t) TREE_STRING_LENGTH (exp))
2259 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2260 op0 = gen_rtx_MEM (BLKmode, op0);
2261 set_mem_attributes (op0, exp, 0);
2264 /* Fall through... */
2269 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2273 gcc_assert (COMPLEX_MODE_P (mode));
2274 op0 = expand_debug_expr (TREE_REALPART (exp));
2275 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2276 return gen_rtx_CONCAT (mode, op0, op1);
2278 case DEBUG_EXPR_DECL:
2279 op0 = DECL_RTL_IF_SET (exp);
2284 op0 = gen_rtx_DEBUG_EXPR (mode);
2285 DEBUG_EXPR_TREE_DECL (op0) = exp;
2286 SET_DECL_RTL (exp, op0);
2296 op0 = DECL_RTL_IF_SET (exp);
2298 /* This decl was probably optimized away. */
2301 if (TREE_CODE (exp) != VAR_DECL
2302 || DECL_EXTERNAL (exp)
2303 || !TREE_STATIC (exp)
2305 || DECL_HARD_REGISTER (exp)
2306 || mode == VOIDmode)
2309 op0 = DECL_RTL (exp);
2310 SET_DECL_RTL (exp, NULL);
2312 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2313 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2317 op0 = copy_rtx (op0);
2319 if (GET_MODE (op0) == BLKmode
2320 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2321 below would ICE. While it is likely a FE bug,
2322 try to be robust here. See PR43166. */
2325 gcc_assert (MEM_P (op0));
2326 op0 = adjust_address_nv (op0, mode, 0);
2337 enum machine_mode inner_mode = GET_MODE (op0);
2339 if (mode == inner_mode)
2342 if (inner_mode == VOIDmode)
2344 if (TREE_CODE (exp) == SSA_NAME)
2345 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2347 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2348 if (mode == inner_mode)
2352 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2354 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2355 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2356 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2357 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2359 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2361 else if (FLOAT_MODE_P (mode))
2363 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2364 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2365 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2367 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2369 else if (FLOAT_MODE_P (inner_mode))
2372 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2374 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2376 else if (CONSTANT_P (op0)
2377 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2378 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2379 subreg_lowpart_offset (mode,
2382 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2384 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2390 case ALIGN_INDIRECT_REF:
2391 case MISALIGNED_INDIRECT_REF:
2392 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2396 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2398 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2399 address_mode = targetm.addr_space.address_mode (as);
2403 as = ADDR_SPACE_GENERIC;
2404 address_mode = Pmode;
2407 if (TREE_CODE (exp) == ALIGN_INDIRECT_REF)
2409 int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp));
2410 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
2413 op0 = gen_rtx_MEM (mode, op0);
2415 set_mem_attributes (op0, exp, 0);
2416 set_mem_addr_space (op0, as);
2420 case TARGET_MEM_REF:
2421 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
2424 op0 = expand_debug_expr
2425 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2429 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
2431 op0 = gen_rtx_MEM (mode, op0);
2433 set_mem_attributes (op0, exp, 0);
2434 set_mem_addr_space (op0, as);
2439 case ARRAY_RANGE_REF:
2444 case VIEW_CONVERT_EXPR:
2446 enum machine_mode mode1;
2447 HOST_WIDE_INT bitsize, bitpos;
2450 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2451 &mode1, &unsignedp, &volatilep, false);
2457 orig_op0 = op0 = expand_debug_expr (tem);
2464 enum machine_mode addrmode, offmode;
2466 gcc_assert (MEM_P (op0));
2468 op0 = XEXP (op0, 0);
2469 addrmode = GET_MODE (op0);
2470 if (addrmode == VOIDmode)
2473 op1 = expand_debug_expr (offset);
2477 offmode = GET_MODE (op1);
2478 if (offmode == VOIDmode)
2479 offmode = TYPE_MODE (TREE_TYPE (offset));
2481 if (addrmode != offmode)
2482 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2483 subreg_lowpart_offset (addrmode,
2486 /* Don't use offset_address here, we don't need a
2487 recognizable address, and we don't want to generate
2489 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2494 if (mode1 == VOIDmode)
2496 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2497 if (bitpos >= BITS_PER_UNIT)
2499 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2500 bitpos %= BITS_PER_UNIT;
2502 else if (bitpos < 0)
2505 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2506 op0 = adjust_address_nv (op0, mode1, units);
2507 bitpos += units * BITS_PER_UNIT;
2509 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2510 op0 = adjust_address_nv (op0, mode, 0);
2511 else if (GET_MODE (op0) != mode1)
2512 op0 = adjust_address_nv (op0, mode1, 0);
2514 op0 = copy_rtx (op0);
2515 if (op0 == orig_op0)
2516 op0 = shallow_copy_rtx (op0);
2517 set_mem_attributes (op0, exp, 0);
2520 if (bitpos == 0 && mode == GET_MODE (op0))
2526 if ((bitpos % BITS_PER_UNIT) == 0
2527 && bitsize == GET_MODE_BITSIZE (mode1))
2529 enum machine_mode opmode = GET_MODE (op0);
2531 gcc_assert (opmode != BLKmode);
2533 if (opmode == VOIDmode)
2536 /* This condition may hold if we're expanding the address
2537 right past the end of an array that turned out not to
2538 be addressable (i.e., the address was only computed in
2539 debug stmts). The gen_subreg below would rightfully
2540 crash, and the address doesn't really exist, so just
2542 if (bitpos >= GET_MODE_BITSIZE (opmode))
2545 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2546 return simplify_gen_subreg (mode, op0, opmode,
2547 bitpos / BITS_PER_UNIT);
2550 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2551 && TYPE_UNSIGNED (TREE_TYPE (exp))
2553 : ZERO_EXTRACT, mode,
2554 GET_MODE (op0) != VOIDmode
2555 ? GET_MODE (op0) : mode1,
2556 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2560 return gen_rtx_ABS (mode, op0);
2563 return gen_rtx_NEG (mode, op0);
2566 return gen_rtx_NOT (mode, op0);
2570 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2572 return gen_rtx_FLOAT (mode, op0);
2574 case FIX_TRUNC_EXPR:
2576 return gen_rtx_UNSIGNED_FIX (mode, op0);
2578 return gen_rtx_FIX (mode, op0);
2580 case POINTER_PLUS_EXPR:
2582 return gen_rtx_PLUS (mode, op0, op1);
2585 return gen_rtx_MINUS (mode, op0, op1);
2588 return gen_rtx_MULT (mode, op0, op1);
2591 case TRUNC_DIV_EXPR:
2592 case EXACT_DIV_EXPR:
2594 return gen_rtx_UDIV (mode, op0, op1);
2596 return gen_rtx_DIV (mode, op0, op1);
2598 case TRUNC_MOD_EXPR:
2600 return gen_rtx_UMOD (mode, op0, op1);
2602 return gen_rtx_MOD (mode, op0, op1);
2604 case FLOOR_DIV_EXPR:
2606 return gen_rtx_UDIV (mode, op0, op1);
2609 rtx div = gen_rtx_DIV (mode, op0, op1);
2610 rtx mod = gen_rtx_MOD (mode, op0, op1);
2611 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2612 return gen_rtx_PLUS (mode, div, adj);
2615 case FLOOR_MOD_EXPR:
2617 return gen_rtx_UMOD (mode, op0, op1);
2620 rtx mod = gen_rtx_MOD (mode, op0, op1);
2621 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2622 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2623 return gen_rtx_PLUS (mode, mod, adj);
2629 rtx div = gen_rtx_UDIV (mode, op0, op1);
2630 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2631 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2632 return gen_rtx_PLUS (mode, div, adj);
2636 rtx div = gen_rtx_DIV (mode, op0, op1);
2637 rtx mod = gen_rtx_MOD (mode, op0, op1);
2638 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2639 return gen_rtx_PLUS (mode, div, adj);
2645 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2646 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2647 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2648 return gen_rtx_PLUS (mode, mod, adj);
2652 rtx mod = gen_rtx_MOD (mode, op0, op1);
2653 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2654 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2655 return gen_rtx_PLUS (mode, mod, adj);
2658 case ROUND_DIV_EXPR:
2661 rtx div = gen_rtx_UDIV (mode, op0, op1);
2662 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2663 rtx adj = round_udiv_adjust (mode, mod, op1);
2664 return gen_rtx_PLUS (mode, div, adj);
2668 rtx div = gen_rtx_DIV (mode, op0, op1);
2669 rtx mod = gen_rtx_MOD (mode, op0, op1);
2670 rtx adj = round_sdiv_adjust (mode, mod, op1);
2671 return gen_rtx_PLUS (mode, div, adj);
2674 case ROUND_MOD_EXPR:
2677 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2678 rtx adj = round_udiv_adjust (mode, mod, op1);
2679 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2680 return gen_rtx_PLUS (mode, mod, adj);
2684 rtx mod = gen_rtx_MOD (mode, op0, op1);
2685 rtx adj = round_sdiv_adjust (mode, mod, op1);
2686 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2687 return gen_rtx_PLUS (mode, mod, adj);
2691 return gen_rtx_ASHIFT (mode, op0, op1);
2695 return gen_rtx_LSHIFTRT (mode, op0, op1);
2697 return gen_rtx_ASHIFTRT (mode, op0, op1);
2700 return gen_rtx_ROTATE (mode, op0, op1);
2703 return gen_rtx_ROTATERT (mode, op0, op1);
2707 return gen_rtx_UMIN (mode, op0, op1);
2709 return gen_rtx_SMIN (mode, op0, op1);
2713 return gen_rtx_UMAX (mode, op0, op1);
2715 return gen_rtx_SMAX (mode, op0, op1);
2718 case TRUTH_AND_EXPR:
2719 return gen_rtx_AND (mode, op0, op1);
2723 return gen_rtx_IOR (mode, op0, op1);
2726 case TRUTH_XOR_EXPR:
2727 return gen_rtx_XOR (mode, op0, op1);
2729 case TRUTH_ANDIF_EXPR:
2730 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2732 case TRUTH_ORIF_EXPR:
2733 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2735 case TRUTH_NOT_EXPR:
2736 return gen_rtx_EQ (mode, op0, const0_rtx);
2740 return gen_rtx_LTU (mode, op0, op1);
2742 return gen_rtx_LT (mode, op0, op1);
2746 return gen_rtx_LEU (mode, op0, op1);
2748 return gen_rtx_LE (mode, op0, op1);
2752 return gen_rtx_GTU (mode, op0, op1);
2754 return gen_rtx_GT (mode, op0, op1);
2758 return gen_rtx_GEU (mode, op0, op1);
2760 return gen_rtx_GE (mode, op0, op1);
2763 return gen_rtx_EQ (mode, op0, op1);
2766 return gen_rtx_NE (mode, op0, op1);
2768 case UNORDERED_EXPR:
2769 return gen_rtx_UNORDERED (mode, op0, op1);
2772 return gen_rtx_ORDERED (mode, op0, op1);
2775 return gen_rtx_UNLT (mode, op0, op1);
2778 return gen_rtx_UNLE (mode, op0, op1);
2781 return gen_rtx_UNGT (mode, op0, op1);
2784 return gen_rtx_UNGE (mode, op0, op1);
2787 return gen_rtx_UNEQ (mode, op0, op1);
2790 return gen_rtx_LTGT (mode, op0, op1);
2793 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2796 gcc_assert (COMPLEX_MODE_P (mode));
2797 if (GET_MODE (op0) == VOIDmode)
2798 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2799 if (GET_MODE (op1) == VOIDmode)
2800 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2801 return gen_rtx_CONCAT (mode, op0, op1);
2804 if (GET_CODE (op0) == CONCAT)
2805 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2806 gen_rtx_NEG (GET_MODE_INNER (mode),
2810 enum machine_mode imode = GET_MODE_INNER (mode);
2815 re = adjust_address_nv (op0, imode, 0);
2816 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2820 enum machine_mode ifmode = int_mode_for_mode (mode);
2821 enum machine_mode ihmode = int_mode_for_mode (imode);
2823 if (ifmode == BLKmode || ihmode == BLKmode)
2825 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2828 re = gen_rtx_SUBREG (ifmode, re, 0);
2829 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2830 if (imode != ihmode)
2831 re = gen_rtx_SUBREG (imode, re, 0);
2832 im = copy_rtx (op0);
2834 im = gen_rtx_SUBREG (ifmode, im, 0);
2835 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
2836 if (imode != ihmode)
2837 im = gen_rtx_SUBREG (imode, im, 0);
2839 im = gen_rtx_NEG (imode, im);
2840 return gen_rtx_CONCAT (mode, re, im);
2844 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2845 if (!op0 || !MEM_P (op0))
2848 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
2853 exp = build_constructor_from_list (TREE_TYPE (exp),
2854 TREE_VECTOR_CST_ELTS (exp));
2858 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
2863 op0 = gen_rtx_CONCATN
2864 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
2866 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
2868 op1 = expand_debug_expr (val);
2871 XVECEXP (op0, 0, i) = op1;
2874 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
2876 op1 = expand_debug_expr
2877 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
2882 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
2883 XVECEXP (op0, 0, i) = op1;
2889 goto flag_unsupported;
2892 /* ??? Maybe handle some builtins? */
2897 gimple g = get_gimple_for_ssa_name (exp);
2900 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
2906 int part = var_to_partition (SA.map, exp);
2908 if (part == NO_PARTITION)
2911 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
2913 op0 = SA.partition_to_pseudo[part];
2923 #ifdef ENABLE_CHECKING
2932 /* Expand the _LOCs in debug insns. We run this after expanding all
2933 regular insns, so that any variables referenced in the function
2934 will have their DECL_RTLs set. */
2937 expand_debug_locations (void)
2940 rtx last = get_last_insn ();
2941 int save_strict_alias = flag_strict_aliasing;
2943 /* New alias sets while setting up memory attributes cause
2944 -fcompare-debug failures, even though it doesn't bring about any
2946 flag_strict_aliasing = 0;
2948 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2949 if (DEBUG_INSN_P (insn))
2951 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
2953 enum machine_mode mode;
2955 if (value == NULL_TREE)
2959 val = expand_debug_expr (value);
2960 gcc_assert (last == get_last_insn ());
2964 val = gen_rtx_UNKNOWN_VAR_LOC ();
2967 mode = GET_MODE (INSN_VAR_LOCATION (insn));
2969 gcc_assert (mode == GET_MODE (val)
2970 || (GET_MODE (val) == VOIDmode
2971 && (CONST_INT_P (val)
2972 || GET_CODE (val) == CONST_FIXED
2973 || GET_CODE (val) == CONST_DOUBLE
2974 || GET_CODE (val) == LABEL_REF)));
2977 INSN_VAR_LOCATION_LOC (insn) = val;
2980 flag_strict_aliasing = save_strict_alias;
2983 /* Expand basic block BB from GIMPLE trees to RTL. */
2986 expand_gimple_basic_block (basic_block bb)
2988 gimple_stmt_iterator gsi;
2997 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3000 /* Note that since we are now transitioning from GIMPLE to RTL, we
3001 cannot use the gsi_*_bb() routines because they expect the basic
3002 block to be in GIMPLE, instead of RTL. Therefore, we need to
3003 access the BB sequence directly. */
3004 stmts = bb_seq (bb);
3005 bb->il.gimple = NULL;
3006 rtl_profile_for_bb (bb);
3007 init_rtl_bb_info (bb);
3008 bb->flags |= BB_RTL;
3010 /* Remove the RETURN_EXPR if we may fall though to the exit
3012 gsi = gsi_last (stmts);
3013 if (!gsi_end_p (gsi)
3014 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3016 gimple ret_stmt = gsi_stmt (gsi);
3018 gcc_assert (single_succ_p (bb));
3019 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3021 if (bb->next_bb == EXIT_BLOCK_PTR
3022 && !gimple_return_retval (ret_stmt))
3024 gsi_remove (&gsi, false);
3025 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3029 gsi = gsi_start (stmts);
3030 if (!gsi_end_p (gsi))
3032 stmt = gsi_stmt (gsi);
3033 if (gimple_code (stmt) != GIMPLE_LABEL)
3037 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3041 last = get_last_insn ();
3045 expand_gimple_stmt (stmt);
3050 emit_label ((rtx) *elt);
3052 /* Java emits line number notes in the top of labels.
3053 ??? Make this go away once line number notes are obsoleted. */
3054 BB_HEAD (bb) = NEXT_INSN (last);
3055 if (NOTE_P (BB_HEAD (bb)))
3056 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3057 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3059 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3062 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3064 NOTE_BASIC_BLOCK (note) = bb;
3066 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3070 stmt = gsi_stmt (gsi);
3072 /* If this statement is a non-debug one, and we generate debug
3073 insns, then this one might be the last real use of a TERed
3074 SSA_NAME, but where there are still some debug uses further
3075 down. Expanding the current SSA name in such further debug
3076 uses by their RHS might lead to wrong debug info, as coalescing
3077 might make the operands of such RHS be placed into the same
3078 pseudo as something else. Like so:
3079 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3083 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3084 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3085 the write to a_2 would actually have clobbered the place which
3088 So, instead of that, we recognize the situation, and generate
3089 debug temporaries at the last real use of TERed SSA names:
3096 if (MAY_HAVE_DEBUG_INSNS
3098 && !is_gimple_debug (stmt))
3104 location_t sloc = get_curr_insn_source_location ();
3105 tree sblock = get_curr_insn_block ();
3107 /* Look for SSA names that have their last use here (TERed
3108 names always have only one real use). */
3109 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3110 if ((def = get_gimple_for_ssa_name (op)))
3112 imm_use_iterator imm_iter;
3113 use_operand_p use_p;
3114 bool have_debug_uses = false;
3116 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3118 if (gimple_debug_bind_p (USE_STMT (use_p)))
3120 have_debug_uses = true;
3125 if (have_debug_uses)
3127 /* OP is a TERed SSA name, with DEF it's defining
3128 statement, and where OP is used in further debug
3129 instructions. Generate a debug temporary, and
3130 replace all uses of OP in debug insns with that
3133 tree value = gimple_assign_rhs_to_tree (def);
3134 tree vexpr = make_node (DEBUG_EXPR_DECL);
3136 enum machine_mode mode;
3138 set_curr_insn_source_location (gimple_location (def));
3139 set_curr_insn_block (gimple_block (def));
3141 DECL_ARTIFICIAL (vexpr) = 1;
3142 TREE_TYPE (vexpr) = TREE_TYPE (value);
3144 mode = DECL_MODE (value);
3146 mode = TYPE_MODE (TREE_TYPE (value));
3147 DECL_MODE (vexpr) = mode;
3149 val = gen_rtx_VAR_LOCATION
3150 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3152 val = emit_debug_insn (val);
3154 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3156 if (!gimple_debug_bind_p (debugstmt))
3159 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3160 SET_USE (use_p, vexpr);
3162 update_stmt (debugstmt);
3166 set_curr_insn_source_location (sloc);
3167 set_curr_insn_block (sblock);
3170 currently_expanding_gimple_stmt = stmt;
3172 /* Expand this statement, then evaluate the resulting RTL and
3173 fixup the CFG accordingly. */
3174 if (gimple_code (stmt) == GIMPLE_COND)
3176 new_bb = expand_gimple_cond (bb, stmt);
3180 else if (gimple_debug_bind_p (stmt))
3182 location_t sloc = get_curr_insn_source_location ();
3183 tree sblock = get_curr_insn_block ();
3184 gimple_stmt_iterator nsi = gsi;
3188 tree var = gimple_debug_bind_get_var (stmt);
3191 enum machine_mode mode;
3193 if (gimple_debug_bind_has_value_p (stmt))
3194 value = gimple_debug_bind_get_value (stmt);
3198 last = get_last_insn ();
3200 set_curr_insn_source_location (gimple_location (stmt));
3201 set_curr_insn_block (gimple_block (stmt));
3204 mode = DECL_MODE (var);
3206 mode = TYPE_MODE (TREE_TYPE (var));
3208 val = gen_rtx_VAR_LOCATION
3209 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3211 val = emit_debug_insn (val);
3213 if (dump_file && (dump_flags & TDF_DETAILS))
3215 /* We can't dump the insn with a TREE where an RTX
3217 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3218 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3219 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3222 /* In order not to generate too many debug temporaries,
3223 we delink all uses of debug statements we already expanded.
3224 Therefore debug statements between definition and real
3225 use of TERed SSA names will continue to use the SSA name,
3226 and not be replaced with debug temps. */
3227 delink_stmt_imm_use (stmt);
3231 if (gsi_end_p (nsi))
3233 stmt = gsi_stmt (nsi);
3234 if (!gimple_debug_bind_p (stmt))
3238 set_curr_insn_source_location (sloc);
3239 set_curr_insn_block (sblock);
3243 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3246 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3257 def_operand_p def_p;
3258 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3262 /* Ignore this stmt if it is in the list of
3263 replaceable expressions. */
3265 && bitmap_bit_p (SA.values,
3266 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3269 last = expand_gimple_stmt (stmt);
3270 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3275 currently_expanding_gimple_stmt = NULL;
3277 /* Expand implicit goto and convert goto_locus. */
3278 FOR_EACH_EDGE (e, ei, bb->succs)
3280 if (e->goto_locus && e->goto_block)
3282 set_curr_insn_source_location (e->goto_locus);
3283 set_curr_insn_block (e->goto_block);
3284 e->goto_locus = curr_insn_locator ();
3286 e->goto_block = NULL;
3287 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3289 emit_jump (label_rtx_for_bb (e->dest));
3290 e->flags &= ~EDGE_FALLTHRU;
3294 /* Expanded RTL can create a jump in the last instruction of block.
3295 This later might be assumed to be a jump to successor and break edge insertion.
3296 We need to insert dummy move to prevent this. PR41440. */
3297 if (single_succ_p (bb)
3298 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3299 && (last = get_last_insn ())
3302 rtx dummy = gen_reg_rtx (SImode);
3303 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3306 do_pending_stack_adjust ();
3308 /* Find the block tail. The last insn in the block is the insn
3309 before a barrier and/or table jump insn. */
3310 last = get_last_insn ();
3311 if (BARRIER_P (last))
3312 last = PREV_INSN (last);
3313 if (JUMP_TABLE_DATA_P (last))
3314 last = PREV_INSN (PREV_INSN (last));
3317 update_bb_for_insn (bb);
3323 /* Create a basic block for initialization code. */
3326 construct_init_block (void)
3328 basic_block init_block, first_block;
3332 /* Multiple entry points not supported yet. */
3333 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3334 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3335 init_rtl_bb_info (EXIT_BLOCK_PTR);
3336 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3337 EXIT_BLOCK_PTR->flags |= BB_RTL;
3339 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3341 /* When entry edge points to first basic block, we don't need jump,
3342 otherwise we have to jump into proper target. */
3343 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3345 tree label = gimple_block_label (e->dest);
3347 emit_jump (label_rtx (label));
3351 flags = EDGE_FALLTHRU;
3353 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3356 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3357 init_block->count = ENTRY_BLOCK_PTR->count;
3360 first_block = e->dest;
3361 redirect_edge_succ (e, init_block);
3362 e = make_edge (init_block, first_block, flags);
3365 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3366 e->probability = REG_BR_PROB_BASE;
3367 e->count = ENTRY_BLOCK_PTR->count;
3369 update_bb_for_insn (init_block);
3373 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3374 found in the block tree. */
3377 set_block_levels (tree block, int level)
3381 BLOCK_NUMBER (block) = level;
3382 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3383 block = BLOCK_CHAIN (block);
3387 /* Create a block containing landing pads and similar stuff. */
3390 construct_exit_block (void)
3392 rtx head = get_last_insn ();
3394 basic_block exit_block;
3398 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3400 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3402 /* Make sure the locus is set to the end of the function, so that
3403 epilogue line numbers and warnings are set properly. */
3404 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3405 input_location = cfun->function_end_locus;
3407 /* The following insns belong to the top scope. */
3408 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3410 /* Generate rtl for function exit. */
3411 expand_function_end ();
3413 end = get_last_insn ();
3416 /* While emitting the function end we could move end of the last basic block.
3418 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3419 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3420 head = NEXT_INSN (head);
3421 exit_block = create_basic_block (NEXT_INSN (head), end,
3422 EXIT_BLOCK_PTR->prev_bb);
3423 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3424 exit_block->count = EXIT_BLOCK_PTR->count;
3427 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3429 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3430 if (!(e->flags & EDGE_ABNORMAL))
3431 redirect_edge_succ (e, exit_block);
3436 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3437 e->probability = REG_BR_PROB_BASE;
3438 e->count = EXIT_BLOCK_PTR->count;
3439 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3442 e->count -= e2->count;
3443 exit_block->count -= e2->count;
3444 exit_block->frequency -= EDGE_FREQUENCY (e2);
3448 if (exit_block->count < 0)
3449 exit_block->count = 0;
3450 if (exit_block->frequency < 0)
3451 exit_block->frequency = 0;
3452 update_bb_for_insn (exit_block);
3455 /* Helper function for discover_nonconstant_array_refs.
3456 Look for ARRAY_REF nodes with non-constant indexes and mark them
3460 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3461 void *data ATTRIBUTE_UNUSED)
3465 if (IS_TYPE_OR_DECL_P (t))
3467 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3469 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3470 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3471 && (!TREE_OPERAND (t, 2)
3472 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3473 || (TREE_CODE (t) == COMPONENT_REF
3474 && (!TREE_OPERAND (t,2)
3475 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3476 || TREE_CODE (t) == BIT_FIELD_REF
3477 || TREE_CODE (t) == REALPART_EXPR
3478 || TREE_CODE (t) == IMAGPART_EXPR
3479 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3480 || CONVERT_EXPR_P (t))
3481 t = TREE_OPERAND (t, 0);
3483 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3485 t = get_base_address (t);
3487 && DECL_MODE (t) != BLKmode)
3488 TREE_ADDRESSABLE (t) = 1;
3497 /* RTL expansion is not able to compile array references with variable
3498 offsets for arrays stored in single register. Discover such
3499 expressions and mark variables as addressable to avoid this
3503 discover_nonconstant_array_refs (void)
3506 gimple_stmt_iterator gsi;
3509 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3511 gimple stmt = gsi_stmt (gsi);
3512 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3516 /* This function sets crtl->args.internal_arg_pointer to a virtual
3517 register if DRAP is needed. Local register allocator will replace
3518 virtual_incoming_args_rtx with the virtual register. */
3521 expand_stack_alignment (void)
3524 unsigned int preferred_stack_boundary;
3526 if (! SUPPORTS_STACK_ALIGNMENT)
3529 if (cfun->calls_alloca
3530 || cfun->has_nonlocal_label
3531 || crtl->has_nonlocal_goto)
3532 crtl->need_drap = true;
3534 /* Call update_stack_boundary here again to update incoming stack
3535 boundary. It may set incoming stack alignment to a different
3536 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3537 use the minimum incoming stack alignment to check if it is OK
3538 to perform sibcall optimization since sibcall optimization will
3539 only align the outgoing stack to incoming stack boundary. */
3540 if (targetm.calls.update_stack_boundary)
3541 targetm.calls.update_stack_boundary ();
3543 /* The incoming stack frame has to be aligned at least at
3544 parm_stack_boundary. */
3545 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3547 /* Update crtl->stack_alignment_estimated and use it later to align
3548 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3549 exceptions since callgraph doesn't collect incoming stack alignment
3551 if (flag_non_call_exceptions
3552 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3553 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3555 preferred_stack_boundary = crtl->preferred_stack_boundary;
3556 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3557 crtl->stack_alignment_estimated = preferred_stack_boundary;
3558 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3559 crtl->stack_alignment_needed = preferred_stack_boundary;
3561 gcc_assert (crtl->stack_alignment_needed
3562 <= crtl->stack_alignment_estimated);
3564 crtl->stack_realign_needed
3565 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3566 crtl->stack_realign_tried = crtl->stack_realign_needed;
3568 crtl->stack_realign_processed = true;
3570 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3572 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3573 drap_rtx = targetm.calls.get_drap_rtx ();
3575 /* stack_realign_drap and drap_rtx must match. */
3576 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3578 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3579 if (NULL != drap_rtx)
3581 crtl->args.internal_arg_pointer = drap_rtx;
3583 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3585 fixup_tail_calls ();
3589 /* Translate the intermediate representation contained in the CFG
3590 from GIMPLE trees to RTL.
3592 We do conversion per basic block and preserve/update the tree CFG.
3593 This implies we have to do some magic as the CFG can simultaneously
3594 consist of basic blocks containing RTL and GIMPLE trees. This can
3595 confuse the CFG hooks, so be careful to not manipulate CFG during
3599 gimple_expand_cfg (void)
3601 basic_block bb, init_block;
3607 rewrite_out_of_ssa (&SA);
3608 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3611 /* Some backends want to know that we are expanding to RTL. */
3612 currently_expanding_to_rtl = 1;
3614 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3616 insn_locators_alloc ();
3617 if (!DECL_IS_BUILTIN (current_function_decl))
3619 /* Eventually, all FEs should explicitly set function_start_locus. */
3620 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3621 set_curr_insn_source_location
3622 (DECL_SOURCE_LOCATION (current_function_decl));
3624 set_curr_insn_source_location (cfun->function_start_locus);
3626 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3627 prologue_locator = curr_insn_locator ();
3629 /* Make sure first insn is a note even if we don't want linenums.
3630 This makes sure the first insn will never be deleted.
3631 Also, final expects a note to appear there. */
3632 emit_note (NOTE_INSN_DELETED);
3634 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3635 discover_nonconstant_array_refs ();
3637 targetm.expand_to_rtl_hook ();
3638 crtl->stack_alignment_needed = STACK_BOUNDARY;
3639 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3640 crtl->stack_alignment_estimated = 0;
3641 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3642 cfun->cfg->max_jumptable_ents = 0;
3645 /* Expand the variables recorded during gimple lowering. */
3646 expand_used_vars ();
3648 /* Honor stack protection warnings. */
3649 if (warn_stack_protect)
3651 if (cfun->calls_alloca)
3652 warning (OPT_Wstack_protector,
3653 "not protecting local variables: variable length buffer");
3654 if (has_short_buffer && !crtl->stack_protect_guard)
3655 warning (OPT_Wstack_protector,
3656 "not protecting function: no buffer at least %d bytes long",
3657 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3660 /* Set up parameters and prepare for return, for the function. */
3661 expand_function_start (current_function_decl);
3663 /* Now that we also have the parameter RTXs, copy them over to our
3665 for (i = 0; i < SA.map->num_partitions; i++)
3667 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3669 if (TREE_CODE (var) != VAR_DECL
3670 && !SA.partition_to_pseudo[i])
3671 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3672 gcc_assert (SA.partition_to_pseudo[i]);
3674 /* If this decl was marked as living in multiple places, reset
3675 this now to NULL. */
3676 if (DECL_RTL_IF_SET (var) == pc_rtx)
3677 SET_DECL_RTL (var, NULL);
3679 /* Some RTL parts really want to look at DECL_RTL(x) when x
3680 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3681 SET_DECL_RTL here making this available, but that would mean
3682 to select one of the potentially many RTLs for one DECL. Instead
3683 of doing that we simply reset the MEM_EXPR of the RTL in question,
3684 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3685 if (!DECL_RTL_SET_P (var))
3687 if (MEM_P (SA.partition_to_pseudo[i]))
3688 set_mem_expr (SA.partition_to_pseudo[i], NULL);
3692 /* If this function is `main', emit a call to `__main'
3693 to run global initializers, etc. */
3694 if (DECL_NAME (current_function_decl)
3695 && MAIN_NAME_P (DECL_NAME (current_function_decl))
3696 && DECL_FILE_SCOPE_P (current_function_decl))
3697 expand_main_function ();
3699 /* Initialize the stack_protect_guard field. This must happen after the
3700 call to __main (if any) so that the external decl is initialized. */
3701 if (crtl->stack_protect_guard)
3702 stack_protect_prologue ();
3704 expand_phi_nodes (&SA);
3706 /* Register rtl specific functions for cfg. */
3707 rtl_register_cfg_hooks ();
3709 init_block = construct_init_block ();
3711 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
3712 remaining edges later. */
3713 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3714 e->flags &= ~EDGE_EXECUTABLE;
3716 lab_rtx_for_bb = pointer_map_create ();
3717 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
3718 bb = expand_gimple_basic_block (bb);
3720 if (MAY_HAVE_DEBUG_INSNS)
3721 expand_debug_locations ();
3723 execute_free_datastructures ();
3724 finish_out_of_ssa (&SA);
3726 /* We are no longer in SSA form. */
3727 cfun->gimple_df->in_ssa_p = false;
3729 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3730 conservatively to true until they are all profile aware. */
3731 pointer_map_destroy (lab_rtx_for_bb);
3734 construct_exit_block ();
3735 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3736 insn_locators_finalize ();
3738 /* Zap the tree EH table. */
3739 set_eh_throw_stmt_table (cfun, NULL);
3741 rebuild_jump_labels (get_insns ());
3743 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3747 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3750 commit_one_edge_insertion (e);
3756 /* We're done expanding trees to RTL. */
3757 currently_expanding_to_rtl = 0;
3759 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
3763 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3765 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
3766 e->flags &= ~EDGE_EXECUTABLE;
3768 /* At the moment not all abnormal edges match the RTL
3769 representation. It is safe to remove them here as
3770 find_many_sub_basic_blocks will rediscover them.
3771 In the future we should get this fixed properly. */
3772 if ((e->flags & EDGE_ABNORMAL)
3773 && !(e->flags & EDGE_SIBCALL))
3780 blocks = sbitmap_alloc (last_basic_block);
3781 sbitmap_ones (blocks);
3782 find_many_sub_basic_blocks (blocks);
3783 sbitmap_free (blocks);
3784 purge_all_dead_edges ();
3788 expand_stack_alignment ();
3790 #ifdef ENABLE_CHECKING
3791 verify_flow_info ();
3794 /* There's no need to defer outputting this function any more; we
3795 know we want to output it. */
3796 DECL_DEFER_OUTPUT (current_function_decl) = 0;
3798 /* Now that we're done expanding trees to RTL, we shouldn't have any
3799 more CONCATs anywhere. */
3800 generating_concat_p = 0;
3805 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
3806 /* And the pass manager will dump RTL for us. */
3809 /* If we're emitting a nested function, make sure its parent gets
3810 emitted as well. Doing otherwise confuses debug info. */
3813 for (parent = DECL_CONTEXT (current_function_decl);
3814 parent != NULL_TREE;
3815 parent = get_containing_scope (parent))
3816 if (TREE_CODE (parent) == FUNCTION_DECL)
3817 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
3820 /* We are now committed to emitting code for this function. Do any
3821 preparation, such as emitting abstract debug info for the inline
3822 before it gets mangled by optimization. */
3823 if (cgraph_function_possibly_inlined_p (current_function_decl))
3824 (*debug_hooks->outlining_inline_function) (current_function_decl);
3826 TREE_ASM_WRITTEN (current_function_decl) = 1;
3828 /* After expanding, the return labels are no longer needed. */
3829 return_label = NULL;
3830 naked_return_label = NULL;
3831 /* Tag the blocks with a depth number so that change_scope can find
3832 the common parent easily. */
3833 set_block_levels (DECL_INITIAL (cfun->decl), 0);
3834 default_rtl_profile ();
3838 struct rtl_opt_pass pass_expand =
3842 "expand", /* name */
3844 gimple_expand_cfg, /* execute */
3847 0, /* static_pass_number */
3848 TV_EXPAND, /* tv_id */
3849 PROP_ssa | PROP_gimple_leh | PROP_cfg
3850 | PROP_gimple_lcx, /* properties_required */
3851 PROP_rtl, /* properties_provided */
3852 PROP_ssa | PROP_trees, /* properties_destroyed */
3853 TODO_verify_ssa | TODO_verify_flow
3854 | TODO_verify_stmts, /* todo_flags_start */
3856 | TODO_ggc_collect /* todo_flags_finish */