1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
42 #include "tree-inline.h"
43 #include "value-prof.h"
45 #include "ssaexpand.h"
48 /* This variable holds information helping the rewriting of SSA trees
52 /* This variable holds the currently expanded gimple statement for purposes
53 of comminucating the profile info to the builtin expanders. */
54 gimple currently_expanding_gimple_stmt;
56 /* Return an expression tree corresponding to the RHS of GIMPLE
60 gimple_assign_rhs_to_tree (gimple stmt)
63 enum gimple_rhs_class grhs_class;
65 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
67 if (grhs_class == GIMPLE_BINARY_RHS)
68 t = build2 (gimple_assign_rhs_code (stmt),
69 TREE_TYPE (gimple_assign_lhs (stmt)),
70 gimple_assign_rhs1 (stmt),
71 gimple_assign_rhs2 (stmt));
72 else if (grhs_class == GIMPLE_UNARY_RHS)
73 t = build1 (gimple_assign_rhs_code (stmt),
74 TREE_TYPE (gimple_assign_lhs (stmt)),
75 gimple_assign_rhs1 (stmt));
76 else if (grhs_class == GIMPLE_SINGLE_RHS)
78 t = gimple_assign_rhs1 (stmt);
79 /* Avoid modifying this tree in place below. */
80 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
81 && gimple_location (stmt) != EXPR_LOCATION (t))
87 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
88 SET_EXPR_LOCATION (t, gimple_location (stmt));
94 /* Verify that there is exactly single jump instruction since last and attach
95 REG_BR_PROB note specifying probability.
96 ??? We really ought to pass the probability down to RTL expanders and let it
97 re-distribute it when the conditional expands into multiple conditionals.
98 This is however difficult to do. */
100 add_reg_br_prob_note (rtx last, int probability)
102 if (profile_status == PROFILE_ABSENT)
104 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
107 /* It is common to emit condjump-around-jump sequence when we don't know
108 how to reverse the conditional. Special case this. */
109 if (!any_condjump_p (last)
110 || !JUMP_P (NEXT_INSN (last))
111 || !simplejump_p (NEXT_INSN (last))
112 || !NEXT_INSN (NEXT_INSN (last))
113 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
114 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
115 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
116 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
118 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
119 add_reg_note (last, REG_BR_PROB,
120 GEN_INT (REG_BR_PROB_BASE - probability));
123 if (!last || !JUMP_P (last) || !any_condjump_p (last))
125 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
126 add_reg_note (last, REG_BR_PROB, GEN_INT (probability));
130 fprintf (dump_file, "Failed to add probability note\n");
134 #ifndef STACK_ALIGNMENT_NEEDED
135 #define STACK_ALIGNMENT_NEEDED 1
138 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
140 /* Associate declaration T with storage space X. If T is no
141 SSA name this is exactly SET_DECL_RTL, otherwise make the
142 partition of T associated with X. */
144 set_rtl (tree t, rtx x)
146 if (TREE_CODE (t) == SSA_NAME)
148 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
150 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
151 /* For the benefit of debug information at -O0 (where vartracking
152 doesn't run) record the place also in the base DECL if it's
153 a normal variable (not a parameter). */
154 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
156 tree var = SSA_NAME_VAR (t);
157 /* If we don't yet have something recorded, just record it now. */
158 if (!DECL_RTL_SET_P (var))
159 SET_DECL_RTL (var, x);
160 /* If we have it set alrady to "multiple places" don't
162 else if (DECL_RTL (var) == pc_rtx)
164 /* If we have something recorded and it's not the same place
165 as we want to record now, we have multiple partitions for the
166 same base variable, with different places. We can't just
167 randomly chose one, hence we have to say that we don't know.
168 This only happens with optimization, and there var-tracking
169 will figure out the right thing. */
170 else if (DECL_RTL (var) != x)
171 SET_DECL_RTL (var, pc_rtx);
178 /* This structure holds data relevant to one variable that will be
179 placed in a stack slot. */
185 /* The offset of the variable. During partitioning, this is the
186 offset relative to the partition. After partitioning, this
187 is relative to the stack frame. */
188 HOST_WIDE_INT offset;
190 /* Initially, the size of the variable. Later, the size of the partition,
191 if this variable becomes it's partition's representative. */
194 /* The *byte* alignment required for this variable. Or as, with the
195 size, the alignment for this partition. */
198 /* The partition representative. */
199 size_t representative;
201 /* The next stack variable in the partition, or EOC. */
204 /* The numbers of conflicting stack variables. */
208 #define EOC ((size_t)-1)
210 /* We have an array of such objects while deciding allocation. */
211 static struct stack_var *stack_vars;
212 static size_t stack_vars_alloc;
213 static size_t stack_vars_num;
215 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
216 is non-decreasing. */
217 static size_t *stack_vars_sorted;
219 /* The phase of the stack frame. This is the known misalignment of
220 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
221 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
222 static int frame_phase;
224 /* Used during expand_used_vars to remember if we saw any decls for
225 which we'd like to enable stack smashing protection. */
226 static bool has_protected_decls;
228 /* Used during expand_used_vars. Remember if we say a character buffer
229 smaller than our cutoff threshold. Used for -Wstack-protector. */
230 static bool has_short_buffer;
232 /* Discover the byte alignment to use for DECL. Ignore alignment
233 we can't do with expected alignment of the stack boundary. */
236 get_decl_align_unit (tree decl)
240 align = LOCAL_DECL_ALIGNMENT (decl);
242 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
243 align = MAX_SUPPORTED_STACK_ALIGNMENT;
245 if (SUPPORTS_STACK_ALIGNMENT)
247 if (crtl->stack_alignment_estimated < align)
249 gcc_assert(!crtl->stack_realign_processed);
250 crtl->stack_alignment_estimated = align;
254 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
255 So here we only make sure stack_alignment_needed >= align. */
256 if (crtl->stack_alignment_needed < align)
257 crtl->stack_alignment_needed = align;
258 if (crtl->max_used_stack_slot_alignment < align)
259 crtl->max_used_stack_slot_alignment = align;
261 return align / BITS_PER_UNIT;
264 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
265 Return the frame offset. */
268 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
270 HOST_WIDE_INT offset, new_frame_offset;
272 new_frame_offset = frame_offset;
273 if (FRAME_GROWS_DOWNWARD)
275 new_frame_offset -= size + frame_phase;
276 new_frame_offset &= -align;
277 new_frame_offset += frame_phase;
278 offset = new_frame_offset;
282 new_frame_offset -= frame_phase;
283 new_frame_offset += align - 1;
284 new_frame_offset &= -align;
285 new_frame_offset += frame_phase;
286 offset = new_frame_offset;
287 new_frame_offset += size;
289 frame_offset = new_frame_offset;
291 if (frame_offset_overflow (frame_offset, cfun->decl))
292 frame_offset = offset = 0;
297 /* Accumulate DECL into STACK_VARS. */
300 add_stack_var (tree decl)
302 if (stack_vars_num >= stack_vars_alloc)
304 if (stack_vars_alloc)
305 stack_vars_alloc = stack_vars_alloc * 3 / 2;
307 stack_vars_alloc = 32;
309 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
311 stack_vars[stack_vars_num].decl = decl;
312 stack_vars[stack_vars_num].offset = 0;
313 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
314 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
316 /* All variables are initially in their own partition. */
317 stack_vars[stack_vars_num].representative = stack_vars_num;
318 stack_vars[stack_vars_num].next = EOC;
320 /* All variables initially conflict with no other. */
321 stack_vars[stack_vars_num].conflicts = NULL;
323 /* Ensure that this decl doesn't get put onto the list twice. */
324 set_rtl (decl, pc_rtx);
329 /* Make the decls associated with luid's X and Y conflict. */
332 add_stack_var_conflict (size_t x, size_t y)
334 struct stack_var *a = &stack_vars[x];
335 struct stack_var *b = &stack_vars[y];
337 a->conflicts = BITMAP_ALLOC (NULL);
339 b->conflicts = BITMAP_ALLOC (NULL);
340 bitmap_set_bit (a->conflicts, y);
341 bitmap_set_bit (b->conflicts, x);
344 /* Check whether the decls associated with luid's X and Y conflict. */
347 stack_var_conflict_p (size_t x, size_t y)
349 struct stack_var *a = &stack_vars[x];
350 struct stack_var *b = &stack_vars[y];
351 if (!a->conflicts || !b->conflicts)
353 return bitmap_bit_p (a->conflicts, y);
356 /* Returns true if TYPE is or contains a union type. */
359 aggregate_contains_union_type (tree type)
363 if (TREE_CODE (type) == UNION_TYPE
364 || TREE_CODE (type) == QUAL_UNION_TYPE)
366 if (TREE_CODE (type) == ARRAY_TYPE)
367 return aggregate_contains_union_type (TREE_TYPE (type));
368 if (TREE_CODE (type) != RECORD_TYPE)
371 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
372 if (TREE_CODE (field) == FIELD_DECL)
373 if (aggregate_contains_union_type (TREE_TYPE (field)))
379 /* A subroutine of expand_used_vars. If two variables X and Y have alias
380 sets that do not conflict, then do add a conflict for these variables
381 in the interference graph. We also need to make sure to add conflicts
382 for union containing structures. Else RTL alias analysis comes along
383 and due to type based aliasing rules decides that for two overlapping
384 union temporaries { short s; int i; } accesses to the same mem through
385 different types may not alias and happily reorders stores across
386 life-time boundaries of the temporaries (See PR25654).
387 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
390 add_alias_set_conflicts (void)
392 size_t i, j, n = stack_vars_num;
394 for (i = 0; i < n; ++i)
396 tree type_i = TREE_TYPE (stack_vars[i].decl);
397 bool aggr_i = AGGREGATE_TYPE_P (type_i);
400 contains_union = aggregate_contains_union_type (type_i);
401 for (j = 0; j < i; ++j)
403 tree type_j = TREE_TYPE (stack_vars[j].decl);
404 bool aggr_j = AGGREGATE_TYPE_P (type_j);
406 /* Either the objects conflict by means of type based
407 aliasing rules, or we need to add a conflict. */
408 || !objects_must_conflict_p (type_i, type_j)
409 /* In case the types do not conflict ensure that access
410 to elements will conflict. In case of unions we have
411 to be careful as type based aliasing rules may say
412 access to the same memory does not conflict. So play
413 safe and add a conflict in this case. */
415 add_stack_var_conflict (i, j);
420 /* A subroutine of partition_stack_vars. A comparison function for qsort,
421 sorting an array of indices by the size and type of the object. */
424 stack_var_size_cmp (const void *a, const void *b)
426 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
427 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
429 unsigned int uida, uidb;
435 decla = stack_vars[*(const size_t *)a].decl;
436 declb = stack_vars[*(const size_t *)b].decl;
437 /* For stack variables of the same size use and id of the decls
438 to make the sort stable. Two SSA names are compared by their
439 version, SSA names come before non-SSA names, and two normal
440 decls are compared by their DECL_UID. */
441 if (TREE_CODE (decla) == SSA_NAME)
443 if (TREE_CODE (declb) == SSA_NAME)
444 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
448 else if (TREE_CODE (declb) == SSA_NAME)
451 uida = DECL_UID (decla), uidb = DECL_UID (declb);
460 /* If the points-to solution *PI points to variables that are in a partition
461 together with other variables add all partition members to the pointed-to
465 add_partitioned_vars_to_ptset (struct pt_solution *pt,
466 struct pointer_map_t *decls_to_partitions,
467 struct pointer_set_t *visited, bitmap temp)
475 /* The pointed-to vars bitmap is shared, it is enough to
477 || pointer_set_insert(visited, pt->vars))
482 /* By using a temporary bitmap to store all members of the partitions
483 we have to add we make sure to visit each of the partitions only
485 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
487 || !bitmap_bit_p (temp, i))
488 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
489 (void *)(size_t) i)))
490 bitmap_ior_into (temp, *part);
491 if (!bitmap_empty_p (temp))
492 bitmap_ior_into (pt->vars, temp);
495 /* Update points-to sets based on partition info, so we can use them on RTL.
496 The bitmaps representing stack partitions will be saved until expand,
497 where partitioned decls used as bases in memory expressions will be
501 update_alias_info_with_stack_vars (void)
503 struct pointer_map_t *decls_to_partitions = NULL;
505 tree var = NULL_TREE;
507 for (i = 0; i < stack_vars_num; i++)
511 struct ptr_info_def *pi;
513 /* Not interested in partitions with single variable. */
514 if (stack_vars[i].representative != i
515 || stack_vars[i].next == EOC)
518 if (!decls_to_partitions)
520 decls_to_partitions = pointer_map_create ();
521 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
524 /* Create an SSA_NAME that points to the partition for use
525 as base during alias-oracle queries on RTL for bases that
526 have been partitioned. */
527 if (var == NULL_TREE)
528 var = create_tmp_var (ptr_type_node, NULL);
529 name = make_ssa_name (var, NULL);
531 /* Create bitmaps representing partitions. They will be used for
532 points-to sets later, so use GGC alloc. */
533 part = BITMAP_GGC_ALLOC ();
534 for (j = i; j != EOC; j = stack_vars[j].next)
536 tree decl = stack_vars[j].decl;
537 unsigned int uid = DECL_UID (decl);
538 /* We should never end up partitioning SSA names (though they
539 may end up on the stack). Neither should we allocate stack
540 space to something that is unused and thus unreferenced. */
541 gcc_assert (DECL_P (decl)
542 && referenced_var_lookup (uid));
543 bitmap_set_bit (part, uid);
544 *((bitmap *) pointer_map_insert (decls_to_partitions,
545 (void *)(size_t) uid)) = part;
546 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
550 /* Make the SSA name point to all partition members. */
551 pi = get_ptr_info (name);
552 pt_solution_set (&pi->pt, part);
555 /* Make all points-to sets that contain one member of a partition
556 contain all members of the partition. */
557 if (decls_to_partitions)
560 struct pointer_set_t *visited = pointer_set_create ();
561 bitmap temp = BITMAP_ALLOC (NULL);
563 for (i = 1; i < num_ssa_names; i++)
565 tree name = ssa_name (i);
566 struct ptr_info_def *pi;
569 && POINTER_TYPE_P (TREE_TYPE (name))
570 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
571 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
575 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
576 decls_to_partitions, visited, temp);
577 add_partitioned_vars_to_ptset (&cfun->gimple_df->callused,
578 decls_to_partitions, visited, temp);
580 pointer_set_destroy (visited);
581 pointer_map_destroy (decls_to_partitions);
586 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
587 partitioning algorithm. Partitions A and B are known to be non-conflicting.
588 Merge them into a single partition A.
590 At the same time, add OFFSET to all variables in partition B. At the end
591 of the partitioning process we've have a nice block easy to lay out within
595 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
598 struct stack_var *vb = &stack_vars[b];
602 /* Update each element of partition B with the given offset,
603 and merge them into partition A. */
604 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
606 stack_vars[i].offset += offset;
607 stack_vars[i].representative = a;
609 stack_vars[last].next = stack_vars[a].next;
610 stack_vars[a].next = b;
612 /* Update the required alignment of partition A to account for B. */
613 if (stack_vars[a].alignb < stack_vars[b].alignb)
614 stack_vars[a].alignb = stack_vars[b].alignb;
616 /* Update the interference graph and merge the conflicts. */
619 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
620 add_stack_var_conflict (a, stack_vars[u].representative);
621 BITMAP_FREE (vb->conflicts);
625 /* A subroutine of expand_used_vars. Binpack the variables into
626 partitions constrained by the interference graph. The overall
627 algorithm used is as follows:
629 Sort the objects by size.
634 Look for the largest non-conflicting object B with size <= S.
644 partition_stack_vars (void)
646 size_t si, sj, n = stack_vars_num;
648 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
649 for (si = 0; si < n; ++si)
650 stack_vars_sorted[si] = si;
655 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
657 for (si = 0; si < n; ++si)
659 size_t i = stack_vars_sorted[si];
660 HOST_WIDE_INT isize = stack_vars[i].size;
661 HOST_WIDE_INT offset = 0;
663 for (sj = si; sj-- > 0; )
665 size_t j = stack_vars_sorted[sj];
666 HOST_WIDE_INT jsize = stack_vars[j].size;
667 unsigned int jalign = stack_vars[j].alignb;
669 /* Ignore objects that aren't partition representatives. */
670 if (stack_vars[j].representative != j)
673 /* Ignore objects too large for the remaining space. */
677 /* Ignore conflicting objects. */
678 if (stack_var_conflict_p (i, j))
681 /* Refine the remaining space check to include alignment. */
682 if (offset & (jalign - 1))
684 HOST_WIDE_INT toff = offset;
686 toff &= -(HOST_WIDE_INT)jalign;
687 if (isize - (toff - offset) < jsize)
690 isize -= toff - offset;
694 /* UNION the objects, placing J at OFFSET. */
695 union_stack_vars (i, j, offset);
704 update_alias_info_with_stack_vars ();
707 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
710 dump_stack_var_partition (void)
712 size_t si, i, j, n = stack_vars_num;
714 for (si = 0; si < n; ++si)
716 i = stack_vars_sorted[si];
718 /* Skip variables that aren't partition representatives, for now. */
719 if (stack_vars[i].representative != i)
722 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
723 " align %u\n", (unsigned long) i, stack_vars[i].size,
724 stack_vars[i].alignb);
726 for (j = i; j != EOC; j = stack_vars[j].next)
728 fputc ('\t', dump_file);
729 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
730 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
731 stack_vars[j].offset);
736 /* Assign rtl to DECL at frame offset OFFSET. */
739 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
741 /* Alignment is unsigned. */
742 unsigned HOST_WIDE_INT align;
745 /* If this fails, we've overflowed the stack frame. Error nicely? */
746 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
748 x = plus_constant (virtual_stack_vars_rtx, offset);
749 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
751 if (TREE_CODE (decl) != SSA_NAME)
753 /* Set alignment we actually gave this decl if it isn't an SSA name.
754 If it is we generate stack slots only accidentally so it isn't as
755 important, we'll simply use the alignment that is already set. */
756 offset -= frame_phase;
757 align = offset & -offset;
758 align *= BITS_PER_UNIT;
760 align = STACK_BOUNDARY;
761 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
762 align = MAX_SUPPORTED_STACK_ALIGNMENT;
764 DECL_ALIGN (decl) = align;
765 DECL_USER_ALIGN (decl) = 0;
768 set_mem_attributes (x, SSAVAR (decl), true);
772 /* A subroutine of expand_used_vars. Give each partition representative
773 a unique location within the stack frame. Update each partition member
774 with that location. */
777 expand_stack_vars (bool (*pred) (tree))
779 size_t si, i, j, n = stack_vars_num;
781 for (si = 0; si < n; ++si)
783 HOST_WIDE_INT offset;
785 i = stack_vars_sorted[si];
787 /* Skip variables that aren't partition representatives, for now. */
788 if (stack_vars[i].representative != i)
791 /* Skip variables that have already had rtl assigned. See also
792 add_stack_var where we perpetrate this pc_rtx hack. */
793 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
794 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
795 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
798 /* Check the predicate to see whether this variable should be
799 allocated in this pass. */
800 if (pred && !pred (stack_vars[i].decl))
803 offset = alloc_stack_frame_space (stack_vars[i].size,
804 stack_vars[i].alignb);
806 /* Create rtl for each variable based on their location within the
808 for (j = i; j != EOC; j = stack_vars[j].next)
810 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
811 expand_one_stack_var_at (stack_vars[j].decl,
812 stack_vars[j].offset + offset);
817 /* Take into account all sizes of partitions and reset DECL_RTLs. */
819 account_stack_vars (void)
821 size_t si, j, i, n = stack_vars_num;
822 HOST_WIDE_INT size = 0;
824 for (si = 0; si < n; ++si)
826 i = stack_vars_sorted[si];
828 /* Skip variables that aren't partition representatives, for now. */
829 if (stack_vars[i].representative != i)
832 size += stack_vars[i].size;
833 for (j = i; j != EOC; j = stack_vars[j].next)
834 set_rtl (stack_vars[j].decl, NULL);
839 /* A subroutine of expand_one_var. Called to immediately assign rtl
840 to a variable to be allocated in the stack frame. */
843 expand_one_stack_var (tree var)
845 HOST_WIDE_INT size, offset, align;
847 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
848 align = get_decl_align_unit (SSAVAR (var));
849 offset = alloc_stack_frame_space (size, align);
851 expand_one_stack_var_at (var, offset);
854 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
855 that will reside in a hard register. */
858 expand_one_hard_reg_var (tree var)
860 rest_of_decl_compilation (var, 0, 0);
863 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
864 that will reside in a pseudo register. */
867 expand_one_register_var (tree var)
869 tree decl = SSAVAR (var);
870 tree type = TREE_TYPE (decl);
871 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
872 rtx x = gen_reg_rtx (reg_mode);
876 /* Note if the object is a user variable. */
877 if (!DECL_ARTIFICIAL (decl))
880 if (POINTER_TYPE_P (type))
881 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
884 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
885 has some associated error, e.g. its type is error-mark. We just need
886 to pick something that won't crash the rest of the compiler. */
889 expand_one_error_var (tree var)
891 enum machine_mode mode = DECL_MODE (var);
895 x = gen_rtx_MEM (BLKmode, const0_rtx);
896 else if (mode == VOIDmode)
899 x = gen_reg_rtx (mode);
901 SET_DECL_RTL (var, x);
904 /* A subroutine of expand_one_var. VAR is a variable that will be
905 allocated to the local stack frame. Return true if we wish to
906 add VAR to STACK_VARS so that it will be coalesced with other
907 variables. Return false to allocate VAR immediately.
909 This function is used to reduce the number of variables considered
910 for coalescing, which reduces the size of the quadratic problem. */
913 defer_stack_allocation (tree var, bool toplevel)
915 /* If stack protection is enabled, *all* stack variables must be deferred,
916 so that we can re-order the strings to the top of the frame. */
917 if (flag_stack_protect)
920 /* Variables in the outermost scope automatically conflict with
921 every other variable. The only reason to want to defer them
922 at all is that, after sorting, we can more efficiently pack
923 small variables in the stack frame. Continue to defer at -O2. */
924 if (toplevel && optimize < 2)
927 /* Without optimization, *most* variables are allocated from the
928 stack, which makes the quadratic problem large exactly when we
929 want compilation to proceed as quickly as possible. On the
930 other hand, we don't want the function's stack frame size to
931 get completely out of hand. So we avoid adding scalars and
932 "small" aggregates to the list at all. */
933 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
939 /* A subroutine of expand_used_vars. Expand one variable according to
940 its flavor. Variables to be placed on the stack are not actually
941 expanded yet, merely recorded.
942 When REALLY_EXPAND is false, only add stack values to be allocated.
943 Return stack usage this variable is supposed to take.
947 expand_one_var (tree var, bool toplevel, bool really_expand)
952 if (SUPPORTS_STACK_ALIGNMENT
953 && TREE_TYPE (var) != error_mark_node
954 && TREE_CODE (var) == VAR_DECL)
958 /* Because we don't know if VAR will be in register or on stack,
959 we conservatively assume it will be on stack even if VAR is
960 eventually put into register after RA pass. For non-automatic
961 variables, which won't be on stack, we collect alignment of
962 type and ignore user specified alignment. */
963 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
964 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
965 TYPE_MODE (TREE_TYPE (var)),
966 TYPE_ALIGN (TREE_TYPE (var)));
968 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
970 if (crtl->stack_alignment_estimated < align)
972 /* stack_alignment_estimated shouldn't change after stack
973 realign decision made */
974 gcc_assert(!crtl->stack_realign_processed);
975 crtl->stack_alignment_estimated = align;
979 if (TREE_CODE (origvar) == SSA_NAME)
981 gcc_assert (TREE_CODE (var) != VAR_DECL
982 || (!DECL_EXTERNAL (var)
983 && !DECL_HAS_VALUE_EXPR_P (var)
984 && !TREE_STATIC (var)
985 && TREE_TYPE (var) != error_mark_node
986 && !DECL_HARD_REGISTER (var)
989 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
991 else if (DECL_EXTERNAL (var))
993 else if (DECL_HAS_VALUE_EXPR_P (var))
995 else if (TREE_STATIC (var))
997 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
999 else if (TREE_TYPE (var) == error_mark_node)
1002 expand_one_error_var (var);
1004 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1007 expand_one_hard_reg_var (var);
1009 else if (use_register_for_decl (var))
1012 expand_one_register_var (origvar);
1014 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1018 error ("size of variable %q+D is too large", var);
1019 expand_one_error_var (var);
1022 else if (defer_stack_allocation (var, toplevel))
1023 add_stack_var (origvar);
1027 expand_one_stack_var (origvar);
1028 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1033 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1034 expanding variables. Those variables that can be put into registers
1035 are allocated pseudos; those that can't are put on the stack.
1037 TOPLEVEL is true if this is the outermost BLOCK. */
1040 expand_used_vars_for_block (tree block, bool toplevel)
1042 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1045 old_sv_num = toplevel ? 0 : stack_vars_num;
1047 /* Expand all variables at this level. */
1048 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1050 expand_one_var (t, toplevel, true);
1052 this_sv_num = stack_vars_num;
1054 /* Expand all variables at containing levels. */
1055 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1056 expand_used_vars_for_block (t, false);
1058 /* Since we do not track exact variable lifetimes (which is not even
1059 possible for variables whose address escapes), we mirror the block
1060 tree in the interference graph. Here we cause all variables at this
1061 level, and all sublevels, to conflict. */
1062 if (old_sv_num < this_sv_num)
1064 new_sv_num = stack_vars_num;
1066 for (i = old_sv_num; i < new_sv_num; ++i)
1067 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1068 add_stack_var_conflict (i, j);
1072 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1073 and clear TREE_USED on all local variables. */
1076 clear_tree_used (tree block)
1080 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1081 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1084 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1085 clear_tree_used (t);
1088 /* Examine TYPE and determine a bit mask of the following features. */
1090 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1091 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1092 #define SPCT_HAS_ARRAY 4
1093 #define SPCT_HAS_AGGREGATE 8
1096 stack_protect_classify_type (tree type)
1098 unsigned int ret = 0;
1101 switch (TREE_CODE (type))
1104 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1105 if (t == char_type_node
1106 || t == signed_char_type_node
1107 || t == unsigned_char_type_node)
1109 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1110 unsigned HOST_WIDE_INT len;
1112 if (!TYPE_SIZE_UNIT (type)
1113 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1116 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1119 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1121 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1124 ret = SPCT_HAS_ARRAY;
1128 case QUAL_UNION_TYPE:
1130 ret = SPCT_HAS_AGGREGATE;
1131 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1132 if (TREE_CODE (t) == FIELD_DECL)
1133 ret |= stack_protect_classify_type (TREE_TYPE (t));
1143 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1144 part of the local stack frame. Remember if we ever return nonzero for
1145 any variable in this function. The return value is the phase number in
1146 which the variable should be allocated. */
1149 stack_protect_decl_phase (tree decl)
1151 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1154 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1155 has_short_buffer = true;
1157 if (flag_stack_protect == 2)
1159 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1160 && !(bits & SPCT_HAS_AGGREGATE))
1162 else if (bits & SPCT_HAS_ARRAY)
1166 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1169 has_protected_decls = true;
1174 /* Two helper routines that check for phase 1 and phase 2. These are used
1175 as callbacks for expand_stack_vars. */
1178 stack_protect_decl_phase_1 (tree decl)
1180 return stack_protect_decl_phase (decl) == 1;
1184 stack_protect_decl_phase_2 (tree decl)
1186 return stack_protect_decl_phase (decl) == 2;
1189 /* Ensure that variables in different stack protection phases conflict
1190 so that they are not merged and share the same stack slot. */
1193 add_stack_protection_conflicts (void)
1195 size_t i, j, n = stack_vars_num;
1196 unsigned char *phase;
1198 phase = XNEWVEC (unsigned char, n);
1199 for (i = 0; i < n; ++i)
1200 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1202 for (i = 0; i < n; ++i)
1204 unsigned char ph_i = phase[i];
1205 for (j = 0; j < i; ++j)
1206 if (ph_i != phase[j])
1207 add_stack_var_conflict (i, j);
1213 /* Create a decl for the guard at the top of the stack frame. */
1216 create_stack_guard (void)
1218 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1219 VAR_DECL, NULL, ptr_type_node);
1220 TREE_THIS_VOLATILE (guard) = 1;
1221 TREE_USED (guard) = 1;
1222 expand_one_stack_var (guard);
1223 crtl->stack_protect_guard = guard;
1226 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1227 expanding variables. Those variables that can be put into registers
1228 are allocated pseudos; those that can't are put on the stack.
1230 TOPLEVEL is true if this is the outermost BLOCK. */
1232 static HOST_WIDE_INT
1233 account_used_vars_for_block (tree block, bool toplevel)
1236 HOST_WIDE_INT size = 0;
1238 /* Expand all variables at this level. */
1239 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1241 size += expand_one_var (t, toplevel, false);
1243 /* Expand all variables at containing levels. */
1244 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1245 size += account_used_vars_for_block (t, false);
1250 /* Prepare for expanding variables. */
1252 init_vars_expansion (void)
1255 /* Set TREE_USED on all variables in the local_decls. */
1256 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1257 TREE_USED (TREE_VALUE (t)) = 1;
1259 /* Clear TREE_USED on all variables associated with a block scope. */
1260 clear_tree_used (DECL_INITIAL (current_function_decl));
1262 /* Initialize local stack smashing state. */
1263 has_protected_decls = false;
1264 has_short_buffer = false;
1267 /* Free up stack variable graph data. */
1269 fini_vars_expansion (void)
1271 size_t i, n = stack_vars_num;
1272 for (i = 0; i < n; i++)
1273 BITMAP_FREE (stack_vars[i].conflicts);
1274 XDELETEVEC (stack_vars);
1275 XDELETEVEC (stack_vars_sorted);
1277 stack_vars_alloc = stack_vars_num = 0;
1280 /* Make a fair guess for the size of the stack frame of the current
1281 function. This doesn't have to be exact, the result is only used
1282 in the inline heuristics. So we don't want to run the full stack
1283 var packing algorithm (which is quadratic in the number of stack
1284 vars). Instead, we calculate the total size of all stack vars.
1285 This turns out to be a pretty fair estimate -- packing of stack
1286 vars doesn't happen very often. */
1289 estimated_stack_frame_size (void)
1291 HOST_WIDE_INT size = 0;
1293 tree t, outer_block = DECL_INITIAL (current_function_decl);
1295 init_vars_expansion ();
1297 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1299 tree var = TREE_VALUE (t);
1301 if (TREE_USED (var))
1302 size += expand_one_var (var, true, false);
1303 TREE_USED (var) = 1;
1305 size += account_used_vars_for_block (outer_block, true);
1307 if (stack_vars_num > 0)
1309 /* Fake sorting the stack vars for account_stack_vars (). */
1310 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1311 for (i = 0; i < stack_vars_num; ++i)
1312 stack_vars_sorted[i] = i;
1313 size += account_stack_vars ();
1314 fini_vars_expansion ();
1320 /* Expand all variables used in the function. */
1323 expand_used_vars (void)
1325 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
1328 /* Compute the phase of the stack frame for this function. */
1330 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1331 int off = STARTING_FRAME_OFFSET % align;
1332 frame_phase = off ? align - off : 0;
1335 init_vars_expansion ();
1337 for (i = 0; i < SA.map->num_partitions; i++)
1339 tree var = partition_to_var (SA.map, i);
1341 gcc_assert (is_gimple_reg (var));
1342 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1343 expand_one_var (var, true, true);
1346 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1347 contain the default def (representing the parm or result itself)
1348 we don't do anything here. But those which don't contain the
1349 default def (representing a temporary based on the parm/result)
1350 we need to allocate space just like for normal VAR_DECLs. */
1351 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1353 expand_one_var (var, true, true);
1354 gcc_assert (SA.partition_to_pseudo[i]);
1359 /* At this point all variables on the local_decls with TREE_USED
1360 set are not associated with any block scope. Lay them out. */
1361 t = cfun->local_decls;
1362 cfun->local_decls = NULL_TREE;
1365 tree var = TREE_VALUE (t);
1366 bool expand_now = false;
1368 next = TREE_CHAIN (t);
1370 /* Expanded above already. */
1371 if (is_gimple_reg (var))
1373 TREE_USED (var) = 0;
1377 /* We didn't set a block for static or extern because it's hard
1378 to tell the difference between a global variable (re)declared
1379 in a local scope, and one that's really declared there to
1380 begin with. And it doesn't really matter much, since we're
1381 not giving them stack space. Expand them now. */
1382 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1385 /* If the variable is not associated with any block, then it
1386 was created by the optimizers, and could be live anywhere
1388 else if (TREE_USED (var))
1391 /* Finally, mark all variables on the list as used. We'll use
1392 this in a moment when we expand those associated with scopes. */
1393 TREE_USED (var) = 1;
1397 expand_one_var (var, true, true);
1398 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1400 rtx rtl = DECL_RTL_IF_SET (var);
1402 /* Keep artificial non-ignored vars in cfun->local_decls
1403 chain until instantiate_decls. */
1404 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1406 TREE_CHAIN (t) = cfun->local_decls;
1407 cfun->local_decls = t;
1416 /* At this point, all variables within the block tree with TREE_USED
1417 set are actually used by the optimized function. Lay them out. */
1418 expand_used_vars_for_block (outer_block, true);
1420 if (stack_vars_num > 0)
1422 /* Due to the way alias sets work, no variables with non-conflicting
1423 alias sets may be assigned the same address. Add conflicts to
1425 add_alias_set_conflicts ();
1427 /* If stack protection is enabled, we don't share space between
1428 vulnerable data and non-vulnerable data. */
1429 if (flag_stack_protect)
1430 add_stack_protection_conflicts ();
1432 /* Now that we have collected all stack variables, and have computed a
1433 minimal interference graph, attempt to save some stack space. */
1434 partition_stack_vars ();
1436 dump_stack_var_partition ();
1439 /* There are several conditions under which we should create a
1440 stack guard: protect-all, alloca used, protected decls present. */
1441 if (flag_stack_protect == 2
1442 || (flag_stack_protect
1443 && (cfun->calls_alloca || has_protected_decls)))
1444 create_stack_guard ();
1446 /* Assign rtl to each variable based on these partitions. */
1447 if (stack_vars_num > 0)
1449 /* Reorder decls to be protected by iterating over the variables
1450 array multiple times, and allocating out of each phase in turn. */
1451 /* ??? We could probably integrate this into the qsort we did
1452 earlier, such that we naturally see these variables first,
1453 and thus naturally allocate things in the right order. */
1454 if (has_protected_decls)
1456 /* Phase 1 contains only character arrays. */
1457 expand_stack_vars (stack_protect_decl_phase_1);
1459 /* Phase 2 contains other kinds of arrays. */
1460 if (flag_stack_protect == 2)
1461 expand_stack_vars (stack_protect_decl_phase_2);
1464 expand_stack_vars (NULL);
1466 fini_vars_expansion ();
1469 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1470 if (STACK_ALIGNMENT_NEEDED)
1472 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1473 if (!FRAME_GROWS_DOWNWARD)
1474 frame_offset += align - 1;
1475 frame_offset &= -align;
1480 /* If we need to produce a detailed dump, print the tree representation
1481 for STMT to the dump file. SINCE is the last RTX after which the RTL
1482 generated for STMT should have been appended. */
1485 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1487 if (dump_file && (dump_flags & TDF_DETAILS))
1489 fprintf (dump_file, "\n;; ");
1490 print_gimple_stmt (dump_file, stmt, 0,
1491 TDF_SLIM | (dump_flags & TDF_LINENO));
1492 fprintf (dump_file, "\n");
1494 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1498 /* Maps the blocks that do not contain tree labels to rtx labels. */
1500 static struct pointer_map_t *lab_rtx_for_bb;
1502 /* Returns the label_rtx expression for a label starting basic block BB. */
1505 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1507 gimple_stmt_iterator gsi;
1512 if (bb->flags & BB_RTL)
1513 return block_label (bb);
1515 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1519 /* Find the tree label if it is present. */
1521 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1523 lab_stmt = gsi_stmt (gsi);
1524 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1527 lab = gimple_label_label (lab_stmt);
1528 if (DECL_NONLOCAL (lab))
1531 return label_rtx (lab);
1534 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1535 *elt = gen_label_rtx ();
1540 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1541 of a basic block where we just expanded the conditional at the end,
1542 possibly clean up the CFG and instruction sequence. LAST is the
1543 last instruction before the just emitted jump sequence. */
1546 maybe_cleanup_end_of_block (edge e, rtx last)
1548 /* Special case: when jumpif decides that the condition is
1549 trivial it emits an unconditional jump (and the necessary
1550 barrier). But we still have two edges, the fallthru one is
1551 wrong. purge_dead_edges would clean this up later. Unfortunately
1552 we have to insert insns (and split edges) before
1553 find_many_sub_basic_blocks and hence before purge_dead_edges.
1554 But splitting edges might create new blocks which depend on the
1555 fact that if there are two edges there's no barrier. So the
1556 barrier would get lost and verify_flow_info would ICE. Instead
1557 of auditing all edge splitters to care for the barrier (which
1558 normally isn't there in a cleaned CFG), fix it here. */
1559 if (BARRIER_P (get_last_insn ()))
1563 /* Now, we have a single successor block, if we have insns to
1564 insert on the remaining edge we potentially will insert
1565 it at the end of this block (if the dest block isn't feasible)
1566 in order to avoid splitting the edge. This insertion will take
1567 place in front of the last jump. But we might have emitted
1568 multiple jumps (conditional and one unconditional) to the
1569 same destination. Inserting in front of the last one then
1570 is a problem. See PR 40021. We fix this by deleting all
1571 jumps except the last unconditional one. */
1572 insn = PREV_INSN (get_last_insn ());
1573 /* Make sure we have an unconditional jump. Otherwise we're
1575 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1576 for (insn = PREV_INSN (insn); insn != last;)
1578 insn = PREV_INSN (insn);
1579 if (JUMP_P (NEXT_INSN (insn)))
1580 delete_insn (NEXT_INSN (insn));
1585 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1586 Returns a new basic block if we've terminated the current basic
1587 block and created a new one. */
1590 expand_gimple_cond (basic_block bb, gimple stmt)
1592 basic_block new_bb, dest;
1597 enum tree_code code;
1600 code = gimple_cond_code (stmt);
1601 op0 = gimple_cond_lhs (stmt);
1602 op1 = gimple_cond_rhs (stmt);
1603 /* We're sometimes presented with such code:
1607 This would expand to two comparisons which then later might
1608 be cleaned up by combine. But some pattern matchers like if-conversion
1609 work better when there's only one compare, so make up for this
1610 here as special exception if TER would have made the same change. */
1611 if (gimple_cond_single_var_p (stmt)
1613 && TREE_CODE (op0) == SSA_NAME
1614 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1616 gimple second = SSA_NAME_DEF_STMT (op0);
1617 if (gimple_code (second) == GIMPLE_ASSIGN)
1619 enum tree_code code2 = gimple_assign_rhs_code (second);
1620 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1623 op0 = gimple_assign_rhs1 (second);
1624 op1 = gimple_assign_rhs2 (second);
1626 /* If jumps are cheap turn some more codes into
1628 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1630 if ((code2 == BIT_AND_EXPR
1631 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1632 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1633 || code2 == TRUTH_AND_EXPR)
1635 code = TRUTH_ANDIF_EXPR;
1636 op0 = gimple_assign_rhs1 (second);
1637 op1 = gimple_assign_rhs2 (second);
1639 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1641 code = TRUTH_ORIF_EXPR;
1642 op0 = gimple_assign_rhs1 (second);
1643 op1 = gimple_assign_rhs2 (second);
1649 last2 = last = get_last_insn ();
1651 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1652 if (gimple_has_location (stmt))
1654 set_curr_insn_source_location (gimple_location (stmt));
1655 set_curr_insn_block (gimple_block (stmt));
1658 /* These flags have no purpose in RTL land. */
1659 true_edge->flags &= ~EDGE_TRUE_VALUE;
1660 false_edge->flags &= ~EDGE_FALSE_VALUE;
1662 /* We can either have a pure conditional jump with one fallthru edge or
1663 two-way jump that needs to be decomposed into two basic blocks. */
1664 if (false_edge->dest == bb->next_bb)
1666 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest));
1667 add_reg_br_prob_note (last, true_edge->probability);
1668 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1669 if (true_edge->goto_locus)
1671 set_curr_insn_source_location (true_edge->goto_locus);
1672 set_curr_insn_block (true_edge->goto_block);
1673 true_edge->goto_locus = curr_insn_locator ();
1675 true_edge->goto_block = NULL;
1676 false_edge->flags |= EDGE_FALLTHRU;
1677 maybe_cleanup_end_of_block (false_edge, last);
1680 if (true_edge->dest == bb->next_bb)
1682 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest));
1683 add_reg_br_prob_note (last, false_edge->probability);
1684 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1685 if (false_edge->goto_locus)
1687 set_curr_insn_source_location (false_edge->goto_locus);
1688 set_curr_insn_block (false_edge->goto_block);
1689 false_edge->goto_locus = curr_insn_locator ();
1691 false_edge->goto_block = NULL;
1692 true_edge->flags |= EDGE_FALLTHRU;
1693 maybe_cleanup_end_of_block (true_edge, last);
1697 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest));
1698 add_reg_br_prob_note (last, true_edge->probability);
1699 last = get_last_insn ();
1700 if (false_edge->goto_locus)
1702 set_curr_insn_source_location (false_edge->goto_locus);
1703 set_curr_insn_block (false_edge->goto_block);
1704 false_edge->goto_locus = curr_insn_locator ();
1706 false_edge->goto_block = NULL;
1707 emit_jump (label_rtx_for_bb (false_edge->dest));
1710 if (BARRIER_P (BB_END (bb)))
1711 BB_END (bb) = PREV_INSN (BB_END (bb));
1712 update_bb_for_insn (bb);
1714 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1715 dest = false_edge->dest;
1716 redirect_edge_succ (false_edge, new_bb);
1717 false_edge->flags |= EDGE_FALLTHRU;
1718 new_bb->count = false_edge->count;
1719 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1720 new_edge = make_edge (new_bb, dest, 0);
1721 new_edge->probability = REG_BR_PROB_BASE;
1722 new_edge->count = new_bb->count;
1723 if (BARRIER_P (BB_END (new_bb)))
1724 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1725 update_bb_for_insn (new_bb);
1727 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1729 if (true_edge->goto_locus)
1731 set_curr_insn_source_location (true_edge->goto_locus);
1732 set_curr_insn_block (true_edge->goto_block);
1733 true_edge->goto_locus = curr_insn_locator ();
1735 true_edge->goto_block = NULL;
1740 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1744 expand_call_stmt (gimple stmt)
1747 tree lhs = gimple_call_lhs (stmt);
1752 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1754 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1755 decl = gimple_call_fndecl (stmt);
1756 builtin_p = decl && DECL_BUILT_IN (decl);
1758 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1759 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1761 for (i = 0; i < gimple_call_num_args (stmt); i++)
1763 tree arg = gimple_call_arg (stmt, i);
1765 /* TER addresses into arguments of builtin functions so we have a
1766 chance to infer more correct alignment information. See PR39954. */
1768 && TREE_CODE (arg) == SSA_NAME
1769 && (def = get_gimple_for_ssa_name (arg))
1770 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1771 arg = gimple_assign_rhs1 (def);
1772 CALL_EXPR_ARG (exp, i) = arg;
1775 if (gimple_has_side_effects (stmt))
1776 TREE_SIDE_EFFECTS (exp) = 1;
1778 if (gimple_call_nothrow_p (stmt))
1779 TREE_NOTHROW (exp) = 1;
1781 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1782 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1783 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1784 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1785 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1786 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1787 TREE_BLOCK (exp) = gimple_block (stmt);
1790 expand_assignment (lhs, exp, false);
1792 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1795 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1796 STMT that doesn't require special handling for outgoing edges. That
1797 is no tailcalls and no GIMPLE_COND. */
1800 expand_gimple_stmt_1 (gimple stmt)
1803 switch (gimple_code (stmt))
1806 op0 = gimple_goto_dest (stmt);
1807 if (TREE_CODE (op0) == LABEL_DECL)
1810 expand_computed_goto (op0);
1813 expand_label (gimple_label_label (stmt));
1816 case GIMPLE_PREDICT:
1822 expand_asm_stmt (stmt);
1825 expand_call_stmt (stmt);
1829 op0 = gimple_return_retval (stmt);
1831 if (op0 && op0 != error_mark_node)
1833 tree result = DECL_RESULT (current_function_decl);
1835 /* If we are not returning the current function's RESULT_DECL,
1836 build an assignment to it. */
1839 /* I believe that a function's RESULT_DECL is unique. */
1840 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1842 /* ??? We'd like to use simply expand_assignment here,
1843 but this fails if the value is of BLKmode but the return
1844 decl is a register. expand_return has special handling
1845 for this combination, which eventually should move
1846 to common code. See comments there. Until then, let's
1847 build a modify expression :-/ */
1848 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1853 expand_null_return ();
1855 expand_return (op0);
1860 tree lhs = gimple_assign_lhs (stmt);
1862 /* Tree expand used to fiddle with |= and &= of two bitfield
1863 COMPONENT_REFs here. This can't happen with gimple, the LHS
1864 of binary assigns must be a gimple reg. */
1866 if (TREE_CODE (lhs) != SSA_NAME
1867 || get_gimple_rhs_class (gimple_expr_code (stmt))
1868 == GIMPLE_SINGLE_RHS)
1870 tree rhs = gimple_assign_rhs1 (stmt);
1871 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1872 == GIMPLE_SINGLE_RHS);
1873 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1874 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1875 expand_assignment (lhs, rhs,
1876 gimple_assign_nontemporal_move_p (stmt));
1881 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1882 struct separate_ops ops;
1883 bool promoted = false;
1885 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1886 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1889 ops.code = gimple_assign_rhs_code (stmt);
1890 ops.type = TREE_TYPE (lhs);
1891 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1893 case GIMPLE_BINARY_RHS:
1894 ops.op1 = gimple_assign_rhs2 (stmt);
1896 case GIMPLE_UNARY_RHS:
1897 ops.op0 = gimple_assign_rhs1 (stmt);
1902 ops.location = gimple_location (stmt);
1904 /* If we want to use a nontemporal store, force the value to
1905 register first. If we store into a promoted register,
1906 don't directly expand to target. */
1907 temp = nontemporal || promoted ? NULL_RTX : target;
1908 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
1915 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
1916 /* If TEMP is a VOIDmode constant, use convert_modes to make
1917 sure that we properly convert it. */
1918 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1920 temp = convert_modes (GET_MODE (target),
1921 TYPE_MODE (ops.type),
1923 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
1924 GET_MODE (target), temp, unsignedp);
1927 convert_move (SUBREG_REG (target), temp, unsignedp);
1929 else if (nontemporal && emit_storent_insn (target, temp))
1933 temp = force_operand (temp, target);
1935 emit_move_insn (target, temp);
1946 /* Expand one gimple statement STMT and return the last RTL instruction
1947 before any of the newly generated ones.
1949 In addition to generating the necessary RTL instructions this also
1950 sets REG_EH_REGION notes if necessary and sets the current source
1951 location for diagnostics. */
1954 expand_gimple_stmt (gimple stmt)
1958 location_t saved_location = input_location;
1960 last = get_last_insn ();
1962 /* If this is an expression of some kind and it has an associated line
1963 number, then emit the line number before expanding the expression.
1965 We need to save and restore the file and line information so that
1966 errors discovered during expansion are emitted with the right
1967 information. It would be better of the diagnostic routines
1968 used the file/line information embedded in the tree nodes rather
1972 if (gimple_has_location (stmt))
1974 input_location = gimple_location (stmt);
1975 set_curr_insn_source_location (input_location);
1977 /* Record where the insns produced belong. */
1978 set_curr_insn_block (gimple_block (stmt));
1981 expand_gimple_stmt_1 (stmt);
1982 /* Free any temporaries used to evaluate this statement. */
1985 input_location = saved_location;
1987 /* Mark all insns that may trap. */
1988 lp_nr = lookup_stmt_eh_lp (stmt);
1992 for (insn = next_real_insn (last); insn;
1993 insn = next_real_insn (insn))
1995 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1996 /* If we want exceptions for non-call insns, any
1997 may_trap_p instruction may throw. */
1998 && GET_CODE (PATTERN (insn)) != CLOBBER
1999 && GET_CODE (PATTERN (insn)) != USE
2000 && insn_could_throw_p (insn))
2001 make_reg_eh_region_note (insn, 0, lp_nr);
2008 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2009 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2010 generated a tail call (something that might be denied by the ABI
2011 rules governing the call; see calls.c).
2013 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2014 can still reach the rest of BB. The case here is __builtin_sqrt,
2015 where the NaN result goes through the external function (with a
2016 tailcall) and the normal result happens via a sqrt instruction. */
2019 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2027 last2 = last = expand_gimple_stmt (stmt);
2029 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2030 if (CALL_P (last) && SIBLING_CALL_P (last))
2033 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2035 *can_fallthru = true;
2039 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2040 Any instructions emitted here are about to be deleted. */
2041 do_pending_stack_adjust ();
2043 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2044 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2045 EH or abnormal edges, we shouldn't have created a tail call in
2046 the first place. So it seems to me we should just be removing
2047 all edges here, or redirecting the existing fallthru edge to
2053 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2055 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2057 if (e->dest != EXIT_BLOCK_PTR)
2059 e->dest->count -= e->count;
2060 e->dest->frequency -= EDGE_FREQUENCY (e);
2061 if (e->dest->count < 0)
2063 if (e->dest->frequency < 0)
2064 e->dest->frequency = 0;
2067 probability += e->probability;
2074 /* This is somewhat ugly: the call_expr expander often emits instructions
2075 after the sibcall (to perform the function return). These confuse the
2076 find_many_sub_basic_blocks code, so we need to get rid of these. */
2077 last = NEXT_INSN (last);
2078 gcc_assert (BARRIER_P (last));
2080 *can_fallthru = false;
2081 while (NEXT_INSN (last))
2083 /* For instance an sqrt builtin expander expands if with
2084 sibcall in the then and label for `else`. */
2085 if (LABEL_P (NEXT_INSN (last)))
2087 *can_fallthru = true;
2090 delete_insn (NEXT_INSN (last));
2093 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2094 e->probability += probability;
2097 update_bb_for_insn (bb);
2099 if (NEXT_INSN (last))
2101 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2104 if (BARRIER_P (last))
2105 BB_END (bb) = PREV_INSN (last);
2108 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2113 /* Return the difference between the floor and the truncated result of
2114 a signed division by OP1 with remainder MOD. */
2116 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2118 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2119 return gen_rtx_IF_THEN_ELSE
2120 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2121 gen_rtx_IF_THEN_ELSE
2122 (mode, gen_rtx_LT (BImode,
2123 gen_rtx_DIV (mode, op1, mod),
2125 constm1_rtx, const0_rtx),
2129 /* Return the difference between the ceil and the truncated result of
2130 a signed division by OP1 with remainder MOD. */
2132 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2134 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2135 return gen_rtx_IF_THEN_ELSE
2136 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2137 gen_rtx_IF_THEN_ELSE
2138 (mode, gen_rtx_GT (BImode,
2139 gen_rtx_DIV (mode, op1, mod),
2141 const1_rtx, const0_rtx),
2145 /* Return the difference between the ceil and the truncated result of
2146 an unsigned division by OP1 with remainder MOD. */
2148 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2150 /* (mod != 0 ? 1 : 0) */
2151 return gen_rtx_IF_THEN_ELSE
2152 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2153 const1_rtx, const0_rtx);
2156 /* Return the difference between the rounded and the truncated result
2157 of a signed division by OP1 with remainder MOD. Halfway cases are
2158 rounded away from zero, rather than to the nearest even number. */
2160 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2162 /* (abs (mod) >= abs (op1) - abs (mod)
2163 ? (op1 / mod > 0 ? 1 : -1)
2165 return gen_rtx_IF_THEN_ELSE
2166 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2167 gen_rtx_MINUS (mode,
2168 gen_rtx_ABS (mode, op1),
2169 gen_rtx_ABS (mode, mod))),
2170 gen_rtx_IF_THEN_ELSE
2171 (mode, gen_rtx_GT (BImode,
2172 gen_rtx_DIV (mode, op1, mod),
2174 const1_rtx, constm1_rtx),
2178 /* Return the difference between the rounded and the truncated result
2179 of a unsigned division by OP1 with remainder MOD. Halfway cases
2180 are rounded away from zero, rather than to the nearest even
2183 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2185 /* (mod >= op1 - mod ? 1 : 0) */
2186 return gen_rtx_IF_THEN_ELSE
2187 (mode, gen_rtx_GE (BImode, mod,
2188 gen_rtx_MINUS (mode, op1, mod)),
2189 const1_rtx, const0_rtx);
2192 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2196 convert_debug_memory_address (enum machine_mode mode, rtx x)
2198 enum machine_mode xmode = GET_MODE (x);
2200 #ifndef POINTERS_EXTEND_UNSIGNED
2201 gcc_assert (mode == Pmode);
2202 gcc_assert (xmode == mode || xmode == VOIDmode);
2204 gcc_assert (mode == Pmode || mode == ptr_mode);
2206 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2209 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2210 x = simplify_gen_subreg (mode, x, xmode,
2211 subreg_lowpart_offset
2213 else if (POINTERS_EXTEND_UNSIGNED > 0)
2214 x = gen_rtx_ZERO_EXTEND (mode, x);
2215 else if (!POINTERS_EXTEND_UNSIGNED)
2216 x = gen_rtx_SIGN_EXTEND (mode, x);
2219 #endif /* POINTERS_EXTEND_UNSIGNED */
2224 /* Return an RTX equivalent to the value of the tree expression
2228 expand_debug_expr (tree exp)
2230 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2231 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2232 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2234 enum machine_mode address_mode;
2236 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2238 case tcc_expression:
2239 switch (TREE_CODE (exp))
2244 case TRUTH_ANDIF_EXPR:
2245 case TRUTH_ORIF_EXPR:
2246 case TRUTH_AND_EXPR:
2248 case TRUTH_XOR_EXPR:
2251 case TRUTH_NOT_EXPR:
2260 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2267 case tcc_comparison:
2268 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2275 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2285 case tcc_exceptional:
2286 case tcc_declaration:
2292 switch (TREE_CODE (exp))
2295 if (!lookup_constant_def (exp))
2297 if (strlen (TREE_STRING_POINTER (exp)) + 1
2298 != (size_t) TREE_STRING_LENGTH (exp))
2300 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2301 op0 = gen_rtx_MEM (BLKmode, op0);
2302 set_mem_attributes (op0, exp, 0);
2305 /* Fall through... */
2310 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2314 gcc_assert (COMPLEX_MODE_P (mode));
2315 op0 = expand_debug_expr (TREE_REALPART (exp));
2316 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2317 return gen_rtx_CONCAT (mode, op0, op1);
2319 case DEBUG_EXPR_DECL:
2320 op0 = DECL_RTL_IF_SET (exp);
2325 op0 = gen_rtx_DEBUG_EXPR (mode);
2326 DEBUG_EXPR_TREE_DECL (op0) = exp;
2327 SET_DECL_RTL (exp, op0);
2337 op0 = DECL_RTL_IF_SET (exp);
2339 /* This decl was probably optimized away. */
2342 if (TREE_CODE (exp) != VAR_DECL
2343 || DECL_EXTERNAL (exp)
2344 || !TREE_STATIC (exp)
2346 || DECL_HARD_REGISTER (exp)
2347 || mode == VOIDmode)
2350 op0 = DECL_RTL (exp);
2351 SET_DECL_RTL (exp, NULL);
2353 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2354 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2358 op0 = copy_rtx (op0);
2360 if (GET_MODE (op0) == BLKmode)
2362 gcc_assert (MEM_P (op0));
2363 op0 = adjust_address_nv (op0, mode, 0);
2374 enum machine_mode inner_mode = GET_MODE (op0);
2376 if (mode == inner_mode)
2379 if (inner_mode == VOIDmode)
2381 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2382 if (mode == inner_mode)
2386 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2388 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2389 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2390 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2391 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2393 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2395 else if (FLOAT_MODE_P (mode))
2397 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2398 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2400 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2402 else if (FLOAT_MODE_P (inner_mode))
2405 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2407 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2409 else if (CONSTANT_P (op0)
2410 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2411 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2412 subreg_lowpart_offset (mode,
2415 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2417 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2423 case ALIGN_INDIRECT_REF:
2424 case MISALIGNED_INDIRECT_REF:
2425 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2429 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2431 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2432 address_mode = targetm.addr_space.address_mode (as);
2436 as = ADDR_SPACE_GENERIC;
2437 address_mode = Pmode;
2440 if (TREE_CODE (exp) == ALIGN_INDIRECT_REF)
2442 int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp));
2443 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
2446 op0 = gen_rtx_MEM (mode, op0);
2448 set_mem_attributes (op0, exp, 0);
2449 set_mem_addr_space (op0, as);
2453 case TARGET_MEM_REF:
2454 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
2457 op0 = expand_debug_expr
2458 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2462 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
2464 op0 = gen_rtx_MEM (mode, op0);
2466 set_mem_attributes (op0, exp, 0);
2467 set_mem_addr_space (op0, as);
2472 case ARRAY_RANGE_REF:
2477 case VIEW_CONVERT_EXPR:
2479 enum machine_mode mode1;
2480 HOST_WIDE_INT bitsize, bitpos;
2483 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2484 &mode1, &unsignedp, &volatilep, false);
2490 orig_op0 = op0 = expand_debug_expr (tem);
2497 enum machine_mode addrmode, offmode;
2499 gcc_assert (MEM_P (op0));
2501 op0 = XEXP (op0, 0);
2502 addrmode = GET_MODE (op0);
2503 if (addrmode == VOIDmode)
2506 op1 = expand_debug_expr (offset);
2510 offmode = GET_MODE (op1);
2511 if (offmode == VOIDmode)
2512 offmode = TYPE_MODE (TREE_TYPE (offset));
2514 if (addrmode != offmode)
2515 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2516 subreg_lowpart_offset (addrmode,
2519 /* Don't use offset_address here, we don't need a
2520 recognizable address, and we don't want to generate
2522 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2527 if (mode1 == VOIDmode)
2529 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2530 if (bitpos >= BITS_PER_UNIT)
2532 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2533 bitpos %= BITS_PER_UNIT;
2535 else if (bitpos < 0)
2538 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2539 op0 = adjust_address_nv (op0, mode1, units);
2540 bitpos += units * BITS_PER_UNIT;
2542 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2543 op0 = adjust_address_nv (op0, mode, 0);
2544 else if (GET_MODE (op0) != mode1)
2545 op0 = adjust_address_nv (op0, mode1, 0);
2547 op0 = copy_rtx (op0);
2548 if (op0 == orig_op0)
2549 op0 = shallow_copy_rtx (op0);
2550 set_mem_attributes (op0, exp, 0);
2553 if (bitpos == 0 && mode == GET_MODE (op0))
2559 if ((bitpos % BITS_PER_UNIT) == 0
2560 && bitsize == GET_MODE_BITSIZE (mode1))
2562 enum machine_mode opmode = GET_MODE (op0);
2564 gcc_assert (opmode != BLKmode);
2566 if (opmode == VOIDmode)
2569 /* This condition may hold if we're expanding the address
2570 right past the end of an array that turned out not to
2571 be addressable (i.e., the address was only computed in
2572 debug stmts). The gen_subreg below would rightfully
2573 crash, and the address doesn't really exist, so just
2575 if (bitpos >= GET_MODE_BITSIZE (opmode))
2578 return simplify_gen_subreg (mode, op0, opmode,
2579 bitpos / BITS_PER_UNIT);
2582 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2583 && TYPE_UNSIGNED (TREE_TYPE (exp))
2585 : ZERO_EXTRACT, mode,
2586 GET_MODE (op0) != VOIDmode
2587 ? GET_MODE (op0) : mode1,
2588 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2592 return gen_rtx_ABS (mode, op0);
2595 return gen_rtx_NEG (mode, op0);
2598 return gen_rtx_NOT (mode, op0);
2602 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2604 return gen_rtx_FLOAT (mode, op0);
2606 case FIX_TRUNC_EXPR:
2608 return gen_rtx_UNSIGNED_FIX (mode, op0);
2610 return gen_rtx_FIX (mode, op0);
2612 case POINTER_PLUS_EXPR:
2614 return gen_rtx_PLUS (mode, op0, op1);
2617 return gen_rtx_MINUS (mode, op0, op1);
2620 return gen_rtx_MULT (mode, op0, op1);
2623 case TRUNC_DIV_EXPR:
2624 case EXACT_DIV_EXPR:
2626 return gen_rtx_UDIV (mode, op0, op1);
2628 return gen_rtx_DIV (mode, op0, op1);
2630 case TRUNC_MOD_EXPR:
2632 return gen_rtx_UMOD (mode, op0, op1);
2634 return gen_rtx_MOD (mode, op0, op1);
2636 case FLOOR_DIV_EXPR:
2638 return gen_rtx_UDIV (mode, op0, op1);
2641 rtx div = gen_rtx_DIV (mode, op0, op1);
2642 rtx mod = gen_rtx_MOD (mode, op0, op1);
2643 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2644 return gen_rtx_PLUS (mode, div, adj);
2647 case FLOOR_MOD_EXPR:
2649 return gen_rtx_UMOD (mode, op0, op1);
2652 rtx mod = gen_rtx_MOD (mode, op0, op1);
2653 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2654 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2655 return gen_rtx_PLUS (mode, mod, adj);
2661 rtx div = gen_rtx_UDIV (mode, op0, op1);
2662 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2663 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2664 return gen_rtx_PLUS (mode, div, adj);
2668 rtx div = gen_rtx_DIV (mode, op0, op1);
2669 rtx mod = gen_rtx_MOD (mode, op0, op1);
2670 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2671 return gen_rtx_PLUS (mode, div, adj);
2677 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2678 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2679 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2680 return gen_rtx_PLUS (mode, mod, adj);
2684 rtx mod = gen_rtx_MOD (mode, op0, op1);
2685 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2686 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2687 return gen_rtx_PLUS (mode, mod, adj);
2690 case ROUND_DIV_EXPR:
2693 rtx div = gen_rtx_UDIV (mode, op0, op1);
2694 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2695 rtx adj = round_udiv_adjust (mode, mod, op1);
2696 return gen_rtx_PLUS (mode, div, adj);
2700 rtx div = gen_rtx_DIV (mode, op0, op1);
2701 rtx mod = gen_rtx_MOD (mode, op0, op1);
2702 rtx adj = round_sdiv_adjust (mode, mod, op1);
2703 return gen_rtx_PLUS (mode, div, adj);
2706 case ROUND_MOD_EXPR:
2709 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2710 rtx adj = round_udiv_adjust (mode, mod, op1);
2711 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2712 return gen_rtx_PLUS (mode, mod, adj);
2716 rtx mod = gen_rtx_MOD (mode, op0, op1);
2717 rtx adj = round_sdiv_adjust (mode, mod, op1);
2718 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2719 return gen_rtx_PLUS (mode, mod, adj);
2723 return gen_rtx_ASHIFT (mode, op0, op1);
2727 return gen_rtx_LSHIFTRT (mode, op0, op1);
2729 return gen_rtx_ASHIFTRT (mode, op0, op1);
2732 return gen_rtx_ROTATE (mode, op0, op1);
2735 return gen_rtx_ROTATERT (mode, op0, op1);
2739 return gen_rtx_UMIN (mode, op0, op1);
2741 return gen_rtx_SMIN (mode, op0, op1);
2745 return gen_rtx_UMAX (mode, op0, op1);
2747 return gen_rtx_SMAX (mode, op0, op1);
2750 case TRUTH_AND_EXPR:
2751 return gen_rtx_AND (mode, op0, op1);
2755 return gen_rtx_IOR (mode, op0, op1);
2758 case TRUTH_XOR_EXPR:
2759 return gen_rtx_XOR (mode, op0, op1);
2761 case TRUTH_ANDIF_EXPR:
2762 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2764 case TRUTH_ORIF_EXPR:
2765 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2767 case TRUTH_NOT_EXPR:
2768 return gen_rtx_EQ (mode, op0, const0_rtx);
2772 return gen_rtx_LTU (mode, op0, op1);
2774 return gen_rtx_LT (mode, op0, op1);
2778 return gen_rtx_LEU (mode, op0, op1);
2780 return gen_rtx_LE (mode, op0, op1);
2784 return gen_rtx_GTU (mode, op0, op1);
2786 return gen_rtx_GT (mode, op0, op1);
2790 return gen_rtx_GEU (mode, op0, op1);
2792 return gen_rtx_GE (mode, op0, op1);
2795 return gen_rtx_EQ (mode, op0, op1);
2798 return gen_rtx_NE (mode, op0, op1);
2800 case UNORDERED_EXPR:
2801 return gen_rtx_UNORDERED (mode, op0, op1);
2804 return gen_rtx_ORDERED (mode, op0, op1);
2807 return gen_rtx_UNLT (mode, op0, op1);
2810 return gen_rtx_UNLE (mode, op0, op1);
2813 return gen_rtx_UNGT (mode, op0, op1);
2816 return gen_rtx_UNGE (mode, op0, op1);
2819 return gen_rtx_UNEQ (mode, op0, op1);
2822 return gen_rtx_LTGT (mode, op0, op1);
2825 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2828 gcc_assert (COMPLEX_MODE_P (mode));
2829 if (GET_MODE (op0) == VOIDmode)
2830 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2831 if (GET_MODE (op1) == VOIDmode)
2832 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2833 return gen_rtx_CONCAT (mode, op0, op1);
2836 if (GET_CODE (op0) == CONCAT)
2837 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2838 gen_rtx_NEG (GET_MODE_INNER (mode),
2842 enum machine_mode imode = GET_MODE_INNER (mode);
2847 re = adjust_address_nv (op0, imode, 0);
2848 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2852 enum machine_mode ifmode = int_mode_for_mode (mode);
2853 enum machine_mode ihmode = int_mode_for_mode (imode);
2855 if (ifmode == BLKmode || ihmode == BLKmode)
2857 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2860 re = gen_rtx_SUBREG (ifmode, re, 0);
2861 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2862 if (imode != ihmode)
2863 re = gen_rtx_SUBREG (imode, re, 0);
2864 im = copy_rtx (op0);
2866 im = gen_rtx_SUBREG (ifmode, im, 0);
2867 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
2868 if (imode != ihmode)
2869 im = gen_rtx_SUBREG (imode, im, 0);
2871 im = gen_rtx_NEG (imode, im);
2872 return gen_rtx_CONCAT (mode, re, im);
2876 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2877 if (!op0 || !MEM_P (op0))
2880 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
2885 exp = build_constructor_from_list (TREE_TYPE (exp),
2886 TREE_VECTOR_CST_ELTS (exp));
2890 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
2895 op0 = gen_rtx_CONCATN
2896 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
2898 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
2900 op1 = expand_debug_expr (val);
2903 XVECEXP (op0, 0, i) = op1;
2906 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
2908 op1 = expand_debug_expr
2909 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
2914 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
2915 XVECEXP (op0, 0, i) = op1;
2921 goto flag_unsupported;
2924 /* ??? Maybe handle some builtins? */
2929 int part = var_to_partition (SA.map, exp);
2931 if (part == NO_PARTITION)
2934 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
2936 op0 = SA.partition_to_pseudo[part];
2945 #ifdef ENABLE_CHECKING
2954 /* Expand the _LOCs in debug insns. We run this after expanding all
2955 regular insns, so that any variables referenced in the function
2956 will have their DECL_RTLs set. */
2959 expand_debug_locations (void)
2962 rtx last = get_last_insn ();
2963 int save_strict_alias = flag_strict_aliasing;
2965 /* New alias sets while setting up memory attributes cause
2966 -fcompare-debug failures, even though it doesn't bring about any
2968 flag_strict_aliasing = 0;
2970 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2971 if (DEBUG_INSN_P (insn))
2973 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
2975 enum machine_mode mode;
2977 if (value == NULL_TREE)
2981 val = expand_debug_expr (value);
2982 gcc_assert (last == get_last_insn ());
2986 val = gen_rtx_UNKNOWN_VAR_LOC ();
2989 mode = GET_MODE (INSN_VAR_LOCATION (insn));
2991 gcc_assert (mode == GET_MODE (val)
2992 || (GET_MODE (val) == VOIDmode
2993 && (CONST_INT_P (val)
2994 || GET_CODE (val) == CONST_FIXED
2995 || GET_CODE (val) == CONST_DOUBLE
2996 || GET_CODE (val) == LABEL_REF)));
2999 INSN_VAR_LOCATION_LOC (insn) = val;
3002 flag_strict_aliasing = save_strict_alias;
3005 /* Expand basic block BB from GIMPLE trees to RTL. */
3008 expand_gimple_basic_block (basic_block bb)
3010 gimple_stmt_iterator gsi;
3019 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3022 /* Note that since we are now transitioning from GIMPLE to RTL, we
3023 cannot use the gsi_*_bb() routines because they expect the basic
3024 block to be in GIMPLE, instead of RTL. Therefore, we need to
3025 access the BB sequence directly. */
3026 stmts = bb_seq (bb);
3027 bb->il.gimple = NULL;
3028 rtl_profile_for_bb (bb);
3029 init_rtl_bb_info (bb);
3030 bb->flags |= BB_RTL;
3032 /* Remove the RETURN_EXPR if we may fall though to the exit
3034 gsi = gsi_last (stmts);
3035 if (!gsi_end_p (gsi)
3036 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3038 gimple ret_stmt = gsi_stmt (gsi);
3040 gcc_assert (single_succ_p (bb));
3041 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3043 if (bb->next_bb == EXIT_BLOCK_PTR
3044 && !gimple_return_retval (ret_stmt))
3046 gsi_remove (&gsi, false);
3047 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3051 gsi = gsi_start (stmts);
3052 if (!gsi_end_p (gsi))
3054 stmt = gsi_stmt (gsi);
3055 if (gimple_code (stmt) != GIMPLE_LABEL)
3059 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3063 last = get_last_insn ();
3067 expand_gimple_stmt (stmt);
3072 emit_label ((rtx) *elt);
3074 /* Java emits line number notes in the top of labels.
3075 ??? Make this go away once line number notes are obsoleted. */
3076 BB_HEAD (bb) = NEXT_INSN (last);
3077 if (NOTE_P (BB_HEAD (bb)))
3078 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3079 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3081 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3084 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3086 NOTE_BASIC_BLOCK (note) = bb;
3088 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3092 stmt = gsi_stmt (gsi);
3093 currently_expanding_gimple_stmt = stmt;
3095 /* Expand this statement, then evaluate the resulting RTL and
3096 fixup the CFG accordingly. */
3097 if (gimple_code (stmt) == GIMPLE_COND)
3099 new_bb = expand_gimple_cond (bb, stmt);
3103 else if (gimple_debug_bind_p (stmt))
3105 location_t sloc = get_curr_insn_source_location ();
3106 tree sblock = get_curr_insn_block ();
3107 gimple_stmt_iterator nsi = gsi;
3111 tree var = gimple_debug_bind_get_var (stmt);
3114 enum machine_mode mode;
3116 if (gimple_debug_bind_has_value_p (stmt))
3117 value = gimple_debug_bind_get_value (stmt);
3121 last = get_last_insn ();
3123 set_curr_insn_source_location (gimple_location (stmt));
3124 set_curr_insn_block (gimple_block (stmt));
3127 mode = DECL_MODE (var);
3129 mode = TYPE_MODE (TREE_TYPE (var));
3131 val = gen_rtx_VAR_LOCATION
3132 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3134 val = emit_debug_insn (val);
3136 if (dump_file && (dump_flags & TDF_DETAILS))
3138 /* We can't dump the insn with a TREE where an RTX
3140 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3141 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3142 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3147 if (gsi_end_p (nsi))
3149 stmt = gsi_stmt (nsi);
3150 if (!gimple_debug_bind_p (stmt))
3154 set_curr_insn_source_location (sloc);
3155 set_curr_insn_block (sblock);
3159 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3162 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3173 def_operand_p def_p;
3174 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3178 /* Ignore this stmt if it is in the list of
3179 replaceable expressions. */
3181 && bitmap_bit_p (SA.values,
3182 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3185 last = expand_gimple_stmt (stmt);
3186 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3191 currently_expanding_gimple_stmt = NULL;
3193 /* Expand implicit goto and convert goto_locus. */
3194 FOR_EACH_EDGE (e, ei, bb->succs)
3196 if (e->goto_locus && e->goto_block)
3198 set_curr_insn_source_location (e->goto_locus);
3199 set_curr_insn_block (e->goto_block);
3200 e->goto_locus = curr_insn_locator ();
3202 e->goto_block = NULL;
3203 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3205 emit_jump (label_rtx_for_bb (e->dest));
3206 e->flags &= ~EDGE_FALLTHRU;
3210 /* Expanded RTL can create a jump in the last instruction of block.
3211 This later might be assumed to be a jump to successor and break edge insertion.
3212 We need to insert dummy move to prevent this. PR41440. */
3213 if (single_succ_p (bb)
3214 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3215 && (last = get_last_insn ())
3218 rtx dummy = gen_reg_rtx (SImode);
3219 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3222 do_pending_stack_adjust ();
3224 /* Find the block tail. The last insn in the block is the insn
3225 before a barrier and/or table jump insn. */
3226 last = get_last_insn ();
3227 if (BARRIER_P (last))
3228 last = PREV_INSN (last);
3229 if (JUMP_TABLE_DATA_P (last))
3230 last = PREV_INSN (PREV_INSN (last));
3233 update_bb_for_insn (bb);
3239 /* Create a basic block for initialization code. */
3242 construct_init_block (void)
3244 basic_block init_block, first_block;
3248 /* Multiple entry points not supported yet. */
3249 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3250 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3251 init_rtl_bb_info (EXIT_BLOCK_PTR);
3252 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3253 EXIT_BLOCK_PTR->flags |= BB_RTL;
3255 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3257 /* When entry edge points to first basic block, we don't need jump,
3258 otherwise we have to jump into proper target. */
3259 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3261 tree label = gimple_block_label (e->dest);
3263 emit_jump (label_rtx (label));
3267 flags = EDGE_FALLTHRU;
3269 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3272 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3273 init_block->count = ENTRY_BLOCK_PTR->count;
3276 first_block = e->dest;
3277 redirect_edge_succ (e, init_block);
3278 e = make_edge (init_block, first_block, flags);
3281 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3282 e->probability = REG_BR_PROB_BASE;
3283 e->count = ENTRY_BLOCK_PTR->count;
3285 update_bb_for_insn (init_block);
3289 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3290 found in the block tree. */
3293 set_block_levels (tree block, int level)
3297 BLOCK_NUMBER (block) = level;
3298 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3299 block = BLOCK_CHAIN (block);
3303 /* Create a block containing landing pads and similar stuff. */
3306 construct_exit_block (void)
3308 rtx head = get_last_insn ();
3310 basic_block exit_block;
3314 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3316 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3318 /* Make sure the locus is set to the end of the function, so that
3319 epilogue line numbers and warnings are set properly. */
3320 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3321 input_location = cfun->function_end_locus;
3323 /* The following insns belong to the top scope. */
3324 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3326 /* Generate rtl for function exit. */
3327 expand_function_end ();
3329 end = get_last_insn ();
3332 /* While emitting the function end we could move end of the last basic block.
3334 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3335 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3336 head = NEXT_INSN (head);
3337 exit_block = create_basic_block (NEXT_INSN (head), end,
3338 EXIT_BLOCK_PTR->prev_bb);
3339 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3340 exit_block->count = EXIT_BLOCK_PTR->count;
3343 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3345 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3346 if (!(e->flags & EDGE_ABNORMAL))
3347 redirect_edge_succ (e, exit_block);
3352 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3353 e->probability = REG_BR_PROB_BASE;
3354 e->count = EXIT_BLOCK_PTR->count;
3355 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3358 e->count -= e2->count;
3359 exit_block->count -= e2->count;
3360 exit_block->frequency -= EDGE_FREQUENCY (e2);
3364 if (exit_block->count < 0)
3365 exit_block->count = 0;
3366 if (exit_block->frequency < 0)
3367 exit_block->frequency = 0;
3368 update_bb_for_insn (exit_block);
3371 /* Helper function for discover_nonconstant_array_refs.
3372 Look for ARRAY_REF nodes with non-constant indexes and mark them
3376 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3377 void *data ATTRIBUTE_UNUSED)
3381 if (IS_TYPE_OR_DECL_P (t))
3383 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3385 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3386 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3387 && (!TREE_OPERAND (t, 2)
3388 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3389 || (TREE_CODE (t) == COMPONENT_REF
3390 && (!TREE_OPERAND (t,2)
3391 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3392 || TREE_CODE (t) == BIT_FIELD_REF
3393 || TREE_CODE (t) == REALPART_EXPR
3394 || TREE_CODE (t) == IMAGPART_EXPR
3395 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3396 || CONVERT_EXPR_P (t))
3397 t = TREE_OPERAND (t, 0);
3399 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3401 t = get_base_address (t);
3403 && DECL_MODE (t) != BLKmode)
3404 TREE_ADDRESSABLE (t) = 1;
3413 /* RTL expansion is not able to compile array references with variable
3414 offsets for arrays stored in single register. Discover such
3415 expressions and mark variables as addressable to avoid this
3419 discover_nonconstant_array_refs (void)
3422 gimple_stmt_iterator gsi;
3425 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3427 gimple stmt = gsi_stmt (gsi);
3428 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3432 /* This function sets crtl->args.internal_arg_pointer to a virtual
3433 register if DRAP is needed. Local register allocator will replace
3434 virtual_incoming_args_rtx with the virtual register. */
3437 expand_stack_alignment (void)
3440 unsigned int preferred_stack_boundary;
3442 if (! SUPPORTS_STACK_ALIGNMENT)
3445 if (cfun->calls_alloca
3446 || cfun->has_nonlocal_label
3447 || crtl->has_nonlocal_goto)
3448 crtl->need_drap = true;
3450 /* Call update_stack_boundary here again to update incoming stack
3451 boundary. It may set incoming stack alignment to a different
3452 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3453 use the minimum incoming stack alignment to check if it is OK
3454 to perform sibcall optimization since sibcall optimization will
3455 only align the outgoing stack to incoming stack boundary. */
3456 if (targetm.calls.update_stack_boundary)
3457 targetm.calls.update_stack_boundary ();
3459 /* The incoming stack frame has to be aligned at least at
3460 parm_stack_boundary. */
3461 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3463 /* Update crtl->stack_alignment_estimated and use it later to align
3464 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3465 exceptions since callgraph doesn't collect incoming stack alignment
3467 if (flag_non_call_exceptions
3468 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3469 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3471 preferred_stack_boundary = crtl->preferred_stack_boundary;
3472 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3473 crtl->stack_alignment_estimated = preferred_stack_boundary;
3474 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3475 crtl->stack_alignment_needed = preferred_stack_boundary;
3477 gcc_assert (crtl->stack_alignment_needed
3478 <= crtl->stack_alignment_estimated);
3480 crtl->stack_realign_needed
3481 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3482 crtl->stack_realign_tried = crtl->stack_realign_needed;
3484 crtl->stack_realign_processed = true;
3486 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3488 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3489 drap_rtx = targetm.calls.get_drap_rtx ();
3491 /* stack_realign_drap and drap_rtx must match. */
3492 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3494 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3495 if (NULL != drap_rtx)
3497 crtl->args.internal_arg_pointer = drap_rtx;
3499 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3501 fixup_tail_calls ();
3505 /* Translate the intermediate representation contained in the CFG
3506 from GIMPLE trees to RTL.
3508 We do conversion per basic block and preserve/update the tree CFG.
3509 This implies we have to do some magic as the CFG can simultaneously
3510 consist of basic blocks containing RTL and GIMPLE trees. This can
3511 confuse the CFG hooks, so be careful to not manipulate CFG during
3515 gimple_expand_cfg (void)
3517 basic_block bb, init_block;
3523 rewrite_out_of_ssa (&SA);
3524 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3527 /* Some backends want to know that we are expanding to RTL. */
3528 currently_expanding_to_rtl = 1;
3530 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3532 insn_locators_alloc ();
3533 if (!DECL_IS_BUILTIN (current_function_decl))
3535 /* Eventually, all FEs should explicitly set function_start_locus. */
3536 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3537 set_curr_insn_source_location
3538 (DECL_SOURCE_LOCATION (current_function_decl));
3540 set_curr_insn_source_location (cfun->function_start_locus);
3542 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3543 prologue_locator = curr_insn_locator ();
3545 /* Make sure first insn is a note even if we don't want linenums.
3546 This makes sure the first insn will never be deleted.
3547 Also, final expects a note to appear there. */
3548 emit_note (NOTE_INSN_DELETED);
3550 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3551 discover_nonconstant_array_refs ();
3553 targetm.expand_to_rtl_hook ();
3554 crtl->stack_alignment_needed = STACK_BOUNDARY;
3555 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3556 crtl->stack_alignment_estimated = 0;
3557 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3558 cfun->cfg->max_jumptable_ents = 0;
3561 /* Expand the variables recorded during gimple lowering. */
3562 expand_used_vars ();
3564 /* Honor stack protection warnings. */
3565 if (warn_stack_protect)
3567 if (cfun->calls_alloca)
3568 warning (OPT_Wstack_protector,
3569 "not protecting local variables: variable length buffer");
3570 if (has_short_buffer && !crtl->stack_protect_guard)
3571 warning (OPT_Wstack_protector,
3572 "not protecting function: no buffer at least %d bytes long",
3573 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3576 /* Set up parameters and prepare for return, for the function. */
3577 expand_function_start (current_function_decl);
3579 /* Now that we also have the parameter RTXs, copy them over to our
3581 for (i = 0; i < SA.map->num_partitions; i++)
3583 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3585 if (TREE_CODE (var) != VAR_DECL
3586 && !SA.partition_to_pseudo[i])
3587 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3588 gcc_assert (SA.partition_to_pseudo[i]);
3590 /* If this decl was marked as living in multiple places, reset
3591 this now to NULL. */
3592 if (DECL_RTL_IF_SET (var) == pc_rtx)
3593 SET_DECL_RTL (var, NULL);
3595 /* Some RTL parts really want to look at DECL_RTL(x) when x
3596 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3597 SET_DECL_RTL here making this available, but that would mean
3598 to select one of the potentially many RTLs for one DECL. Instead
3599 of doing that we simply reset the MEM_EXPR of the RTL in question,
3600 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3601 if (!DECL_RTL_SET_P (var))
3603 if (MEM_P (SA.partition_to_pseudo[i]))
3604 set_mem_expr (SA.partition_to_pseudo[i], NULL);
3608 /* If this function is `main', emit a call to `__main'
3609 to run global initializers, etc. */
3610 if (DECL_NAME (current_function_decl)
3611 && MAIN_NAME_P (DECL_NAME (current_function_decl))
3612 && DECL_FILE_SCOPE_P (current_function_decl))
3613 expand_main_function ();
3615 /* Initialize the stack_protect_guard field. This must happen after the
3616 call to __main (if any) so that the external decl is initialized. */
3617 if (crtl->stack_protect_guard)
3618 stack_protect_prologue ();
3620 expand_phi_nodes (&SA);
3622 /* Register rtl specific functions for cfg. */
3623 rtl_register_cfg_hooks ();
3625 init_block = construct_init_block ();
3627 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
3628 remaining edges later. */
3629 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3630 e->flags &= ~EDGE_EXECUTABLE;
3632 lab_rtx_for_bb = pointer_map_create ();
3633 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
3634 bb = expand_gimple_basic_block (bb);
3636 if (MAY_HAVE_DEBUG_INSNS)
3637 expand_debug_locations ();
3639 execute_free_datastructures ();
3640 finish_out_of_ssa (&SA);
3642 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3643 conservatively to true until they are all profile aware. */
3644 pointer_map_destroy (lab_rtx_for_bb);
3647 construct_exit_block ();
3648 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3649 insn_locators_finalize ();
3651 /* Zap the tree EH table. */
3652 set_eh_throw_stmt_table (cfun, NULL);
3654 rebuild_jump_labels (get_insns ());
3656 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3660 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3663 commit_one_edge_insertion (e);
3669 /* We're done expanding trees to RTL. */
3670 currently_expanding_to_rtl = 0;
3672 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
3676 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3678 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
3679 e->flags &= ~EDGE_EXECUTABLE;
3681 /* At the moment not all abnormal edges match the RTL
3682 representation. It is safe to remove them here as
3683 find_many_sub_basic_blocks will rediscover them.
3684 In the future we should get this fixed properly. */
3685 if ((e->flags & EDGE_ABNORMAL)
3686 && !(e->flags & EDGE_SIBCALL))
3693 blocks = sbitmap_alloc (last_basic_block);
3694 sbitmap_ones (blocks);
3695 find_many_sub_basic_blocks (blocks);
3696 sbitmap_free (blocks);
3697 purge_all_dead_edges ();
3701 expand_stack_alignment ();
3703 #ifdef ENABLE_CHECKING
3704 verify_flow_info ();
3707 /* There's no need to defer outputting this function any more; we
3708 know we want to output it. */
3709 DECL_DEFER_OUTPUT (current_function_decl) = 0;
3711 /* Now that we're done expanding trees to RTL, we shouldn't have any
3712 more CONCATs anywhere. */
3713 generating_concat_p = 0;
3718 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
3719 /* And the pass manager will dump RTL for us. */
3722 /* If we're emitting a nested function, make sure its parent gets
3723 emitted as well. Doing otherwise confuses debug info. */
3726 for (parent = DECL_CONTEXT (current_function_decl);
3727 parent != NULL_TREE;
3728 parent = get_containing_scope (parent))
3729 if (TREE_CODE (parent) == FUNCTION_DECL)
3730 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
3733 /* We are now committed to emitting code for this function. Do any
3734 preparation, such as emitting abstract debug info for the inline
3735 before it gets mangled by optimization. */
3736 if (cgraph_function_possibly_inlined_p (current_function_decl))
3737 (*debug_hooks->outlining_inline_function) (current_function_decl);
3739 TREE_ASM_WRITTEN (current_function_decl) = 1;
3741 /* After expanding, the return labels are no longer needed. */
3742 return_label = NULL;
3743 naked_return_label = NULL;
3744 /* Tag the blocks with a depth number so that change_scope can find
3745 the common parent easily. */
3746 set_block_levels (DECL_INITIAL (cfun->decl), 0);
3747 default_rtl_profile ();
3751 struct rtl_opt_pass pass_expand =
3755 "expand", /* name */
3757 gimple_expand_cfg, /* execute */
3760 0, /* static_pass_number */
3761 TV_EXPAND, /* tv_id */
3762 PROP_ssa | PROP_gimple_leh | PROP_cfg
3763 | PROP_gimple_lcx, /* properties_required */
3764 PROP_rtl, /* properties_provided */
3765 PROP_ssa | PROP_trees, /* properties_destroyed */
3766 TODO_verify_ssa | TODO_verify_flow
3767 | TODO_verify_stmts, /* todo_flags_start */
3769 | TODO_ggc_collect /* todo_flags_finish */