1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "basic-block.h"
30 #include "langhooks.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
34 #include "tree-pass.h"
37 #include "diagnostic.h"
41 #include "tree-inline.h"
42 #include "value-prof.h"
45 /* Verify that there is exactly single jump instruction since last and attach
46 REG_BR_PROB note specifying probability.
47 ??? We really ought to pass the probability down to RTL expanders and let it
48 re-distribute it when the conditional expands into multiple conditionals.
49 This is however difficult to do. */
51 add_reg_br_prob_note (rtx last, int probability)
53 if (profile_status == PROFILE_ABSENT)
55 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
58 /* It is common to emit condjump-around-jump sequence when we don't know
59 how to reverse the conditional. Special case this. */
60 if (!any_condjump_p (last)
61 || !JUMP_P (NEXT_INSN (last))
62 || !simplejump_p (NEXT_INSN (last))
63 || !NEXT_INSN (NEXT_INSN (last))
64 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
65 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
66 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
67 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
69 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
70 add_reg_note (last, REG_BR_PROB,
71 GEN_INT (REG_BR_PROB_BASE - probability));
74 if (!last || !JUMP_P (last) || !any_condjump_p (last))
76 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
77 add_reg_note (last, REG_BR_PROB, GEN_INT (probability));
81 fprintf (dump_file, "Failed to add probability note\n");
85 #ifndef STACK_ALIGNMENT_NEEDED
86 #define STACK_ALIGNMENT_NEEDED 1
90 /* This structure holds data relevant to one variable that will be
91 placed in a stack slot. */
97 /* The offset of the variable. During partitioning, this is the
98 offset relative to the partition. After partitioning, this
99 is relative to the stack frame. */
100 HOST_WIDE_INT offset;
102 /* Initially, the size of the variable. Later, the size of the partition,
103 if this variable becomes it's partition's representative. */
106 /* The *byte* alignment required for this variable. Or as, with the
107 size, the alignment for this partition. */
110 /* The partition representative. */
111 size_t representative;
113 /* The next stack variable in the partition, or EOC. */
117 #define EOC ((size_t)-1)
119 /* We have an array of such objects while deciding allocation. */
120 static struct stack_var *stack_vars;
121 static size_t stack_vars_alloc;
122 static size_t stack_vars_num;
124 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
125 is non-decreasing. */
126 static size_t *stack_vars_sorted;
128 /* We have an interference graph between such objects. This graph
129 is lower triangular. */
130 static bool *stack_vars_conflict;
131 static size_t stack_vars_conflict_alloc;
133 /* The phase of the stack frame. This is the known misalignment of
134 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
135 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
136 static int frame_phase;
138 /* Used during expand_used_vars to remember if we saw any decls for
139 which we'd like to enable stack smashing protection. */
140 static bool has_protected_decls;
142 /* Used during expand_used_vars. Remember if we say a character buffer
143 smaller than our cutoff threshold. Used for -Wstack-protector. */
144 static bool has_short_buffer;
146 /* Discover the byte alignment to use for DECL. Ignore alignment
147 we can't do with expected alignment of the stack boundary. */
150 get_decl_align_unit (tree decl)
154 align = DECL_ALIGN (decl);
155 align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
156 if (align > PREFERRED_STACK_BOUNDARY)
157 align = PREFERRED_STACK_BOUNDARY;
158 if (crtl->stack_alignment_needed < align)
159 crtl->stack_alignment_needed = align;
161 return align / BITS_PER_UNIT;
164 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
165 Return the frame offset. */
168 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
170 HOST_WIDE_INT offset, new_frame_offset;
172 new_frame_offset = frame_offset;
173 if (FRAME_GROWS_DOWNWARD)
175 new_frame_offset -= size + frame_phase;
176 new_frame_offset &= -align;
177 new_frame_offset += frame_phase;
178 offset = new_frame_offset;
182 new_frame_offset -= frame_phase;
183 new_frame_offset += align - 1;
184 new_frame_offset &= -align;
185 new_frame_offset += frame_phase;
186 offset = new_frame_offset;
187 new_frame_offset += size;
189 frame_offset = new_frame_offset;
191 if (frame_offset_overflow (frame_offset, cfun->decl))
192 frame_offset = offset = 0;
197 /* Accumulate DECL into STACK_VARS. */
200 add_stack_var (tree decl)
202 if (stack_vars_num >= stack_vars_alloc)
204 if (stack_vars_alloc)
205 stack_vars_alloc = stack_vars_alloc * 3 / 2;
207 stack_vars_alloc = 32;
209 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
211 stack_vars[stack_vars_num].decl = decl;
212 stack_vars[stack_vars_num].offset = 0;
213 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
214 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
216 /* All variables are initially in their own partition. */
217 stack_vars[stack_vars_num].representative = stack_vars_num;
218 stack_vars[stack_vars_num].next = EOC;
220 /* Ensure that this decl doesn't get put onto the list twice. */
221 SET_DECL_RTL (decl, pc_rtx);
226 /* Compute the linear index of a lower-triangular coordinate (I, J). */
229 triangular_index (size_t i, size_t j)
236 return (i * (i + 1)) / 2 + j;
239 /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
242 resize_stack_vars_conflict (size_t n)
244 size_t size = triangular_index (n-1, n-1) + 1;
246 if (size <= stack_vars_conflict_alloc)
249 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
250 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
251 (size - stack_vars_conflict_alloc) * sizeof (bool));
252 stack_vars_conflict_alloc = size;
255 /* Make the decls associated with luid's X and Y conflict. */
258 add_stack_var_conflict (size_t x, size_t y)
260 size_t index = triangular_index (x, y);
261 gcc_assert (index < stack_vars_conflict_alloc);
262 stack_vars_conflict[index] = true;
265 /* Check whether the decls associated with luid's X and Y conflict. */
268 stack_var_conflict_p (size_t x, size_t y)
270 size_t index = triangular_index (x, y);
271 gcc_assert (index < stack_vars_conflict_alloc);
272 return stack_vars_conflict[index];
275 /* Returns true if TYPE is or contains a union type. */
278 aggregate_contains_union_type (tree type)
282 if (TREE_CODE (type) == UNION_TYPE
283 || TREE_CODE (type) == QUAL_UNION_TYPE)
285 if (TREE_CODE (type) == ARRAY_TYPE)
286 return aggregate_contains_union_type (TREE_TYPE (type));
287 if (TREE_CODE (type) != RECORD_TYPE)
290 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
291 if (TREE_CODE (field) == FIELD_DECL)
292 if (aggregate_contains_union_type (TREE_TYPE (field)))
298 /* A subroutine of expand_used_vars. If two variables X and Y have alias
299 sets that do not conflict, then do add a conflict for these variables
300 in the interference graph. We also need to make sure to add conflicts
301 for union containing structures. Else RTL alias analysis comes along
302 and due to type based aliasing rules decides that for two overlapping
303 union temporaries { short s; int i; } accesses to the same mem through
304 different types may not alias and happily reorders stores across
305 life-time boundaries of the temporaries (See PR25654).
306 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
309 add_alias_set_conflicts (void)
311 size_t i, j, n = stack_vars_num;
313 for (i = 0; i < n; ++i)
315 tree type_i = TREE_TYPE (stack_vars[i].decl);
316 bool aggr_i = AGGREGATE_TYPE_P (type_i);
319 contains_union = aggregate_contains_union_type (type_i);
320 for (j = 0; j < i; ++j)
322 tree type_j = TREE_TYPE (stack_vars[j].decl);
323 bool aggr_j = AGGREGATE_TYPE_P (type_j);
325 /* Either the objects conflict by means of type based
326 aliasing rules, or we need to add a conflict. */
327 || !objects_must_conflict_p (type_i, type_j)
328 /* In case the types do not conflict ensure that access
329 to elements will conflict. In case of unions we have
330 to be careful as type based aliasing rules may say
331 access to the same memory does not conflict. So play
332 safe and add a conflict in this case. */
334 add_stack_var_conflict (i, j);
339 /* A subroutine of partition_stack_vars. A comparison function for qsort,
340 sorting an array of indices by the size of the object. */
343 stack_var_size_cmp (const void *a, const void *b)
345 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
346 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
347 unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl);
348 unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl);
354 /* For stack variables of the same size use the uid of the decl
355 to make the sort stable. */
363 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
364 partitioning algorithm. Partitions A and B are known to be non-conflicting.
365 Merge them into a single partition A.
367 At the same time, add OFFSET to all variables in partition B. At the end
368 of the partitioning process we've have a nice block easy to lay out within
372 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
376 /* Update each element of partition B with the given offset,
377 and merge them into partition A. */
378 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
380 stack_vars[i].offset += offset;
381 stack_vars[i].representative = a;
383 stack_vars[last].next = stack_vars[a].next;
384 stack_vars[a].next = b;
386 /* Update the required alignment of partition A to account for B. */
387 if (stack_vars[a].alignb < stack_vars[b].alignb)
388 stack_vars[a].alignb = stack_vars[b].alignb;
390 /* Update the interference graph and merge the conflicts. */
391 for (last = stack_vars_num, i = 0; i < last; ++i)
392 if (stack_var_conflict_p (b, i))
393 add_stack_var_conflict (a, i);
396 /* A subroutine of expand_used_vars. Binpack the variables into
397 partitions constrained by the interference graph. The overall
398 algorithm used is as follows:
400 Sort the objects by size.
405 Look for the largest non-conflicting object B with size <= S.
415 partition_stack_vars (void)
417 size_t si, sj, n = stack_vars_num;
419 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
420 for (si = 0; si < n; ++si)
421 stack_vars_sorted[si] = si;
426 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
428 /* Special case: detect when all variables conflict, and thus we can't
429 do anything during the partitioning loop. It isn't uncommon (with
430 C code at least) to declare all variables at the top of the function,
431 and if we're not inlining, then all variables will be in the same scope.
432 Take advantage of very fast libc routines for this scan. */
433 gcc_assert (sizeof(bool) == sizeof(char));
434 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
437 for (si = 0; si < n; ++si)
439 size_t i = stack_vars_sorted[si];
440 HOST_WIDE_INT isize = stack_vars[i].size;
441 HOST_WIDE_INT offset = 0;
443 for (sj = si; sj-- > 0; )
445 size_t j = stack_vars_sorted[sj];
446 HOST_WIDE_INT jsize = stack_vars[j].size;
447 unsigned int jalign = stack_vars[j].alignb;
449 /* Ignore objects that aren't partition representatives. */
450 if (stack_vars[j].representative != j)
453 /* Ignore objects too large for the remaining space. */
457 /* Ignore conflicting objects. */
458 if (stack_var_conflict_p (i, j))
461 /* Refine the remaining space check to include alignment. */
462 if (offset & (jalign - 1))
464 HOST_WIDE_INT toff = offset;
466 toff &= -(HOST_WIDE_INT)jalign;
467 if (isize - (toff - offset) < jsize)
470 isize -= toff - offset;
474 /* UNION the objects, placing J at OFFSET. */
475 union_stack_vars (i, j, offset);
484 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
487 dump_stack_var_partition (void)
489 size_t si, i, j, n = stack_vars_num;
491 for (si = 0; si < n; ++si)
493 i = stack_vars_sorted[si];
495 /* Skip variables that aren't partition representatives, for now. */
496 if (stack_vars[i].representative != i)
499 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
500 " align %u\n", (unsigned long) i, stack_vars[i].size,
501 stack_vars[i].alignb);
503 for (j = i; j != EOC; j = stack_vars[j].next)
505 fputc ('\t', dump_file);
506 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
507 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
508 stack_vars[j].offset);
513 /* Assign rtl to DECL at frame offset OFFSET. */
516 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
521 /* If this fails, we've overflowed the stack frame. Error nicely? */
522 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
524 x = plus_constant (virtual_stack_vars_rtx, offset);
525 x = gen_rtx_MEM (DECL_MODE (decl), x);
527 /* Set alignment we actually gave this decl. */
528 offset -= frame_phase;
529 align = offset & -offset;
530 align *= BITS_PER_UNIT;
531 if (align > STACK_BOUNDARY || align == 0)
532 align = STACK_BOUNDARY;
533 DECL_ALIGN (decl) = align;
534 DECL_USER_ALIGN (decl) = 0;
536 set_mem_attributes (x, decl, true);
537 SET_DECL_RTL (decl, x);
540 /* A subroutine of expand_used_vars. Give each partition representative
541 a unique location within the stack frame. Update each partition member
542 with that location. */
545 expand_stack_vars (bool (*pred) (tree))
547 size_t si, i, j, n = stack_vars_num;
549 for (si = 0; si < n; ++si)
551 HOST_WIDE_INT offset;
553 i = stack_vars_sorted[si];
555 /* Skip variables that aren't partition representatives, for now. */
556 if (stack_vars[i].representative != i)
559 /* Skip variables that have already had rtl assigned. See also
560 add_stack_var where we perpetrate this pc_rtx hack. */
561 if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
564 /* Check the predicate to see whether this variable should be
565 allocated in this pass. */
566 if (pred && !pred (stack_vars[i].decl))
569 offset = alloc_stack_frame_space (stack_vars[i].size,
570 stack_vars[i].alignb);
572 /* Create rtl for each variable based on their location within the
574 for (j = i; j != EOC; j = stack_vars[j].next)
576 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
577 expand_one_stack_var_at (stack_vars[j].decl,
578 stack_vars[j].offset + offset);
583 /* Take into account all sizes of partitions and reset DECL_RTLs. */
585 account_stack_vars (void)
587 size_t si, j, i, n = stack_vars_num;
588 HOST_WIDE_INT size = 0;
590 for (si = 0; si < n; ++si)
592 i = stack_vars_sorted[si];
594 /* Skip variables that aren't partition representatives, for now. */
595 if (stack_vars[i].representative != i)
598 size += stack_vars[i].size;
599 for (j = i; j != EOC; j = stack_vars[j].next)
600 SET_DECL_RTL (stack_vars[j].decl, NULL);
605 /* A subroutine of expand_one_var. Called to immediately assign rtl
606 to a variable to be allocated in the stack frame. */
609 expand_one_stack_var (tree var)
611 HOST_WIDE_INT size, offset, align;
613 size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
614 align = get_decl_align_unit (var);
615 offset = alloc_stack_frame_space (size, align);
617 expand_one_stack_var_at (var, offset);
620 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
621 that will reside in a hard register. */
624 expand_one_hard_reg_var (tree var)
626 rest_of_decl_compilation (var, 0, 0);
629 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
630 that will reside in a pseudo register. */
633 expand_one_register_var (tree var)
635 tree type = TREE_TYPE (var);
636 int unsignedp = TYPE_UNSIGNED (type);
637 enum machine_mode reg_mode
638 = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
639 rtx x = gen_reg_rtx (reg_mode);
641 SET_DECL_RTL (var, x);
643 /* Note if the object is a user variable. */
644 if (!DECL_ARTIFICIAL (var))
647 if (POINTER_TYPE_P (type))
648 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
651 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
652 has some associated error, e.g. its type is error-mark. We just need
653 to pick something that won't crash the rest of the compiler. */
656 expand_one_error_var (tree var)
658 enum machine_mode mode = DECL_MODE (var);
662 x = gen_rtx_MEM (BLKmode, const0_rtx);
663 else if (mode == VOIDmode)
666 x = gen_reg_rtx (mode);
668 SET_DECL_RTL (var, x);
671 /* A subroutine of expand_one_var. VAR is a variable that will be
672 allocated to the local stack frame. Return true if we wish to
673 add VAR to STACK_VARS so that it will be coalesced with other
674 variables. Return false to allocate VAR immediately.
676 This function is used to reduce the number of variables considered
677 for coalescing, which reduces the size of the quadratic problem. */
680 defer_stack_allocation (tree var, bool toplevel)
682 /* If stack protection is enabled, *all* stack variables must be deferred,
683 so that we can re-order the strings to the top of the frame. */
684 if (flag_stack_protect)
687 /* Variables in the outermost scope automatically conflict with
688 every other variable. The only reason to want to defer them
689 at all is that, after sorting, we can more efficiently pack
690 small variables in the stack frame. Continue to defer at -O2. */
691 if (toplevel && optimize < 2)
694 /* Without optimization, *most* variables are allocated from the
695 stack, which makes the quadratic problem large exactly when we
696 want compilation to proceed as quickly as possible. On the
697 other hand, we don't want the function's stack frame size to
698 get completely out of hand. So we avoid adding scalars and
699 "small" aggregates to the list at all. */
700 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
706 /* A subroutine of expand_used_vars. Expand one variable according to
707 its flavor. Variables to be placed on the stack are not actually
708 expanded yet, merely recorded.
709 When REALLY_EXPAND is false, only add stack values to be allocated.
710 Return stack usage this variable is supposed to take.
714 expand_one_var (tree var, bool toplevel, bool really_expand)
716 if (TREE_CODE (var) != VAR_DECL)
718 else if (DECL_EXTERNAL (var))
720 else if (DECL_HAS_VALUE_EXPR_P (var))
722 else if (TREE_STATIC (var))
724 else if (DECL_RTL_SET_P (var))
726 else if (TREE_TYPE (var) == error_mark_node)
729 expand_one_error_var (var);
731 else if (DECL_HARD_REGISTER (var))
734 expand_one_hard_reg_var (var);
736 else if (use_register_for_decl (var))
739 expand_one_register_var (var);
741 else if (defer_stack_allocation (var, toplevel))
746 expand_one_stack_var (var);
747 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
752 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
753 expanding variables. Those variables that can be put into registers
754 are allocated pseudos; those that can't are put on the stack.
756 TOPLEVEL is true if this is the outermost BLOCK. */
759 expand_used_vars_for_block (tree block, bool toplevel)
761 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
764 old_sv_num = toplevel ? 0 : stack_vars_num;
766 /* Expand all variables at this level. */
767 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
769 expand_one_var (t, toplevel, true);
771 this_sv_num = stack_vars_num;
773 /* Expand all variables at containing levels. */
774 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
775 expand_used_vars_for_block (t, false);
777 /* Since we do not track exact variable lifetimes (which is not even
778 possible for variables whose address escapes), we mirror the block
779 tree in the interference graph. Here we cause all variables at this
780 level, and all sublevels, to conflict. Do make certain that a
781 variable conflicts with itself. */
782 if (old_sv_num < this_sv_num)
784 new_sv_num = stack_vars_num;
785 resize_stack_vars_conflict (new_sv_num);
787 for (i = old_sv_num; i < new_sv_num; ++i)
788 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
789 add_stack_var_conflict (i, j);
793 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
794 and clear TREE_USED on all local variables. */
797 clear_tree_used (tree block)
801 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
802 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
805 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
809 /* Examine TYPE and determine a bit mask of the following features. */
811 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
812 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
813 #define SPCT_HAS_ARRAY 4
814 #define SPCT_HAS_AGGREGATE 8
817 stack_protect_classify_type (tree type)
819 unsigned int ret = 0;
822 switch (TREE_CODE (type))
825 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
826 if (t == char_type_node
827 || t == signed_char_type_node
828 || t == unsigned_char_type_node)
830 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
831 unsigned HOST_WIDE_INT len;
833 if (!TYPE_SIZE_UNIT (type)
834 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
837 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
840 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
842 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
845 ret = SPCT_HAS_ARRAY;
849 case QUAL_UNION_TYPE:
851 ret = SPCT_HAS_AGGREGATE;
852 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
853 if (TREE_CODE (t) == FIELD_DECL)
854 ret |= stack_protect_classify_type (TREE_TYPE (t));
864 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
865 part of the local stack frame. Remember if we ever return nonzero for
866 any variable in this function. The return value is the phase number in
867 which the variable should be allocated. */
870 stack_protect_decl_phase (tree decl)
872 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
875 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
876 has_short_buffer = true;
878 if (flag_stack_protect == 2)
880 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
881 && !(bits & SPCT_HAS_AGGREGATE))
883 else if (bits & SPCT_HAS_ARRAY)
887 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
890 has_protected_decls = true;
895 /* Two helper routines that check for phase 1 and phase 2. These are used
896 as callbacks for expand_stack_vars. */
899 stack_protect_decl_phase_1 (tree decl)
901 return stack_protect_decl_phase (decl) == 1;
905 stack_protect_decl_phase_2 (tree decl)
907 return stack_protect_decl_phase (decl) == 2;
910 /* Ensure that variables in different stack protection phases conflict
911 so that they are not merged and share the same stack slot. */
914 add_stack_protection_conflicts (void)
916 size_t i, j, n = stack_vars_num;
917 unsigned char *phase;
919 phase = XNEWVEC (unsigned char, n);
920 for (i = 0; i < n; ++i)
921 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
923 for (i = 0; i < n; ++i)
925 unsigned char ph_i = phase[i];
926 for (j = 0; j < i; ++j)
927 if (ph_i != phase[j])
928 add_stack_var_conflict (i, j);
934 /* Create a decl for the guard at the top of the stack frame. */
937 create_stack_guard (void)
939 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
940 TREE_THIS_VOLATILE (guard) = 1;
941 TREE_USED (guard) = 1;
942 expand_one_stack_var (guard);
943 crtl->stack_protect_guard = guard;
946 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
947 expanding variables. Those variables that can be put into registers
948 are allocated pseudos; those that can't are put on the stack.
950 TOPLEVEL is true if this is the outermost BLOCK. */
953 account_used_vars_for_block (tree block, bool toplevel)
955 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
957 HOST_WIDE_INT size = 0;
959 old_sv_num = toplevel ? 0 : stack_vars_num;
961 /* Expand all variables at this level. */
962 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
964 size += expand_one_var (t, toplevel, false);
966 this_sv_num = stack_vars_num;
968 /* Expand all variables at containing levels. */
969 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
970 size += account_used_vars_for_block (t, false);
972 /* Since we do not track exact variable lifetimes (which is not even
973 possible for variables whose address escapes), we mirror the block
974 tree in the interference graph. Here we cause all variables at this
975 level, and all sublevels, to conflict. Do make certain that a
976 variable conflicts with itself. */
977 if (old_sv_num < this_sv_num)
979 new_sv_num = stack_vars_num;
980 resize_stack_vars_conflict (new_sv_num);
982 for (i = old_sv_num; i < new_sv_num; ++i)
983 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
984 add_stack_var_conflict (i, j);
989 /* Prepare for expanding variables. */
991 init_vars_expansion (void)
994 /* Set TREE_USED on all variables in the local_decls. */
995 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
996 TREE_USED (TREE_VALUE (t)) = 1;
998 /* Clear TREE_USED on all variables associated with a block scope. */
999 clear_tree_used (DECL_INITIAL (current_function_decl));
1001 /* Initialize local stack smashing state. */
1002 has_protected_decls = false;
1003 has_short_buffer = false;
1006 /* Free up stack variable graph data. */
1008 fini_vars_expansion (void)
1010 XDELETEVEC (stack_vars);
1011 XDELETEVEC (stack_vars_sorted);
1012 XDELETEVEC (stack_vars_conflict);
1014 stack_vars_alloc = stack_vars_num = 0;
1015 stack_vars_conflict = NULL;
1016 stack_vars_conflict_alloc = 0;
1020 estimated_stack_frame_size (void)
1022 HOST_WIDE_INT size = 0;
1023 tree t, outer_block = DECL_INITIAL (current_function_decl);
1025 init_vars_expansion ();
1027 /* At this point all variables on the local_decls with TREE_USED
1028 set are not associated with any block scope. Lay them out. */
1029 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1031 tree var = TREE_VALUE (t);
1033 if (TREE_USED (var))
1034 size += expand_one_var (var, true, false);
1035 TREE_USED (var) = 1;
1037 size += account_used_vars_for_block (outer_block, true);
1038 if (stack_vars_num > 0)
1040 /* Due to the way alias sets work, no variables with non-conflicting
1041 alias sets may be assigned the same address. Add conflicts to
1043 add_alias_set_conflicts ();
1045 /* If stack protection is enabled, we don't share space between
1046 vulnerable data and non-vulnerable data. */
1047 if (flag_stack_protect)
1048 add_stack_protection_conflicts ();
1050 /* Now that we have collected all stack variables, and have computed a
1051 minimal interference graph, attempt to save some stack space. */
1052 partition_stack_vars ();
1054 dump_stack_var_partition ();
1056 size += account_stack_vars ();
1057 fini_vars_expansion ();
1062 /* Expand all variables used in the function. */
1065 expand_used_vars (void)
1067 tree t, outer_block = DECL_INITIAL (current_function_decl);
1069 /* Compute the phase of the stack frame for this function. */
1071 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1072 int off = STARTING_FRAME_OFFSET % align;
1073 frame_phase = off ? align - off : 0;
1076 init_vars_expansion ();
1078 /* At this point all variables on the local_decls with TREE_USED
1079 set are not associated with any block scope. Lay them out. */
1080 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1082 tree var = TREE_VALUE (t);
1083 bool expand_now = false;
1085 /* We didn't set a block for static or extern because it's hard
1086 to tell the difference between a global variable (re)declared
1087 in a local scope, and one that's really declared there to
1088 begin with. And it doesn't really matter much, since we're
1089 not giving them stack space. Expand them now. */
1090 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1093 /* Any variable that could have been hoisted into an SSA_NAME
1094 will have been propagated anywhere the optimizers chose,
1095 i.e. not confined to their original block. Allocate them
1096 as if they were defined in the outermost scope. */
1097 else if (is_gimple_reg (var))
1100 /* If the variable is not associated with any block, then it
1101 was created by the optimizers, and could be live anywhere
1103 else if (TREE_USED (var))
1106 /* Finally, mark all variables on the list as used. We'll use
1107 this in a moment when we expand those associated with scopes. */
1108 TREE_USED (var) = 1;
1111 expand_one_var (var, true, true);
1113 cfun->local_decls = NULL_TREE;
1115 /* At this point, all variables within the block tree with TREE_USED
1116 set are actually used by the optimized function. Lay them out. */
1117 expand_used_vars_for_block (outer_block, true);
1119 if (stack_vars_num > 0)
1121 /* Due to the way alias sets work, no variables with non-conflicting
1122 alias sets may be assigned the same address. Add conflicts to
1124 add_alias_set_conflicts ();
1126 /* If stack protection is enabled, we don't share space between
1127 vulnerable data and non-vulnerable data. */
1128 if (flag_stack_protect)
1129 add_stack_protection_conflicts ();
1131 /* Now that we have collected all stack variables, and have computed a
1132 minimal interference graph, attempt to save some stack space. */
1133 partition_stack_vars ();
1135 dump_stack_var_partition ();
1138 /* There are several conditions under which we should create a
1139 stack guard: protect-all, alloca used, protected decls present. */
1140 if (flag_stack_protect == 2
1141 || (flag_stack_protect
1142 && (cfun->calls_alloca || has_protected_decls)))
1143 create_stack_guard ();
1145 /* Assign rtl to each variable based on these partitions. */
1146 if (stack_vars_num > 0)
1148 /* Reorder decls to be protected by iterating over the variables
1149 array multiple times, and allocating out of each phase in turn. */
1150 /* ??? We could probably integrate this into the qsort we did
1151 earlier, such that we naturally see these variables first,
1152 and thus naturally allocate things in the right order. */
1153 if (has_protected_decls)
1155 /* Phase 1 contains only character arrays. */
1156 expand_stack_vars (stack_protect_decl_phase_1);
1158 /* Phase 2 contains other kinds of arrays. */
1159 if (flag_stack_protect == 2)
1160 expand_stack_vars (stack_protect_decl_phase_2);
1163 expand_stack_vars (NULL);
1165 fini_vars_expansion ();
1168 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1169 if (STACK_ALIGNMENT_NEEDED)
1171 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1172 if (!FRAME_GROWS_DOWNWARD)
1173 frame_offset += align - 1;
1174 frame_offset &= -align;
1179 /* If we need to produce a detailed dump, print the tree representation
1180 for STMT to the dump file. SINCE is the last RTX after which the RTL
1181 generated for STMT should have been appended. */
1184 maybe_dump_rtl_for_tree_stmt (tree stmt, rtx since)
1186 if (dump_file && (dump_flags & TDF_DETAILS))
1188 fprintf (dump_file, "\n;; ");
1189 print_generic_expr (dump_file, stmt, TDF_SLIM);
1190 fprintf (dump_file, "\n");
1192 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1196 /* Maps the blocks that do not contain tree labels to rtx labels. */
1198 static struct pointer_map_t *lab_rtx_for_bb;
1200 /* Returns the label_rtx expression for a label starting basic block BB. */
1203 label_rtx_for_bb (basic_block bb)
1205 tree_stmt_iterator tsi;
1209 if (bb->flags & BB_RTL)
1210 return block_label (bb);
1212 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1216 /* Find the tree label if it is present. */
1218 for (tsi = tsi_start (bb_stmt_list (bb)); !tsi_end_p (tsi); tsi_next (&tsi))
1220 lab_stmt = tsi_stmt (tsi);
1221 if (TREE_CODE (lab_stmt) != LABEL_EXPR)
1224 lab = LABEL_EXPR_LABEL (lab_stmt);
1225 if (DECL_NONLOCAL (lab))
1228 return label_rtx (lab);
1231 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1232 *elt = gen_label_rtx ();
1236 /* A subroutine of expand_gimple_basic_block. Expand one COND_EXPR.
1237 Returns a new basic block if we've terminated the current basic
1238 block and created a new one. */
1241 expand_gimple_cond_expr (basic_block bb, tree stmt)
1243 basic_block new_bb, dest;
1247 tree pred = COND_EXPR_COND (stmt);
1250 gcc_assert (COND_EXPR_THEN (stmt) == NULL_TREE);
1251 gcc_assert (COND_EXPR_ELSE (stmt) == NULL_TREE);
1252 last2 = last = get_last_insn ();
1254 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1255 if (EXPR_LOCUS (stmt))
1257 set_curr_insn_source_location (*(EXPR_LOCUS (stmt)));
1258 set_curr_insn_block (TREE_BLOCK (stmt));
1261 /* These flags have no purpose in RTL land. */
1262 true_edge->flags &= ~EDGE_TRUE_VALUE;
1263 false_edge->flags &= ~EDGE_FALSE_VALUE;
1265 /* We can either have a pure conditional jump with one fallthru edge or
1266 two-way jump that needs to be decomposed into two basic blocks. */
1267 if (false_edge->dest == bb->next_bb)
1269 jumpif (pred, label_rtx_for_bb (true_edge->dest));
1270 add_reg_br_prob_note (last, true_edge->probability);
1271 maybe_dump_rtl_for_tree_stmt (stmt, last);
1272 if (true_edge->goto_locus)
1273 set_curr_insn_source_location (true_edge->goto_locus);
1274 false_edge->flags |= EDGE_FALLTHRU;
1277 if (true_edge->dest == bb->next_bb)
1279 jumpifnot (pred, label_rtx_for_bb (false_edge->dest));
1280 add_reg_br_prob_note (last, false_edge->probability);
1281 maybe_dump_rtl_for_tree_stmt (stmt, last);
1282 if (false_edge->goto_locus)
1283 set_curr_insn_source_location (false_edge->goto_locus);
1284 true_edge->flags |= EDGE_FALLTHRU;
1288 jumpif (pred, label_rtx_for_bb (true_edge->dest));
1289 add_reg_br_prob_note (last, true_edge->probability);
1290 last = get_last_insn ();
1291 emit_jump (label_rtx_for_bb (false_edge->dest));
1294 if (BARRIER_P (BB_END (bb)))
1295 BB_END (bb) = PREV_INSN (BB_END (bb));
1296 update_bb_for_insn (bb);
1298 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1299 dest = false_edge->dest;
1300 redirect_edge_succ (false_edge, new_bb);
1301 false_edge->flags |= EDGE_FALLTHRU;
1302 new_bb->count = false_edge->count;
1303 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1304 new_edge = make_edge (new_bb, dest, 0);
1305 new_edge->probability = REG_BR_PROB_BASE;
1306 new_edge->count = new_bb->count;
1307 if (BARRIER_P (BB_END (new_bb)))
1308 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1309 update_bb_for_insn (new_bb);
1311 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1313 if (false_edge->goto_locus)
1314 set_curr_insn_source_location (false_edge->goto_locus);
1319 /* A subroutine of expand_gimple_basic_block. Expand one CALL_EXPR
1320 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1321 generated a tail call (something that might be denied by the ABI
1322 rules governing the call; see calls.c).
1324 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1325 can still reach the rest of BB. The case here is __builtin_sqrt,
1326 where the NaN result goes through the external function (with a
1327 tailcall) and the normal result happens via a sqrt instruction. */
1330 expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
1338 last2 = last = get_last_insn ();
1340 expand_expr_stmt (stmt);
1342 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
1343 if (CALL_P (last) && SIBLING_CALL_P (last))
1346 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1348 *can_fallthru = true;
1352 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1353 Any instructions emitted here are about to be deleted. */
1354 do_pending_stack_adjust ();
1356 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1357 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1358 EH or abnormal edges, we shouldn't have created a tail call in
1359 the first place. So it seems to me we should just be removing
1360 all edges here, or redirecting the existing fallthru edge to
1366 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1368 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1370 if (e->dest != EXIT_BLOCK_PTR)
1372 e->dest->count -= e->count;
1373 e->dest->frequency -= EDGE_FREQUENCY (e);
1374 if (e->dest->count < 0)
1376 if (e->dest->frequency < 0)
1377 e->dest->frequency = 0;
1380 probability += e->probability;
1387 /* This is somewhat ugly: the call_expr expander often emits instructions
1388 after the sibcall (to perform the function return). These confuse the
1389 find_many_sub_basic_blocks code, so we need to get rid of these. */
1390 last = NEXT_INSN (last);
1391 gcc_assert (BARRIER_P (last));
1393 *can_fallthru = false;
1394 while (NEXT_INSN (last))
1396 /* For instance an sqrt builtin expander expands if with
1397 sibcall in the then and label for `else`. */
1398 if (LABEL_P (NEXT_INSN (last)))
1400 *can_fallthru = true;
1403 delete_insn (NEXT_INSN (last));
1406 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1407 e->probability += probability;
1410 update_bb_for_insn (bb);
1412 if (NEXT_INSN (last))
1414 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1417 if (BARRIER_P (last))
1418 BB_END (bb) = PREV_INSN (last);
1421 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1426 /* Expand basic block BB from GIMPLE trees to RTL. */
1429 expand_gimple_basic_block (basic_block bb)
1431 tree_stmt_iterator tsi;
1432 tree stmts = bb_stmt_list (bb);
1442 "\n;; Generating RTL for tree basic block %d\n",
1447 init_rtl_bb_info (bb);
1448 bb->flags |= BB_RTL;
1450 /* Remove the RETURN_EXPR if we may fall though to the exit
1452 tsi = tsi_last (stmts);
1453 if (!tsi_end_p (tsi)
1454 && TREE_CODE (tsi_stmt (tsi)) == RETURN_EXPR)
1456 tree ret_stmt = tsi_stmt (tsi);
1458 gcc_assert (single_succ_p (bb));
1459 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
1461 if (bb->next_bb == EXIT_BLOCK_PTR
1462 && !TREE_OPERAND (ret_stmt, 0))
1465 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
1469 tsi = tsi_start (stmts);
1470 if (!tsi_end_p (tsi))
1472 stmt = tsi_stmt (tsi);
1473 if (TREE_CODE (stmt) != LABEL_EXPR)
1477 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1481 last = get_last_insn ();
1485 expand_expr_stmt (stmt);
1490 emit_label ((rtx) *elt);
1492 /* Java emits line number notes in the top of labels.
1493 ??? Make this go away once line number notes are obsoleted. */
1494 BB_HEAD (bb) = NEXT_INSN (last);
1495 if (NOTE_P (BB_HEAD (bb)))
1496 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
1497 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
1499 maybe_dump_rtl_for_tree_stmt (stmt, last);
1502 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1504 NOTE_BASIC_BLOCK (note) = bb;
1506 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1508 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1509 e->flags &= ~EDGE_EXECUTABLE;
1511 /* At the moment not all abnormal edges match the RTL representation.
1512 It is safe to remove them here as find_many_sub_basic_blocks will
1513 rediscover them. In the future we should get this fixed properly. */
1514 if (e->flags & EDGE_ABNORMAL)
1520 for (; !tsi_end_p (tsi); tsi_next (&tsi))
1522 tree stmt = tsi_stmt (tsi);
1528 /* Expand this statement, then evaluate the resulting RTL and
1529 fixup the CFG accordingly. */
1530 if (TREE_CODE (stmt) == COND_EXPR)
1532 new_bb = expand_gimple_cond_expr (bb, stmt);
1538 tree call = get_call_expr_in (stmt);
1540 /* For the benefit of calls.c, converting all this to rtl,
1541 we need to record the call expression, not just the outer
1542 modify statement. */
1543 if (call && call != stmt)
1545 if ((region = lookup_stmt_eh_region (stmt)) > 0)
1546 add_stmt_to_eh_region (call, region);
1547 gimple_duplicate_stmt_histograms (cfun, call, cfun, stmt);
1549 if (call && CALL_EXPR_TAILCALL (call))
1552 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1563 last = get_last_insn ();
1564 expand_expr_stmt (stmt);
1565 maybe_dump_rtl_for_tree_stmt (stmt, last);
1570 /* Expand implicit goto. */
1571 FOR_EACH_EDGE (e, ei, bb->succs)
1573 if (e->flags & EDGE_FALLTHRU)
1577 if (e && e->dest != bb->next_bb)
1579 emit_jump (label_rtx_for_bb (e->dest));
1581 set_curr_insn_source_location (e->goto_locus);
1582 e->flags &= ~EDGE_FALLTHRU;
1585 do_pending_stack_adjust ();
1587 /* Find the block tail. The last insn in the block is the insn
1588 before a barrier and/or table jump insn. */
1589 last = get_last_insn ();
1590 if (BARRIER_P (last))
1591 last = PREV_INSN (last);
1592 if (JUMP_TABLE_DATA_P (last))
1593 last = PREV_INSN (PREV_INSN (last));
1596 update_bb_for_insn (bb);
1602 /* Create a basic block for initialization code. */
1605 construct_init_block (void)
1607 basic_block init_block, first_block;
1611 /* Multiple entry points not supported yet. */
1612 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
1613 init_rtl_bb_info (ENTRY_BLOCK_PTR);
1614 init_rtl_bb_info (EXIT_BLOCK_PTR);
1615 ENTRY_BLOCK_PTR->flags |= BB_RTL;
1616 EXIT_BLOCK_PTR->flags |= BB_RTL;
1618 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
1620 /* When entry edge points to first basic block, we don't need jump,
1621 otherwise we have to jump into proper target. */
1622 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
1624 tree label = tree_block_label (e->dest);
1626 emit_jump (label_rtx (label));
1630 flags = EDGE_FALLTHRU;
1632 init_block = create_basic_block (NEXT_INSN (get_insns ()),
1635 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
1636 init_block->count = ENTRY_BLOCK_PTR->count;
1639 first_block = e->dest;
1640 redirect_edge_succ (e, init_block);
1641 e = make_edge (init_block, first_block, flags);
1644 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1645 e->probability = REG_BR_PROB_BASE;
1646 e->count = ENTRY_BLOCK_PTR->count;
1648 update_bb_for_insn (init_block);
1652 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
1653 found in the block tree. */
1656 set_block_levels (tree block, int level)
1660 BLOCK_NUMBER (block) = level;
1661 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
1662 block = BLOCK_CHAIN (block);
1666 /* Create a block containing landing pads and similar stuff. */
1669 construct_exit_block (void)
1671 rtx head = get_last_insn ();
1673 basic_block exit_block;
1677 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
1679 /* Make sure the locus is set to the end of the function, so that
1680 epilogue line numbers and warnings are set properly. */
1681 if (cfun->function_end_locus != UNKNOWN_LOCATION)
1682 input_location = cfun->function_end_locus;
1684 /* The following insns belong to the top scope. */
1685 set_curr_insn_block (DECL_INITIAL (current_function_decl));
1687 /* Generate rtl for function exit. */
1688 expand_function_end ();
1690 end = get_last_insn ();
1693 /* While emitting the function end we could move end of the last basic block.
1695 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
1696 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
1697 head = NEXT_INSN (head);
1698 exit_block = create_basic_block (NEXT_INSN (head), end,
1699 EXIT_BLOCK_PTR->prev_bb);
1700 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
1701 exit_block->count = EXIT_BLOCK_PTR->count;
1704 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
1706 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
1707 if (!(e->flags & EDGE_ABNORMAL))
1708 redirect_edge_succ (e, exit_block);
1713 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1714 e->probability = REG_BR_PROB_BASE;
1715 e->count = EXIT_BLOCK_PTR->count;
1716 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
1719 e->count -= e2->count;
1720 exit_block->count -= e2->count;
1721 exit_block->frequency -= EDGE_FREQUENCY (e2);
1725 if (exit_block->count < 0)
1726 exit_block->count = 0;
1727 if (exit_block->frequency < 0)
1728 exit_block->frequency = 0;
1729 update_bb_for_insn (exit_block);
1732 /* Helper function for discover_nonconstant_array_refs.
1733 Look for ARRAY_REF nodes with non-constant indexes and mark them
1737 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
1738 void *data ATTRIBUTE_UNUSED)
1742 if (IS_TYPE_OR_DECL_P (t))
1744 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1746 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1747 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
1748 && (!TREE_OPERAND (t, 2)
1749 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1750 || (TREE_CODE (t) == COMPONENT_REF
1751 && (!TREE_OPERAND (t,2)
1752 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1753 || TREE_CODE (t) == BIT_FIELD_REF
1754 || TREE_CODE (t) == REALPART_EXPR
1755 || TREE_CODE (t) == IMAGPART_EXPR
1756 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1757 || CONVERT_EXPR_P (t))
1758 t = TREE_OPERAND (t, 0);
1760 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1762 t = get_base_address (t);
1763 if (t && DECL_P (t))
1764 TREE_ADDRESSABLE (t) = 1;
1773 /* RTL expansion is not able to compile array references with variable
1774 offsets for arrays stored in single register. Discover such
1775 expressions and mark variables as addressable to avoid this
1779 discover_nonconstant_array_refs (void)
1782 block_stmt_iterator bsi;
1786 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1787 walk_tree (bsi_stmt_ptr (bsi), discover_nonconstant_array_refs_r,
1792 /* Translate the intermediate representation contained in the CFG
1793 from GIMPLE trees to RTL.
1795 We do conversion per basic block and preserve/update the tree CFG.
1796 This implies we have to do some magic as the CFG can simultaneously
1797 consist of basic blocks containing RTL and GIMPLE trees. This can
1798 confuse the CFG hooks, so be careful to not manipulate CFG during
1802 tree_expand_cfg (void)
1804 basic_block bb, init_block;
1809 /* Some backends want to know that we are expanding to RTL. */
1810 currently_expanding_to_rtl = 1;
1812 insn_locators_alloc ();
1813 if (!DECL_BUILT_IN (current_function_decl))
1814 set_curr_insn_source_location (DECL_SOURCE_LOCATION (current_function_decl));
1815 set_curr_insn_block (DECL_INITIAL (current_function_decl));
1816 prologue_locator = curr_insn_locator ();
1818 /* Make sure first insn is a note even if we don't want linenums.
1819 This makes sure the first insn will never be deleted.
1820 Also, final expects a note to appear there. */
1821 emit_note (NOTE_INSN_DELETED);
1823 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
1824 discover_nonconstant_array_refs ();
1826 targetm.expand_to_rtl_hook ();
1827 crtl->stack_alignment_needed = STACK_BOUNDARY;
1828 crtl->preferred_stack_boundary = STACK_BOUNDARY;
1829 cfun->cfg->max_jumptable_ents = 0;
1832 /* Expand the variables recorded during gimple lowering. */
1833 expand_used_vars ();
1835 /* Honor stack protection warnings. */
1836 if (warn_stack_protect)
1838 if (cfun->calls_alloca)
1839 warning (OPT_Wstack_protector,
1840 "not protecting local variables: variable length buffer");
1841 if (has_short_buffer && !crtl->stack_protect_guard)
1842 warning (OPT_Wstack_protector,
1843 "not protecting function: no buffer at least %d bytes long",
1844 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
1847 /* Set up parameters and prepare for return, for the function. */
1848 expand_function_start (current_function_decl);
1850 /* If this function is `main', emit a call to `__main'
1851 to run global initializers, etc. */
1852 if (DECL_NAME (current_function_decl)
1853 && MAIN_NAME_P (DECL_NAME (current_function_decl))
1854 && DECL_FILE_SCOPE_P (current_function_decl))
1855 expand_main_function ();
1857 /* Initialize the stack_protect_guard field. This must happen after the
1858 call to __main (if any) so that the external decl is initialized. */
1859 if (crtl->stack_protect_guard)
1860 stack_protect_prologue ();
1862 /* Register rtl specific functions for cfg. */
1863 rtl_register_cfg_hooks ();
1865 init_block = construct_init_block ();
1867 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
1868 remaining edges in expand_gimple_basic_block. */
1869 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
1870 e->flags &= ~EDGE_EXECUTABLE;
1872 lab_rtx_for_bb = pointer_map_create ();
1873 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
1874 bb = expand_gimple_basic_block (bb);
1875 pointer_map_destroy (lab_rtx_for_bb);
1878 construct_exit_block ();
1879 set_curr_insn_block (DECL_INITIAL (current_function_decl));
1880 insn_locators_finalize ();
1882 /* We're done expanding trees to RTL. */
1883 currently_expanding_to_rtl = 0;
1885 /* Convert tree EH labels to RTL EH labels and zap the tree EH table. */
1886 convert_from_eh_region_ranges ();
1887 set_eh_throw_stmt_table (cfun, NULL);
1889 rebuild_jump_labels (get_insns ());
1890 find_exception_handler_labels ();
1892 blocks = sbitmap_alloc (last_basic_block);
1893 sbitmap_ones (blocks);
1894 find_many_sub_basic_blocks (blocks);
1895 purge_all_dead_edges ();
1896 sbitmap_free (blocks);
1899 #ifdef ENABLE_CHECKING
1900 verify_flow_info ();
1903 /* There's no need to defer outputting this function any more; we
1904 know we want to output it. */
1905 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1907 /* Now that we're done expanding trees to RTL, we shouldn't have any
1908 more CONCATs anywhere. */
1909 generating_concat_p = 0;
1914 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
1915 /* And the pass manager will dump RTL for us. */
1918 /* If we're emitting a nested function, make sure its parent gets
1919 emitted as well. Doing otherwise confuses debug info. */
1922 for (parent = DECL_CONTEXT (current_function_decl);
1923 parent != NULL_TREE;
1924 parent = get_containing_scope (parent))
1925 if (TREE_CODE (parent) == FUNCTION_DECL)
1926 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1929 /* We are now committed to emitting code for this function. Do any
1930 preparation, such as emitting abstract debug info for the inline
1931 before it gets mangled by optimization. */
1932 if (cgraph_function_possibly_inlined_p (current_function_decl))
1933 (*debug_hooks->outlining_inline_function) (current_function_decl);
1935 TREE_ASM_WRITTEN (current_function_decl) = 1;
1937 /* After expanding, the return labels are no longer needed. */
1938 return_label = NULL;
1939 naked_return_label = NULL;
1940 /* Tag the blocks with a depth number so that change_scope can find
1941 the common parent easily. */
1942 set_block_levels (DECL_INITIAL (cfun->decl), 0);
1946 struct rtl_opt_pass pass_expand =
1950 "expand", /* name */
1952 tree_expand_cfg, /* execute */
1955 0, /* static_pass_number */
1956 TV_EXPAND, /* tv_id */
1957 /* ??? If TER is enabled, we actually receive GENERIC. */
1958 PROP_gimple_leh | PROP_cfg, /* properties_required */
1959 PROP_rtl, /* properties_provided */
1960 PROP_trees, /* properties_destroyed */
1961 0, /* todo_flags_start */
1962 TODO_dump_func, /* todo_flags_finish */