1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
43 /* Verify that there is exactly single jump instruction since last and attach
44 REG_BR_PROB note specifying probability.
45 ??? We really ought to pass the probability down to RTL expanders and let it
46 re-distribute it when the conditional expands into multiple conditionals.
47 This is however difficult to do. */
49 add_reg_br_prob_note (FILE *dump_file, rtx last, int probability)
51 if (profile_status == PROFILE_ABSENT)
53 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
56 /* It is common to emit condjump-around-jump sequence when we don't know
57 how to reverse the conditional. Special case this. */
58 if (!any_condjump_p (last)
59 || !JUMP_P (NEXT_INSN (last))
60 || !simplejump_p (NEXT_INSN (last))
61 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
62 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
63 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
65 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
67 = gen_rtx_EXPR_LIST (REG_BR_PROB,
68 GEN_INT (REG_BR_PROB_BASE - probability),
72 if (!last || !JUMP_P (last) || !any_condjump_p (last))
74 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
76 = gen_rtx_EXPR_LIST (REG_BR_PROB,
77 GEN_INT (probability), REG_NOTES (last));
81 fprintf (dump_file, "Failed to add probability note\n");
85 #ifndef LOCAL_ALIGNMENT
86 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
89 #ifndef STACK_ALIGNMENT_NEEDED
90 #define STACK_ALIGNMENT_NEEDED 1
94 /* This structure holds data relevant to one variable that will be
95 placed in a stack slot. */
101 /* The offset of the variable. During partitioning, this is the
102 offset relative to the partition. After partitioning, this
103 is relative to the stack frame. */
104 HOST_WIDE_INT offset;
106 /* Initially, the size of the variable. Later, the size of the partition,
107 if this variable becomes it's partition's representative. */
110 /* The *byte* alignment required for this variable. Or as, with the
111 size, the alignment for this partition. */
114 /* The partition representative. */
115 size_t representative;
117 /* The next stack variable in the partition, or EOC. */
121 #define EOC ((size_t)-1)
123 /* We have an array of such objects while deciding allocation. */
124 static struct stack_var *stack_vars;
125 static size_t stack_vars_alloc;
126 static size_t stack_vars_num;
128 /* An array of indicies such that stack_vars[stack_vars_sorted[i]].size
129 is non-decreasing. */
130 static size_t *stack_vars_sorted;
132 /* We have an interference graph between such objects. This graph
133 is lower triangular. */
134 static bool *stack_vars_conflict;
135 static size_t stack_vars_conflict_alloc;
137 /* The phase of the stack frame. This is the known misalignment of
138 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
139 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
140 static int frame_phase;
142 /* Used during expand_used_vars to remember if we saw any decls for
143 which we'd like to enable stack smashing protection. */
144 static bool has_protected_decls;
146 /* Used during expand_used_vars. Remember if we say a character buffer
147 smaller than our cutoff threshold. Used for -Wstack-protector. */
148 static bool has_short_buffer;
150 /* Discover the byte alignment to use for DECL. Ignore alignment
151 we can't do with expected alignment of the stack boundary. */
154 get_decl_align_unit (tree decl)
158 align = DECL_ALIGN (decl);
159 align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
160 if (align > PREFERRED_STACK_BOUNDARY)
161 align = PREFERRED_STACK_BOUNDARY;
162 if (cfun->stack_alignment_needed < align)
163 cfun->stack_alignment_needed = align;
165 return align / BITS_PER_UNIT;
168 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
169 Return the frame offset. */
172 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
174 HOST_WIDE_INT offset, new_frame_offset;
176 new_frame_offset = frame_offset;
177 if (FRAME_GROWS_DOWNWARD)
179 new_frame_offset -= size + frame_phase;
180 new_frame_offset &= -align;
181 new_frame_offset += frame_phase;
182 offset = new_frame_offset;
186 new_frame_offset -= frame_phase;
187 new_frame_offset += align - 1;
188 new_frame_offset &= -align;
189 new_frame_offset += frame_phase;
190 offset = new_frame_offset;
191 new_frame_offset += size;
193 frame_offset = new_frame_offset;
198 /* Accumulate DECL into STACK_VARS. */
201 add_stack_var (tree decl)
203 if (stack_vars_num >= stack_vars_alloc)
205 if (stack_vars_alloc)
206 stack_vars_alloc = stack_vars_alloc * 3 / 2;
208 stack_vars_alloc = 32;
210 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
212 stack_vars[stack_vars_num].decl = decl;
213 stack_vars[stack_vars_num].offset = 0;
214 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
215 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
217 /* All variables are initially in their own partition. */
218 stack_vars[stack_vars_num].representative = stack_vars_num;
219 stack_vars[stack_vars_num].next = EOC;
221 /* Ensure that this decl doesn't get put onto the list twice. */
222 SET_DECL_RTL (decl, pc_rtx);
227 /* Compute the linear index of a lower-triangular coordinate (I, J). */
230 triangular_index (size_t i, size_t j)
237 return (i * (i + 1)) / 2 + j;
240 /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
243 resize_stack_vars_conflict (size_t n)
245 size_t size = triangular_index (n-1, n-1) + 1;
247 if (size <= stack_vars_conflict_alloc)
250 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
251 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
252 (size - stack_vars_conflict_alloc) * sizeof (bool));
253 stack_vars_conflict_alloc = size;
256 /* Make the decls associated with luid's X and Y conflict. */
259 add_stack_var_conflict (size_t x, size_t y)
261 size_t index = triangular_index (x, y);
262 gcc_assert (index < stack_vars_conflict_alloc);
263 stack_vars_conflict[index] = true;
266 /* Check whether the decls associated with luid's X and Y conflict. */
269 stack_var_conflict_p (size_t x, size_t y)
271 size_t index = triangular_index (x, y);
272 gcc_assert (index < stack_vars_conflict_alloc);
273 return stack_vars_conflict[index];
276 /* A subroutine of expand_used_vars. If two variables X and Y have alias
277 sets that do not conflict, then do add a conflict for these variables
278 in the interference graph. We also have to mind MEM_IN_STRUCT_P and
282 add_alias_set_conflicts (void)
284 size_t i, j, n = stack_vars_num;
286 for (i = 0; i < n; ++i)
288 bool aggr_i = AGGREGATE_TYPE_P (TREE_TYPE (stack_vars[i].decl));
289 HOST_WIDE_INT set_i = get_alias_set (stack_vars[i].decl);
291 for (j = 0; j < i; ++j)
293 bool aggr_j = AGGREGATE_TYPE_P (TREE_TYPE (stack_vars[j].decl));
294 HOST_WIDE_INT set_j = get_alias_set (stack_vars[j].decl);
295 if (aggr_i != aggr_j || !alias_sets_conflict_p (set_i, set_j))
296 add_stack_var_conflict (i, j);
301 /* A subroutine of partition_stack_vars. A comparison function for qsort,
302 sorting an array of indicies by the size of the object. */
305 stack_var_size_cmp (const void *a, const void *b)
307 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
308 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
317 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
318 partitioning algorithm. Partitions A and B are known to be non-conflicting.
319 Merge them into a single partition A.
321 At the same time, add OFFSET to all variables in partition B. At the end
322 of the partitioning process we've have a nice block easy to lay out within
326 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
330 /* Update each element of partition B with the given offset,
331 and merge them into partition A. */
332 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
334 stack_vars[i].offset += offset;
335 stack_vars[i].representative = a;
337 stack_vars[last].next = stack_vars[a].next;
338 stack_vars[a].next = b;
340 /* Update the required alignment of partition A to account for B. */
341 if (stack_vars[a].alignb < stack_vars[b].alignb)
342 stack_vars[a].alignb = stack_vars[b].alignb;
344 /* Update the interference graph and merge the conflicts. */
345 for (last = stack_vars_num, i = 0; i < last; ++i)
346 if (stack_var_conflict_p (b, i))
347 add_stack_var_conflict (a, i);
350 /* A subroutine of expand_used_vars. Binpack the variables into
351 partitions constrained by the interference graph. The overall
352 algorithm used is as follows:
354 Sort the objects by size.
359 Look for the largest non-conflicting object B with size <= S.
369 partition_stack_vars (void)
371 size_t si, sj, n = stack_vars_num;
373 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
374 for (si = 0; si < n; ++si)
375 stack_vars_sorted[si] = si;
380 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
382 /* Special case: detect when all variables conflict, and thus we can't
383 do anything during the partitioning loop. It isn't uncommon (with
384 C code at least) to declare all variables at the top of the function,
385 and if we're not inlining, then all variables will be in the same scope.
386 Take advantage of very fast libc routines for this scan. */
387 gcc_assert (sizeof(bool) == sizeof(char));
388 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
391 for (si = 0; si < n; ++si)
393 size_t i = stack_vars_sorted[si];
394 HOST_WIDE_INT isize = stack_vars[i].size;
395 HOST_WIDE_INT offset = 0;
397 for (sj = si; sj-- > 0; )
399 size_t j = stack_vars_sorted[sj];
400 HOST_WIDE_INT jsize = stack_vars[j].size;
401 unsigned int jalign = stack_vars[j].alignb;
403 /* Ignore objects that aren't partition representatives. */
404 if (stack_vars[j].representative != j)
407 /* Ignore objects too large for the remaining space. */
411 /* Ignore conflicting objects. */
412 if (stack_var_conflict_p (i, j))
415 /* Refine the remaining space check to include alignment. */
416 if (offset & (jalign - 1))
418 HOST_WIDE_INT toff = offset;
420 toff &= -(HOST_WIDE_INT)jalign;
421 if (isize - (toff - offset) < jsize)
424 isize -= toff - offset;
428 /* UNION the objects, placing J at OFFSET. */
429 union_stack_vars (i, j, offset);
438 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
441 dump_stack_var_partition (void)
443 size_t si, i, j, n = stack_vars_num;
445 for (si = 0; si < n; ++si)
447 i = stack_vars_sorted[si];
449 /* Skip variables that aren't partition representatives, for now. */
450 if (stack_vars[i].representative != i)
453 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
454 " align %u\n", (unsigned long) i, stack_vars[i].size,
455 stack_vars[i].alignb);
457 for (j = i; j != EOC; j = stack_vars[j].next)
459 fputc ('\t', dump_file);
460 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
461 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
462 stack_vars[i].offset);
467 /* Assign rtl to DECL at frame offset OFFSET. */
470 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
475 /* If this fails, we've overflowed the stack frame. Error nicely? */
476 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
478 x = plus_constant (virtual_stack_vars_rtx, offset);
479 x = gen_rtx_MEM (DECL_MODE (decl), x);
481 /* Set alignment we actually gave this decl. */
482 offset -= frame_phase;
483 align = offset & -offset;
484 align *= BITS_PER_UNIT;
485 if (align > STACK_BOUNDARY || align == 0)
486 align = STACK_BOUNDARY;
487 DECL_ALIGN (decl) = align;
488 DECL_USER_ALIGN (decl) = 0;
490 set_mem_attributes (x, decl, true);
491 SET_DECL_RTL (decl, x);
494 /* A subroutine of expand_used_vars. Give each partition representative
495 a unique location within the stack frame. Update each partition member
496 with that location. */
499 expand_stack_vars (bool (*pred) (tree))
501 size_t si, i, j, n = stack_vars_num;
503 for (si = 0; si < n; ++si)
505 HOST_WIDE_INT offset;
507 i = stack_vars_sorted[si];
509 /* Skip variables that aren't partition representatives, for now. */
510 if (stack_vars[i].representative != i)
513 /* Skip variables that have already had rtl assigned. See also
514 add_stack_var where we perpetrate this pc_rtx hack. */
515 if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
518 /* Check the predicate to see whether this variable should be
519 allocated in this pass. */
520 if (pred && !pred (stack_vars[i].decl))
523 offset = alloc_stack_frame_space (stack_vars[i].size,
524 stack_vars[i].alignb);
526 /* Create rtl for each variable based on their location within the
528 for (j = i; j != EOC; j = stack_vars[j].next)
529 expand_one_stack_var_at (stack_vars[j].decl,
530 stack_vars[j].offset + offset);
534 /* A subroutine of expand_one_var. Called to immediately assign rtl
535 to a variable to be allocated in the stack frame. */
538 expand_one_stack_var (tree var)
540 HOST_WIDE_INT size, offset, align;
542 size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
543 align = get_decl_align_unit (var);
544 offset = alloc_stack_frame_space (size, align);
546 expand_one_stack_var_at (var, offset);
549 /* A subroutine of expand_one_var. Called to assign rtl
550 to a TREE_STATIC VAR_DECL. */
553 expand_one_static_var (tree var)
555 /* If this is an inlined copy of a static local variable,
556 look up the original. */
557 var = DECL_ORIGIN (var);
559 /* If we've already processed this variable because of that, do nothing. */
560 if (TREE_ASM_WRITTEN (var))
563 /* Give the front end a chance to do whatever. In practice, this is
564 resolving duplicate names for IMA in C. */
565 if (lang_hooks.expand_decl (var))
568 /* Otherwise, just emit the variable. */
569 rest_of_decl_compilation (var, 0, 0);
572 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
573 that will reside in a hard register. */
576 expand_one_hard_reg_var (tree var)
578 rest_of_decl_compilation (var, 0, 0);
581 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
582 that will reside in a pseudo register. */
585 expand_one_register_var (tree var)
587 tree type = TREE_TYPE (var);
588 int unsignedp = TYPE_UNSIGNED (type);
589 enum machine_mode reg_mode
590 = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
591 rtx x = gen_reg_rtx (reg_mode);
593 SET_DECL_RTL (var, x);
595 /* Note if the object is a user variable. */
596 if (!DECL_ARTIFICIAL (var))
600 /* Trust user variables which have a pointer type to really
601 be pointers. Do not trust compiler generated temporaries
602 as our type system is totally busted as it relates to
603 pointer arithmetic which translates into lots of compiler
604 generated objects with pointer types, but which are not really
606 if (POINTER_TYPE_P (type))
607 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
611 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
612 has some associated error, e.g. its type is error-mark. We just need
613 to pick something that won't crash the rest of the compiler. */
616 expand_one_error_var (tree var)
618 enum machine_mode mode = DECL_MODE (var);
622 x = gen_rtx_MEM (BLKmode, const0_rtx);
623 else if (mode == VOIDmode)
626 x = gen_reg_rtx (mode);
628 SET_DECL_RTL (var, x);
631 /* A subroutine of expand_one_var. VAR is a variable that will be
632 allocated to the local stack frame. Return true if we wish to
633 add VAR to STACK_VARS so that it will be coalesced with other
634 variables. Return false to allocate VAR immediately.
636 This function is used to reduce the number of variables considered
637 for coalescing, which reduces the size of the quadratic problem. */
640 defer_stack_allocation (tree var, bool toplevel)
642 /* If stack protection is enabled, *all* stack variables must be deferred,
643 so that we can re-order the strings to the top of the frame. */
644 if (flag_stack_protect)
647 /* Variables in the outermost scope automatically conflict with
648 every other variable. The only reason to want to defer them
649 at all is that, after sorting, we can more efficiently pack
650 small variables in the stack frame. Continue to defer at -O2. */
651 if (toplevel && optimize < 2)
654 /* Without optimization, *most* variables are allocated from the
655 stack, which makes the quadratic problem large exactly when we
656 want compilation to proceed as quickly as possible. On the
657 other hand, we don't want the function's stack frame size to
658 get completely out of hand. So we avoid adding scalars and
659 "small" aggregates to the list at all. */
660 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
666 /* A subroutine of expand_used_vars. Expand one variable according to
667 its flavor. Variables to be placed on the stack are not actually
668 expanded yet, merely recorded. */
671 expand_one_var (tree var, bool toplevel)
673 if (TREE_CODE (var) != VAR_DECL)
674 lang_hooks.expand_decl (var);
675 else if (DECL_EXTERNAL (var))
677 else if (DECL_HAS_VALUE_EXPR_P (var))
679 else if (TREE_STATIC (var))
680 expand_one_static_var (var);
681 else if (DECL_RTL_SET_P (var))
683 else if (TREE_TYPE (var) == error_mark_node)
684 expand_one_error_var (var);
685 else if (DECL_HARD_REGISTER (var))
686 expand_one_hard_reg_var (var);
687 else if (use_register_for_decl (var))
688 expand_one_register_var (var);
689 else if (defer_stack_allocation (var, toplevel))
692 expand_one_stack_var (var);
695 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
696 expanding variables. Those variables that can be put into registers
697 are allocated pseudos; those that can't are put on the stack.
699 TOPLEVEL is true if this is the outermost BLOCK. */
702 expand_used_vars_for_block (tree block, bool toplevel)
704 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
707 old_sv_num = toplevel ? 0 : stack_vars_num;
709 /* Expand all variables at this level. */
710 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
712 expand_one_var (t, toplevel);
714 this_sv_num = stack_vars_num;
716 /* Expand all variables at containing levels. */
717 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
718 expand_used_vars_for_block (t, false);
720 /* Since we do not track exact variable lifetimes (which is not even
721 possible for varibles whose address escapes), we mirror the block
722 tree in the interference graph. Here we cause all variables at this
723 level, and all sublevels, to conflict. Do make certain that a
724 variable conflicts with itself. */
725 if (old_sv_num < this_sv_num)
727 new_sv_num = stack_vars_num;
728 resize_stack_vars_conflict (new_sv_num);
730 for (i = old_sv_num; i < new_sv_num; ++i)
731 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
732 add_stack_var_conflict (i, j);
736 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
737 and clear TREE_USED on all local variables. */
740 clear_tree_used (tree block)
744 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
745 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
748 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
752 /* Examine TYPE and determine a bit mask of the following features. */
754 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
755 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
756 #define SPCT_HAS_ARRAY 4
757 #define SPCT_HAS_AGGREGATE 8
760 stack_protect_classify_type (tree type)
762 unsigned int ret = 0;
765 switch (TREE_CODE (type))
768 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
769 if (t == char_type_node
770 || t == signed_char_type_node
771 || t == unsigned_char_type_node)
773 HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
776 if (!TYPE_DOMAIN (type)
777 || !TYPE_MAX_VALUE (TYPE_DOMAIN (type))
778 || !host_integerp (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), 1))
781 len = tree_low_cst (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), 1);
784 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
786 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
789 ret = SPCT_HAS_ARRAY;
793 case QUAL_UNION_TYPE:
795 ret = SPCT_HAS_AGGREGATE;
796 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
797 if (TREE_CODE (t) == FIELD_DECL)
798 ret |= stack_protect_classify_type (TREE_TYPE (t));
808 /* Return non-zero if DECL should be segregated into the "vulnerable" upper
809 part of the local stack frame. Remember if we ever return non-zero for
810 any variable in this function. The return value is the phase number in
811 which the variable should be allocated. */
814 stack_protect_decl_phase (tree decl)
816 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
819 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
820 has_short_buffer = true;
822 if (flag_stack_protect == 2)
824 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
825 && !(bits & SPCT_HAS_AGGREGATE))
827 else if (bits & SPCT_HAS_ARRAY)
831 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
834 has_protected_decls = true;
839 /* Two helper routines that check for phase 1 and phase 2. These are used
840 as callbacks for expand_stack_vars. */
843 stack_protect_decl_phase_1 (tree decl)
845 return stack_protect_decl_phase (decl) == 1;
849 stack_protect_decl_phase_2 (tree decl)
851 return stack_protect_decl_phase (decl) == 2;
854 /* Ensure that variables in different stack protection phases conflict
855 so that they are not merged and share the same stack slot. */
858 add_stack_protection_conflicts (void)
860 size_t i, j, n = stack_vars_num;
861 unsigned char *phase;
863 phase = XNEWVEC (unsigned char, n);
864 for (i = 0; i < n; ++i)
865 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
867 for (i = 0; i < n; ++i)
869 unsigned char ph_i = phase[i];
870 for (j = 0; j < i; ++j)
871 if (ph_i != phase[j])
872 add_stack_var_conflict (i, j);
878 /* Create a decl for the guard at the top of the stack frame. */
881 create_stack_guard (void)
883 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
884 TREE_THIS_VOLATILE (guard) = 1;
885 TREE_USED (guard) = 1;
886 expand_one_stack_var (guard);
887 cfun->stack_protect_guard = guard;
890 /* Expand all variables used in the function. */
893 expand_used_vars (void)
895 tree t, outer_block = DECL_INITIAL (current_function_decl);
897 /* Compute the phase of the stack frame for this function. */
899 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
900 int off = STARTING_FRAME_OFFSET % align;
901 frame_phase = off ? align - off : 0;
904 /* Set TREE_USED on all variables in the unexpanded_var_list. */
905 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
906 TREE_USED (TREE_VALUE (t)) = 1;
908 /* Clear TREE_USED on all variables associated with a block scope. */
909 clear_tree_used (outer_block);
911 /* Initialize local stack smashing state. */
912 has_protected_decls = false;
913 has_short_buffer = false;
915 /* At this point all variables on the unexpanded_var_list with TREE_USED
916 set are not associated with any block scope. Lay them out. */
917 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
919 tree var = TREE_VALUE (t);
920 bool expand_now = false;
922 /* We didn't set a block for static or extern because it's hard
923 to tell the difference between a global variable (re)declared
924 in a local scope, and one that's really declared there to
925 begin with. And it doesn't really matter much, since we're
926 not giving them stack space. Expand them now. */
927 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
930 /* Any variable that could have been hoisted into an SSA_NAME
931 will have been propagated anywhere the optimizers chose,
932 i.e. not confined to their original block. Allocate them
933 as if they were defined in the outermost scope. */
934 else if (is_gimple_reg (var))
937 /* If the variable is not associated with any block, then it
938 was created by the optimizers, and could be live anywhere
940 else if (TREE_USED (var))
943 /* Finally, mark all variables on the list as used. We'll use
944 this in a moment when we expand those associated with scopes. */
948 expand_one_var (var, true);
950 cfun->unexpanded_var_list = NULL_TREE;
952 /* At this point, all variables within the block tree with TREE_USED
953 set are actually used by the optimized function. Lay them out. */
954 expand_used_vars_for_block (outer_block, true);
956 if (stack_vars_num > 0)
958 /* Due to the way alias sets work, no variables with non-conflicting
959 alias sets may be assigned the same address. Add conflicts to
961 add_alias_set_conflicts ();
963 /* If stack protection is enabled, we don't share space between
964 vulnerable data and non-vulnerable data. */
965 if (flag_stack_protect)
966 add_stack_protection_conflicts ();
968 /* Now that we have collected all stack variables, and have computed a
969 minimal interference graph, attempt to save some stack space. */
970 partition_stack_vars ();
972 dump_stack_var_partition ();
975 /* There are several conditions under which we should create a
976 stack guard: protect-all, alloca used, protected decls present. */
977 if (flag_stack_protect == 2
978 || (flag_stack_protect
979 && (current_function_calls_alloca || has_protected_decls)))
980 create_stack_guard ();
982 /* Assign rtl to each variable based on these partitions. */
983 if (stack_vars_num > 0)
985 /* Reorder decls to be protected by iterating over the variables
986 array multiple times, and allocating out of each phase in turn. */
987 /* ??? We could probably integrate this into the qsort we did
988 earlier, such that we naturally see these variables first,
989 and thus naturally allocate things in the right order. */
990 if (has_protected_decls)
992 /* Phase 1 contains only character arrays. */
993 expand_stack_vars (stack_protect_decl_phase_1);
995 /* Phase 2 contains other kinds of arrays. */
996 if (flag_stack_protect == 2)
997 expand_stack_vars (stack_protect_decl_phase_2);
1000 expand_stack_vars (NULL);
1002 /* Free up stack variable graph data. */
1003 XDELETEVEC (stack_vars);
1004 XDELETEVEC (stack_vars_sorted);
1005 XDELETEVEC (stack_vars_conflict);
1007 stack_vars_alloc = stack_vars_num = 0;
1008 stack_vars_conflict = NULL;
1009 stack_vars_conflict_alloc = 0;
1012 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1013 if (STACK_ALIGNMENT_NEEDED)
1015 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1016 if (!FRAME_GROWS_DOWNWARD)
1017 frame_offset += align - 1;
1018 frame_offset &= -align;
1023 /* If we need to produce a detailed dump, print the tree representation
1024 for STMT to the dump file. SINCE is the last RTX after which the RTL
1025 generated for STMT should have been appended. */
1028 maybe_dump_rtl_for_tree_stmt (tree stmt, rtx since)
1030 if (dump_file && (dump_flags & TDF_DETAILS))
1032 fprintf (dump_file, "\n;; ");
1033 print_generic_expr (dump_file, stmt, TDF_SLIM);
1034 fprintf (dump_file, "\n");
1036 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1040 /* A subroutine of expand_gimple_basic_block. Expand one COND_EXPR.
1041 Returns a new basic block if we've terminated the current basic
1042 block and created a new one. */
1045 expand_gimple_cond_expr (basic_block bb, tree stmt)
1047 basic_block new_bb, dest;
1051 tree pred = COND_EXPR_COND (stmt);
1052 tree then_exp = COND_EXPR_THEN (stmt);
1053 tree else_exp = COND_EXPR_ELSE (stmt);
1056 last2 = last = get_last_insn ();
1058 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1059 if (EXPR_LOCUS (stmt))
1061 emit_line_note (*(EXPR_LOCUS (stmt)));
1062 record_block_change (TREE_BLOCK (stmt));
1065 /* These flags have no purpose in RTL land. */
1066 true_edge->flags &= ~EDGE_TRUE_VALUE;
1067 false_edge->flags &= ~EDGE_FALSE_VALUE;
1069 /* We can either have a pure conditional jump with one fallthru edge or
1070 two-way jump that needs to be decomposed into two basic blocks. */
1071 if (TREE_CODE (then_exp) == GOTO_EXPR && IS_EMPTY_STMT (else_exp))
1073 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
1074 add_reg_br_prob_note (dump_file, last, true_edge->probability);
1075 maybe_dump_rtl_for_tree_stmt (stmt, last);
1076 if (EXPR_LOCUS (then_exp))
1077 emit_line_note (*(EXPR_LOCUS (then_exp)));
1080 if (TREE_CODE (else_exp) == GOTO_EXPR && IS_EMPTY_STMT (then_exp))
1082 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_exp)));
1083 add_reg_br_prob_note (dump_file, last, false_edge->probability);
1084 maybe_dump_rtl_for_tree_stmt (stmt, last);
1085 if (EXPR_LOCUS (else_exp))
1086 emit_line_note (*(EXPR_LOCUS (else_exp)));
1089 gcc_assert (TREE_CODE (then_exp) == GOTO_EXPR
1090 && TREE_CODE (else_exp) == GOTO_EXPR);
1092 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
1093 add_reg_br_prob_note (dump_file, last, true_edge->probability);
1094 last = get_last_insn ();
1095 expand_expr (else_exp, const0_rtx, VOIDmode, 0);
1098 if (BARRIER_P (BB_END (bb)))
1099 BB_END (bb) = PREV_INSN (BB_END (bb));
1100 update_bb_for_insn (bb);
1102 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1103 dest = false_edge->dest;
1104 redirect_edge_succ (false_edge, new_bb);
1105 false_edge->flags |= EDGE_FALLTHRU;
1106 new_bb->count = false_edge->count;
1107 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1108 new_edge = make_edge (new_bb, dest, 0);
1109 new_edge->probability = REG_BR_PROB_BASE;
1110 new_edge->count = new_bb->count;
1111 if (BARRIER_P (BB_END (new_bb)))
1112 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1113 update_bb_for_insn (new_bb);
1115 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1117 if (EXPR_LOCUS (else_exp))
1118 emit_line_note (*(EXPR_LOCUS (else_exp)));
1123 /* A subroutine of expand_gimple_basic_block. Expand one CALL_EXPR
1124 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1125 generated a tail call (something that might be denied by the ABI
1126 rules governing the call; see calls.c).
1128 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1129 can still reach the rest of BB. The case here is __builtin_sqrt,
1130 where the NaN result goes through the external function (with a
1131 tailcall) and the normal result happens via a sqrt instruction. */
1134 expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
1142 last2 = last = get_last_insn ();
1144 expand_expr_stmt (stmt);
1146 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
1147 if (CALL_P (last) && SIBLING_CALL_P (last))
1150 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1152 *can_fallthru = true;
1156 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1157 Any instructions emitted here are about to be deleted. */
1158 do_pending_stack_adjust ();
1160 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1161 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1162 EH or abnormal edges, we shouldn't have created a tail call in
1163 the first place. So it seems to me we should just be removing
1164 all edges here, or redirecting the existing fallthru edge to
1170 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1172 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1174 if (e->dest != EXIT_BLOCK_PTR)
1176 e->dest->count -= e->count;
1177 e->dest->frequency -= EDGE_FREQUENCY (e);
1178 if (e->dest->count < 0)
1180 if (e->dest->frequency < 0)
1181 e->dest->frequency = 0;
1184 probability += e->probability;
1191 /* This is somewhat ugly: the call_expr expander often emits instructions
1192 after the sibcall (to perform the function return). These confuse the
1193 find_many_sub_basic_blocks code, so we need to get rid of these. */
1194 last = NEXT_INSN (last);
1195 gcc_assert (BARRIER_P (last));
1197 *can_fallthru = false;
1198 while (NEXT_INSN (last))
1200 /* For instance an sqrt builtin expander expands if with
1201 sibcall in the then and label for `else`. */
1202 if (LABEL_P (NEXT_INSN (last)))
1204 *can_fallthru = true;
1207 delete_insn (NEXT_INSN (last));
1210 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1211 e->probability += probability;
1214 update_bb_for_insn (bb);
1216 if (NEXT_INSN (last))
1218 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1221 if (BARRIER_P (last))
1222 BB_END (bb) = PREV_INSN (last);
1225 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1230 /* Expand basic block BB from GIMPLE trees to RTL. */
1233 expand_gimple_basic_block (basic_block bb, FILE * dump_file)
1235 block_stmt_iterator bsi = bsi_start (bb);
1244 "\n;; Generating RTL for tree basic block %d\n",
1248 init_rtl_bb_info (bb);
1249 bb->flags |= BB_RTL;
1251 if (!bsi_end_p (bsi))
1252 stmt = bsi_stmt (bsi);
1254 if (stmt && TREE_CODE (stmt) == LABEL_EXPR)
1256 last = get_last_insn ();
1258 expand_expr_stmt (stmt);
1260 /* Java emits line number notes in the top of labels.
1261 ??? Make this go away once line number notes are obsoleted. */
1262 BB_HEAD (bb) = NEXT_INSN (last);
1263 if (NOTE_P (BB_HEAD (bb)))
1264 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
1266 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
1268 maybe_dump_rtl_for_tree_stmt (stmt, last);
1271 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1273 NOTE_BASIC_BLOCK (note) = bb;
1275 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1277 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1278 e->flags &= ~EDGE_EXECUTABLE;
1280 /* At the moment not all abnormal edges match the RTL representation.
1281 It is safe to remove them here as find_many_sub_basic_blocks will
1282 rediscover them. In the future we should get this fixed properly. */
1283 if (e->flags & EDGE_ABNORMAL)
1289 for (; !bsi_end_p (bsi); bsi_next (&bsi))
1291 tree stmt = bsi_stmt (bsi);
1297 /* Expand this statement, then evaluate the resulting RTL and
1298 fixup the CFG accordingly. */
1299 if (TREE_CODE (stmt) == COND_EXPR)
1301 new_bb = expand_gimple_cond_expr (bb, stmt);
1307 tree call = get_call_expr_in (stmt);
1308 if (call && CALL_EXPR_TAILCALL (call))
1311 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1322 last = get_last_insn ();
1323 expand_expr_stmt (stmt);
1324 maybe_dump_rtl_for_tree_stmt (stmt, last);
1329 do_pending_stack_adjust ();
1331 /* Find the block tail. The last insn in the block is the insn
1332 before a barrier and/or table jump insn. */
1333 last = get_last_insn ();
1334 if (BARRIER_P (last))
1335 last = PREV_INSN (last);
1336 if (JUMP_TABLE_DATA_P (last))
1337 last = PREV_INSN (PREV_INSN (last));
1340 update_bb_for_insn (bb);
1346 /* Create a basic block for initialization code. */
1349 construct_init_block (void)
1351 basic_block init_block, first_block;
1355 /* Multiple entry points not supported yet. */
1356 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
1357 init_rtl_bb_info (ENTRY_BLOCK_PTR);
1358 init_rtl_bb_info (EXIT_BLOCK_PTR);
1359 ENTRY_BLOCK_PTR->flags |= BB_RTL;
1360 EXIT_BLOCK_PTR->flags |= BB_RTL;
1362 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
1364 /* When entry edge points to first basic block, we don't need jump,
1365 otherwise we have to jump into proper target. */
1366 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
1368 tree label = tree_block_label (e->dest);
1370 emit_jump (label_rtx (label));
1374 flags = EDGE_FALLTHRU;
1376 init_block = create_basic_block (NEXT_INSN (get_insns ()),
1379 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
1380 init_block->count = ENTRY_BLOCK_PTR->count;
1383 first_block = e->dest;
1384 redirect_edge_succ (e, init_block);
1385 e = make_edge (init_block, first_block, flags);
1388 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1389 e->probability = REG_BR_PROB_BASE;
1390 e->count = ENTRY_BLOCK_PTR->count;
1392 update_bb_for_insn (init_block);
1397 /* Create a block containing landing pads and similar stuff. */
1400 construct_exit_block (void)
1402 rtx head = get_last_insn ();
1404 basic_block exit_block;
1409 /* Make sure the locus is set to the end of the function, so that
1410 epilogue line numbers and warnings are set properly. */
1411 #ifdef USE_MAPPED_LOCATION
1412 if (cfun->function_end_locus != UNKNOWN_LOCATION)
1414 if (cfun->function_end_locus.file)
1416 input_location = cfun->function_end_locus;
1418 /* The following insns belong to the top scope. */
1419 record_block_change (DECL_INITIAL (current_function_decl));
1421 /* Generate rtl for function exit. */
1422 expand_function_end ();
1424 end = get_last_insn ();
1427 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
1428 head = NEXT_INSN (head);
1429 exit_block = create_basic_block (NEXT_INSN (head), end,
1430 EXIT_BLOCK_PTR->prev_bb);
1431 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
1432 exit_block->count = EXIT_BLOCK_PTR->count;
1435 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
1437 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
1438 if (!(e->flags & EDGE_ABNORMAL))
1439 redirect_edge_succ (e, exit_block);
1444 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1445 e->probability = REG_BR_PROB_BASE;
1446 e->count = EXIT_BLOCK_PTR->count;
1447 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
1450 e->count -= e2->count;
1451 exit_block->count -= e2->count;
1452 exit_block->frequency -= EDGE_FREQUENCY (e2);
1456 if (exit_block->count < 0)
1457 exit_block->count = 0;
1458 if (exit_block->frequency < 0)
1459 exit_block->frequency = 0;
1460 update_bb_for_insn (exit_block);
1463 /* Translate the intermediate representation contained in the CFG
1464 from GIMPLE trees to RTL.
1466 We do conversion per basic block and preserve/update the tree CFG.
1467 This implies we have to do some magic as the CFG can simultaneously
1468 consist of basic blocks containing RTL and GIMPLE trees. This can
1469 confuse the CFG hooks, so be careful to not manipulate CFG during
1473 tree_expand_cfg (void)
1475 basic_block bb, init_block;
1478 /* Some backends want to know that we are expanding to RTL. */
1479 currently_expanding_to_rtl = 1;
1481 /* Prepare the rtl middle end to start recording block changes. */
1482 reset_block_changes ();
1484 /* Expand the variables recorded during gimple lowering. */
1485 expand_used_vars ();
1487 /* Honor stack protection warnings. */
1488 if (warn_stack_protect)
1490 if (current_function_calls_alloca)
1491 warning (0, "not protecting local variables: variable length buffer");
1492 if (has_short_buffer && !cfun->stack_protect_guard)
1493 warning (0, "not protecting function: no buffer at least %d bytes long",
1494 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
1497 /* Set up parameters and prepare for return, for the function. */
1498 expand_function_start (current_function_decl);
1500 /* If this function is `main', emit a call to `__main'
1501 to run global initializers, etc. */
1502 if (DECL_NAME (current_function_decl)
1503 && MAIN_NAME_P (DECL_NAME (current_function_decl))
1504 && DECL_FILE_SCOPE_P (current_function_decl))
1505 expand_main_function ();
1507 /* Initialize the stack_protect_guard field. This must happen after the
1508 call to __main (if any) so that the external decl is initialized. */
1509 if (cfun->stack_protect_guard)
1510 stack_protect_prologue ();
1512 /* Register rtl specific functions for cfg. */
1513 rtl_register_cfg_hooks ();
1515 init_block = construct_init_block ();
1517 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
1518 bb = expand_gimple_basic_block (bb, dump_file);
1520 construct_exit_block ();
1522 /* We're done expanding trees to RTL. */
1523 currently_expanding_to_rtl = 0;
1525 /* Convert tree EH labels to RTL EH labels, and clean out any unreachable
1527 convert_from_eh_region_ranges ();
1529 rebuild_jump_labels (get_insns ());
1530 find_exception_handler_labels ();
1532 blocks = sbitmap_alloc (last_basic_block);
1533 sbitmap_ones (blocks);
1534 find_many_sub_basic_blocks (blocks);
1535 purge_all_dead_edges ();
1536 sbitmap_free (blocks);
1539 #ifdef ENABLE_CHECKING
1543 /* There's no need to defer outputting this function any more; we
1544 know we want to output it. */
1545 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1547 /* Now that we're done expanding trees to RTL, we shouldn't have any
1548 more CONCATs anywhere. */
1549 generating_concat_p = 0;
1551 finalize_block_changes ();
1556 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
1557 /* And the pass manager will dump RTL for us. */
1560 /* If we're emitting a nested function, make sure its parent gets
1561 emitted as well. Doing otherwise confuses debug info. */
1564 for (parent = DECL_CONTEXT (current_function_decl);
1565 parent != NULL_TREE;
1566 parent = get_containing_scope (parent))
1567 if (TREE_CODE (parent) == FUNCTION_DECL)
1568 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1571 /* We are now committed to emitting code for this function. Do any
1572 preparation, such as emitting abstract debug info for the inline
1573 before it gets mangled by optimization. */
1574 if (cgraph_function_possibly_inlined_p (current_function_decl))
1575 (*debug_hooks->outlining_inline_function) (current_function_decl);
1577 TREE_ASM_WRITTEN (current_function_decl) = 1;
1585 struct tree_opt_pass pass_expand =
1587 "expand", /* name */
1589 tree_expand_cfg, /* execute */
1592 0, /* static_pass_number */
1593 TV_EXPAND, /* tv_id */
1594 /* ??? If TER is enabled, we actually receive GENERIC. */
1595 PROP_gimple_leh | PROP_cfg, /* properties_required */
1596 PROP_rtl, /* properties_provided */
1597 PROP_gimple_leh, /* properties_destroyed */
1598 0, /* todo_flags_start */
1599 TODO_dump_func, /* todo_flags_finish */