1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
42 #include "tree-inline.h"
43 #include "value-prof.h"
45 #include "ssaexpand.h"
48 /* This variable holds information helping the rewriting of SSA trees
52 /* Return an expression tree corresponding to the RHS of GIMPLE
56 gimple_assign_rhs_to_tree (gimple stmt)
59 enum gimple_rhs_class grhs_class;
61 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
63 if (grhs_class == GIMPLE_BINARY_RHS)
64 t = build2 (gimple_assign_rhs_code (stmt),
65 TREE_TYPE (gimple_assign_lhs (stmt)),
66 gimple_assign_rhs1 (stmt),
67 gimple_assign_rhs2 (stmt));
68 else if (grhs_class == GIMPLE_UNARY_RHS)
69 t = build1 (gimple_assign_rhs_code (stmt),
70 TREE_TYPE (gimple_assign_lhs (stmt)),
71 gimple_assign_rhs1 (stmt));
72 else if (grhs_class == GIMPLE_SINGLE_RHS)
73 t = gimple_assign_rhs1 (stmt);
80 /* Return an expression tree corresponding to the PREDICATE of GIMPLE_COND
84 gimple_cond_pred_to_tree (gimple stmt)
86 /* We're sometimes presented with such code:
90 This would expand to two comparisons which then later might
91 be cleaned up by combine. But some pattern matchers like if-conversion
92 work better when there's only one compare, so make up for this
93 here as special exception if TER would have made the same change. */
94 tree lhs = gimple_cond_lhs (stmt);
96 && TREE_CODE (lhs) == SSA_NAME
97 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (lhs)))
98 lhs = gimple_assign_rhs_to_tree (SSA_NAME_DEF_STMT (lhs));
100 return build2 (gimple_cond_code (stmt), boolean_type_node,
101 lhs, gimple_cond_rhs (stmt));
104 /* Helper for gimple_to_tree. Set EXPR_LOCATION for every expression
105 inside *TP. DATA is the location to set. */
108 set_expr_location_r (tree *tp, int *ws ATTRIBUTE_UNUSED, void *data)
110 location_t *loc = (location_t *) data;
112 SET_EXPR_LOCATION (*tp, *loc);
118 /* RTL expansion has traditionally been done on trees, so the
119 transition to doing it on GIMPLE tuples is very invasive to the RTL
120 expander. To facilitate the transition, this function takes a
121 GIMPLE tuple STMT and returns the same statement in the form of a
125 gimple_to_tree (gimple stmt)
129 tree_ann_common_t ann;
132 switch (gimple_code (stmt))
136 tree lhs = gimple_assign_lhs (stmt);
138 t = gimple_assign_rhs_to_tree (stmt);
139 t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
140 if (gimple_assign_nontemporal_move_p (stmt))
141 MOVE_NONTEMPORAL (t) = true;
146 t = gimple_cond_pred_to_tree (stmt);
147 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
151 t = build1 (GOTO_EXPR, void_type_node, gimple_goto_dest (stmt));
155 t = build1 (LABEL_EXPR, void_type_node, gimple_label_label (stmt));
160 tree retval = gimple_return_retval (stmt);
162 if (retval && retval != error_mark_node)
164 tree result = DECL_RESULT (current_function_decl);
166 /* If we are not returning the current function's RESULT_DECL,
167 build an assignment to it. */
168 if (retval != result)
170 /* I believe that a function's RESULT_DECL is unique. */
171 gcc_assert (TREE_CODE (retval) != RESULT_DECL);
173 retval = build2 (MODIFY_EXPR, TREE_TYPE (result),
177 t = build1 (RETURN_EXPR, void_type_node, retval);
188 n = gimple_asm_noutputs (stmt);
191 t = out = gimple_asm_output_op (stmt, 0);
192 for (i = 1; i < n; i++)
194 TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
195 t = gimple_asm_output_op (stmt, i);
200 n = gimple_asm_ninputs (stmt);
203 t = in = gimple_asm_input_op (stmt, 0);
204 for (i = 1; i < n; i++)
206 TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
207 t = gimple_asm_input_op (stmt, i);
212 n = gimple_asm_nclobbers (stmt);
215 t = cl = gimple_asm_clobber_op (stmt, 0);
216 for (i = 1; i < n; i++)
218 TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
219 t = gimple_asm_clobber_op (stmt, i);
223 s = gimple_asm_string (stmt);
224 t = build4 (ASM_EXPR, void_type_node, build_string (strlen (s), s),
226 ASM_VOLATILE_P (t) = gimple_asm_volatile_p (stmt);
227 ASM_INPUT_P (t) = gimple_asm_input_p (stmt);
235 tree_ann_common_t ann;
237 t = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
239 CALL_EXPR_FN (t) = gimple_call_fn (stmt);
240 TREE_TYPE (t) = gimple_call_return_type (stmt);
241 CALL_EXPR_STATIC_CHAIN (t) = gimple_call_chain (stmt);
243 for (i = 0; i < gimple_call_num_args (stmt); i++)
244 CALL_EXPR_ARG (t, i) = gimple_call_arg (stmt, i);
246 if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
247 TREE_SIDE_EFFECTS (t) = 1;
249 if (gimple_call_flags (stmt) & ECF_NOTHROW)
250 TREE_NOTHROW (t) = 1;
252 CALL_EXPR_TAILCALL (t) = gimple_call_tail_p (stmt);
253 CALL_EXPR_RETURN_SLOT_OPT (t) = gimple_call_return_slot_opt_p (stmt);
254 CALL_FROM_THUNK_P (t) = gimple_call_from_thunk_p (stmt);
255 CALL_CANNOT_INLINE_P (t) = gimple_call_cannot_inline_p (stmt);
256 CALL_EXPR_VA_ARG_PACK (t) = gimple_call_va_arg_pack_p (stmt);
258 /* If the call has a LHS then create a MODIFY_EXPR to hold it. */
260 tree lhs = gimple_call_lhs (stmt);
263 t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
266 /* Record the original call statement, as it may be used
267 to retrieve profile information during expansion. */
269 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
270 && DECL_BUILT_IN (fn))
272 ann = get_tree_common_ann (t);
282 tree elt = gimple_switch_label (stmt, 0);
284 label_vec = make_tree_vec (gimple_switch_num_labels (stmt));
286 if (!CASE_LOW (elt) && !CASE_HIGH (elt))
288 for (i = 1; i < gimple_switch_num_labels (stmt); i++)
289 TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, i);
291 /* The default case in a SWITCH_EXPR must be at the end of
293 TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, 0);
297 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
298 TREE_VEC_ELT (label_vec, i) = gimple_switch_label (stmt, i);
301 t = build3 (SWITCH_EXPR, void_type_node, gimple_switch_index (stmt),
308 t = build1 (NOP_EXPR, void_type_node, size_zero_node);
312 t = build_resx (gimple_resx_region (stmt));
318 error ("Unrecognized GIMPLE statement during RTL expansion");
319 print_gimple_stmt (stderr, stmt, 4, 0);
324 /* Ignore any bad gimple codes if we're going to die anyhow,
325 so we can at least set TREE_ASM_WRITTEN and have the rest
326 of compilation advance without sudden ICE death. */
327 t = build1 (NOP_EXPR, void_type_node, size_zero_node);
332 /* If STMT is inside an exception region, record it in the generated
334 rn = lookup_stmt_eh_region (stmt);
337 tree call = get_call_expr_in (t);
339 ann = get_tree_common_ann (t);
342 /* For a CALL_EXPR on the RHS of an assignment, calls.c looks up
343 the CALL_EXPR not the assignment statment for EH region number. */
344 if (call && call != t)
346 ann = get_tree_common_ann (call);
351 /* Set EXPR_LOCATION in all the embedded expressions. */
352 loc = gimple_location (stmt);
353 walk_tree (&t, set_expr_location_r, (void *) &loc, NULL);
355 TREE_BLOCK (t) = gimple_block (stmt);
361 /* Release back to GC memory allocated by gimple_to_tree. */
364 release_stmt_tree (gimple stmt, tree stmt_tree)
366 tree_ann_common_t ann;
368 switch (gimple_code (stmt))
371 if (get_gimple_rhs_class (gimple_expr_code (stmt)) != GIMPLE_SINGLE_RHS)
372 ggc_free (TREE_OPERAND (stmt_tree, 1));
375 ggc_free (COND_EXPR_COND (stmt_tree));
378 if (TREE_OPERAND (stmt_tree, 0)
379 && TREE_CODE (TREE_OPERAND (stmt_tree, 0)) == MODIFY_EXPR)
380 ggc_free (TREE_OPERAND (stmt_tree, 0));
383 if (gimple_call_lhs (stmt))
385 ann = tree_common_ann (TREE_OPERAND (stmt_tree, 1));
388 ggc_free (TREE_OPERAND (stmt_tree, 1));
394 ann = tree_common_ann (stmt_tree);
397 ggc_free (stmt_tree);
401 /* Verify that there is exactly single jump instruction since last and attach
402 REG_BR_PROB note specifying probability.
403 ??? We really ought to pass the probability down to RTL expanders and let it
404 re-distribute it when the conditional expands into multiple conditionals.
405 This is however difficult to do. */
407 add_reg_br_prob_note (rtx last, int probability)
409 if (profile_status == PROFILE_ABSENT)
411 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
414 /* It is common to emit condjump-around-jump sequence when we don't know
415 how to reverse the conditional. Special case this. */
416 if (!any_condjump_p (last)
417 || !JUMP_P (NEXT_INSN (last))
418 || !simplejump_p (NEXT_INSN (last))
419 || !NEXT_INSN (NEXT_INSN (last))
420 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
421 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
422 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
423 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
425 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
426 add_reg_note (last, REG_BR_PROB,
427 GEN_INT (REG_BR_PROB_BASE - probability));
430 if (!last || !JUMP_P (last) || !any_condjump_p (last))
432 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
433 add_reg_note (last, REG_BR_PROB, GEN_INT (probability));
437 fprintf (dump_file, "Failed to add probability note\n");
441 #ifndef STACK_ALIGNMENT_NEEDED
442 #define STACK_ALIGNMENT_NEEDED 1
445 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
447 /* Associate declaration T with storage space X. If T is no
448 SSA name this is exactly SET_DECL_RTL, otherwise make the
449 partition of T associated with X. */
451 set_rtl (tree t, rtx x)
453 if (TREE_CODE (t) == SSA_NAME)
455 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
457 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
458 /* For the benefit of debug information at -O0 (where vartracking
459 doesn't run) record the place also in the base DECL if it's
460 a normal variable (not a parameter). */
461 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
463 tree var = SSA_NAME_VAR (t);
464 /* If we don't yet have something recorded, just record it now. */
465 if (!DECL_RTL_SET_P (var))
466 SET_DECL_RTL (var, x);
467 /* If we have it set alrady to "multiple places" don't
469 else if (DECL_RTL (var) == pc_rtx)
471 /* If we have something recorded and it's not the same place
472 as we want to record now, we have multiple partitions for the
473 same base variable, with different places. We can't just
474 randomly chose one, hence we have to say that we don't know.
475 This only happens with optimization, and there var-tracking
476 will figure out the right thing. */
477 else if (DECL_RTL (var) != x)
478 SET_DECL_RTL (var, pc_rtx);
485 /* This structure holds data relevant to one variable that will be
486 placed in a stack slot. */
492 /* The offset of the variable. During partitioning, this is the
493 offset relative to the partition. After partitioning, this
494 is relative to the stack frame. */
495 HOST_WIDE_INT offset;
497 /* Initially, the size of the variable. Later, the size of the partition,
498 if this variable becomes it's partition's representative. */
501 /* The *byte* alignment required for this variable. Or as, with the
502 size, the alignment for this partition. */
505 /* The partition representative. */
506 size_t representative;
508 /* The next stack variable in the partition, or EOC. */
512 #define EOC ((size_t)-1)
514 /* We have an array of such objects while deciding allocation. */
515 static struct stack_var *stack_vars;
516 static size_t stack_vars_alloc;
517 static size_t stack_vars_num;
519 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
520 is non-decreasing. */
521 static size_t *stack_vars_sorted;
523 /* We have an interference graph between such objects. This graph
524 is lower triangular. */
525 static bool *stack_vars_conflict;
526 static size_t stack_vars_conflict_alloc;
528 /* The phase of the stack frame. This is the known misalignment of
529 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
530 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
531 static int frame_phase;
533 /* Used during expand_used_vars to remember if we saw any decls for
534 which we'd like to enable stack smashing protection. */
535 static bool has_protected_decls;
537 /* Used during expand_used_vars. Remember if we say a character buffer
538 smaller than our cutoff threshold. Used for -Wstack-protector. */
539 static bool has_short_buffer;
541 /* Discover the byte alignment to use for DECL. Ignore alignment
542 we can't do with expected alignment of the stack boundary. */
545 get_decl_align_unit (tree decl)
549 align = LOCAL_DECL_ALIGNMENT (decl);
551 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
552 align = MAX_SUPPORTED_STACK_ALIGNMENT;
554 if (SUPPORTS_STACK_ALIGNMENT)
556 if (crtl->stack_alignment_estimated < align)
558 gcc_assert(!crtl->stack_realign_processed);
559 crtl->stack_alignment_estimated = align;
563 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
564 So here we only make sure stack_alignment_needed >= align. */
565 if (crtl->stack_alignment_needed < align)
566 crtl->stack_alignment_needed = align;
567 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
568 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
570 return align / BITS_PER_UNIT;
573 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
574 Return the frame offset. */
577 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
579 HOST_WIDE_INT offset, new_frame_offset;
581 new_frame_offset = frame_offset;
582 if (FRAME_GROWS_DOWNWARD)
584 new_frame_offset -= size + frame_phase;
585 new_frame_offset &= -align;
586 new_frame_offset += frame_phase;
587 offset = new_frame_offset;
591 new_frame_offset -= frame_phase;
592 new_frame_offset += align - 1;
593 new_frame_offset &= -align;
594 new_frame_offset += frame_phase;
595 offset = new_frame_offset;
596 new_frame_offset += size;
598 frame_offset = new_frame_offset;
600 if (frame_offset_overflow (frame_offset, cfun->decl))
601 frame_offset = offset = 0;
606 /* Accumulate DECL into STACK_VARS. */
609 add_stack_var (tree decl)
611 if (stack_vars_num >= stack_vars_alloc)
613 if (stack_vars_alloc)
614 stack_vars_alloc = stack_vars_alloc * 3 / 2;
616 stack_vars_alloc = 32;
618 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
620 stack_vars[stack_vars_num].decl = decl;
621 stack_vars[stack_vars_num].offset = 0;
622 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
623 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
625 /* All variables are initially in their own partition. */
626 stack_vars[stack_vars_num].representative = stack_vars_num;
627 stack_vars[stack_vars_num].next = EOC;
629 /* Ensure that this decl doesn't get put onto the list twice. */
630 set_rtl (decl, pc_rtx);
635 /* Compute the linear index of a lower-triangular coordinate (I, J). */
638 triangular_index (size_t i, size_t j)
645 return (i * (i + 1)) / 2 + j;
648 /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
651 resize_stack_vars_conflict (size_t n)
653 size_t size = triangular_index (n-1, n-1) + 1;
655 if (size <= stack_vars_conflict_alloc)
658 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
659 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
660 (size - stack_vars_conflict_alloc) * sizeof (bool));
661 stack_vars_conflict_alloc = size;
664 /* Make the decls associated with luid's X and Y conflict. */
667 add_stack_var_conflict (size_t x, size_t y)
669 size_t index = triangular_index (x, y);
670 gcc_assert (index < stack_vars_conflict_alloc);
671 stack_vars_conflict[index] = true;
674 /* Check whether the decls associated with luid's X and Y conflict. */
677 stack_var_conflict_p (size_t x, size_t y)
679 size_t index = triangular_index (x, y);
680 gcc_assert (index < stack_vars_conflict_alloc);
681 return stack_vars_conflict[index];
684 /* Returns true if TYPE is or contains a union type. */
687 aggregate_contains_union_type (tree type)
691 if (TREE_CODE (type) == UNION_TYPE
692 || TREE_CODE (type) == QUAL_UNION_TYPE)
694 if (TREE_CODE (type) == ARRAY_TYPE)
695 return aggregate_contains_union_type (TREE_TYPE (type));
696 if (TREE_CODE (type) != RECORD_TYPE)
699 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
700 if (TREE_CODE (field) == FIELD_DECL)
701 if (aggregate_contains_union_type (TREE_TYPE (field)))
707 /* A subroutine of expand_used_vars. If two variables X and Y have alias
708 sets that do not conflict, then do add a conflict for these variables
709 in the interference graph. We also need to make sure to add conflicts
710 for union containing structures. Else RTL alias analysis comes along
711 and due to type based aliasing rules decides that for two overlapping
712 union temporaries { short s; int i; } accesses to the same mem through
713 different types may not alias and happily reorders stores across
714 life-time boundaries of the temporaries (See PR25654).
715 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
718 add_alias_set_conflicts (void)
720 size_t i, j, n = stack_vars_num;
722 for (i = 0; i < n; ++i)
724 tree type_i = TREE_TYPE (stack_vars[i].decl);
725 bool aggr_i = AGGREGATE_TYPE_P (type_i);
728 contains_union = aggregate_contains_union_type (type_i);
729 for (j = 0; j < i; ++j)
731 tree type_j = TREE_TYPE (stack_vars[j].decl);
732 bool aggr_j = AGGREGATE_TYPE_P (type_j);
734 /* Either the objects conflict by means of type based
735 aliasing rules, or we need to add a conflict. */
736 || !objects_must_conflict_p (type_i, type_j)
737 /* In case the types do not conflict ensure that access
738 to elements will conflict. In case of unions we have
739 to be careful as type based aliasing rules may say
740 access to the same memory does not conflict. So play
741 safe and add a conflict in this case. */
743 add_stack_var_conflict (i, j);
748 /* A subroutine of partition_stack_vars. A comparison function for qsort,
749 sorting an array of indices by the size and type of the object. */
752 stack_var_size_cmp (const void *a, const void *b)
754 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
755 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
757 unsigned int uida, uidb;
763 decla = stack_vars[*(const size_t *)a].decl;
764 declb = stack_vars[*(const size_t *)b].decl;
765 /* For stack variables of the same size use and id of the decls
766 to make the sort stable. Two SSA names are compared by their
767 version, SSA names come before non-SSA names, and two normal
768 decls are compared by their DECL_UID. */
769 if (TREE_CODE (decla) == SSA_NAME)
771 if (TREE_CODE (declb) == SSA_NAME)
772 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
776 else if (TREE_CODE (declb) == SSA_NAME)
779 uida = DECL_UID (decla), uidb = DECL_UID (declb);
787 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
788 partitioning algorithm. Partitions A and B are known to be non-conflicting.
789 Merge them into a single partition A.
791 At the same time, add OFFSET to all variables in partition B. At the end
792 of the partitioning process we've have a nice block easy to lay out within
796 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
800 /* Update each element of partition B with the given offset,
801 and merge them into partition A. */
802 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
804 stack_vars[i].offset += offset;
805 stack_vars[i].representative = a;
807 stack_vars[last].next = stack_vars[a].next;
808 stack_vars[a].next = b;
810 /* Update the required alignment of partition A to account for B. */
811 if (stack_vars[a].alignb < stack_vars[b].alignb)
812 stack_vars[a].alignb = stack_vars[b].alignb;
814 /* Update the interference graph and merge the conflicts. */
815 for (last = stack_vars_num, i = 0; i < last; ++i)
816 if (stack_var_conflict_p (b, i))
817 add_stack_var_conflict (a, i);
820 /* A subroutine of expand_used_vars. Binpack the variables into
821 partitions constrained by the interference graph. The overall
822 algorithm used is as follows:
824 Sort the objects by size.
829 Look for the largest non-conflicting object B with size <= S.
839 partition_stack_vars (void)
841 size_t si, sj, n = stack_vars_num;
843 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
844 for (si = 0; si < n; ++si)
845 stack_vars_sorted[si] = si;
850 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
852 /* Special case: detect when all variables conflict, and thus we can't
853 do anything during the partitioning loop. It isn't uncommon (with
854 C code at least) to declare all variables at the top of the function,
855 and if we're not inlining, then all variables will be in the same scope.
856 Take advantage of very fast libc routines for this scan. */
857 gcc_assert (sizeof(bool) == sizeof(char));
858 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
861 for (si = 0; si < n; ++si)
863 size_t i = stack_vars_sorted[si];
864 HOST_WIDE_INT isize = stack_vars[i].size;
865 HOST_WIDE_INT offset = 0;
867 for (sj = si; sj-- > 0; )
869 size_t j = stack_vars_sorted[sj];
870 HOST_WIDE_INT jsize = stack_vars[j].size;
871 unsigned int jalign = stack_vars[j].alignb;
873 /* Ignore objects that aren't partition representatives. */
874 if (stack_vars[j].representative != j)
877 /* Ignore objects too large for the remaining space. */
881 /* Ignore conflicting objects. */
882 if (stack_var_conflict_p (i, j))
885 /* Refine the remaining space check to include alignment. */
886 if (offset & (jalign - 1))
888 HOST_WIDE_INT toff = offset;
890 toff &= -(HOST_WIDE_INT)jalign;
891 if (isize - (toff - offset) < jsize)
894 isize -= toff - offset;
898 /* UNION the objects, placing J at OFFSET. */
899 union_stack_vars (i, j, offset);
908 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
911 dump_stack_var_partition (void)
913 size_t si, i, j, n = stack_vars_num;
915 for (si = 0; si < n; ++si)
917 i = stack_vars_sorted[si];
919 /* Skip variables that aren't partition representatives, for now. */
920 if (stack_vars[i].representative != i)
923 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
924 " align %u\n", (unsigned long) i, stack_vars[i].size,
925 stack_vars[i].alignb);
927 for (j = i; j != EOC; j = stack_vars[j].next)
929 fputc ('\t', dump_file);
930 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
931 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
932 stack_vars[j].offset);
937 /* Assign rtl to DECL at frame offset OFFSET. */
940 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
942 /* Alignment is unsigned. */
943 unsigned HOST_WIDE_INT align;
946 /* If this fails, we've overflowed the stack frame. Error nicely? */
947 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
949 x = plus_constant (virtual_stack_vars_rtx, offset);
950 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
952 if (TREE_CODE (decl) != SSA_NAME)
954 /* Set alignment we actually gave this decl if it isn't an SSA name.
955 If it is we generate stack slots only accidentally so it isn't as
956 important, we'll simply use the alignment that is already set. */
957 offset -= frame_phase;
958 align = offset & -offset;
959 align *= BITS_PER_UNIT;
961 align = STACK_BOUNDARY;
962 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
963 align = MAX_SUPPORTED_STACK_ALIGNMENT;
965 DECL_ALIGN (decl) = align;
966 DECL_USER_ALIGN (decl) = 0;
969 set_mem_attributes (x, SSAVAR (decl), true);
973 /* A subroutine of expand_used_vars. Give each partition representative
974 a unique location within the stack frame. Update each partition member
975 with that location. */
978 expand_stack_vars (bool (*pred) (tree))
980 size_t si, i, j, n = stack_vars_num;
982 for (si = 0; si < n; ++si)
984 HOST_WIDE_INT offset;
986 i = stack_vars_sorted[si];
988 /* Skip variables that aren't partition representatives, for now. */
989 if (stack_vars[i].representative != i)
992 /* Skip variables that have already had rtl assigned. See also
993 add_stack_var where we perpetrate this pc_rtx hack. */
994 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
995 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
996 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
999 /* Check the predicate to see whether this variable should be
1000 allocated in this pass. */
1001 if (pred && !pred (stack_vars[i].decl))
1004 offset = alloc_stack_frame_space (stack_vars[i].size,
1005 stack_vars[i].alignb);
1007 /* Create rtl for each variable based on their location within the
1009 for (j = i; j != EOC; j = stack_vars[j].next)
1011 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
1012 expand_one_stack_var_at (stack_vars[j].decl,
1013 stack_vars[j].offset + offset);
1018 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1019 static HOST_WIDE_INT
1020 account_stack_vars (void)
1022 size_t si, j, i, n = stack_vars_num;
1023 HOST_WIDE_INT size = 0;
1025 for (si = 0; si < n; ++si)
1027 i = stack_vars_sorted[si];
1029 /* Skip variables that aren't partition representatives, for now. */
1030 if (stack_vars[i].representative != i)
1033 size += stack_vars[i].size;
1034 for (j = i; j != EOC; j = stack_vars[j].next)
1035 set_rtl (stack_vars[j].decl, NULL);
1040 /* A subroutine of expand_one_var. Called to immediately assign rtl
1041 to a variable to be allocated in the stack frame. */
1044 expand_one_stack_var (tree var)
1046 HOST_WIDE_INT size, offset, align;
1048 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1049 align = get_decl_align_unit (SSAVAR (var));
1050 offset = alloc_stack_frame_space (size, align);
1052 expand_one_stack_var_at (var, offset);
1055 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1056 that will reside in a hard register. */
1059 expand_one_hard_reg_var (tree var)
1061 rest_of_decl_compilation (var, 0, 0);
1064 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1065 that will reside in a pseudo register. */
1068 expand_one_register_var (tree var)
1070 tree decl = SSAVAR (var);
1071 tree type = TREE_TYPE (decl);
1072 int unsignedp = TYPE_UNSIGNED (type);
1073 enum machine_mode reg_mode
1074 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
1075 rtx x = gen_reg_rtx (reg_mode);
1079 /* Note if the object is a user variable. */
1080 if (!DECL_ARTIFICIAL (decl))
1083 if (POINTER_TYPE_P (type))
1084 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
1087 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1088 has some associated error, e.g. its type is error-mark. We just need
1089 to pick something that won't crash the rest of the compiler. */
1092 expand_one_error_var (tree var)
1094 enum machine_mode mode = DECL_MODE (var);
1097 if (mode == BLKmode)
1098 x = gen_rtx_MEM (BLKmode, const0_rtx);
1099 else if (mode == VOIDmode)
1102 x = gen_reg_rtx (mode);
1104 SET_DECL_RTL (var, x);
1107 /* A subroutine of expand_one_var. VAR is a variable that will be
1108 allocated to the local stack frame. Return true if we wish to
1109 add VAR to STACK_VARS so that it will be coalesced with other
1110 variables. Return false to allocate VAR immediately.
1112 This function is used to reduce the number of variables considered
1113 for coalescing, which reduces the size of the quadratic problem. */
1116 defer_stack_allocation (tree var, bool toplevel)
1118 /* If stack protection is enabled, *all* stack variables must be deferred,
1119 so that we can re-order the strings to the top of the frame. */
1120 if (flag_stack_protect)
1123 /* Variables in the outermost scope automatically conflict with
1124 every other variable. The only reason to want to defer them
1125 at all is that, after sorting, we can more efficiently pack
1126 small variables in the stack frame. Continue to defer at -O2. */
1127 if (toplevel && optimize < 2)
1130 /* Without optimization, *most* variables are allocated from the
1131 stack, which makes the quadratic problem large exactly when we
1132 want compilation to proceed as quickly as possible. On the
1133 other hand, we don't want the function's stack frame size to
1134 get completely out of hand. So we avoid adding scalars and
1135 "small" aggregates to the list at all. */
1136 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1142 /* A subroutine of expand_used_vars. Expand one variable according to
1143 its flavor. Variables to be placed on the stack are not actually
1144 expanded yet, merely recorded.
1145 When REALLY_EXPAND is false, only add stack values to be allocated.
1146 Return stack usage this variable is supposed to take.
1149 static HOST_WIDE_INT
1150 expand_one_var (tree var, bool toplevel, bool really_expand)
1155 if (SUPPORTS_STACK_ALIGNMENT
1156 && TREE_TYPE (var) != error_mark_node
1157 && TREE_CODE (var) == VAR_DECL)
1161 /* Because we don't know if VAR will be in register or on stack,
1162 we conservatively assume it will be on stack even if VAR is
1163 eventually put into register after RA pass. For non-automatic
1164 variables, which won't be on stack, we collect alignment of
1165 type and ignore user specified alignment. */
1166 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1167 align = TYPE_ALIGN (TREE_TYPE (var));
1169 align = DECL_ALIGN (var);
1171 if (crtl->stack_alignment_estimated < align)
1173 /* stack_alignment_estimated shouldn't change after stack
1174 realign decision made */
1175 gcc_assert(!crtl->stack_realign_processed);
1176 crtl->stack_alignment_estimated = align;
1180 if (TREE_CODE (origvar) == SSA_NAME)
1182 gcc_assert (TREE_CODE (var) != VAR_DECL
1183 || (!DECL_EXTERNAL (var)
1184 && !DECL_HAS_VALUE_EXPR_P (var)
1185 && !TREE_STATIC (var)
1186 && TREE_TYPE (var) != error_mark_node
1187 && !DECL_HARD_REGISTER (var)
1190 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1192 else if (DECL_EXTERNAL (var))
1194 else if (DECL_HAS_VALUE_EXPR_P (var))
1196 else if (TREE_STATIC (var))
1198 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1200 else if (TREE_TYPE (var) == error_mark_node)
1203 expand_one_error_var (var);
1205 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1208 expand_one_hard_reg_var (var);
1210 else if (use_register_for_decl (var))
1213 expand_one_register_var (origvar);
1215 else if (defer_stack_allocation (var, toplevel))
1216 add_stack_var (origvar);
1220 expand_one_stack_var (origvar);
1221 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1226 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1227 expanding variables. Those variables that can be put into registers
1228 are allocated pseudos; those that can't are put on the stack.
1230 TOPLEVEL is true if this is the outermost BLOCK. */
1233 expand_used_vars_for_block (tree block, bool toplevel)
1235 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1238 old_sv_num = toplevel ? 0 : stack_vars_num;
1240 /* Expand all variables at this level. */
1241 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1243 expand_one_var (t, toplevel, true);
1245 this_sv_num = stack_vars_num;
1247 /* Expand all variables at containing levels. */
1248 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1249 expand_used_vars_for_block (t, false);
1251 /* Since we do not track exact variable lifetimes (which is not even
1252 possible for variables whose address escapes), we mirror the block
1253 tree in the interference graph. Here we cause all variables at this
1254 level, and all sublevels, to conflict. Do make certain that a
1255 variable conflicts with itself. */
1256 if (old_sv_num < this_sv_num)
1258 new_sv_num = stack_vars_num;
1259 resize_stack_vars_conflict (new_sv_num);
1261 for (i = old_sv_num; i < new_sv_num; ++i)
1262 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1263 add_stack_var_conflict (i, j);
1267 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1268 and clear TREE_USED on all local variables. */
1271 clear_tree_used (tree block)
1275 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1276 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1279 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1280 clear_tree_used (t);
1283 /* Examine TYPE and determine a bit mask of the following features. */
1285 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1286 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1287 #define SPCT_HAS_ARRAY 4
1288 #define SPCT_HAS_AGGREGATE 8
1291 stack_protect_classify_type (tree type)
1293 unsigned int ret = 0;
1296 switch (TREE_CODE (type))
1299 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1300 if (t == char_type_node
1301 || t == signed_char_type_node
1302 || t == unsigned_char_type_node)
1304 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1305 unsigned HOST_WIDE_INT len;
1307 if (!TYPE_SIZE_UNIT (type)
1308 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1311 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1314 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1316 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1319 ret = SPCT_HAS_ARRAY;
1323 case QUAL_UNION_TYPE:
1325 ret = SPCT_HAS_AGGREGATE;
1326 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1327 if (TREE_CODE (t) == FIELD_DECL)
1328 ret |= stack_protect_classify_type (TREE_TYPE (t));
1338 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1339 part of the local stack frame. Remember if we ever return nonzero for
1340 any variable in this function. The return value is the phase number in
1341 which the variable should be allocated. */
1344 stack_protect_decl_phase (tree decl)
1346 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1349 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1350 has_short_buffer = true;
1352 if (flag_stack_protect == 2)
1354 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1355 && !(bits & SPCT_HAS_AGGREGATE))
1357 else if (bits & SPCT_HAS_ARRAY)
1361 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1364 has_protected_decls = true;
1369 /* Two helper routines that check for phase 1 and phase 2. These are used
1370 as callbacks for expand_stack_vars. */
1373 stack_protect_decl_phase_1 (tree decl)
1375 return stack_protect_decl_phase (decl) == 1;
1379 stack_protect_decl_phase_2 (tree decl)
1381 return stack_protect_decl_phase (decl) == 2;
1384 /* Ensure that variables in different stack protection phases conflict
1385 so that they are not merged and share the same stack slot. */
1388 add_stack_protection_conflicts (void)
1390 size_t i, j, n = stack_vars_num;
1391 unsigned char *phase;
1393 phase = XNEWVEC (unsigned char, n);
1394 for (i = 0; i < n; ++i)
1395 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1397 for (i = 0; i < n; ++i)
1399 unsigned char ph_i = phase[i];
1400 for (j = 0; j < i; ++j)
1401 if (ph_i != phase[j])
1402 add_stack_var_conflict (i, j);
1408 /* Create a decl for the guard at the top of the stack frame. */
1411 create_stack_guard (void)
1413 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
1414 TREE_THIS_VOLATILE (guard) = 1;
1415 TREE_USED (guard) = 1;
1416 expand_one_stack_var (guard);
1417 crtl->stack_protect_guard = guard;
1420 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1421 expanding variables. Those variables that can be put into registers
1422 are allocated pseudos; those that can't are put on the stack.
1424 TOPLEVEL is true if this is the outermost BLOCK. */
1426 static HOST_WIDE_INT
1427 account_used_vars_for_block (tree block, bool toplevel)
1429 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1431 HOST_WIDE_INT size = 0;
1433 old_sv_num = toplevel ? 0 : stack_vars_num;
1435 /* Expand all variables at this level. */
1436 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1438 size += expand_one_var (t, toplevel, false);
1440 this_sv_num = stack_vars_num;
1442 /* Expand all variables at containing levels. */
1443 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1444 size += account_used_vars_for_block (t, false);
1446 /* Since we do not track exact variable lifetimes (which is not even
1447 possible for variables whose address escapes), we mirror the block
1448 tree in the interference graph. Here we cause all variables at this
1449 level, and all sublevels, to conflict. Do make certain that a
1450 variable conflicts with itself. */
1451 if (old_sv_num < this_sv_num)
1453 new_sv_num = stack_vars_num;
1454 resize_stack_vars_conflict (new_sv_num);
1456 for (i = old_sv_num; i < new_sv_num; ++i)
1457 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1458 add_stack_var_conflict (i, j);
1463 /* Prepare for expanding variables. */
1465 init_vars_expansion (void)
1468 /* Set TREE_USED on all variables in the local_decls. */
1469 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1470 TREE_USED (TREE_VALUE (t)) = 1;
1472 /* Clear TREE_USED on all variables associated with a block scope. */
1473 clear_tree_used (DECL_INITIAL (current_function_decl));
1475 /* Initialize local stack smashing state. */
1476 has_protected_decls = false;
1477 has_short_buffer = false;
1480 /* Free up stack variable graph data. */
1482 fini_vars_expansion (void)
1484 XDELETEVEC (stack_vars);
1485 XDELETEVEC (stack_vars_sorted);
1486 XDELETEVEC (stack_vars_conflict);
1488 stack_vars_alloc = stack_vars_num = 0;
1489 stack_vars_conflict = NULL;
1490 stack_vars_conflict_alloc = 0;
1493 /* Make a fair guess for the size of the stack frame of the current
1494 function. This doesn't have to be exact, the result is only used
1495 in the inline heuristics. So we don't want to run the full stack
1496 var packing algorithm (which is quadratic in the number of stack
1497 vars). Instead, we calculate the total size of all stack vars.
1498 This turns out to be a pretty fair estimate -- packing of stack
1499 vars doesn't happen very often. */
1502 estimated_stack_frame_size (void)
1504 HOST_WIDE_INT size = 0;
1506 tree t, outer_block = DECL_INITIAL (current_function_decl);
1508 init_vars_expansion ();
1510 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1512 tree var = TREE_VALUE (t);
1514 if (TREE_USED (var))
1515 size += expand_one_var (var, true, false);
1516 TREE_USED (var) = 1;
1518 size += account_used_vars_for_block (outer_block, true);
1520 if (stack_vars_num > 0)
1522 /* Fake sorting the stack vars for account_stack_vars (). */
1523 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1524 for (i = 0; i < stack_vars_num; ++i)
1525 stack_vars_sorted[i] = i;
1526 size += account_stack_vars ();
1527 fini_vars_expansion ();
1533 /* Expand all variables used in the function. */
1536 expand_used_vars (void)
1538 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
1541 /* Compute the phase of the stack frame for this function. */
1543 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1544 int off = STARTING_FRAME_OFFSET % align;
1545 frame_phase = off ? align - off : 0;
1548 init_vars_expansion ();
1550 for (i = 0; i < SA.map->num_partitions; i++)
1552 tree var = partition_to_var (SA.map, i);
1554 gcc_assert (is_gimple_reg (var));
1555 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1556 expand_one_var (var, true, true);
1559 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1560 contain the default def (representing the parm or result itself)
1561 we don't do anything here. But those which don't contain the
1562 default def (representing a temporary based on the parm/result)
1563 we need to allocate space just like for normal VAR_DECLs. */
1564 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1566 expand_one_var (var, true, true);
1567 gcc_assert (SA.partition_to_pseudo[i]);
1572 /* At this point all variables on the local_decls with TREE_USED
1573 set are not associated with any block scope. Lay them out. */
1574 t = cfun->local_decls;
1575 cfun->local_decls = NULL_TREE;
1578 tree var = TREE_VALUE (t);
1579 bool expand_now = false;
1581 next = TREE_CHAIN (t);
1583 /* Expanded above already. */
1584 if (is_gimple_reg (var))
1586 TREE_USED (var) = 0;
1590 /* We didn't set a block for static or extern because it's hard
1591 to tell the difference between a global variable (re)declared
1592 in a local scope, and one that's really declared there to
1593 begin with. And it doesn't really matter much, since we're
1594 not giving them stack space. Expand them now. */
1595 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1598 /* If the variable is not associated with any block, then it
1599 was created by the optimizers, and could be live anywhere
1601 else if (TREE_USED (var))
1604 /* Finally, mark all variables on the list as used. We'll use
1605 this in a moment when we expand those associated with scopes. */
1606 TREE_USED (var) = 1;
1610 expand_one_var (var, true, true);
1611 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1613 rtx rtl = DECL_RTL_IF_SET (var);
1615 /* Keep artificial non-ignored vars in cfun->local_decls
1616 chain until instantiate_decls. */
1617 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1619 TREE_CHAIN (t) = cfun->local_decls;
1620 cfun->local_decls = t;
1629 /* At this point, all variables within the block tree with TREE_USED
1630 set are actually used by the optimized function. Lay them out. */
1631 expand_used_vars_for_block (outer_block, true);
1633 if (stack_vars_num > 0)
1635 /* Due to the way alias sets work, no variables with non-conflicting
1636 alias sets may be assigned the same address. Add conflicts to
1638 add_alias_set_conflicts ();
1640 /* If stack protection is enabled, we don't share space between
1641 vulnerable data and non-vulnerable data. */
1642 if (flag_stack_protect)
1643 add_stack_protection_conflicts ();
1645 /* Now that we have collected all stack variables, and have computed a
1646 minimal interference graph, attempt to save some stack space. */
1647 partition_stack_vars ();
1649 dump_stack_var_partition ();
1652 /* There are several conditions under which we should create a
1653 stack guard: protect-all, alloca used, protected decls present. */
1654 if (flag_stack_protect == 2
1655 || (flag_stack_protect
1656 && (cfun->calls_alloca || has_protected_decls)))
1657 create_stack_guard ();
1659 /* Assign rtl to each variable based on these partitions. */
1660 if (stack_vars_num > 0)
1662 /* Reorder decls to be protected by iterating over the variables
1663 array multiple times, and allocating out of each phase in turn. */
1664 /* ??? We could probably integrate this into the qsort we did
1665 earlier, such that we naturally see these variables first,
1666 and thus naturally allocate things in the right order. */
1667 if (has_protected_decls)
1669 /* Phase 1 contains only character arrays. */
1670 expand_stack_vars (stack_protect_decl_phase_1);
1672 /* Phase 2 contains other kinds of arrays. */
1673 if (flag_stack_protect == 2)
1674 expand_stack_vars (stack_protect_decl_phase_2);
1677 expand_stack_vars (NULL);
1679 fini_vars_expansion ();
1682 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1683 if (STACK_ALIGNMENT_NEEDED)
1685 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1686 if (!FRAME_GROWS_DOWNWARD)
1687 frame_offset += align - 1;
1688 frame_offset &= -align;
1693 /* If we need to produce a detailed dump, print the tree representation
1694 for STMT to the dump file. SINCE is the last RTX after which the RTL
1695 generated for STMT should have been appended. */
1698 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1700 if (dump_file && (dump_flags & TDF_DETAILS))
1702 fprintf (dump_file, "\n;; ");
1703 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1704 fprintf (dump_file, "\n");
1706 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1710 /* Maps the blocks that do not contain tree labels to rtx labels. */
1712 static struct pointer_map_t *lab_rtx_for_bb;
1714 /* Returns the label_rtx expression for a label starting basic block BB. */
1717 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1719 gimple_stmt_iterator gsi;
1724 if (bb->flags & BB_RTL)
1725 return block_label (bb);
1727 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1731 /* Find the tree label if it is present. */
1733 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1735 lab_stmt = gsi_stmt (gsi);
1736 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1739 lab = gimple_label_label (lab_stmt);
1740 if (DECL_NONLOCAL (lab))
1743 return label_rtx (lab);
1746 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1747 *elt = gen_label_rtx ();
1752 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1753 of a basic block where we just expanded the conditional at the end,
1754 possibly clean up the CFG and instruction sequence. */
1757 maybe_cleanup_end_of_block (edge e)
1759 /* Special case: when jumpif decides that the condition is
1760 trivial it emits an unconditional jump (and the necessary
1761 barrier). But we still have two edges, the fallthru one is
1762 wrong. purge_dead_edges would clean this up later. Unfortunately
1763 we have to insert insns (and split edges) before
1764 find_many_sub_basic_blocks and hence before purge_dead_edges.
1765 But splitting edges might create new blocks which depend on the
1766 fact that if there are two edges there's no barrier. So the
1767 barrier would get lost and verify_flow_info would ICE. Instead
1768 of auditing all edge splitters to care for the barrier (which
1769 normally isn't there in a cleaned CFG), fix it here. */
1770 if (BARRIER_P (get_last_insn ()))
1772 basic_block bb = e->src;
1775 /* Now, we have a single successor block, if we have insns to
1776 insert on the remaining edge we potentially will insert
1777 it at the end of this block (if the dest block isn't feasible)
1778 in order to avoid splitting the edge. This insertion will take
1779 place in front of the last jump. But we might have emitted
1780 multiple jumps (conditional and one unconditional) to the
1781 same destination. Inserting in front of the last one then
1782 is a problem. See PR 40021. We fix this by deleting all
1783 jumps except the last unconditional one. */
1784 insn = PREV_INSN (get_last_insn ());
1785 /* Make sure we have an unconditional jump. Otherwise we're
1787 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1788 for (insn = PREV_INSN (insn); insn != BB_HEAD (bb);)
1790 insn = PREV_INSN (insn);
1791 if (JUMP_P (NEXT_INSN (insn)))
1792 delete_insn (NEXT_INSN (insn));
1798 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1799 Returns a new basic block if we've terminated the current basic
1800 block and created a new one. */
1803 expand_gimple_cond (basic_block bb, gimple stmt)
1805 basic_block new_bb, dest;
1809 tree pred = gimple_cond_pred_to_tree (stmt);
1812 last2 = last = get_last_insn ();
1814 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1815 if (gimple_has_location (stmt))
1817 set_curr_insn_source_location (gimple_location (stmt));
1818 set_curr_insn_block (gimple_block (stmt));
1821 /* These flags have no purpose in RTL land. */
1822 true_edge->flags &= ~EDGE_TRUE_VALUE;
1823 false_edge->flags &= ~EDGE_FALSE_VALUE;
1825 /* We can either have a pure conditional jump with one fallthru edge or
1826 two-way jump that needs to be decomposed into two basic blocks. */
1827 if (false_edge->dest == bb->next_bb)
1829 jumpif (pred, label_rtx_for_bb (true_edge->dest));
1830 add_reg_br_prob_note (last, true_edge->probability);
1831 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1832 if (true_edge->goto_locus)
1834 set_curr_insn_source_location (true_edge->goto_locus);
1835 set_curr_insn_block (true_edge->goto_block);
1836 true_edge->goto_locus = curr_insn_locator ();
1838 true_edge->goto_block = NULL;
1839 false_edge->flags |= EDGE_FALLTHRU;
1841 maybe_cleanup_end_of_block (false_edge);
1844 if (true_edge->dest == bb->next_bb)
1846 jumpifnot (pred, label_rtx_for_bb (false_edge->dest));
1847 add_reg_br_prob_note (last, false_edge->probability);
1848 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1849 if (false_edge->goto_locus)
1851 set_curr_insn_source_location (false_edge->goto_locus);
1852 set_curr_insn_block (false_edge->goto_block);
1853 false_edge->goto_locus = curr_insn_locator ();
1855 false_edge->goto_block = NULL;
1856 true_edge->flags |= EDGE_FALLTHRU;
1858 maybe_cleanup_end_of_block (true_edge);
1862 jumpif (pred, label_rtx_for_bb (true_edge->dest));
1863 add_reg_br_prob_note (last, true_edge->probability);
1864 last = get_last_insn ();
1865 if (false_edge->goto_locus)
1867 set_curr_insn_source_location (false_edge->goto_locus);
1868 set_curr_insn_block (false_edge->goto_block);
1869 false_edge->goto_locus = curr_insn_locator ();
1871 false_edge->goto_block = NULL;
1872 emit_jump (label_rtx_for_bb (false_edge->dest));
1875 if (BARRIER_P (BB_END (bb)))
1876 BB_END (bb) = PREV_INSN (BB_END (bb));
1877 update_bb_for_insn (bb);
1879 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1880 dest = false_edge->dest;
1881 redirect_edge_succ (false_edge, new_bb);
1882 false_edge->flags |= EDGE_FALLTHRU;
1883 new_bb->count = false_edge->count;
1884 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1885 new_edge = make_edge (new_bb, dest, 0);
1886 new_edge->probability = REG_BR_PROB_BASE;
1887 new_edge->count = new_bb->count;
1888 if (BARRIER_P (BB_END (new_bb)))
1889 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1890 update_bb_for_insn (new_bb);
1892 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1894 if (true_edge->goto_locus)
1896 set_curr_insn_source_location (true_edge->goto_locus);
1897 set_curr_insn_block (true_edge->goto_block);
1898 true_edge->goto_locus = curr_insn_locator ();
1900 true_edge->goto_block = NULL;
1906 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
1907 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1908 generated a tail call (something that might be denied by the ABI
1909 rules governing the call; see calls.c).
1911 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1912 can still reach the rest of BB. The case here is __builtin_sqrt,
1913 where the NaN result goes through the external function (with a
1914 tailcall) and the normal result happens via a sqrt instruction. */
1917 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
1924 tree stmt_tree = gimple_to_tree (stmt);
1926 last2 = last = get_last_insn ();
1928 expand_expr_stmt (stmt_tree);
1930 release_stmt_tree (stmt, stmt_tree);
1932 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
1933 if (CALL_P (last) && SIBLING_CALL_P (last))
1936 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1938 *can_fallthru = true;
1942 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1943 Any instructions emitted here are about to be deleted. */
1944 do_pending_stack_adjust ();
1946 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1947 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1948 EH or abnormal edges, we shouldn't have created a tail call in
1949 the first place. So it seems to me we should just be removing
1950 all edges here, or redirecting the existing fallthru edge to
1956 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1958 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1960 if (e->dest != EXIT_BLOCK_PTR)
1962 e->dest->count -= e->count;
1963 e->dest->frequency -= EDGE_FREQUENCY (e);
1964 if (e->dest->count < 0)
1966 if (e->dest->frequency < 0)
1967 e->dest->frequency = 0;
1970 probability += e->probability;
1977 /* This is somewhat ugly: the call_expr expander often emits instructions
1978 after the sibcall (to perform the function return). These confuse the
1979 find_many_sub_basic_blocks code, so we need to get rid of these. */
1980 last = NEXT_INSN (last);
1981 gcc_assert (BARRIER_P (last));
1983 *can_fallthru = false;
1984 while (NEXT_INSN (last))
1986 /* For instance an sqrt builtin expander expands if with
1987 sibcall in the then and label for `else`. */
1988 if (LABEL_P (NEXT_INSN (last)))
1990 *can_fallthru = true;
1993 delete_insn (NEXT_INSN (last));
1996 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1997 e->probability += probability;
2000 update_bb_for_insn (bb);
2002 if (NEXT_INSN (last))
2004 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2007 if (BARRIER_P (last))
2008 BB_END (bb) = PREV_INSN (last);
2011 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2016 /* Expand basic block BB from GIMPLE trees to RTL. */
2019 expand_gimple_basic_block (basic_block bb)
2021 gimple_stmt_iterator gsi;
2030 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
2033 /* Note that since we are now transitioning from GIMPLE to RTL, we
2034 cannot use the gsi_*_bb() routines because they expect the basic
2035 block to be in GIMPLE, instead of RTL. Therefore, we need to
2036 access the BB sequence directly. */
2037 stmts = bb_seq (bb);
2038 bb->il.gimple = NULL;
2039 rtl_profile_for_bb (bb);
2040 init_rtl_bb_info (bb);
2041 bb->flags |= BB_RTL;
2043 /* Remove the RETURN_EXPR if we may fall though to the exit
2045 gsi = gsi_last (stmts);
2046 if (!gsi_end_p (gsi)
2047 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2049 gimple ret_stmt = gsi_stmt (gsi);
2051 gcc_assert (single_succ_p (bb));
2052 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2054 if (bb->next_bb == EXIT_BLOCK_PTR
2055 && !gimple_return_retval (ret_stmt))
2057 gsi_remove (&gsi, false);
2058 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2062 gsi = gsi_start (stmts);
2063 if (!gsi_end_p (gsi))
2065 stmt = gsi_stmt (gsi);
2066 if (gimple_code (stmt) != GIMPLE_LABEL)
2070 elt = pointer_map_contains (lab_rtx_for_bb, bb);
2074 last = get_last_insn ();
2078 tree stmt_tree = gimple_to_tree (stmt);
2079 expand_expr_stmt (stmt_tree);
2080 release_stmt_tree (stmt, stmt_tree);
2085 emit_label ((rtx) *elt);
2087 /* Java emits line number notes in the top of labels.
2088 ??? Make this go away once line number notes are obsoleted. */
2089 BB_HEAD (bb) = NEXT_INSN (last);
2090 if (NOTE_P (BB_HEAD (bb)))
2091 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
2092 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
2094 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2097 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
2099 NOTE_BASIC_BLOCK (note) = bb;
2101 for (; !gsi_end_p (gsi); gsi_next (&gsi))
2103 gimple stmt = gsi_stmt (gsi);
2106 /* Expand this statement, then evaluate the resulting RTL and
2107 fixup the CFG accordingly. */
2108 if (gimple_code (stmt) == GIMPLE_COND)
2110 new_bb = expand_gimple_cond (bb, stmt);
2116 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
2119 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
2130 def_operand_p def_p;
2132 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
2136 /* Ignore this stmt if it is in the list of
2137 replaceable expressions. */
2139 && bitmap_bit_p (SA.values,
2140 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
2143 stmt_tree = gimple_to_tree (stmt);
2144 last = get_last_insn ();
2145 expand_expr_stmt (stmt_tree);
2146 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2147 release_stmt_tree (stmt, stmt_tree);
2152 /* Expand implicit goto and convert goto_locus. */
2153 FOR_EACH_EDGE (e, ei, bb->succs)
2155 if (e->goto_locus && e->goto_block)
2157 set_curr_insn_source_location (e->goto_locus);
2158 set_curr_insn_block (e->goto_block);
2159 e->goto_locus = curr_insn_locator ();
2161 e->goto_block = NULL;
2162 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
2164 emit_jump (label_rtx_for_bb (e->dest));
2165 e->flags &= ~EDGE_FALLTHRU;
2169 do_pending_stack_adjust ();
2171 /* Find the block tail. The last insn in the block is the insn
2172 before a barrier and/or table jump insn. */
2173 last = get_last_insn ();
2174 if (BARRIER_P (last))
2175 last = PREV_INSN (last);
2176 if (JUMP_TABLE_DATA_P (last))
2177 last = PREV_INSN (PREV_INSN (last));
2180 update_bb_for_insn (bb);
2186 /* Create a basic block for initialization code. */
2189 construct_init_block (void)
2191 basic_block init_block, first_block;
2195 /* Multiple entry points not supported yet. */
2196 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
2197 init_rtl_bb_info (ENTRY_BLOCK_PTR);
2198 init_rtl_bb_info (EXIT_BLOCK_PTR);
2199 ENTRY_BLOCK_PTR->flags |= BB_RTL;
2200 EXIT_BLOCK_PTR->flags |= BB_RTL;
2202 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
2204 /* When entry edge points to first basic block, we don't need jump,
2205 otherwise we have to jump into proper target. */
2206 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
2208 tree label = gimple_block_label (e->dest);
2210 emit_jump (label_rtx (label));
2214 flags = EDGE_FALLTHRU;
2216 init_block = create_basic_block (NEXT_INSN (get_insns ()),
2219 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
2220 init_block->count = ENTRY_BLOCK_PTR->count;
2223 first_block = e->dest;
2224 redirect_edge_succ (e, init_block);
2225 e = make_edge (init_block, first_block, flags);
2228 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
2229 e->probability = REG_BR_PROB_BASE;
2230 e->count = ENTRY_BLOCK_PTR->count;
2232 update_bb_for_insn (init_block);
2236 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
2237 found in the block tree. */
2240 set_block_levels (tree block, int level)
2244 BLOCK_NUMBER (block) = level;
2245 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
2246 block = BLOCK_CHAIN (block);
2250 /* Create a block containing landing pads and similar stuff. */
2253 construct_exit_block (void)
2255 rtx head = get_last_insn ();
2257 basic_block exit_block;
2261 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
2263 rtl_profile_for_bb (EXIT_BLOCK_PTR);
2265 /* Make sure the locus is set to the end of the function, so that
2266 epilogue line numbers and warnings are set properly. */
2267 if (cfun->function_end_locus != UNKNOWN_LOCATION)
2268 input_location = cfun->function_end_locus;
2270 /* The following insns belong to the top scope. */
2271 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2273 /* Generate rtl for function exit. */
2274 expand_function_end ();
2276 end = get_last_insn ();
2279 /* While emitting the function end we could move end of the last basic block.
2281 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
2282 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
2283 head = NEXT_INSN (head);
2284 exit_block = create_basic_block (NEXT_INSN (head), end,
2285 EXIT_BLOCK_PTR->prev_bb);
2286 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
2287 exit_block->count = EXIT_BLOCK_PTR->count;
2290 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
2292 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
2293 if (!(e->flags & EDGE_ABNORMAL))
2294 redirect_edge_succ (e, exit_block);
2299 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
2300 e->probability = REG_BR_PROB_BASE;
2301 e->count = EXIT_BLOCK_PTR->count;
2302 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
2305 e->count -= e2->count;
2306 exit_block->count -= e2->count;
2307 exit_block->frequency -= EDGE_FREQUENCY (e2);
2311 if (exit_block->count < 0)
2312 exit_block->count = 0;
2313 if (exit_block->frequency < 0)
2314 exit_block->frequency = 0;
2315 update_bb_for_insn (exit_block);
2318 /* Helper function for discover_nonconstant_array_refs.
2319 Look for ARRAY_REF nodes with non-constant indexes and mark them
2323 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
2324 void *data ATTRIBUTE_UNUSED)
2328 if (IS_TYPE_OR_DECL_P (t))
2330 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2332 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2333 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
2334 && (!TREE_OPERAND (t, 2)
2335 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
2336 || (TREE_CODE (t) == COMPONENT_REF
2337 && (!TREE_OPERAND (t,2)
2338 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
2339 || TREE_CODE (t) == BIT_FIELD_REF
2340 || TREE_CODE (t) == REALPART_EXPR
2341 || TREE_CODE (t) == IMAGPART_EXPR
2342 || TREE_CODE (t) == VIEW_CONVERT_EXPR
2343 || CONVERT_EXPR_P (t))
2344 t = TREE_OPERAND (t, 0);
2346 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2348 t = get_base_address (t);
2350 && DECL_MODE (t) != BLKmode)
2351 TREE_ADDRESSABLE (t) = 1;
2360 /* RTL expansion is not able to compile array references with variable
2361 offsets for arrays stored in single register. Discover such
2362 expressions and mark variables as addressable to avoid this
2366 discover_nonconstant_array_refs (void)
2369 gimple_stmt_iterator gsi;
2372 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2374 gimple stmt = gsi_stmt (gsi);
2375 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
2379 /* This function sets crtl->args.internal_arg_pointer to a virtual
2380 register if DRAP is needed. Local register allocator will replace
2381 virtual_incoming_args_rtx with the virtual register. */
2384 expand_stack_alignment (void)
2387 unsigned int preferred_stack_boundary;
2389 if (! SUPPORTS_STACK_ALIGNMENT)
2392 if (cfun->calls_alloca
2393 || cfun->has_nonlocal_label
2394 || crtl->has_nonlocal_goto)
2395 crtl->need_drap = true;
2397 gcc_assert (crtl->stack_alignment_needed
2398 <= crtl->stack_alignment_estimated);
2400 /* Update crtl->stack_alignment_estimated and use it later to align
2401 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
2402 exceptions since callgraph doesn't collect incoming stack alignment
2404 if (flag_non_call_exceptions
2405 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
2406 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2408 preferred_stack_boundary = crtl->preferred_stack_boundary;
2409 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
2410 crtl->stack_alignment_estimated = preferred_stack_boundary;
2411 if (preferred_stack_boundary > crtl->stack_alignment_needed)
2412 crtl->stack_alignment_needed = preferred_stack_boundary;
2414 crtl->stack_realign_needed
2415 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
2416 crtl->stack_realign_tried = crtl->stack_realign_needed;
2418 crtl->stack_realign_processed = true;
2420 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
2422 gcc_assert (targetm.calls.get_drap_rtx != NULL);
2423 drap_rtx = targetm.calls.get_drap_rtx ();
2425 /* stack_realign_drap and drap_rtx must match. */
2426 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
2428 /* Do nothing if NULL is returned, which means DRAP is not needed. */
2429 if (NULL != drap_rtx)
2431 crtl->args.internal_arg_pointer = drap_rtx;
2433 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
2435 fixup_tail_calls ();
2439 /* Translate the intermediate representation contained in the CFG
2440 from GIMPLE trees to RTL.
2442 We do conversion per basic block and preserve/update the tree CFG.
2443 This implies we have to do some magic as the CFG can simultaneously
2444 consist of basic blocks containing RTL and GIMPLE trees. This can
2445 confuse the CFG hooks, so be careful to not manipulate CFG during
2449 gimple_expand_cfg (void)
2451 basic_block bb, init_block;
2457 rewrite_out_of_ssa (&SA);
2458 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
2461 /* Some backends want to know that we are expanding to RTL. */
2462 currently_expanding_to_rtl = 1;
2464 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
2466 insn_locators_alloc ();
2467 if (!DECL_IS_BUILTIN (current_function_decl))
2469 /* Eventually, all FEs should explicitly set function_start_locus. */
2470 if (cfun->function_start_locus == UNKNOWN_LOCATION)
2471 set_curr_insn_source_location
2472 (DECL_SOURCE_LOCATION (current_function_decl));
2474 set_curr_insn_source_location (cfun->function_start_locus);
2476 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2477 prologue_locator = curr_insn_locator ();
2479 /* Make sure first insn is a note even if we don't want linenums.
2480 This makes sure the first insn will never be deleted.
2481 Also, final expects a note to appear there. */
2482 emit_note (NOTE_INSN_DELETED);
2484 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
2485 discover_nonconstant_array_refs ();
2487 targetm.expand_to_rtl_hook ();
2488 crtl->stack_alignment_needed = STACK_BOUNDARY;
2489 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
2490 crtl->stack_alignment_estimated = STACK_BOUNDARY;
2491 crtl->preferred_stack_boundary = STACK_BOUNDARY;
2492 cfun->cfg->max_jumptable_ents = 0;
2495 /* Expand the variables recorded during gimple lowering. */
2496 expand_used_vars ();
2498 /* Honor stack protection warnings. */
2499 if (warn_stack_protect)
2501 if (cfun->calls_alloca)
2502 warning (OPT_Wstack_protector,
2503 "not protecting local variables: variable length buffer");
2504 if (has_short_buffer && !crtl->stack_protect_guard)
2505 warning (OPT_Wstack_protector,
2506 "not protecting function: no buffer at least %d bytes long",
2507 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
2510 /* Set up parameters and prepare for return, for the function. */
2511 expand_function_start (current_function_decl);
2513 /* Now that we also have the parameter RTXs, copy them over to our
2515 for (i = 0; i < SA.map->num_partitions; i++)
2517 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
2519 if (TREE_CODE (var) != VAR_DECL
2520 && !SA.partition_to_pseudo[i])
2521 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
2522 gcc_assert (SA.partition_to_pseudo[i]);
2524 /* If this decl was marked as living in multiple places, reset
2525 this now to NULL. */
2526 if (DECL_RTL_IF_SET (var) == pc_rtx)
2527 SET_DECL_RTL (var, NULL);
2529 /* Some RTL parts really want to look at DECL_RTL(x) when x
2530 was a decl marked in REG_ATTR or MEM_ATTR. We could use
2531 SET_DECL_RTL here making this available, but that would mean
2532 to select one of the potentially many RTLs for one DECL. Instead
2533 of doing that we simply reset the MEM_EXPR of the RTL in question,
2534 then nobody can get at it and hence nobody can call DECL_RTL on it. */
2535 if (!DECL_RTL_SET_P (var))
2537 if (MEM_P (SA.partition_to_pseudo[i]))
2538 set_mem_expr (SA.partition_to_pseudo[i], NULL);
2542 /* If this function is `main', emit a call to `__main'
2543 to run global initializers, etc. */
2544 if (DECL_NAME (current_function_decl)
2545 && MAIN_NAME_P (DECL_NAME (current_function_decl))
2546 && DECL_FILE_SCOPE_P (current_function_decl))
2547 expand_main_function ();
2549 /* Initialize the stack_protect_guard field. This must happen after the
2550 call to __main (if any) so that the external decl is initialized. */
2551 if (crtl->stack_protect_guard)
2552 stack_protect_prologue ();
2554 /* Update stack boundary if needed. */
2555 if (SUPPORTS_STACK_ALIGNMENT)
2557 /* Call update_stack_boundary here to update incoming stack
2558 boundary before TARGET_FUNCTION_OK_FOR_SIBCALL is called.
2559 TARGET_FUNCTION_OK_FOR_SIBCALL needs to know the accurate
2560 incoming stack alignment to check if it is OK to perform
2561 sibcall optimization since sibcall optimization will only
2562 align the outgoing stack to incoming stack boundary. */
2563 if (targetm.calls.update_stack_boundary)
2564 targetm.calls.update_stack_boundary ();
2566 /* The incoming stack frame has to be aligned at least at
2567 parm_stack_boundary. */
2568 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2571 expand_phi_nodes (&SA);
2573 /* Register rtl specific functions for cfg. */
2574 rtl_register_cfg_hooks ();
2576 init_block = construct_init_block ();
2578 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
2579 remaining edges later. */
2580 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2581 e->flags &= ~EDGE_EXECUTABLE;
2583 lab_rtx_for_bb = pointer_map_create ();
2584 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
2585 bb = expand_gimple_basic_block (bb);
2587 execute_free_datastructures ();
2588 finish_out_of_ssa (&SA);
2590 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
2591 conservatively to true until they are all profile aware. */
2592 pointer_map_destroy (lab_rtx_for_bb);
2595 construct_exit_block ();
2596 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2597 insn_locators_finalize ();
2599 /* Convert tree EH labels to RTL EH labels and zap the tree EH table. */
2600 convert_from_eh_region_ranges ();
2601 set_eh_throw_stmt_table (cfun, NULL);
2603 rebuild_jump_labels (get_insns ());
2604 find_exception_handler_labels ();
2606 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
2610 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2613 commit_one_edge_insertion (e);
2619 /* We're done expanding trees to RTL. */
2620 currently_expanding_to_rtl = 0;
2622 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
2626 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2628 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
2629 e->flags &= ~EDGE_EXECUTABLE;
2631 /* At the moment not all abnormal edges match the RTL
2632 representation. It is safe to remove them here as
2633 find_many_sub_basic_blocks will rediscover them.
2634 In the future we should get this fixed properly. */
2635 if ((e->flags & EDGE_ABNORMAL)
2636 && !(e->flags & EDGE_SIBCALL))
2643 blocks = sbitmap_alloc (last_basic_block);
2644 sbitmap_ones (blocks);
2645 find_many_sub_basic_blocks (blocks);
2646 sbitmap_free (blocks);
2647 purge_all_dead_edges ();
2651 expand_stack_alignment ();
2653 #ifdef ENABLE_CHECKING
2654 verify_flow_info ();
2657 /* There's no need to defer outputting this function any more; we
2658 know we want to output it. */
2659 DECL_DEFER_OUTPUT (current_function_decl) = 0;
2661 /* Now that we're done expanding trees to RTL, we shouldn't have any
2662 more CONCATs anywhere. */
2663 generating_concat_p = 0;
2668 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
2669 /* And the pass manager will dump RTL for us. */
2672 /* If we're emitting a nested function, make sure its parent gets
2673 emitted as well. Doing otherwise confuses debug info. */
2676 for (parent = DECL_CONTEXT (current_function_decl);
2677 parent != NULL_TREE;
2678 parent = get_containing_scope (parent))
2679 if (TREE_CODE (parent) == FUNCTION_DECL)
2680 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
2683 /* We are now committed to emitting code for this function. Do any
2684 preparation, such as emitting abstract debug info for the inline
2685 before it gets mangled by optimization. */
2686 if (cgraph_function_possibly_inlined_p (current_function_decl))
2687 (*debug_hooks->outlining_inline_function) (current_function_decl);
2689 TREE_ASM_WRITTEN (current_function_decl) = 1;
2691 /* After expanding, the return labels are no longer needed. */
2692 return_label = NULL;
2693 naked_return_label = NULL;
2694 /* Tag the blocks with a depth number so that change_scope can find
2695 the common parent easily. */
2696 set_block_levels (DECL_INITIAL (cfun->decl), 0);
2697 default_rtl_profile ();
2701 struct rtl_opt_pass pass_expand =
2705 "expand", /* name */
2707 gimple_expand_cfg, /* execute */
2710 0, /* static_pass_number */
2711 TV_EXPAND, /* tv_id */
2712 PROP_ssa | PROP_gimple_leh | PROP_cfg,/* properties_required */
2713 PROP_rtl, /* properties_provided */
2714 PROP_ssa | PROP_trees, /* properties_destroyed */
2715 TODO_verify_ssa | TODO_verify_flow
2716 | TODO_verify_stmts, /* todo_flags_start */
2718 | TODO_ggc_collect /* todo_flags_finish */