X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Fcfgexpand.c;h=a943eff6ec1d25c154785e50f1778a77dd294730;hb=97ba552253e2473141a58a0829fe797af9660601;hp=9839d098375536f1b01953027d99ac47bb538567;hpb=4a121d41abbdaf9d355756f3c335f3a52f7e8809;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c index 9839d098375..a943eff6ec1 100644 --- a/gcc/cfgexpand.c +++ b/gcc/cfgexpand.c @@ -1,11 +1,11 @@ /* A pass for lowering trees to RTL. - Copyright (C) 2004, 2005 Free Software Foundation, Inc. + Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc. This file is part of GCC. GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 2, or (at your option) +the Free Software Foundation; either version 3, or (at your option) any later version. GCC is distributed in the hope that it will be useful, @@ -14,9 +14,8 @@ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License -along with GCC; see the file COPYING. If not, write to -the Free Software Foundation, 51 Franklin Street, Fifth Floor, -Boston, MA 02110-1301, USA. */ +along with GCC; see the file COPYING3. If not see +. */ #include "config.h" #include "system.h" @@ -39,13 +38,362 @@ Boston, MA 02110-1301, USA. */ #include "toplev.h" #include "debug.h" #include "params.h" +#include "tree-inline.h" +#include "value-prof.h" +#include "target.h" + + +/* Return an expression tree corresponding to the RHS of GIMPLE + statement STMT. */ + +tree +gimple_assign_rhs_to_tree (gimple stmt) +{ + tree t; + enum gimple_rhs_class class; + + class = get_gimple_rhs_class (gimple_expr_code (stmt)); + + if (class == GIMPLE_BINARY_RHS) + t = build2 (gimple_assign_rhs_code (stmt), + TREE_TYPE (gimple_assign_lhs (stmt)), + gimple_assign_rhs1 (stmt), + gimple_assign_rhs2 (stmt)); + else if (class == GIMPLE_UNARY_RHS) + t = build1 (gimple_assign_rhs_code (stmt), + TREE_TYPE (gimple_assign_lhs (stmt)), + gimple_assign_rhs1 (stmt)); + else if (class == GIMPLE_SINGLE_RHS) + t = gimple_assign_rhs1 (stmt); + else + gcc_unreachable (); + + return t; +} + +/* Return an expression tree corresponding to the PREDICATE of GIMPLE_COND + statement STMT. */ + +static tree +gimple_cond_pred_to_tree (gimple stmt) +{ + return build2 (gimple_cond_code (stmt), boolean_type_node, + gimple_cond_lhs (stmt), gimple_cond_rhs (stmt)); +} + +/* Helper for gimple_to_tree. Set EXPR_LOCATION for every expression + inside *TP. DATA is the location to set. */ + +static tree +set_expr_location_r (tree *tp, int *ws ATTRIBUTE_UNUSED, void *data) +{ + location_t *loc = (location_t *) data; + if (EXPR_P (*tp)) + SET_EXPR_LOCATION (*tp, *loc); + + return NULL_TREE; +} + + +/* RTL expansion has traditionally been done on trees, so the + transition to doing it on GIMPLE tuples is very invasive to the RTL + expander. To facilitate the transition, this function takes a + GIMPLE tuple STMT and returns the same statement in the form of a + tree. */ + +static tree +gimple_to_tree (gimple stmt) +{ + tree t; + int rn; + tree_ann_common_t ann; + location_t loc; + + switch (gimple_code (stmt)) + { + case GIMPLE_ASSIGN: + { + tree lhs = gimple_assign_lhs (stmt); + + t = gimple_assign_rhs_to_tree (stmt); + t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t); + if (gimple_assign_nontemporal_move_p (stmt)) + MOVE_NONTEMPORAL (t) = true; + } + break; + + case GIMPLE_COND: + t = gimple_cond_pred_to_tree (stmt); + t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE); + break; + + case GIMPLE_GOTO: + t = build1 (GOTO_EXPR, void_type_node, gimple_goto_dest (stmt)); + break; + + case GIMPLE_LABEL: + t = build1 (LABEL_EXPR, void_type_node, gimple_label_label (stmt)); + break; + + case GIMPLE_RETURN: + { + tree retval = gimple_return_retval (stmt); + + if (retval && retval != error_mark_node) + { + tree result = DECL_RESULT (current_function_decl); + + /* If we are not returning the current function's RESULT_DECL, + build an assignment to it. */ + if (retval != result) + { + /* I believe that a function's RESULT_DECL is unique. */ + gcc_assert (TREE_CODE (retval) != RESULT_DECL); + + retval = build2 (MODIFY_EXPR, TREE_TYPE (result), + result, retval); + } + } + t = build1 (RETURN_EXPR, void_type_node, retval); + } + break; + + case GIMPLE_ASM: + { + size_t i, n; + tree out, in, cl; + const char *s; + + out = NULL_TREE; + n = gimple_asm_noutputs (stmt); + if (n > 0) + { + t = out = gimple_asm_output_op (stmt, 0); + for (i = 1; i < n; i++) + { + TREE_CHAIN (t) = gimple_asm_output_op (stmt, i); + t = gimple_asm_output_op (stmt, i); + } + } + + in = NULL_TREE; + n = gimple_asm_ninputs (stmt); + if (n > 0) + { + t = in = gimple_asm_input_op (stmt, 0); + for (i = 1; i < n; i++) + { + TREE_CHAIN (t) = gimple_asm_input_op (stmt, i); + t = gimple_asm_input_op (stmt, i); + } + } + + cl = NULL_TREE; + n = gimple_asm_nclobbers (stmt); + if (n > 0) + { + t = cl = gimple_asm_clobber_op (stmt, 0); + for (i = 1; i < n; i++) + { + TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i); + t = gimple_asm_clobber_op (stmt, i); + } + } + + s = gimple_asm_string (stmt); + t = build4 (ASM_EXPR, void_type_node, build_string (strlen (s), s), + out, in, cl); + ASM_VOLATILE_P (t) = gimple_asm_volatile_p (stmt); + ASM_INPUT_P (t) = gimple_asm_input_p (stmt); + } + break; + + case GIMPLE_CALL: + { + size_t i; + tree fn; + tree_ann_common_t ann; + + t = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3); + + fn = gimple_call_fn (stmt); + if (TREE_CODE (fn) == FUNCTION_DECL) + CALL_EXPR_FN (t) = build1 (ADDR_EXPR, + build_pointer_type (TREE_TYPE (fn)), + fn); + else + CALL_EXPR_FN (t) = fn; + + TREE_TYPE (t) = gimple_call_return_type (stmt); + + CALL_EXPR_STATIC_CHAIN (t) = gimple_call_chain (stmt); + + for (i = 0; i < gimple_call_num_args (stmt); i++) + CALL_EXPR_ARG (t, i) = gimple_call_arg (stmt, i); + + if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))) + TREE_SIDE_EFFECTS (t) = 1; + + if (gimple_call_flags (stmt) & ECF_NOTHROW) + TREE_NOTHROW (t) = 1; + + CALL_EXPR_TAILCALL (t) = gimple_call_tail_p (stmt); + CALL_EXPR_RETURN_SLOT_OPT (t) = gimple_call_return_slot_opt_p (stmt); + CALL_FROM_THUNK_P (t) = gimple_call_from_thunk_p (stmt); + CALL_CANNOT_INLINE_P (t) = gimple_call_cannot_inline_p (stmt); + CALL_EXPR_VA_ARG_PACK (t) = gimple_call_va_arg_pack_p (stmt); + + /* If the call has a LHS then create a MODIFY_EXPR to hold it. */ + { + tree lhs = gimple_call_lhs (stmt); + + if (lhs) + t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t); + } + + /* Record the original call statement, as it may be used + to retrieve profile information during expansion. */ + if (TREE_CODE (fn) == FUNCTION_DECL && DECL_BUILT_IN (fn)) + { + ann = get_tree_common_ann (t); + ann->stmt = stmt; + } + } + break; + + case GIMPLE_SWITCH: + { + tree label_vec; + size_t i; + tree elt = gimple_switch_label (stmt, 0); + + label_vec = make_tree_vec (gimple_switch_num_labels (stmt)); + + if (!CASE_LOW (elt) && !CASE_HIGH (elt)) + { + for (i = 1; i < gimple_switch_num_labels (stmt); i++) + TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, i); + + /* The default case in a SWITCH_EXPR must be at the end of + the label vector. */ + TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, 0); + } + else + { + for (i = 0; i < gimple_switch_num_labels (stmt); i++) + TREE_VEC_ELT (label_vec, i) = gimple_switch_label (stmt, i); + } + + t = build3 (SWITCH_EXPR, void_type_node, gimple_switch_index (stmt), + NULL, label_vec); + } + break; + + case GIMPLE_NOP: + case GIMPLE_PREDICT: + t = build1 (NOP_EXPR, void_type_node, size_zero_node); + break; + + case GIMPLE_RESX: + t = build_resx (gimple_resx_region (stmt)); + break; + + default: + if (errorcount == 0) + { + error ("Unrecognized GIMPLE statement during RTL expansion"); + print_gimple_stmt (stderr, stmt, 4, 0); + gcc_unreachable (); + } + else + { + /* Ignore any bad gimple codes if we're going to die anyhow, + so we can at least set TREE_ASM_WRITTEN and have the rest + of compilation advance without sudden ICE death. */ + t = build1 (NOP_EXPR, void_type_node, size_zero_node); + break; + } + } + + /* If STMT is inside an exception region, record it in the generated + expression. */ + rn = lookup_stmt_eh_region (stmt); + if (rn >= 0) + { + tree call = get_call_expr_in (t); + + ann = get_tree_common_ann (t); + ann->rn = rn; + + /* For a CALL_EXPR on the RHS of an assignment, calls.c looks up + the CALL_EXPR not the assignment statment for EH region number. */ + if (call && call != t) + { + ann = get_tree_common_ann (call); + ann->rn = rn; + } + } + + /* Set EXPR_LOCATION in all the embedded expressions. */ + loc = gimple_location (stmt); + walk_tree (&t, set_expr_location_r, (void *) &loc, NULL); + + TREE_BLOCK (t) = gimple_block (stmt); + + return t; +} + + +/* Release back to GC memory allocated by gimple_to_tree. */ + +static void +release_stmt_tree (gimple stmt, tree stmt_tree) +{ + tree_ann_common_t ann; + + switch (gimple_code (stmt)) + { + case GIMPLE_ASSIGN: + if (get_gimple_rhs_class (gimple_expr_code (stmt)) != GIMPLE_SINGLE_RHS) + ggc_free (TREE_OPERAND (stmt_tree, 1)); + break; + case GIMPLE_COND: + ggc_free (COND_EXPR_COND (stmt_tree)); + break; + case GIMPLE_RETURN: + if (TREE_OPERAND (stmt_tree, 0) + && TREE_CODE (TREE_OPERAND (stmt_tree, 0)) == MODIFY_EXPR) + ggc_free (TREE_OPERAND (stmt_tree, 0)); + break; + case GIMPLE_CALL: + if (gimple_call_lhs (stmt)) + { + if (TREE_CODE (gimple_call_fn (stmt)) == FUNCTION_DECL) + ggc_free (CALL_EXPR_FN (TREE_OPERAND (stmt_tree, 1))); + ann = tree_common_ann (TREE_OPERAND (stmt_tree, 1)); + if (ann) + ggc_free (ann); + ggc_free (TREE_OPERAND (stmt_tree, 1)); + } + else if (TREE_CODE (gimple_call_fn (stmt)) == FUNCTION_DECL) + ggc_free (CALL_EXPR_FN (stmt_tree)); + break; + default: + break; + } + ann = tree_common_ann (stmt_tree); + if (ann) + ggc_free (ann); + ggc_free (stmt_tree); +} + /* Verify that there is exactly single jump instruction since last and attach REG_BR_PROB note specifying probability. ??? We really ought to pass the probability down to RTL expanders and let it re-distribute it when the conditional expands into multiple conditionals. This is however difficult to do. */ -static void +void add_reg_br_prob_note (rtx last, int probability) { if (profile_status == PROFILE_ABSENT) @@ -65,18 +413,14 @@ add_reg_br_prob_note (rtx last, int probability) || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))) goto failed; gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); - REG_NOTES (last) - = gen_rtx_EXPR_LIST (REG_BR_PROB, - GEN_INT (REG_BR_PROB_BASE - probability), - REG_NOTES (last)); + add_reg_note (last, REG_BR_PROB, + GEN_INT (REG_BR_PROB_BASE - probability)); return; } if (!last || !JUMP_P (last) || !any_condjump_p (last)) goto failed; gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); - REG_NOTES (last) - = gen_rtx_EXPR_LIST (REG_BR_PROB, - GEN_INT (probability), REG_NOTES (last)); + add_reg_note (last, REG_BR_PROB, GEN_INT (probability)); return; failed: if (dump_file) @@ -84,10 +428,6 @@ failed: } -#ifndef LOCAL_ALIGNMENT -#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT -#endif - #ifndef STACK_ALIGNMENT_NEEDED #define STACK_ALIGNMENT_NEEDED 1 #endif @@ -127,7 +467,7 @@ static struct stack_var *stack_vars; static size_t stack_vars_alloc; static size_t stack_vars_num; -/* An array of indicies such that stack_vars[stack_vars_sorted[i]].size +/* An array of indices such that stack_vars[stack_vars_sorted[i]].size is non-decreasing. */ static size_t *stack_vars_sorted; @@ -159,10 +499,25 @@ get_decl_align_unit (tree decl) align = DECL_ALIGN (decl); align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align); - if (align > PREFERRED_STACK_BOUNDARY) - align = PREFERRED_STACK_BOUNDARY; - if (cfun->stack_alignment_needed < align) - cfun->stack_alignment_needed = align; + + if (align > MAX_SUPPORTED_STACK_ALIGNMENT) + align = MAX_SUPPORTED_STACK_ALIGNMENT; + + if (SUPPORTS_STACK_ALIGNMENT) + { + if (crtl->stack_alignment_estimated < align) + { + gcc_assert(!crtl->stack_realign_processed); + crtl->stack_alignment_estimated = align; + } + } + + /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted. + So here we only make sure stack_alignment_needed >= align. */ + if (crtl->stack_alignment_needed < align) + crtl->stack_alignment_needed = align; + if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed) + crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed; return align / BITS_PER_UNIT; } @@ -343,18 +698,26 @@ add_alias_set_conflicts (void) } /* A subroutine of partition_stack_vars. A comparison function for qsort, - sorting an array of indicies by the size of the object. */ + sorting an array of indices by the size of the object. */ static int stack_var_size_cmp (const void *a, const void *b) { HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size; HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size; + unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl); + unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl); if (sa < sb) return -1; if (sa > sb) return 1; + /* For stack variables of the same size use the uid of the decl + to make the sort stable. */ + if (uida < uidb) + return -1; + if (uida > uidb) + return 1; return 0; } @@ -503,7 +866,7 @@ dump_stack_var_partition (void) fputc ('\t', dump_file); print_generic_expr (dump_file, stack_vars[j].decl, dump_flags); fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n", - stack_vars[i].offset); + stack_vars[j].offset); } } } @@ -570,11 +933,36 @@ expand_stack_vars (bool (*pred) (tree)) /* Create rtl for each variable based on their location within the partition. */ for (j = i; j != EOC; j = stack_vars[j].next) - expand_one_stack_var_at (stack_vars[j].decl, - stack_vars[j].offset + offset); + { + gcc_assert (stack_vars[j].offset <= stack_vars[i].size); + expand_one_stack_var_at (stack_vars[j].decl, + stack_vars[j].offset + offset); + } } } +/* Take into account all sizes of partitions and reset DECL_RTLs. */ +static HOST_WIDE_INT +account_stack_vars (void) +{ + size_t si, j, i, n = stack_vars_num; + HOST_WIDE_INT size = 0; + + for (si = 0; si < n; ++si) + { + i = stack_vars_sorted[si]; + + /* Skip variables that aren't partition representatives, for now. */ + if (stack_vars[i].representative != i) + continue; + + size += stack_vars[i].size; + for (j = i; j != EOC; j = stack_vars[j].next) + SET_DECL_RTL (stack_vars[j].decl, NULL); + } + return size; +} + /* A subroutine of expand_one_var. Called to immediately assign rtl to a variable to be allocated in the stack frame. */ @@ -590,33 +978,6 @@ expand_one_stack_var (tree var) expand_one_stack_var_at (var, offset); } -/* A subroutine of expand_one_var. Called to assign rtl - to a TREE_STATIC VAR_DECL. */ - -static void -expand_one_static_var (tree var) -{ - /* In unit-at-a-time all the static variables are expanded at the end - of compilation process. */ - if (flag_unit_at_a_time) - return; - /* If this is an inlined copy of a static local variable, - look up the original. */ - var = DECL_ORIGIN (var); - - /* If we've already processed this variable because of that, do nothing. */ - if (TREE_ASM_WRITTEN (var)) - return; - - /* Give the front end a chance to do whatever. In practice, this is - resolving duplicate names for IMA in C. */ - if (lang_hooks.expand_decl (var)) - return; - - /* Otherwise, just emit the variable. */ - rest_of_decl_compilation (var, 0, 0); -} - /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that will reside in a hard register. */ @@ -642,18 +1003,10 @@ expand_one_register_var (tree var) /* Note if the object is a user variable. */ if (!DECL_ARTIFICIAL (var)) - { mark_user_reg (x); - /* Trust user variables which have a pointer type to really - be pointers. Do not trust compiler generated temporaries - as our type system is totally busted as it relates to - pointer arithmetic which translates into lots of compiler - generated objects with pointer types, but which are not really - pointers. */ - if (POINTER_TYPE_P (type)) - mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var)))); - } + if (POINTER_TYPE_P (type)) + mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var)))); } /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that @@ -713,31 +1066,73 @@ defer_stack_allocation (tree var, bool toplevel) /* A subroutine of expand_used_vars. Expand one variable according to its flavor. Variables to be placed on the stack are not actually - expanded yet, merely recorded. */ + expanded yet, merely recorded. + When REALLY_EXPAND is false, only add stack values to be allocated. + Return stack usage this variable is supposed to take. +*/ -static void -expand_one_var (tree var, bool toplevel) +static HOST_WIDE_INT +expand_one_var (tree var, bool toplevel, bool really_expand) { + if (SUPPORTS_STACK_ALIGNMENT + && TREE_TYPE (var) != error_mark_node + && TREE_CODE (var) == VAR_DECL) + { + unsigned int align; + + /* Because we don't know if VAR will be in register or on stack, + we conservatively assume it will be on stack even if VAR is + eventually put into register after RA pass. For non-automatic + variables, which won't be on stack, we collect alignment of + type and ignore user specified alignment. */ + if (TREE_STATIC (var) || DECL_EXTERNAL (var)) + align = TYPE_ALIGN (TREE_TYPE (var)); + else + align = DECL_ALIGN (var); + + if (crtl->stack_alignment_estimated < align) + { + /* stack_alignment_estimated shouldn't change after stack + realign decision made */ + gcc_assert(!crtl->stack_realign_processed); + crtl->stack_alignment_estimated = align; + } + } + if (TREE_CODE (var) != VAR_DECL) - lang_hooks.expand_decl (var); + ; else if (DECL_EXTERNAL (var)) ; else if (DECL_HAS_VALUE_EXPR_P (var)) ; else if (TREE_STATIC (var)) - expand_one_static_var (var); + ; else if (DECL_RTL_SET_P (var)) ; else if (TREE_TYPE (var) == error_mark_node) - expand_one_error_var (var); + { + if (really_expand) + expand_one_error_var (var); + } else if (DECL_HARD_REGISTER (var)) - expand_one_hard_reg_var (var); + { + if (really_expand) + expand_one_hard_reg_var (var); + } else if (use_register_for_decl (var)) - expand_one_register_var (var); + { + if (really_expand) + expand_one_register_var (var); + } else if (defer_stack_allocation (var, toplevel)) add_stack_var (var); else - expand_one_stack_var (var); + { + if (really_expand) + expand_one_stack_var (var); + return tree_low_cst (DECL_SIZE_UNIT (var), 1); + } + return 0; } /* A subroutine of expand_used_vars. Walk down through the BLOCK tree @@ -757,7 +1152,7 @@ expand_used_vars_for_block (tree block, bool toplevel) /* Expand all variables at this level. */ for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) if (TREE_USED (t)) - expand_one_var (t, toplevel); + expand_one_var (t, toplevel, true); this_sv_num = stack_vars_num; @@ -931,7 +1326,123 @@ create_stack_guard (void) TREE_THIS_VOLATILE (guard) = 1; TREE_USED (guard) = 1; expand_one_stack_var (guard); - cfun->stack_protect_guard = guard; + crtl->stack_protect_guard = guard; +} + +/* A subroutine of expand_used_vars. Walk down through the BLOCK tree + expanding variables. Those variables that can be put into registers + are allocated pseudos; those that can't are put on the stack. + + TOPLEVEL is true if this is the outermost BLOCK. */ + +static HOST_WIDE_INT +account_used_vars_for_block (tree block, bool toplevel) +{ + size_t i, j, old_sv_num, this_sv_num, new_sv_num; + tree t; + HOST_WIDE_INT size = 0; + + old_sv_num = toplevel ? 0 : stack_vars_num; + + /* Expand all variables at this level. */ + for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) + if (TREE_USED (t)) + size += expand_one_var (t, toplevel, false); + + this_sv_num = stack_vars_num; + + /* Expand all variables at containing levels. */ + for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) + size += account_used_vars_for_block (t, false); + + /* Since we do not track exact variable lifetimes (which is not even + possible for variables whose address escapes), we mirror the block + tree in the interference graph. Here we cause all variables at this + level, and all sublevels, to conflict. Do make certain that a + variable conflicts with itself. */ + if (old_sv_num < this_sv_num) + { + new_sv_num = stack_vars_num; + resize_stack_vars_conflict (new_sv_num); + + for (i = old_sv_num; i < new_sv_num; ++i) + for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;) + add_stack_var_conflict (i, j); + } + return size; +} + +/* Prepare for expanding variables. */ +static void +init_vars_expansion (void) +{ + tree t; + /* Set TREE_USED on all variables in the local_decls. */ + for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) + TREE_USED (TREE_VALUE (t)) = 1; + + /* Clear TREE_USED on all variables associated with a block scope. */ + clear_tree_used (DECL_INITIAL (current_function_decl)); + + /* Initialize local stack smashing state. */ + has_protected_decls = false; + has_short_buffer = false; +} + +/* Free up stack variable graph data. */ +static void +fini_vars_expansion (void) +{ + XDELETEVEC (stack_vars); + XDELETEVEC (stack_vars_sorted); + XDELETEVEC (stack_vars_conflict); + stack_vars = NULL; + stack_vars_alloc = stack_vars_num = 0; + stack_vars_conflict = NULL; + stack_vars_conflict_alloc = 0; +} + +HOST_WIDE_INT +estimated_stack_frame_size (void) +{ + HOST_WIDE_INT size = 0; + tree t, outer_block = DECL_INITIAL (current_function_decl); + + init_vars_expansion (); + + /* At this point all variables on the local_decls with TREE_USED + set are not associated with any block scope. Lay them out. */ + for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) + { + tree var = TREE_VALUE (t); + + if (TREE_USED (var)) + size += expand_one_var (var, true, false); + TREE_USED (var) = 1; + } + size += account_used_vars_for_block (outer_block, true); + if (stack_vars_num > 0) + { + /* Due to the way alias sets work, no variables with non-conflicting + alias sets may be assigned the same address. Add conflicts to + reflect this. */ + add_alias_set_conflicts (); + + /* If stack protection is enabled, we don't share space between + vulnerable data and non-vulnerable data. */ + if (flag_stack_protect) + add_stack_protection_conflicts (); + + /* Now that we have collected all stack variables, and have computed a + minimal interference graph, attempt to save some stack space. */ + partition_stack_vars (); + if (dump_file) + dump_stack_var_partition (); + + size += account_stack_vars (); + fini_vars_expansion (); + } + return size; } /* Expand all variables used in the function. */ @@ -948,20 +1459,11 @@ expand_used_vars (void) frame_phase = off ? align - off : 0; } - /* Set TREE_USED on all variables in the unexpanded_var_list. */ - for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t)) - TREE_USED (TREE_VALUE (t)) = 1; + init_vars_expansion (); - /* Clear TREE_USED on all variables associated with a block scope. */ - clear_tree_used (outer_block); - - /* Initialize local stack smashing state. */ - has_protected_decls = false; - has_short_buffer = false; - - /* At this point all variables on the unexpanded_var_list with TREE_USED + /* At this point all variables on the local_decls with TREE_USED set are not associated with any block scope. Lay them out. */ - for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t)) + for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) { tree var = TREE_VALUE (t); bool expand_now = false; @@ -992,9 +1494,9 @@ expand_used_vars (void) TREE_USED (var) = 1; if (expand_now) - expand_one_var (var, true); + expand_one_var (var, true, true); } - cfun->unexpanded_var_list = NULL_TREE; + cfun->local_decls = NULL_TREE; /* At this point, all variables within the block tree with TREE_USED set are actually used by the optimized function. Lay them out. */ @@ -1023,7 +1525,7 @@ expand_used_vars (void) stack guard: protect-all, alloca used, protected decls present. */ if (flag_stack_protect == 2 || (flag_stack_protect - && (current_function_calls_alloca || has_protected_decls))) + && (cfun->calls_alloca || has_protected_decls))) create_stack_guard (); /* Assign rtl to each variable based on these partitions. */ @@ -1046,14 +1548,7 @@ expand_used_vars (void) expand_stack_vars (NULL); - /* Free up stack variable graph data. */ - XDELETEVEC (stack_vars); - XDELETEVEC (stack_vars_sorted); - XDELETEVEC (stack_vars_conflict); - stack_vars = NULL; - stack_vars_alloc = stack_vars_num = 0; - stack_vars_conflict = NULL; - stack_vars_conflict_alloc = 0; + fini_vars_expansion (); } /* If the target requires that FRAME_OFFSET be aligned, do it. */ @@ -1072,41 +1567,81 @@ expand_used_vars (void) generated for STMT should have been appended. */ static void -maybe_dump_rtl_for_tree_stmt (tree stmt, rtx since) +maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since) { if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "\n;; "); - print_generic_expr (dump_file, stmt, TDF_SLIM); + print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); fprintf (dump_file, "\n"); print_rtl (dump_file, since ? NEXT_INSN (since) : since); } } -/* A subroutine of expand_gimple_basic_block. Expand one COND_EXPR. +/* Maps the blocks that do not contain tree labels to rtx labels. */ + +static struct pointer_map_t *lab_rtx_for_bb; + +/* Returns the label_rtx expression for a label starting basic block BB. */ + +static rtx +label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED) +{ + gimple_stmt_iterator gsi; + tree lab; + gimple lab_stmt; + void **elt; + + if (bb->flags & BB_RTL) + return block_label (bb); + + elt = pointer_map_contains (lab_rtx_for_bb, bb); + if (elt) + return (rtx) *elt; + + /* Find the tree label if it is present. */ + + for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) + { + lab_stmt = gsi_stmt (gsi); + if (gimple_code (lab_stmt) != GIMPLE_LABEL) + break; + + lab = gimple_label_label (lab_stmt); + if (DECL_NONLOCAL (lab)) + break; + + return label_rtx (lab); + } + + elt = pointer_map_insert (lab_rtx_for_bb, bb); + *elt = gen_label_rtx (); + return (rtx) *elt; +} + + +/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND. Returns a new basic block if we've terminated the current basic block and created a new one. */ static basic_block -expand_gimple_cond_expr (basic_block bb, tree stmt) +expand_gimple_cond (basic_block bb, gimple stmt) { basic_block new_bb, dest; edge new_edge; edge true_edge; edge false_edge; - tree pred = COND_EXPR_COND (stmt); - tree then_exp = COND_EXPR_THEN (stmt); - tree else_exp = COND_EXPR_ELSE (stmt); + tree pred = gimple_cond_pred_to_tree (stmt); rtx last2, last; last2 = last = get_last_insn (); extract_true_false_edges_from_block (bb, &true_edge, &false_edge); - if (EXPR_LOCUS (stmt)) + if (gimple_has_location (stmt)) { - emit_line_note (*(EXPR_LOCUS (stmt))); - record_block_change (TREE_BLOCK (stmt)); + set_curr_insn_source_location (gimple_location (stmt)); + set_curr_insn_block (gimple_block (stmt)); } /* These flags have no purpose in RTL land. */ @@ -1115,31 +1650,33 @@ expand_gimple_cond_expr (basic_block bb, tree stmt) /* We can either have a pure conditional jump with one fallthru edge or two-way jump that needs to be decomposed into two basic blocks. */ - if (TREE_CODE (then_exp) == GOTO_EXPR && IS_EMPTY_STMT (else_exp)) + if (false_edge->dest == bb->next_bb) { - jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp))); + jumpif (pred, label_rtx_for_bb (true_edge->dest)); add_reg_br_prob_note (last, true_edge->probability); - maybe_dump_rtl_for_tree_stmt (stmt, last); - if (EXPR_LOCUS (then_exp)) - emit_line_note (*(EXPR_LOCUS (then_exp))); + maybe_dump_rtl_for_gimple_stmt (stmt, last); + if (true_edge->goto_locus) + set_curr_insn_source_location (true_edge->goto_locus); + false_edge->flags |= EDGE_FALLTHRU; + ggc_free (pred); return NULL; } - if (TREE_CODE (else_exp) == GOTO_EXPR && IS_EMPTY_STMT (then_exp)) + if (true_edge->dest == bb->next_bb) { - jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_exp))); + jumpifnot (pred, label_rtx_for_bb (false_edge->dest)); add_reg_br_prob_note (last, false_edge->probability); - maybe_dump_rtl_for_tree_stmt (stmt, last); - if (EXPR_LOCUS (else_exp)) - emit_line_note (*(EXPR_LOCUS (else_exp))); + maybe_dump_rtl_for_gimple_stmt (stmt, last); + if (false_edge->goto_locus) + set_curr_insn_source_location (false_edge->goto_locus); + true_edge->flags |= EDGE_FALLTHRU; + ggc_free (pred); return NULL; } - gcc_assert (TREE_CODE (then_exp) == GOTO_EXPR - && TREE_CODE (else_exp) == GOTO_EXPR); - jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp))); + jumpif (pred, label_rtx_for_bb (true_edge->dest)); add_reg_br_prob_note (last, true_edge->probability); last = get_last_insn (); - expand_expr (else_exp, const0_rtx, VOIDmode, 0); + emit_jump (label_rtx_for_bb (false_edge->dest)); BB_END (bb) = last; if (BARRIER_P (BB_END (bb))) @@ -1159,15 +1696,16 @@ expand_gimple_cond_expr (basic_block bb, tree stmt) BB_END (new_bb) = PREV_INSN (BB_END (new_bb)); update_bb_for_insn (new_bb); - maybe_dump_rtl_for_tree_stmt (stmt, last2); + maybe_dump_rtl_for_gimple_stmt (stmt, last2); - if (EXPR_LOCUS (else_exp)) - emit_line_note (*(EXPR_LOCUS (else_exp))); + if (false_edge->goto_locus) + set_curr_insn_source_location (false_edge->goto_locus); + ggc_free (pred); return new_bb; } -/* A subroutine of expand_gimple_basic_block. Expand one CALL_EXPR +/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL that has CALL_EXPR_TAILCALL set. Returns non-null if we actually generated a tail call (something that might be denied by the ABI rules governing the call; see calls.c). @@ -1178,23 +1716,26 @@ expand_gimple_cond_expr (basic_block bb, tree stmt) tailcall) and the normal result happens via a sqrt instruction. */ static basic_block -expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru) +expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru) { rtx last2, last; edge e; edge_iterator ei; int probability; gcov_type count; + tree stmt_tree = gimple_to_tree (stmt); last2 = last = get_last_insn (); - expand_expr_stmt (stmt); + expand_expr_stmt (stmt_tree); + + release_stmt_tree (stmt, stmt_tree); for (last = NEXT_INSN (last); last; last = NEXT_INSN (last)) if (CALL_P (last) && SIBLING_CALL_P (last)) goto found; - maybe_dump_rtl_for_tree_stmt (stmt, last2); + maybe_dump_rtl_for_gimple_stmt (stmt, last2); *can_fallthru = true; return NULL; @@ -1269,7 +1810,7 @@ expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru) BB_END (bb) = PREV_INSN (last); } - maybe_dump_rtl_for_tree_stmt (stmt, last2); + maybe_dump_rtl_for_gimple_stmt (stmt, last2); return bb; } @@ -1279,40 +1820,80 @@ expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru) static basic_block expand_gimple_basic_block (basic_block bb) { - block_stmt_iterator bsi = bsi_start (bb); - tree stmt = NULL; + gimple_stmt_iterator gsi; + gimple_seq stmts; + gimple stmt = NULL; rtx note, last; edge e; edge_iterator ei; + void **elt; if (dump_file) + fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n", + bb->index); + + /* Note that since we are now transitioning from GIMPLE to RTL, we + cannot use the gsi_*_bb() routines because they expect the basic + block to be in GIMPLE, instead of RTL. Therefore, we need to + access the BB sequence directly. */ + stmts = bb_seq (bb); + bb->il.gimple = NULL; + rtl_profile_for_bb (bb); + init_rtl_bb_info (bb); + bb->flags |= BB_RTL; + + /* Remove the RETURN_EXPR if we may fall though to the exit + instead. */ + gsi = gsi_last (stmts); + if (!gsi_end_p (gsi) + && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN) { - fprintf (dump_file, - "\n;; Generating RTL for tree basic block %d\n", - bb->index); + gimple ret_stmt = gsi_stmt (gsi); + + gcc_assert (single_succ_p (bb)); + gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR); + + if (bb->next_bb == EXIT_BLOCK_PTR + && !gimple_return_retval (ret_stmt)) + { + gsi_remove (&gsi, false); + single_succ_edge (bb)->flags |= EDGE_FALLTHRU; + } } - init_rtl_bb_info (bb); - bb->flags |= BB_RTL; + gsi = gsi_start (stmts); + if (!gsi_end_p (gsi)) + { + stmt = gsi_stmt (gsi); + if (gimple_code (stmt) != GIMPLE_LABEL) + stmt = NULL; + } - if (!bsi_end_p (bsi)) - stmt = bsi_stmt (bsi); + elt = pointer_map_contains (lab_rtx_for_bb, bb); - if (stmt && TREE_CODE (stmt) == LABEL_EXPR) + if (stmt || elt) { last = get_last_insn (); - expand_expr_stmt (stmt); + if (stmt) + { + tree stmt_tree = gimple_to_tree (stmt); + expand_expr_stmt (stmt_tree); + release_stmt_tree (stmt, stmt_tree); + gsi_next (&gsi); + } + + if (elt) + emit_label ((rtx) *elt); /* Java emits line number notes in the top of labels. ??? Make this go away once line number notes are obsoleted. */ BB_HEAD (bb) = NEXT_INSN (last); if (NOTE_P (BB_HEAD (bb))) BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb)); - bsi_next (&bsi); note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb)); - maybe_dump_rtl_for_tree_stmt (stmt, last); + maybe_dump_rtl_for_gimple_stmt (stmt, last); } else note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK); @@ -1333,26 +1914,22 @@ expand_gimple_basic_block (basic_block bb) ei_next (&ei); } - for (; !bsi_end_p (bsi); bsi_next (&bsi)) + for (; !gsi_end_p (gsi); gsi_next (&gsi)) { - tree stmt = bsi_stmt (bsi); + gimple stmt = gsi_stmt (gsi); basic_block new_bb; - if (!stmt) - continue; - /* Expand this statement, then evaluate the resulting RTL and fixup the CFG accordingly. */ - if (TREE_CODE (stmt) == COND_EXPR) + if (gimple_code (stmt) == GIMPLE_COND) { - new_bb = expand_gimple_cond_expr (bb, stmt); + new_bb = expand_gimple_cond (bb, stmt); if (new_bb) return new_bb; } else { - tree call = get_call_expr_in (stmt); - if (call && CALL_EXPR_TAILCALL (call)) + if (is_gimple_call (stmt) && gimple_call_tail_p (stmt)) { bool can_fallthru; new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru); @@ -1366,13 +1943,30 @@ expand_gimple_basic_block (basic_block bb) } else { + tree stmt_tree = gimple_to_tree (stmt); last = get_last_insn (); - expand_expr_stmt (stmt); - maybe_dump_rtl_for_tree_stmt (stmt, last); + expand_expr_stmt (stmt_tree); + maybe_dump_rtl_for_gimple_stmt (stmt, last); + release_stmt_tree (stmt, stmt_tree); } } } + /* Expand implicit goto. */ + FOR_EACH_EDGE (e, ei, bb->succs) + { + if (e->flags & EDGE_FALLTHRU) + break; + } + + if (e && e->dest != bb->next_bb) + { + emit_jump (label_rtx_for_bb (e->dest)); + if (e->goto_locus) + set_curr_insn_source_location (e->goto_locus); + e->flags &= ~EDGE_FALLTHRU; + } + do_pending_stack_adjust (); /* Find the block tail. The last insn in the block is the insn @@ -1412,7 +2006,7 @@ construct_init_block (void) otherwise we have to jump into proper target. */ if (e && e->dest != ENTRY_BLOCK_PTR->next_bb) { - tree label = tree_block_label (e->dest); + tree label = gimple_block_label (e->dest); emit_jump (label_rtx (label)); flags = 0; @@ -1440,6 +2034,19 @@ construct_init_block (void) return init_block; } +/* For each lexical block, set BLOCK_NUMBER to the depth at which it is + found in the block tree. */ + +static void +set_block_levels (tree block, int level) +{ + while (block) + { + BLOCK_NUMBER (block) = level; + set_block_levels (BLOCK_SUBBLOCKS (block), level + 1); + block = BLOCK_CHAIN (block); + } +} /* Create a block containing landing pads and similar stuff. */ @@ -1452,18 +2059,17 @@ construct_exit_block (void) edge e, e2; unsigned ix; edge_iterator ei; + rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb); + + rtl_profile_for_bb (EXIT_BLOCK_PTR); /* Make sure the locus is set to the end of the function, so that epilogue line numbers and warnings are set properly. */ -#ifdef USE_MAPPED_LOCATION if (cfun->function_end_locus != UNKNOWN_LOCATION) -#else - if (cfun->function_end_locus.file) -#endif input_location = cfun->function_end_locus; /* The following insns belong to the top scope. */ - record_block_change (DECL_INITIAL (current_function_decl)); + set_curr_insn_block (DECL_INITIAL (current_function_decl)); /* Generate rtl for function exit. */ expand_function_end (); @@ -1471,6 +2077,9 @@ construct_exit_block (void) end = get_last_insn (); if (head == end) return; + /* While emitting the function end we could move end of the last basic block. + */ + BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end; while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head))) head = NEXT_INSN (head); exit_block = create_basic_block (NEXT_INSN (head), end, @@ -1532,8 +2141,7 @@ discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees, || TREE_CODE (t) == REALPART_EXPR || TREE_CODE (t) == IMAGPART_EXPR || TREE_CODE (t) == VIEW_CONVERT_EXPR - || TREE_CODE (t) == NOP_EXPR - || TREE_CODE (t) == CONVERT_EXPR) + || CONVERT_EXPR_P (t)) t = TREE_OPERAND (t, 0); if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) @@ -1558,13 +2166,77 @@ static void discover_nonconstant_array_refs (void) { basic_block bb; - block_stmt_iterator bsi; + gimple_stmt_iterator gsi; FOR_EACH_BB (bb) + for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) + { + gimple stmt = gsi_stmt (gsi); + walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL); + } +} + +/* This function sets crtl->args.internal_arg_pointer to a virtual + register if DRAP is needed. Local register allocator will replace + virtual_incoming_args_rtx with the virtual register. */ + +static void +expand_stack_alignment (void) +{ + rtx drap_rtx; + unsigned int preferred_stack_boundary; + + if (! SUPPORTS_STACK_ALIGNMENT) + return; + + if (cfun->calls_alloca + || cfun->has_nonlocal_label + || crtl->has_nonlocal_goto) + crtl->need_drap = true; + + gcc_assert (crtl->stack_alignment_needed + <= crtl->stack_alignment_estimated); + + /* Update stack boundary if needed. */ + if (targetm.calls.update_stack_boundary) + targetm.calls.update_stack_boundary (); + + /* Update crtl->stack_alignment_estimated and use it later to align + stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call + exceptions since callgraph doesn't collect incoming stack alignment + in this case. */ + if (flag_non_call_exceptions + && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary) + preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; + else + preferred_stack_boundary = crtl->preferred_stack_boundary; + if (preferred_stack_boundary > crtl->stack_alignment_estimated) + crtl->stack_alignment_estimated = preferred_stack_boundary; + if (preferred_stack_boundary > crtl->stack_alignment_needed) + crtl->stack_alignment_needed = preferred_stack_boundary; + + crtl->stack_realign_needed + = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated; + crtl->stack_realign_tried = crtl->stack_realign_needed; + + crtl->stack_realign_processed = true; + + /* Target has to redefine TARGET_GET_DRAP_RTX to support stack + alignment. */ + gcc_assert (targetm.calls.get_drap_rtx != NULL); + drap_rtx = targetm.calls.get_drap_rtx (); + + /* stack_realign_drap and drap_rtx must match. */ + gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL)); + + /* Do nothing if NULL is returned, which means DRAP is not needed. */ + if (NULL != drap_rtx) { - for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) - walk_tree (bsi_stmt_ptr (bsi), discover_nonconstant_array_refs_r, - NULL , NULL); + crtl->args.internal_arg_pointer = drap_rtx; + + /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is + needed. */ + fixup_tail_calls (); } } @@ -1578,7 +2250,7 @@ discover_nonconstant_array_refs (void) the expansion. */ static unsigned int -tree_expand_cfg (void) +gimple_expand_cfg (void) { basic_block bb, init_block; sbitmap blocks; @@ -1588,22 +2260,42 @@ tree_expand_cfg (void) /* Some backends want to know that we are expanding to RTL. */ currently_expanding_to_rtl = 1; - /* Prepare the rtl middle end to start recording block changes. */ - reset_block_changes (); + rtl_profile_for_bb (ENTRY_BLOCK_PTR); + + insn_locators_alloc (); + if (!DECL_BUILT_IN (current_function_decl)) + set_curr_insn_source_location (DECL_SOURCE_LOCATION (current_function_decl)); + set_curr_insn_block (DECL_INITIAL (current_function_decl)); + prologue_locator = curr_insn_locator (); + + /* Make sure first insn is a note even if we don't want linenums. + This makes sure the first insn will never be deleted. + Also, final expects a note to appear there. */ + emit_note (NOTE_INSN_DELETED); /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */ discover_nonconstant_array_refs (); + targetm.expand_to_rtl_hook (); + crtl->stack_alignment_needed = STACK_BOUNDARY; + crtl->max_used_stack_slot_alignment = STACK_BOUNDARY; + crtl->stack_alignment_estimated = STACK_BOUNDARY; + crtl->preferred_stack_boundary = STACK_BOUNDARY; + cfun->cfg->max_jumptable_ents = 0; + + /* Expand the variables recorded during gimple lowering. */ expand_used_vars (); /* Honor stack protection warnings. */ if (warn_stack_protect) { - if (current_function_calls_alloca) - warning (0, "not protecting local variables: variable length buffer"); - if (has_short_buffer && !cfun->stack_protect_guard) - warning (0, "not protecting function: no buffer at least %d bytes long", + if (cfun->calls_alloca) + warning (OPT_Wstack_protector, + "not protecting local variables: variable length buffer"); + if (has_short_buffer && !crtl->stack_protect_guard) + warning (OPT_Wstack_protector, + "not protecting function: no buffer at least %d bytes long", (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE)); } @@ -1619,7 +2311,7 @@ tree_expand_cfg (void) /* Initialize the stack_protect_guard field. This must happen after the call to __main (if any) so that the external decl is initialized. */ - if (cfun->stack_protect_guard) + if (crtl->stack_protect_guard) stack_protect_prologue (); /* Register rtl specific functions for cfg. */ @@ -1632,17 +2324,25 @@ tree_expand_cfg (void) FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs) e->flags &= ~EDGE_EXECUTABLE; + lab_rtx_for_bb = pointer_map_create (); FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb) bb = expand_gimple_basic_block (bb); + /* Expansion is used by optimization passes too, set maybe_hot_insn_p + conservatively to true until they are all profile aware. */ + pointer_map_destroy (lab_rtx_for_bb); + free_histograms (); + construct_exit_block (); + set_curr_insn_block (DECL_INITIAL (current_function_decl)); + insn_locators_finalize (); /* We're done expanding trees to RTL. */ currently_expanding_to_rtl = 0; - /* Convert tree EH labels to RTL EH labels, and clean out any unreachable - EH regions. */ + /* Convert tree EH labels to RTL EH labels and zap the tree EH table. */ convert_from_eh_region_ranges (); + set_eh_throw_stmt_table (cfun, NULL); rebuild_jump_labels (get_insns ()); find_exception_handler_labels (); @@ -1654,8 +2354,11 @@ tree_expand_cfg (void) sbitmap_free (blocks); compact_blocks (); + + expand_stack_alignment (); + #ifdef ENABLE_CHECKING - verify_flow_info(); + verify_flow_info (); #endif /* There's no need to defer outputting this function any more; we @@ -1666,8 +2369,6 @@ tree_expand_cfg (void) more CONCATs anywhere. */ generating_concat_p = 0; - finalize_block_changes (); - if (dump_file) { fprintf (dump_file, @@ -1697,14 +2398,20 @@ tree_expand_cfg (void) /* After expanding, the return labels are no longer needed. */ return_label = NULL; naked_return_label = NULL; + /* Tag the blocks with a depth number so that change_scope can find + the common parent easily. */ + set_block_levels (DECL_INITIAL (cfun->decl), 0); + default_rtl_profile (); return 0; } -struct tree_opt_pass pass_expand = +struct rtl_opt_pass pass_expand = { + { + RTL_PASS, "expand", /* name */ NULL, /* gate */ - tree_expand_cfg, /* execute */ + gimple_expand_cfg, /* execute */ NULL, /* sub */ NULL, /* next */ 0, /* static_pass_number */ @@ -1715,5 +2422,5 @@ struct tree_opt_pass pass_expand = PROP_trees, /* properties_destroyed */ 0, /* todo_flags_start */ TODO_dump_func, /* todo_flags_finish */ - 'r' /* letter */ + } };