X-Git-Url: http://git.sourceforge.jp/view?p=pf3gnuchains%2Fgcc-fork.git;a=blobdiff_plain;f=gcc%2Ffunction.c;h=f0a2dd613927519dfa14a79bdfea7f47ce9044f8;hp=c0339b8950e54b84fa2ea37feb10e9a014522de5;hb=d9f9327b8c6432820044bc19bc99e02e9afec1da;hpb=da72c08324c5eadcc5451ced9fc0a6c80f186517 diff --git a/gcc/function.c b/gcc/function.c index c0339b8950e..f0a2dd61392 100644 --- a/gcc/function.c +++ b/gcc/function.c @@ -1,6 +1,6 @@ /* Expands front end tree to back end RTL for GCC. Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997, - 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 + 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc. This file is part of GCC. @@ -63,6 +63,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA #include "tree-gimple.h" #include "tree-pass.h" #include "predict.h" +#include "vecprim.h" #ifndef LOCAL_ALIGNMENT #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT @@ -123,9 +124,6 @@ struct machine_function * (*init_machine_status) (void); /* The currently compiled function. */ struct function *cfun = 0; -DEF_VEC_I(int); -DEF_VEC_ALLOC_I(int,heap); - /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */ static VEC(int,heap) *prologue; static VEC(int,heap) *epilogue; @@ -197,7 +195,6 @@ static struct temp_slot *find_temp_slot_from_address (rtx); static void pad_to_arg_alignment (struct args_size *, int, struct args_size *); static void pad_below (struct args_size *, enum machine_mode, tree); static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **); -static void reorder_fix_fragments (tree); static int all_blocks (tree, tree *); static tree *get_block_vector (tree, int *); extern tree debug_find_var_in_block_tree (tree, tree); @@ -206,7 +203,7 @@ extern tree debug_find_var_in_block_tree (tree, tree); static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED; static int contains (rtx, VEC(int,heap) **); #ifdef HAVE_return -static void emit_return_into_block (basic_block, rtx); +static void emit_return_into_block (basic_block); #endif #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX) static rtx keep_stack_depressed (rtx); @@ -331,11 +328,9 @@ free_after_compilation (struct function *f) f->x_return_label = NULL; f->x_naked_return_label = NULL; f->x_stack_slot_list = NULL; - f->x_tail_recursion_reentry = NULL; + f->x_stack_check_probe_note = NULL; f->x_arg_pointer_save_area = NULL; f->x_parm_birth_insn = NULL; - f->original_arg_vector = NULL; - f->original_decl_initial = NULL; f->epilogue_delay_list = NULL; } @@ -358,12 +353,33 @@ get_func_frame_size (struct function *f) /* Return size needed for stack frame based on slots so far allocated. This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY; the caller may have to do that. */ + HOST_WIDE_INT get_frame_size (void) { return get_func_frame_size (cfun); } +/* Issue an error message and return TRUE if frame OFFSET overflows in + the signed target pointer arithmetics for function FUNC. Otherwise + return FALSE. */ + +bool +frame_offset_overflow (HOST_WIDE_INT offset, tree func) +{ + unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset; + + if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1)) + /* Leave room for the fixed part of the frame. */ + - 64 * UNITS_PER_WORD) + { + error ("%Jtotal size of local objects too large", func); + return TRUE; + } + + return FALSE; +} + /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it with machine mode MODE. @@ -479,20 +495,8 @@ assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align, function->x_stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list); - /* Try to detect frame size overflows on native platforms. */ -#if BITS_PER_WORD >= 32 - if ((FRAME_GROWS_DOWNWARD - ? (unsigned HOST_WIDE_INT) -function->x_frame_offset - : (unsigned HOST_WIDE_INT) function->x_frame_offset) - > ((unsigned HOST_WIDE_INT) 1 << (BITS_PER_WORD - 1)) - /* Leave room for the fixed part of the frame. */ - - 64 * UNITS_PER_WORD) - { - error ("%Jtotal size of local objects too large", function->decl); - /* Avoid duplicate error messages as much as possible. */ - function->x_frame_offset = 0; - } -#endif + if (frame_offset_overflow (function->x_frame_offset, function->decl)) + function->x_frame_offset = 0; return x; } @@ -539,14 +543,10 @@ insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list) static struct temp_slot ** temp_slots_at_level (int level) { + if (level >= (int) VEC_length (temp_slot_p, used_temp_slots)) + VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1); - if (!used_temp_slots) - VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots"); - - while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots)) - VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL); - - return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level); + return &(VEC_address (temp_slot_p, used_temp_slots)[level]); } /* Returns the maximal temporary slot level. */ @@ -557,7 +557,7 @@ max_slot_level (void) if (!used_temp_slots) return -1; - return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1; + return VEC_length (temp_slot_p, used_temp_slots) - 1; } /* Moves temporary slot TEMP to LEVEL. */ @@ -625,22 +625,30 @@ assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, /* Try to find an available, already-allocated temporary of the proper mode which meets the size and alignment requirements. Choose the - smallest one with the closest alignment. */ - for (p = avail_temp_slots; p; p = p->next) + smallest one with the closest alignment. + + If assign_stack_temp is called outside of the tree->rtl expansion, + we cannot reuse the stack slots (that may still refer to + VIRTUAL_STACK_VARS_REGNUM). */ + if (!virtuals_instantiated) { - if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode - && objects_must_conflict_p (p->type, type) - && (best_p == 0 || best_p->size > p->size - || (best_p->size == p->size && best_p->align > p->align))) + for (p = avail_temp_slots; p; p = p->next) { - if (p->align == align && p->size == size) + if (p->align >= align && p->size >= size + && GET_MODE (p->slot) == mode + && objects_must_conflict_p (p->type, type) + && (best_p == 0 || best_p->size > p->size + || (best_p->size == p->size && best_p->align > p->align))) { - selected = p; - cut_slot_from_list (selected, &avail_temp_slots); - best_p = 0; - break; + if (p->align == align && p->size == size) + { + selected = p; + cut_slot_from_list (selected, &avail_temp_slots); + best_p = 0; + break; + } + best_p = p; } - best_p = p; } } @@ -755,7 +763,8 @@ assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, if (type != 0) { MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type); - MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type)); + MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type) + || TREE_CODE (type) == COMPLEX_TYPE)); } MEM_NOTRAP_P (slot) = 1; @@ -804,7 +813,6 @@ assign_temp (tree type_or_decl, int keep, int memory_required, if (mode == BLKmode || memory_required) { HOST_WIDE_INT size = int_size_in_bytes (type); - tree size_tree; rtx tmp; /* Zero sized arrays are GNU C extension. Set size to 1 to avoid @@ -813,20 +821,10 @@ assign_temp (tree type_or_decl, int keep, int memory_required, size = 1; /* Unfortunately, we don't yet know how to allocate variable-sized - temporaries. However, sometimes we have a fixed upper limit on - the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that - instead. This is the case for Chill variable-sized strings. */ - if (size == -1 && TREE_CODE (type) == ARRAY_TYPE - && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE - && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1)) - size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1); - - /* If we still haven't been able to get a size, see if the language - can compute a maximum size. */ - if (size == -1 - && (size_tree = lang_hooks.types.max_size (type)) != 0 - && host_integerp (size_tree, 1)) - size = tree_low_cst (size_tree, 1); + temporaries. However, sometimes we can find a fixed upper limit on + the size, so try that instead. */ + else if (size == -1) + size = max_int_size_in_bytes (type); /* The size of the temporary may be too large to fit into an integer. */ /* ??? Not sure this should happen except for user silliness, so limit @@ -1213,12 +1211,12 @@ static int cfa_offset; `current_function_outgoing_args_size'. Nevertheless, we must allow for it when allocating stack dynamic objects. */ -#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE) +#if defined(REG_PARM_STACK_SPACE) #define STACK_DYNAMIC_OFFSET(FNDECL) \ ((ACCUMULATE_OUTGOING_ARGS \ - ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\ - + (STACK_POINTER_OFFSET)) \ - + ? (current_function_outgoing_args_size \ + + (OUTGOING_REG_PARM_STACK_SPACE ? 0 : REG_PARM_STACK_SPACE (FNDECL))) \ + : 0) + (STACK_POINTER_OFFSET)) #else #define STACK_DYNAMIC_OFFSET(FNDECL) \ ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \ @@ -1524,7 +1522,14 @@ instantiate_virtual_regs_in_insn (rtx insn) Validate the new value vs the insn predicate. Note that asm insns will have insn_code -1 here. */ if (!safe_insn_predicate (insn_code, i, x)) - x = force_reg (insn_data[insn_code].operand[i].mode, x); + { + start_sequence (); + x = force_reg (insn_data[insn_code].operand[i].mode, x); + seq = get_insns (); + end_sequence (); + if (seq) + emit_insn_before (seq, insn); + } *recog_data.operand_loc[i] = recog_data.operand[i] = x; any_change = true; @@ -1535,7 +1540,7 @@ instantiate_virtual_regs_in_insn (rtx insn) /* Propagate operand changes into the duplicates. */ for (i = 0; i < recog_data.n_dups; ++i) *recog_data.dup_loc[i] - = recog_data.operand[(unsigned)recog_data.dup_num[i]]; + = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]); /* Force re-recognition of the instruction for validation. */ INSN_CODE (insn) = -1; @@ -1590,6 +1595,22 @@ instantiate_decl (rtx x) for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL); } +/* Helper for instantiate_decls called via walk_tree: Process all decls + in the given DECL_VALUE_EXPR. */ + +static tree +instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) +{ + tree t = *tp; + if (! EXPR_P (t) && ! GIMPLE_STMT_P (t)) + { + *walk_subtrees = 0; + if (DECL_P (t) && DECL_RTL_SET_P (t)) + instantiate_decl (DECL_RTL (t)); + } + return NULL; +} + /* Subroutine of instantiate_decls: Process all decls in the given BLOCK node and all its subblocks. */ @@ -1599,8 +1620,15 @@ instantiate_decls_1 (tree let) tree t; for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t)) - if (DECL_RTL_SET_P (t)) - instantiate_decl (DECL_RTL (t)); + { + if (DECL_RTL_SET_P (t)) + instantiate_decl (DECL_RTL (t)); + if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t)) + { + tree v = DECL_VALUE_EXPR (t); + walk_tree (&v, instantiate_expr, NULL, NULL); + } + } /* Process all subblocks. */ for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t)) @@ -1620,6 +1648,11 @@ instantiate_decls (tree fndecl) { instantiate_decl (DECL_RTL (decl)); instantiate_decl (DECL_INCOMING_RTL (decl)); + if (DECL_HAS_VALUE_EXPR_P (decl)) + { + tree v = DECL_VALUE_EXPR (decl); + walk_tree (&v, instantiate_expr, NULL, NULL); + } } /* Now process all variables defined in the function or its subblocks. */ @@ -1629,7 +1662,7 @@ instantiate_decls (tree fndecl) /* Pass through the INSNS of function FNDECL and convert virtual register references to hard register references. */ -void +static unsigned int instantiate_virtual_regs (void) { rtx insn; @@ -1681,6 +1714,7 @@ instantiate_virtual_regs (void) /* Indicate that, from now on, assign_stack_local should use frame_pointer_rtx. */ virtuals_instantiated = 1; + return 0; } struct tree_opt_pass pass_instantiate_virtual_regs = @@ -1714,15 +1748,21 @@ aggregate_value_p (tree exp, tree fntype) tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp); + /* DECL node associated with FNTYPE when relevant, which we might need to + check for by-invisible-reference returns, typically for CALL_EXPR input + EXPressions. */ + tree fndecl = NULL_TREE; + if (fntype) switch (TREE_CODE (fntype)) { case CALL_EXPR: - fntype = get_callee_fndecl (fntype); - fntype = fntype ? TREE_TYPE (fntype) : 0; + fndecl = get_callee_fndecl (fntype); + fntype = fndecl ? TREE_TYPE (fndecl) : 0; break; case FUNCTION_DECL: - fntype = TREE_TYPE (fntype); + fndecl = fntype; + fntype = TREE_TYPE (fndecl); break; case FUNCTION_TYPE: case METHOD_TYPE: @@ -1737,11 +1777,23 @@ aggregate_value_p (tree exp, tree fntype) if (TREE_CODE (type) == VOID_TYPE) return 0; + /* If the front end has decided that this needs to be passed by reference, do so. */ if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL) && DECL_BY_REFERENCE (exp)) return 1; + + /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the + called function RESULT_DECL, meaning the function returns in memory by + invisible reference. This check lets front-ends not set TREE_ADDRESSABLE + on the function type, which used to be the way to request such a return + mechanism but might now be causing troubles at gimplification time if + temporaries with the function type need to be created. */ + if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl) + && DECL_BY_REFERENCE (DECL_RESULT (fndecl))) + return 1; + if (targetm.calls.return_in_memory (type, fntype)) return 1; /* Types that are TREE_ADDRESSABLE must be constructed in memory, @@ -2596,8 +2648,10 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, /* Store the parm in a pseudoregister during the function, but we may need to do it in a wider mode. */ + /* This is not really promoting for a call. However we need to be + consistent with assign_parm_find_data_types and expand_expr_real_1. */ promoted_nominal_mode - = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 0); + = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1); parmreg = gen_reg_rtx (promoted_nominal_mode); @@ -2747,20 +2801,14 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, continue; if (SET_DEST (set) == regno_reg_rtx [regnoi]) - REG_NOTES (sinsn) - = gen_rtx_EXPR_LIST (REG_EQUIV, stacki, - REG_NOTES (sinsn)); + set_unique_reg_note (sinsn, REG_EQUIV, stacki); else if (SET_DEST (set) == regno_reg_rtx [regnor]) - REG_NOTES (sinsn) - = gen_rtx_EXPR_LIST (REG_EQUIV, stackr, - REG_NOTES (sinsn)); + set_unique_reg_note (sinsn, REG_EQUIV, stackr); } } else if ((set = single_set (linsn)) != 0 && SET_DEST (set) == parmreg) - REG_NOTES (linsn) - = gen_rtx_EXPR_LIST (REG_EQUIV, - data->stack_parm, REG_NOTES (linsn)); + set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm); } /* For pointer data type, suggest pointer register. */ @@ -3155,22 +3203,21 @@ gimplify_parameters (void) } else { - tree ptr_type, addr, args; + tree ptr_type, addr; ptr_type = build_pointer_type (type); addr = create_tmp_var (ptr_type, get_name (parm)); DECL_IGNORED_P (addr) = 0; local = build_fold_indirect_ref (addr); - args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL); t = built_in_decls[BUILT_IN_ALLOCA]; - t = build_function_call_expr (t, args); + t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm)); t = fold_convert (ptr_type, t); - t = build2 (MODIFY_EXPR, void_type_node, addr, t); + t = build_gimple_modify_stmt (addr, t); gimplify_and_add (t, &stmts); } - t = build2 (MODIFY_EXPR, void_type_node, local, parm); + t = build_gimple_modify_stmt (local, parm); gimplify_and_add (t, &stmts); SET_DECL_VALUE_EXPR (parm, local); @@ -3182,40 +3229,6 @@ gimplify_parameters (void) return stmts; } -/* Indicate whether REGNO is an incoming argument to the current function - that was promoted to a wider mode. If so, return the RTX for the - register (to get its mode). PMODE and PUNSIGNEDP are set to the mode - that REGNO is promoted from and whether the promotion was signed or - unsigned. */ - -rtx -promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp) -{ - tree arg; - - for (arg = DECL_ARGUMENTS (current_function_decl); arg; - arg = TREE_CHAIN (arg)) - if (REG_P (DECL_INCOMING_RTL (arg)) - && REGNO (DECL_INCOMING_RTL (arg)) == regno - && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg))) - { - enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg)); - int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg)); - - mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1); - if (mode == GET_MODE (DECL_INCOMING_RTL (arg)) - && mode != DECL_MODE (arg)) - { - *pmode = DECL_MODE (arg); - *punsignedp = unsignedp; - return DECL_INCOMING_RTL (arg); - } - } - - return 0; -} - - /* Compute the size and offset from the start of the stacked arguments for a parm passed in mode PASSED_MODE and with type TYPE. @@ -3475,9 +3488,8 @@ setjmp_vars_warning (tree block) && DECL_RTL_SET_P (decl) && REG_P (DECL_RTL (decl)) && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl)))) - warning (0, "variable %q+D might be clobbered by %" - " or %", - decl); + warning (OPT_Wclobbered, "variable %q+D might be clobbered by" + " % or %", decl); } for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub)) @@ -3496,7 +3508,8 @@ setjmp_args_warning (void) if (DECL_RTL (decl) != 0 && REG_P (DECL_RTL (decl)) && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl)))) - warning (0, "argument %q+D might be clobbered by % or %", + warning (OPT_Wclobbered, + "argument %q+D might be clobbered by % or %", decl); } @@ -3529,9 +3542,6 @@ reorder_blocks (void) reorder_blocks_1 (get_insns (), block, &block_stack); BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block)); - /* Remove deleted blocks from the block fragment chains. */ - reorder_fix_fragments (block); - VEC_free (tree, heap, block_stack); } @@ -3560,17 +3570,18 @@ reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack) if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG) { tree block = NOTE_BLOCK (insn); + tree origin; + + origin = (BLOCK_FRAGMENT_ORIGIN (block) + ? BLOCK_FRAGMENT_ORIGIN (block) + : block); /* If we have seen this block before, that means it now spans multiple address regions. Create a new fragment. */ if (TREE_ASM_WRITTEN (block)) { tree new_block = copy_node (block); - tree origin; - origin = (BLOCK_FRAGMENT_ORIGIN (block) - ? BLOCK_FRAGMENT_ORIGIN (block) - : block); BLOCK_FRAGMENT_ORIGIN (new_block) = origin; BLOCK_FRAGMENT_CHAIN (new_block) = BLOCK_FRAGMENT_CHAIN (origin); @@ -3587,10 +3598,13 @@ reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack) will cause infinite recursion. */ if (block != current_block) { + if (block != origin) + gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block); + BLOCK_SUPERCONTEXT (block) = current_block; BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block); BLOCK_SUBBLOCKS (current_block) = block; - current_block = block; + current_block = origin; } VEC_safe_push (tree, heap, *p_block_stack, block); } @@ -3605,61 +3619,6 @@ reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack) } } -/* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer - appears in the block tree, select one of the fragments to become - the new origin block. */ - -static void -reorder_fix_fragments (tree block) -{ - while (block) - { - tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block); - tree new_origin = NULL_TREE; - - if (dup_origin) - { - if (! TREE_ASM_WRITTEN (dup_origin)) - { - new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin); - - /* Find the first of the remaining fragments. There must - be at least one -- the current block. */ - while (! TREE_ASM_WRITTEN (new_origin)) - new_origin = BLOCK_FRAGMENT_CHAIN (new_origin); - BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE; - } - } - else if (! dup_origin) - new_origin = block; - - /* Re-root the rest of the fragments to the new origin. In the - case that DUP_ORIGIN was null, that means BLOCK was the origin - of a chain of fragments and we want to remove those fragments - that didn't make it to the output. */ - if (new_origin) - { - tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin); - tree chain = *pp; - - while (chain) - { - if (TREE_ASM_WRITTEN (chain)) - { - BLOCK_FRAGMENT_ORIGIN (chain) = new_origin; - *pp = chain; - pp = &BLOCK_FRAGMENT_CHAIN (chain); - } - chain = BLOCK_FRAGMENT_CHAIN (chain); - } - *pp = NULL_TREE; - } - - reorder_fix_fragments (BLOCK_SUBBLOCKS (block)); - block = BLOCK_CHAIN (block); - } -} - /* Reverse the order of elements in the chain T of blocks, and return the new head of the chain (old last element). */ @@ -3716,7 +3675,7 @@ get_block_vector (tree block, int *n_blocks_p) tree *block_vector; *n_blocks_p = all_blocks (block, NULL); - block_vector = xmalloc (*n_blocks_p * sizeof (tree)); + block_vector = XNEWVEC (tree, *n_blocks_p); all_blocks (block, block_vector); return block_vector; @@ -3774,6 +3733,14 @@ debug_find_var_in_block_tree (tree var, tree block) return NULL_TREE; } + +/* Return value of funcdef and increase it. */ +int +get_next_funcdef_no (void) +{ + return funcdef_no++; +} + /* Allocate a function structure for FNDECL and set its contents to the defaults. */ @@ -3788,7 +3755,7 @@ allocate_struct_function (tree fndecl) cfun->stack_alignment_needed = STACK_BOUNDARY; cfun->preferred_stack_boundary = STACK_BOUNDARY; - current_function_funcdef_no = funcdef_no++; + current_function_funcdef_no = get_next_funcdef_no (); cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL; @@ -3896,7 +3863,7 @@ init_function_start (tree subr) /* Make sure all values used by the optimization passes have sane defaults. */ -void +unsigned int init_function_for_compilation (void) { reg_renumber = 0; @@ -3906,6 +3873,7 @@ init_function_for_compilation (void) gcc_assert (VEC_length (int, prologue) == 0); gcc_assert (VEC_length (int, epilogue) == 0); gcc_assert (VEC_length (int, sibcall_epilogue) == 0); + return 0; } struct tree_opt_pass pass_init_function = @@ -4071,7 +4039,7 @@ expand_function_start (tree subr) else #endif { - rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1); + rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2); /* Expect to be passed the address of a place to store the value. If it is passed as an argument, assign_parms will take care of it. */ @@ -4175,8 +4143,8 @@ expand_function_start (tree subr) as opposed to parm setup. */ emit_note (NOTE_INSN_FUNCTION_BEG); - if (!NOTE_P (get_last_insn ())) - emit_note (NOTE_INSN_DELETED); + gcc_assert (NOTE_P (get_last_insn ())); + parm_birth_insn = get_last_insn (); if (current_function_profile) @@ -4186,10 +4154,10 @@ expand_function_start (tree subr) #endif } - /* After the display initializations is where the tail-recursion label - should go, if we end up needing one. Ensure we have a NOTE here - since some things (like trampolines) get placed before this. */ - tail_recursion_reentry = emit_note (NOTE_INSN_DELETED); + /* After the display initializations is where the stack checking + probe should go. */ + if(flag_stack_check) + stack_check_probe_note = emit_note (NOTE_INSN_DELETED); /* Make sure there is a line number after the function entry setup code. */ force_next_line_note (); @@ -4267,7 +4235,7 @@ do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED) emit_insn (gen_rtx_USE (VOIDmode, reg)); } -void +static void use_return_register (void) { diddle_return_value (do_use_return_reg, NULL); @@ -4315,7 +4283,7 @@ expand_function_end (void) GEN_INT (STACK_CHECK_MAX_FRAME_SIZE)); seq = get_insns (); end_sequence (); - emit_insn_before (seq, tail_recursion_reentry); + emit_insn_before (seq, stack_check_probe_note); break; } } @@ -4334,27 +4302,6 @@ expand_function_end (void) clear_pending_stack_adjust (); do_pending_stack_adjust (); - /* @@@ This is a kludge. We want to ensure that instructions that - may trap are not moved into the epilogue by scheduling, because - we don't always emit unwind information for the epilogue. - However, not all machine descriptions define a blockage insn, so - emit an ASM_INPUT to act as one. */ - if (flag_non_call_exceptions) - emit_insn (gen_rtx_ASM_INPUT (VOIDmode, "")); - - /* Mark the end of the function body. - If control reaches this insn, the function can drop through - without returning a value. */ - emit_note (NOTE_INSN_FUNCTION_END); - - /* Must mark the last line number note in the function, so that the test - coverage code can avoid counting the last line twice. This just tells - the code to ignore the immediately following line note, since there - already exists a copy of this note somewhere above. This line number - note is still needed for debugging though, so we can't delete it. */ - if (flag_test_coverage) - emit_note (NOTE_INSN_REPEATED_LINE_NUMBER); - /* Output a linenumber for the end of the function. SDB depends on this. */ force_next_line_note (); @@ -4373,10 +4320,13 @@ expand_function_end (void) /* Output the label for the actual return from the function. */ emit_label (return_label); - /* Let except.c know where it should emit the call to unregister - the function context for sjlj exceptions. */ - if (flag_exceptions && USING_SJLJ_EXCEPTIONS) - sjlj_emit_function_exit_after (get_last_insn ()); + if (USING_SJLJ_EXCEPTIONS) + { + /* Let except.c know where it should emit the call to unregister + the function context for sjlj exceptions. */ + if (flag_exceptions) + sjlj_emit_function_exit_after (get_last_insn ()); + } /* If this is an implementation of throw, do what's necessary to communicate between __builtin_eh_return and the epilogue. */ @@ -4518,6 +4468,14 @@ expand_function_end (void) /* Output the label for the naked return from the function. */ emit_label (naked_return_label); + /* @@@ This is a kludge. We want to ensure that instructions that + may trap are not moved into the epilogue by scheduling, because + we don't always emit unwind information for the epilogue. + However, not all machine descriptions define a blockage insn, so + emit an ASM_INPUT to act as one. */ + if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions) + emit_insn (gen_rtx_ASM_INPUT (VOIDmode, "")); + /* If stack protection is enabled for this function, check the guard. */ if (cfun->stack_protect_guard) stack_protect_epilogue (); @@ -4647,11 +4605,9 @@ sibcall_epilogue_contains (rtx insn) block_for_insn appropriately. */ static void -emit_return_into_block (basic_block bb, rtx line_note) +emit_return_into_block (basic_block bb) { emit_jump_insn_after (gen_return (), BB_END (bb)); - if (line_note) - emit_note_copy_after (line_note, PREV_INSN (BB_END (bb))); } #endif /* HAVE_return */ @@ -5117,18 +5073,6 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED) if (BB_HEAD (last) == label && LABEL_P (label)) { edge_iterator ei2; - rtx epilogue_line_note = NULL_RTX; - - /* Locate the line number associated with the closing brace, - if we can find one. */ - for (seq = get_last_insn (); - seq && ! active_insn_p (seq); - seq = PREV_INSN (seq)) - if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0) - { - epilogue_line_note = seq; - break; - } for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); ) { @@ -5152,7 +5096,7 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED) with a simple return instruction. */ if (simplejump_p (jump)) { - emit_return_into_block (bb, epilogue_line_note); + emit_return_into_block (bb); delete_insn (jump); } @@ -5189,7 +5133,7 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED) this is still reachable will be determined later. */ emit_barrier_after (BB_END (last)); - emit_return_into_block (last, epilogue_line_note); + emit_return_into_block (last); epilogue_end = BB_END (last); single_succ_edge (last)->flags &= ~EDGE_FALLTHRU; goto epilogue_done; @@ -5251,7 +5195,8 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED) fixup_fallthru_exit_predecessor. */ cfg_layout_initialize (0); FOR_EACH_BB (cur_bb) - if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0) + if (cur_bb->index >= NUM_FIXED_BLOCKS + && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS) cur_bb->aux = cur_bb->next_bb; cfg_layout_finalize (); } @@ -5290,61 +5235,6 @@ epilogue_done: } #endif -#ifdef HAVE_prologue - /* This is probably all useless now that we use locators. */ - if (prologue_end) - { - rtx insn, prev; - - /* GDB handles `break f' by setting a breakpoint on the first - line note after the prologue. Which means (1) that if - there are line number notes before where we inserted the - prologue we should move them, and (2) we should generate a - note before the end of the first basic block, if there isn't - one already there. - - ??? This behavior is completely broken when dealing with - multiple entry functions. We simply place the note always - into first basic block and let alternate entry points - to be missed. - */ - - for (insn = prologue_end; insn; insn = prev) - { - prev = PREV_INSN (insn); - if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0) - { - /* Note that we cannot reorder the first insn in the - chain, since rest_of_compilation relies on that - remaining constant. */ - if (prev == NULL) - break; - reorder_insns (insn, insn, prologue_end); - } - } - - /* Find the last line number note in the first block. */ - for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb); - insn != prologue_end && insn; - insn = PREV_INSN (insn)) - if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0) - break; - - /* If we didn't find one, make a copy of the first line number - we run across. */ - if (! insn) - { - for (insn = next_active_insn (prologue_end); - insn; - insn = PREV_INSN (insn)) - if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0) - { - emit_note_copy_after (insn, prologue_end); - break; - } - } - } -#endif #ifdef HAVE_epilogue if (epilogue_end) { @@ -5352,16 +5242,14 @@ epilogue_done: /* Similarly, move any line notes that appear after the epilogue. There is no need, however, to be quite so anal about the existence - of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly) + of such a note. Also possibly move NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug info generation. */ for (insn = epilogue_end; insn; insn = next) { next = NEXT_INSN (insn); if (NOTE_P (insn) - && (NOTE_LINE_NUMBER (insn) > 0 - || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG - || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)) + && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)) reorder_insns (insn, insn, PREV_INSN (epilogue_end)); } } @@ -5465,8 +5353,8 @@ reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED) void reset_block_changes (void) { - VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block"); - VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE); + cfun->ib_boundaries_block = VEC_alloc (tree, gc, 100); + VEC_quick_push (tree, cfun->ib_boundaries_block, NULL_TREE); } /* Record the boundary for BLOCK. */ @@ -5482,17 +5370,17 @@ record_block_change (tree block) if(!cfun->ib_boundaries_block) return; - last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block); - VARRAY_POP (cfun->ib_boundaries_block); + last_block = VEC_pop (tree, cfun->ib_boundaries_block); n = get_max_uid (); - for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++) - VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block); + for (i = VEC_length (tree, cfun->ib_boundaries_block); i < n; i++) + VEC_safe_push (tree, gc, cfun->ib_boundaries_block, last_block); - VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block); + VEC_safe_push (tree, gc, cfun->ib_boundaries_block, block); } /* Finishes record of boundaries. */ -void finalize_block_changes (void) +void +finalize_block_changes (void) { record_block_change (DECL_INITIAL (current_function_decl)); } @@ -5503,17 +5391,17 @@ check_block_change (rtx insn, tree *block) { unsigned uid = INSN_UID (insn); - if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block)) + if (uid >= VEC_length (tree, cfun->ib_boundaries_block)) return; - *block = VARRAY_TREE (cfun->ib_boundaries_block, uid); + *block = VEC_index (tree, cfun->ib_boundaries_block, uid); } /* Releases the ib_boundaries_block records. */ void free_block_changes (void) { - cfun->ib_boundaries_block = NULL; + VEC_free (tree, gc, cfun->ib_boundaries_block); } /* Returns the name of the current function. */ @@ -5524,13 +5412,42 @@ current_function_name (void) } -static void +static unsigned int rest_of_handle_check_leaf_regs (void) { #ifdef LEAF_REGISTERS current_function_uses_only_leaf_regs = optimize > 0 && only_leaf_regs_used () && leaf_function_p (); #endif + return 0; +} + +/* Insert a TYPE into the used types hash table of CFUN. */ +static void +used_types_insert_helper (tree type, struct function *func) +{ + if (type != NULL && func != NULL) + { + void **slot; + + if (func->used_types_hash == NULL) + func->used_types_hash = htab_create_ggc (37, htab_hash_pointer, + htab_eq_pointer, NULL); + slot = htab_find_slot (func->used_types_hash, type, INSERT); + if (*slot == NULL) + *slot = type; + } +} + +/* Given a type, insert it into the used hash table in cfun. */ +void +used_types_insert (tree t) +{ + while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE) + t = TREE_TYPE (t); + t = TYPE_MAIN_VARIANT (t); + if (debug_info_level > DINFO_LEVEL_NONE) + used_types_insert_helper (t, cfun); } struct tree_opt_pass pass_leaf_regs =