X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ffunction.c;h=a27d8cbb19f6bc504f74d7d38243d786e2506b2a;hb=4e970fe40fcb363c66c7d513409dbff8e35c611c;hp=12ebece0fbe7270738ce05db8ca6b650a6076664;hpb=bbd335579ad975bb6f3599c460694070caf43354;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/function.c b/gcc/function.c index 12ebece0fbe..a27d8cbb19f 100644 --- a/gcc/function.c +++ b/gcc/function.c @@ -1,6 +1,6 @@ /* Expands front end tree to back end RTL for GCC. Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997, - 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 + 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc. This file is part of GCC. @@ -61,8 +61,9 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA #include "target.h" #include "cfglayout.h" #include "tree-gimple.h" +#include "tree-pass.h" #include "predict.h" - +#include "vecprim.h" #ifndef LOCAL_ALIGNMENT #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT @@ -123,9 +124,6 @@ struct machine_function * (*init_machine_status) (void); /* The currently compiled function. */ struct function *cfun = 0; -DEF_VEC_I(int); -DEF_VEC_ALLOC_I(int,heap); - /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */ static VEC(int,heap) *prologue; static VEC(int,heap) *epilogue; @@ -197,7 +195,6 @@ static struct temp_slot *find_temp_slot_from_address (rtx); static void pad_to_arg_alignment (struct args_size *, int, struct args_size *); static void pad_below (struct args_size *, enum machine_mode, tree); static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **); -static void reorder_fix_fragments (tree); static int all_blocks (tree, tree *); static tree *get_block_vector (tree, int *); extern tree debug_find_var_in_block_tree (tree, tree); @@ -206,7 +203,7 @@ extern tree debug_find_var_in_block_tree (tree, tree); static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED; static int contains (rtx, VEC(int,heap) **); #ifdef HAVE_return -static void emit_return_into_block (basic_block, rtx); +static void emit_return_into_block (basic_block); #endif #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX) static rtx keep_stack_depressed (rtx); @@ -331,11 +328,9 @@ free_after_compilation (struct function *f) f->x_return_label = NULL; f->x_naked_return_label = NULL; f->x_stack_slot_list = NULL; - f->x_tail_recursion_reentry = NULL; + f->x_stack_check_probe_note = NULL; f->x_arg_pointer_save_area = NULL; f->x_parm_birth_insn = NULL; - f->original_arg_vector = NULL; - f->original_decl_initial = NULL; f->epilogue_delay_list = NULL; } @@ -358,12 +353,33 @@ get_func_frame_size (struct function *f) /* Return size needed for stack frame based on slots so far allocated. This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY; the caller may have to do that. */ + HOST_WIDE_INT get_frame_size (void) { return get_func_frame_size (cfun); } +/* Issue an error message and return TRUE if frame OFFSET overflows in + the signed target pointer arithmetics for function FUNC. Otherwise + return FALSE. */ + +bool +frame_offset_overflow (HOST_WIDE_INT offset, tree func) +{ + unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset; + + if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1)) + /* Leave room for the fixed part of the frame. */ + - 64 * UNITS_PER_WORD) + { + error ("%Jtotal size of local objects too large", func); + return TRUE; + } + + return FALSE; +} + /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it with machine mode MODE. @@ -454,7 +470,7 @@ assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align, /* On a big-endian machine, if we are allocating more space than we will use, use the least significant bytes of those that are allocated. */ - if (BYTES_BIG_ENDIAN && mode != BLKmode) + if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size) bigend_correction = size - GET_MODE_SIZE (mode); /* If we have already instantiated virtual registers, return the actual @@ -474,10 +490,14 @@ assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align, function->x_frame_offset += size; x = gen_rtx_MEM (mode, addr); + MEM_NOTRAP_P (x) = 1; function->x_stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list); + if (frame_offset_overflow (function->x_frame_offset, function->decl)) + function->x_frame_offset = 0; + return x; } @@ -523,14 +543,18 @@ insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list) static struct temp_slot ** temp_slots_at_level (int level) { + if (level >= (int) VEC_length (temp_slot_p, used_temp_slots)) + { + size_t old_length = VEC_length (temp_slot_p, used_temp_slots); + temp_slot_p *p; - if (!used_temp_slots) - VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots"); - - while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots)) - VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL); + VEC_safe_grow (temp_slot_p, gc, used_temp_slots, level + 1); + p = VEC_address (temp_slot_p, used_temp_slots); + memset (&p[old_length], 0, + sizeof (temp_slot_p) * (level + 1 - old_length)); + } - return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level); + return &(VEC_address (temp_slot_p, used_temp_slots)[level]); } /* Returns the maximal temporary slot level. */ @@ -541,7 +565,7 @@ max_slot_level (void) if (!used_temp_slots) return -1; - return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1; + return VEC_length (temp_slot_p, used_temp_slots) - 1; } /* Moves temporary slot TEMP to LEVEL. */ @@ -609,22 +633,30 @@ assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, /* Try to find an available, already-allocated temporary of the proper mode which meets the size and alignment requirements. Choose the - smallest one with the closest alignment. */ - for (p = avail_temp_slots; p; p = p->next) + smallest one with the closest alignment. + + If assign_stack_temp is called outside of the tree->rtl expansion, + we cannot reuse the stack slots (that may still refer to + VIRTUAL_STACK_VARS_REGNUM). */ + if (!virtuals_instantiated) { - if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode - && objects_must_conflict_p (p->type, type) - && (best_p == 0 || best_p->size > p->size - || (best_p->size == p->size && best_p->align > p->align))) + for (p = avail_temp_slots; p; p = p->next) { - if (p->align == align && p->size == size) + if (p->align >= align && p->size >= size + && GET_MODE (p->slot) == mode + && objects_must_conflict_p (p->type, type) + && (best_p == 0 || best_p->size > p->size + || (best_p->size == p->size && best_p->align > p->align))) { - selected = p; - cut_slot_from_list (selected, &avail_temp_slots); - best_p = 0; - break; + if (p->align == align && p->size == size) + { + selected = p; + cut_slot_from_list (selected, &avail_temp_slots); + best_p = 0; + break; + } + best_p = p; } - best_p = p; } } @@ -649,9 +681,7 @@ assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, p->size = best_p->size - rounded_size; p->base_offset = best_p->base_offset + rounded_size; p->full_size = best_p->full_size - rounded_size; - p->slot = gen_rtx_MEM (BLKmode, - plus_constant (XEXP (best_p->slot, 0), - rounded_size)); + p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size); p->align = best_p->align; p->address = 0; p->type = best_p->type; @@ -743,6 +773,7 @@ assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type); MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type)); } + MEM_NOTRAP_P (slot) = 1; return slot; } @@ -789,7 +820,6 @@ assign_temp (tree type_or_decl, int keep, int memory_required, if (mode == BLKmode || memory_required) { HOST_WIDE_INT size = int_size_in_bytes (type); - tree size_tree; rtx tmp; /* Zero sized arrays are GNU C extension. Set size to 1 to avoid @@ -798,20 +828,10 @@ assign_temp (tree type_or_decl, int keep, int memory_required, size = 1; /* Unfortunately, we don't yet know how to allocate variable-sized - temporaries. However, sometimes we have a fixed upper limit on - the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that - instead. This is the case for Chill variable-sized strings. */ - if (size == -1 && TREE_CODE (type) == ARRAY_TYPE - && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE - && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1)) - size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1); - - /* If we still haven't been able to get a size, see if the language - can compute a maximum size. */ - if (size == -1 - && (size_tree = lang_hooks.types.max_size (type)) != 0 - && host_integerp (size_tree, 1)) - size = tree_low_cst (size_tree, 1); + temporaries. However, sometimes we can find a fixed upper limit on + the size, so try that instead. */ + else if (size == -1) + size = max_int_size_in_bytes (type); /* The size of the temporary may be too large to fit into an integer. */ /* ??? Not sure this should happen except for user silliness, so limit @@ -820,7 +840,7 @@ assign_temp (tree type_or_decl, int keep, int memory_required, if (decl && size == -1 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST) { - error ("%Jsize of variable %qD is too large", decl, decl); + error ("size of variable %q+D is too large", decl); size = 1; } @@ -1211,12 +1231,6 @@ static int cfa_offset; #endif #endif -/* On most machines, the CFA coincides with the first incoming parm. */ - -#ifndef ARG_POINTER_CFA_OFFSET -#define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL) -#endif - /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX is a virtual register, return the equivalent hard register and set the @@ -1237,7 +1251,14 @@ instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset) else if (x == virtual_outgoing_args_rtx) new = stack_pointer_rtx, offset = out_arg_offset; else if (x == virtual_cfa_rtx) - new = arg_pointer_rtx, offset = cfa_offset; + { +#ifdef FRAME_POINTER_CFA_OFFSET + new = frame_pointer_rtx; +#else + new = arg_pointer_rtx; +#endif + offset = cfa_offset; + } else return NULL_RTX; @@ -1519,7 +1540,7 @@ instantiate_virtual_regs_in_insn (rtx insn) /* Propagate operand changes into the duplicates. */ for (i = 0; i < recog_data.n_dups; ++i) *recog_data.dup_loc[i] - = recog_data.operand[(unsigned)recog_data.dup_num[i]]; + = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]); /* Force re-recognition of the instruction for validation. */ INSN_CODE (insn) = -1; @@ -1574,6 +1595,22 @@ instantiate_decl (rtx x) for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL); } +/* Helper for instantiate_decls called via walk_tree: Process all decls + in the given DECL_VALUE_EXPR. */ + +static tree +instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) +{ + tree t = *tp; + if (! EXPR_P (t)) + { + *walk_subtrees = 0; + if (DECL_P (t) && DECL_RTL_SET_P (t)) + instantiate_decl (DECL_RTL (t)); + } + return NULL; +} + /* Subroutine of instantiate_decls: Process all decls in the given BLOCK node and all its subblocks. */ @@ -1583,8 +1620,15 @@ instantiate_decls_1 (tree let) tree t; for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t)) - if (DECL_RTL_SET_P (t)) - instantiate_decl (DECL_RTL (t)); + { + if (DECL_RTL_SET_P (t)) + instantiate_decl (DECL_RTL (t)); + if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t)) + { + tree v = DECL_VALUE_EXPR (t); + walk_tree (&v, instantiate_expr, NULL, NULL); + } + } /* Process all subblocks. */ for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t)) @@ -1604,6 +1648,11 @@ instantiate_decls (tree fndecl) { instantiate_decl (DECL_RTL (decl)); instantiate_decl (DECL_INCOMING_RTL (decl)); + if (DECL_HAS_VALUE_EXPR_P (decl)) + { + tree v = DECL_VALUE_EXPR (decl); + walk_tree (&v, instantiate_expr, NULL, NULL); + } } /* Now process all variables defined in the function or its subblocks. */ @@ -1613,7 +1662,7 @@ instantiate_decls (tree fndecl) /* Pass through the INSNS of function FNDECL and convert virtual register references to hard register references. */ -void +static unsigned int instantiate_virtual_regs (void) { rtx insn; @@ -1623,7 +1672,11 @@ instantiate_virtual_regs (void) var_offset = STARTING_FRAME_OFFSET; dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl); out_arg_offset = STACK_POINTER_OFFSET; +#ifdef FRAME_POINTER_CFA_OFFSET + cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl); +#else cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl); +#endif /* Initialize recognition, indicating that volatile is OK. */ init_recog (); @@ -1661,7 +1714,26 @@ instantiate_virtual_regs (void) /* Indicate that, from now on, assign_stack_local should use frame_pointer_rtx. */ virtuals_instantiated = 1; + return 0; } + +struct tree_opt_pass pass_instantiate_virtual_regs = +{ + "vregs", /* name */ + NULL, /* gate */ + instantiate_virtual_regs, /* execute */ + NULL, /* sub */ + NULL, /* next */ + 0, /* static_pass_number */ + 0, /* tv_id */ + 0, /* properties_required */ + 0, /* properties_provided */ + 0, /* properties_destroyed */ + 0, /* todo_flags_start */ + TODO_dump_func, /* todo_flags_finish */ + 0 /* letter */ +}; + /* Return 1 if EXP is an aggregate type (or a value with aggregate type). This means a type for which function calls must pass an address to the @@ -1676,15 +1748,21 @@ aggregate_value_p (tree exp, tree fntype) tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp); + /* DECL node associated with FNTYPE when relevant, which we might need to + check for by-invisible-reference returns, typically for CALL_EXPR input + EXPressions. */ + tree fndecl = NULL_TREE; + if (fntype) switch (TREE_CODE (fntype)) { case CALL_EXPR: - fntype = get_callee_fndecl (fntype); - fntype = fntype ? TREE_TYPE (fntype) : 0; + fndecl = get_callee_fndecl (fntype); + fntype = fndecl ? TREE_TYPE (fndecl) : 0; break; case FUNCTION_DECL: - fntype = TREE_TYPE (fntype); + fndecl = fntype; + fntype = TREE_TYPE (fndecl); break; case FUNCTION_TYPE: case METHOD_TYPE: @@ -1699,11 +1777,23 @@ aggregate_value_p (tree exp, tree fntype) if (TREE_CODE (type) == VOID_TYPE) return 0; + /* If the front end has decided that this needs to be passed by reference, do so. */ if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL) && DECL_BY_REFERENCE (exp)) return 1; + + /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the + called function RESULT_DECL, meaning the function returns in memory by + invisible reference. This check lets front-ends not set TREE_ADDRESSABLE + on the function type, which used to be the way to request such a return + mechanism but might now be causing troubles at gimplification time if + temporaries with the function type need to be created. */ + if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl) + && DECL_BY_REFERENCE (DECL_RESULT (fndecl))) + return 1; + if (targetm.calls.return_in_memory (type, fntype)) return 1; /* Types that are TREE_ADDRESSABLE must be constructed in memory, @@ -1714,7 +1804,7 @@ aggregate_value_p (tree exp, tree fntype) return 1; /* Make sure we have suitable call-clobbered regs to return the value in; if not, we must return it in memory. */ - reg = hard_function_value (type, 0, 0); + reg = hard_function_value (type, 0, fntype, 0); /* If we have something other than a REG (e.g. a PARALLEL), then assume it is OK. */ @@ -2001,9 +2091,8 @@ assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm, /* If the parm is to be passed as a transparent union, use the type of the first field for the tests below. We have already verified that the modes are the same. */ - if (DECL_TRANSPARENT_UNION (parm) - || (TREE_CODE (passed_type) == UNION_TYPE - && TYPE_TRANSPARENT_UNION (passed_type))) + if (TREE_CODE (passed_type) == UNION_TYPE + && TYPE_TRANSPARENT_UNION (passed_type)) passed_type = TREE_TYPE (TYPE_FIELDS (passed_type)); /* See if this arg was passed by invisible reference. */ @@ -2559,8 +2648,10 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, /* Store the parm in a pseudoregister during the function, but we may need to do it in a wider mode. */ + /* This is not really promoting for a call. However we need to be + consistent with assign_parm_find_data_types and expand_expr_real_1. */ promoted_nominal_mode - = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 0); + = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1); parmreg = gen_reg_rtx (promoted_nominal_mode); @@ -2883,22 +2974,9 @@ assign_parms (tree fndecl) { struct assign_parm_data_all all; tree fnargs, parm; - rtx internal_arg_pointer; - - /* If the reg that the virtual arg pointer will be translated into is - not a fixed reg or is the stack pointer, make a copy of the virtual - arg pointer, and address parms via the copy. The frame pointer is - considered fixed even though it is not marked as such. - The second time through, simply use ap to avoid generating rtx. */ - - if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM - || ! (fixed_regs[ARG_POINTER_REGNUM] - || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))) - internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx); - else - internal_arg_pointer = virtual_incoming_args_rtx; - current_function_internal_arg_pointer = internal_arg_pointer; + current_function_internal_arg_pointer + = targetm.calls.internal_arg_pointer (); assign_parms_initialize_all (&all); fnargs = assign_parms_augmented_arg_list (&all); @@ -2989,9 +3067,8 @@ assign_parms (tree fndecl) REG_PARM_STACK_SPACE (fndecl)); #endif - current_function_args_size - = ((current_function_args_size + STACK_BYTES - 1) - / STACK_BYTES) * STACK_BYTES; + current_function_args_size = CEIL_ROUND (current_function_args_size, + PARM_BOUNDARY / BITS_PER_UNIT); #ifdef ARGS_GROW_DOWNWARD current_function_arg_offset_rtx @@ -3037,13 +3114,8 @@ assign_parms (tree fndecl) { rtx real_decl_rtl; -#ifdef FUNCTION_OUTGOING_VALUE - real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result), - fndecl); -#else - real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result), - fndecl); -#endif + real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result), + fndecl, true); REG_FUNCTION_VALUE_P (real_decl_rtl) = 1; /* The delay slot scheduler assumes that current_function_return_rtx holds the hard register containing the return value, not a @@ -3238,7 +3310,7 @@ locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs, { tree sizetree; enum direction where_pad; - int boundary; + unsigned int boundary; int reg_parm_stack_space = 0; int part_size_in_regs; @@ -3273,6 +3345,13 @@ locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs, locate->where_pad = where_pad; locate->boundary = boundary; + /* Remember if the outgoing parameter requires extra alignment on the + calling function side. */ + if (boundary > PREFERRED_STACK_BOUNDARY) + boundary = PREFERRED_STACK_BOUNDARY; + if (cfun->stack_alignment_needed < boundary) + cfun->stack_alignment_needed = boundary; + #ifdef ARGS_GROW_DOWNWARD locate->slot_offset.constant = -initial_offset_ptr->constant; if (initial_offset_ptr->var) @@ -3358,10 +3437,9 @@ pad_to_arg_alignment (struct args_size *offset_ptr, int boundary, HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET; #ifdef SPARC_STACK_BOUNDARY_HACK - /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY - higher than the real alignment of %sp. However, when it does this, - the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY. - This is a temporary hack while the sparc port is fixed. */ + /* ??? The SPARC port may claim a STACK_BOUNDARY higher than + the real alignment of %sp. However, when it does this, the + alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */ if (SPARC_STACK_BOUNDARY_HACK) sp_offset = 0; #endif @@ -3451,9 +3529,9 @@ setjmp_vars_warning (tree block) && DECL_RTL_SET_P (decl) && REG_P (DECL_RTL (decl)) && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl)))) - warning (0, "%Jvariable %qD might be clobbered by %" + warning (0, "variable %q+D might be clobbered by %" " or %", - decl, decl); + decl); } for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub)) @@ -3472,8 +3550,8 @@ setjmp_args_warning (void) if (DECL_RTL (decl) != 0 && REG_P (DECL_RTL (decl)) && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl)))) - warning (0, "%Jargument %qD might be clobbered by % or %", - decl, decl); + warning (0, "argument %q+D might be clobbered by % or %", + decl); } @@ -3505,9 +3583,6 @@ reorder_blocks (void) reorder_blocks_1 (get_insns (), block, &block_stack); BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block)); - /* Remove deleted blocks from the block fragment chains. */ - reorder_fix_fragments (block); - VEC_free (tree, heap, block_stack); } @@ -3536,17 +3611,18 @@ reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack) if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG) { tree block = NOTE_BLOCK (insn); + tree origin; + + origin = (BLOCK_FRAGMENT_ORIGIN (block) + ? BLOCK_FRAGMENT_ORIGIN (block) + : block); /* If we have seen this block before, that means it now spans multiple address regions. Create a new fragment. */ if (TREE_ASM_WRITTEN (block)) { tree new_block = copy_node (block); - tree origin; - origin = (BLOCK_FRAGMENT_ORIGIN (block) - ? BLOCK_FRAGMENT_ORIGIN (block) - : block); BLOCK_FRAGMENT_ORIGIN (new_block) = origin; BLOCK_FRAGMENT_CHAIN (new_block) = BLOCK_FRAGMENT_CHAIN (origin); @@ -3563,10 +3639,13 @@ reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack) will cause infinite recursion. */ if (block != current_block) { + if (block != origin) + gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block); + BLOCK_SUPERCONTEXT (block) = current_block; BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block); BLOCK_SUBBLOCKS (current_block) = block; - current_block = block; + current_block = origin; } VEC_safe_push (tree, heap, *p_block_stack, block); } @@ -3581,61 +3660,6 @@ reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack) } } -/* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer - appears in the block tree, select one of the fragments to become - the new origin block. */ - -static void -reorder_fix_fragments (tree block) -{ - while (block) - { - tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block); - tree new_origin = NULL_TREE; - - if (dup_origin) - { - if (! TREE_ASM_WRITTEN (dup_origin)) - { - new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin); - - /* Find the first of the remaining fragments. There must - be at least one -- the current block. */ - while (! TREE_ASM_WRITTEN (new_origin)) - new_origin = BLOCK_FRAGMENT_CHAIN (new_origin); - BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE; - } - } - else if (! dup_origin) - new_origin = block; - - /* Re-root the rest of the fragments to the new origin. In the - case that DUP_ORIGIN was null, that means BLOCK was the origin - of a chain of fragments and we want to remove those fragments - that didn't make it to the output. */ - if (new_origin) - { - tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin); - tree chain = *pp; - - while (chain) - { - if (TREE_ASM_WRITTEN (chain)) - { - BLOCK_FRAGMENT_ORIGIN (chain) = new_origin; - *pp = chain; - pp = &BLOCK_FRAGMENT_CHAIN (chain); - } - chain = BLOCK_FRAGMENT_CHAIN (chain); - } - *pp = NULL_TREE; - } - - reorder_fix_fragments (BLOCK_SUBBLOCKS (block)); - block = BLOCK_CHAIN (block); - } -} - /* Reverse the order of elements in the chain T of blocks, and return the new head of the chain (old last element). */ @@ -3692,7 +3716,7 @@ get_block_vector (tree block, int *n_blocks_p) tree *block_vector; *n_blocks_p = all_blocks (block, NULL); - block_vector = xmalloc (*n_blocks_p * sizeof (tree)); + block_vector = XNEWVEC (tree, *n_blocks_p); all_blocks (block, block_vector); return block_vector; @@ -3872,7 +3896,7 @@ init_function_start (tree subr) /* Make sure all values used by the optimization passes have sane defaults. */ -void +unsigned int init_function_for_compilation (void) { reg_renumber = 0; @@ -3882,47 +3906,30 @@ init_function_for_compilation (void) gcc_assert (VEC_length (int, prologue) == 0); gcc_assert (VEC_length (int, epilogue) == 0); gcc_assert (VEC_length (int, sibcall_epilogue) == 0); + return 0; } -void -expand_main_function (void) +struct tree_opt_pass pass_init_function = { -#ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN - if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN) - { - int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; - rtx tmp, seq; - - start_sequence (); - /* Forcibly align the stack. */ -#ifdef STACK_GROWS_DOWNWARD - tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align), - stack_pointer_rtx, 1, OPTAB_WIDEN); -#else - tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx, - GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN); - tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align), - stack_pointer_rtx, 1, OPTAB_WIDEN); -#endif - if (tmp != stack_pointer_rtx) - emit_move_insn (stack_pointer_rtx, tmp); - - /* Enlist allocate_dynamic_stack_space to pick up the pieces. */ - tmp = force_reg (Pmode, const0_rtx); - allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT); - seq = get_insns (); - end_sequence (); + NULL, /* name */ + NULL, /* gate */ + init_function_for_compilation, /* execute */ + NULL, /* sub */ + NULL, /* next */ + 0, /* static_pass_number */ + 0, /* tv_id */ + 0, /* properties_required */ + 0, /* properties_provided */ + 0, /* properties_destroyed */ + 0, /* todo_flags_start */ + 0, /* todo_flags_finish */ + 0 /* letter */ +}; - for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp)) - if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG) - break; - if (tmp) - emit_insn_before (seq, tmp); - else - emit_insn (seq); - } -#endif +void +expand_main_function (void) +{ #if (defined(INVOKE__main) \ || (!defined(HAS_INIT_SECTION) \ && !defined(INIT_SECTION_ASM_OP) \ @@ -3976,7 +3983,7 @@ stack_protect_prologue (void) # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX) #endif -static void +void stack_protect_epilogue (void) { tree guard_decl = targetm.stack_protect_guard (); @@ -4065,7 +4072,7 @@ expand_function_start (tree subr) else #endif { - rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1); + rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2); /* Expect to be passed the address of a place to store the value. If it is passed as an argument, assign_parms will take care of it. */ @@ -4106,7 +4113,7 @@ expand_function_start (tree subr) /* In order to figure out what mode to use for the pseudo, we figure out what the mode of the eventual return register will actually be, and use that. */ - rtx hard_reg = hard_function_value (return_type, subr, 1); + rtx hard_reg = hard_function_value (return_type, subr, 0, 1); /* Structures that are returned in registers are not aggregate_value_p, so we may see a PARALLEL or a REG. */ @@ -4169,8 +4176,8 @@ expand_function_start (tree subr) as opposed to parm setup. */ emit_note (NOTE_INSN_FUNCTION_BEG); - if (!NOTE_P (get_last_insn ())) - emit_note (NOTE_INSN_DELETED); + gcc_assert (NOTE_P (get_last_insn ())); + parm_birth_insn = get_last_insn (); if (current_function_profile) @@ -4180,10 +4187,10 @@ expand_function_start (tree subr) #endif } - /* After the display initializations is where the tail-recursion label - should go, if we end up needing one. Ensure we have a NOTE here - since some things (like trampolines) get placed before this. */ - tail_recursion_reentry = emit_note (NOTE_INSN_DELETED); + /* After the display initializations is where the stack checking + probe should go. */ + if(flag_stack_check) + stack_check_probe_note = emit_note (NOTE_INSN_DELETED); /* Make sure there is a line number after the function entry setup code. */ force_next_line_note (); @@ -4261,7 +4268,7 @@ do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED) emit_insn (gen_rtx_USE (VOIDmode, reg)); } -void +static void use_return_register (void) { diddle_return_value (do_use_return_reg, NULL); @@ -4277,7 +4284,7 @@ do_warn_unused_parameter (tree fn) decl; decl = TREE_CHAIN (decl)) if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)) - warning (0, "%Junused parameter %qD", decl, decl); + warning (OPT_Wunused_parameter, "unused parameter %q+D", decl); } static GTY(()) rtx initial_trampoline; @@ -4309,7 +4316,7 @@ expand_function_end (void) GEN_INT (STACK_CHECK_MAX_FRAME_SIZE)); seq = get_insns (); end_sequence (); - emit_insn_before (seq, tail_recursion_reentry); + emit_insn_before (seq, stack_check_probe_note); break; } } @@ -4328,27 +4335,11 @@ expand_function_end (void) clear_pending_stack_adjust (); do_pending_stack_adjust (); - /* @@@ This is a kludge. We want to ensure that instructions that - may trap are not moved into the epilogue by scheduling, because - we don't always emit unwind information for the epilogue. - However, not all machine descriptions define a blockage insn, so - emit an ASM_INPUT to act as one. */ - if (flag_non_call_exceptions) - emit_insn (gen_rtx_ASM_INPUT (VOIDmode, "")); - /* Mark the end of the function body. If control reaches this insn, the function can drop through without returning a value. */ emit_note (NOTE_INSN_FUNCTION_END); - /* Must mark the last line number note in the function, so that the test - coverage code can avoid counting the last line twice. This just tells - the code to ignore the immediately following line note, since there - already exists a copy of this note somewhere above. This line number - note is still needed for debugging though, so we can't delete it. */ - if (flag_test_coverage) - emit_note (NOTE_INSN_REPEATED_LINE_NUMBER); - /* Output a linenumber for the end of the function. SDB depends on this. */ force_next_line_note (); @@ -4367,6 +4358,28 @@ expand_function_end (void) /* Output the label for the actual return from the function. */ emit_label (return_label); + if (USING_SJLJ_EXCEPTIONS) + { + /* Let except.c know where it should emit the call to unregister + the function context for sjlj exceptions. */ + if (flag_exceptions) + sjlj_emit_function_exit_after (get_last_insn ()); + } + else + { + /* @@@ This is a kludge. We want to ensure that instructions that + may trap are not moved into the epilogue by scheduling, because + we don't always emit unwind information for the epilogue. + However, not all machine descriptions define a blockage insn, so + emit an ASM_INPUT to act as one. */ + if (flag_non_call_exceptions) + emit_insn (gen_rtx_ASM_INPUT (VOIDmode, "")); + } + + /* If this is an implementation of throw, do what's necessary to + communicate between __builtin_eh_return and the epilogue. */ + expand_eh_return (); + /* If scalar return value was computed in a pseudo-reg, or was a named return value that got dumped to the stack, copy that to the hard return register. */ @@ -4428,6 +4441,24 @@ expand_function_end (void) TREE_TYPE (decl_result), int_size_in_bytes (TREE_TYPE (decl_result))); } + /* In the case of complex integer modes smaller than a word, we'll + need to generate some non-trivial bitfield insertions. Do that + on a pseudo and not the hard register. */ + else if (GET_CODE (decl_rtl) == CONCAT + && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT + && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD) + { + int old_generating_concat_p; + rtx tmp; + + old_generating_concat_p = generating_concat_p; + generating_concat_p = 0; + tmp = gen_reg_rtx (GET_MODE (decl_rtl)); + generating_concat_p = old_generating_concat_p; + + emit_move_insn (tmp, decl_rtl); + emit_move_insn (real_decl_rtl, tmp); + } else emit_move_insn (real_decl_rtl, decl_rtl); } @@ -4451,13 +4482,8 @@ expand_function_end (void) else value_address = XEXP (value_address, 0); -#ifdef FUNCTION_OUTGOING_VALUE - outgoing = FUNCTION_OUTGOING_VALUE (build_pointer_type (type), - current_function_decl); -#else - outgoing = FUNCTION_VALUE (build_pointer_type (type), - current_function_decl); -#endif + outgoing = targetm.calls.function_value (build_pointer_type (type), + current_function_decl, true); /* Mark this as a function return value so integrate will delete the assignment and USE below when inlining this function. */ @@ -4474,10 +4500,6 @@ expand_function_end (void) current_function_return_rtx = outgoing; } - /* If this is an implementation of throw, do what's necessary to - communicate between __builtin_eh_return and the epilogue. */ - expand_eh_return (); - /* Emit the actual code to clobber return register. */ { rtx seq; @@ -4494,11 +4516,6 @@ expand_function_end (void) /* Output the label for the naked return from the function. */ emit_label (naked_return_label); - /* Let except.c know where it should emit the call to unregister - the function context for sjlj exceptions. */ - if (flag_exceptions && USING_SJLJ_EXCEPTIONS) - sjlj_emit_function_exit_after (get_last_insn ()); - /* If stack protection is enabled for this function, check the guard. */ if (cfun->stack_protect_guard) stack_protect_epilogue (); @@ -4628,11 +4645,9 @@ sibcall_epilogue_contains (rtx insn) block_for_insn appropriately. */ static void -emit_return_into_block (basic_block bb, rtx line_note) +emit_return_into_block (basic_block bb) { emit_jump_insn_after (gen_return (), BB_END (bb)); - if (line_note) - emit_note_copy_after (line_note, PREV_INSN (BB_END (bb))); } #endif /* HAVE_return */ @@ -4790,6 +4805,7 @@ keep_stack_depressed (rtx insns) info.sp_offset)); retaddr = gen_rtx_MEM (Pmode, retaddr); + MEM_NOTRAP_P (retaddr) = 1; /* If there is a pending load to the equivalent register for SP and we reference that register, we must load our address into @@ -5097,18 +5113,6 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED) if (BB_HEAD (last) == label && LABEL_P (label)) { edge_iterator ei2; - rtx epilogue_line_note = NULL_RTX; - - /* Locate the line number associated with the closing brace, - if we can find one. */ - for (seq = get_last_insn (); - seq && ! active_insn_p (seq); - seq = PREV_INSN (seq)) - if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0) - { - epilogue_line_note = seq; - break; - } for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); ) { @@ -5132,7 +5136,7 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED) with a simple return instruction. */ if (simplejump_p (jump)) { - emit_return_into_block (bb, epilogue_line_note); + emit_return_into_block (bb); delete_insn (jump); } @@ -5169,7 +5173,7 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED) this is still reachable will be determined later. */ emit_barrier_after (BB_END (last)); - emit_return_into_block (last, epilogue_line_note); + emit_return_into_block (last); epilogue_end = BB_END (last); single_succ_edge (last)->flags &= ~EDGE_FALLTHRU; goto epilogue_done; @@ -5231,7 +5235,8 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED) fixup_fallthru_exit_predecessor. */ cfg_layout_initialize (0); FOR_EACH_BB (cur_bb) - if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0) + if (cur_bb->index >= NUM_FIXED_BLOCKS + && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS) cur_bb->aux = cur_bb->next_bb; cfg_layout_finalize (); } @@ -5270,61 +5275,6 @@ epilogue_done: } #endif -#ifdef HAVE_prologue - /* This is probably all useless now that we use locators. */ - if (prologue_end) - { - rtx insn, prev; - - /* GDB handles `break f' by setting a breakpoint on the first - line note after the prologue. Which means (1) that if - there are line number notes before where we inserted the - prologue we should move them, and (2) we should generate a - note before the end of the first basic block, if there isn't - one already there. - - ??? This behavior is completely broken when dealing with - multiple entry functions. We simply place the note always - into first basic block and let alternate entry points - to be missed. - */ - - for (insn = prologue_end; insn; insn = prev) - { - prev = PREV_INSN (insn); - if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0) - { - /* Note that we cannot reorder the first insn in the - chain, since rest_of_compilation relies on that - remaining constant. */ - if (prev == NULL) - break; - reorder_insns (insn, insn, prologue_end); - } - } - - /* Find the last line number note in the first block. */ - for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb); - insn != prologue_end && insn; - insn = PREV_INSN (insn)) - if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0) - break; - - /* If we didn't find one, make a copy of the first line number - we run across. */ - if (! insn) - { - for (insn = next_active_insn (prologue_end); - insn; - insn = PREV_INSN (insn)) - if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0) - { - emit_note_copy_after (insn, prologue_end); - break; - } - } - } -#endif #ifdef HAVE_epilogue if (epilogue_end) { @@ -5339,8 +5289,7 @@ epilogue_done: { next = NEXT_INSN (insn); if (NOTE_P (insn) - && (NOTE_LINE_NUMBER (insn) > 0 - || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG + && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)) reorder_insns (insn, insn, PREV_INSN (epilogue_end)); } @@ -5445,8 +5394,8 @@ reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED) void reset_block_changes (void) { - VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block"); - VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE); + cfun->ib_boundaries_block = VEC_alloc (tree, gc, 100); + VEC_quick_push (tree, cfun->ib_boundaries_block, NULL_TREE); } /* Record the boundary for BLOCK. */ @@ -5459,17 +5408,20 @@ record_block_change (tree block) if (!block) return; - last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block); - VARRAY_POP (cfun->ib_boundaries_block); + if(!cfun->ib_boundaries_block) + return; + + last_block = VEC_pop (tree, cfun->ib_boundaries_block); n = get_max_uid (); - for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++) - VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block); + for (i = VEC_length (tree, cfun->ib_boundaries_block); i < n; i++) + VEC_safe_push (tree, gc, cfun->ib_boundaries_block, last_block); - VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block); + VEC_safe_push (tree, gc, cfun->ib_boundaries_block, block); } /* Finishes record of boundaries. */ -void finalize_block_changes (void) +void +finalize_block_changes (void) { record_block_change (DECL_INITIAL (current_function_decl)); } @@ -5480,17 +5432,17 @@ check_block_change (rtx insn, tree *block) { unsigned uid = INSN_UID (insn); - if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block)) + if (uid >= VEC_length (tree, cfun->ib_boundaries_block)) return; - *block = VARRAY_TREE (cfun->ib_boundaries_block, uid); + *block = VEC_index (tree, cfun->ib_boundaries_block, uid); } /* Releases the ib_boundaries_block records. */ void free_block_changes (void) { - cfun->ib_boundaries_block = NULL; + VEC_free (tree, gc, cfun->ib_boundaries_block); } /* Returns the name of the current function. */ @@ -5499,5 +5451,62 @@ current_function_name (void) { return lang_hooks.decl_printable_name (cfun->decl, 2); } + + +static unsigned int +rest_of_handle_check_leaf_regs (void) +{ +#ifdef LEAF_REGISTERS + current_function_uses_only_leaf_regs + = optimize > 0 && only_leaf_regs_used () && leaf_function_p (); +#endif + return 0; +} + +/* Insert a TYPE into the used types hash table of CFUN. */ +static void +used_types_insert_helper (tree type, struct function *func) +{ + if (type != NULL && func != NULL) + { + void **slot; + + if (func->used_types_hash == NULL) + func->used_types_hash = htab_create_ggc (37, htab_hash_pointer, + htab_eq_pointer, NULL); + slot = htab_find_slot (func->used_types_hash, type, INSERT); + if (*slot == NULL) + *slot = type; + } +} + +/* Given a type, insert it into the used hash table in cfun. */ +void +used_types_insert (tree t) +{ + while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE) + t = TREE_TYPE (t); + t = TYPE_MAIN_VARIANT (t); + if (debug_info_level > DINFO_LEVEL_NONE) + used_types_insert_helper (t, cfun); +} + +struct tree_opt_pass pass_leaf_regs = +{ + NULL, /* name */ + NULL, /* gate */ + rest_of_handle_check_leaf_regs, /* execute */ + NULL, /* sub */ + NULL, /* next */ + 0, /* static_pass_number */ + 0, /* tv_id */ + 0, /* properties_required */ + 0, /* properties_provided */ + 0, /* properties_destroyed */ + 0, /* todo_flags_start */ + 0, /* todo_flags_finish */ + 0 /* letter */ +}; + #include "gt-function.h"