X-Git-Url: http://git.sourceforge.jp/view?p=pf3gnuchains%2Fgcc-fork.git;a=blobdiff_plain;f=gcc%2Ffunction.c;h=10fe7febe6cf5e1b745fb65eb11cbcac3b38e0ef;hp=38f56a607e29edba63dd0e08f631f85644508c0f;hb=3072d30e7983a3ca5ad030f1f98a5c39bcc2c07b;hpb=e8d0745df96e4d0f620210f349192f65bdd4f5a5 diff --git a/gcc/function.c b/gcc/function.c index 38f56a607e2..10fe7febe6c 100644 --- a/gcc/function.c +++ b/gcc/function.c @@ -1,6 +1,6 @@ /* Expands front end tree to back end RTL for GCC. Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997, - 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 + 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc. This file is part of GCC. @@ -63,6 +63,9 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA #include "tree-gimple.h" #include "tree-pass.h" #include "predict.h" +#include "df.h" +#include "timevar.h" +#include "vecprim.h" #ifndef LOCAL_ALIGNMENT #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT @@ -99,7 +102,7 @@ int current_function_is_leaf; /* Nonzero if function being compiled doesn't modify the stack pointer (ignoring the prologue and epilogue). This is only valid after - life_analysis has run. */ + pass_stack_ptr_mod has run. */ int current_function_sp_is_unchanging; /* Nonzero if the function being compiled is a leaf function which only @@ -123,9 +126,6 @@ struct machine_function * (*init_machine_status) (void); /* The currently compiled function. */ struct function *cfun = 0; -DEF_VEC_I(int); -DEF_VEC_ALLOC_I(int,heap); - /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */ static VEC(int,heap) *prologue; static VEC(int,heap) *epilogue; @@ -197,7 +197,6 @@ static struct temp_slot *find_temp_slot_from_address (rtx); static void pad_to_arg_alignment (struct args_size *, int, struct args_size *); static void pad_below (struct args_size *, enum machine_mode, tree); static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **); -static void reorder_fix_fragments (tree); static int all_blocks (tree, tree *); static tree *get_block_vector (tree, int *); extern tree debug_find_var_in_block_tree (tree, tree); @@ -206,7 +205,7 @@ extern tree debug_find_var_in_block_tree (tree, tree); static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED; static int contains (rtx, VEC(int,heap) **); #ifdef HAVE_return -static void emit_return_into_block (basic_block, rtx); +static void emit_return_into_block (basic_block); #endif #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX) static rtx keep_stack_depressed (rtx); @@ -331,11 +330,9 @@ free_after_compilation (struct function *f) f->x_return_label = NULL; f->x_naked_return_label = NULL; f->x_stack_slot_list = NULL; - f->x_tail_recursion_reentry = NULL; + f->x_stack_check_probe_note = NULL; f->x_arg_pointer_save_area = NULL; f->x_parm_birth_insn = NULL; - f->original_arg_vector = NULL; - f->original_decl_initial = NULL; f->epilogue_delay_list = NULL; } @@ -358,12 +355,33 @@ get_func_frame_size (struct function *f) /* Return size needed for stack frame based on slots so far allocated. This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY; the caller may have to do that. */ + HOST_WIDE_INT get_frame_size (void) { return get_func_frame_size (cfun); } +/* Issue an error message and return TRUE if frame OFFSET overflows in + the signed target pointer arithmetics for function FUNC. Otherwise + return FALSE. */ + +bool +frame_offset_overflow (HOST_WIDE_INT offset, tree func) +{ + unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset; + + if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1)) + /* Leave room for the fixed part of the frame. */ + - 64 * UNITS_PER_WORD) + { + error ("%Jtotal size of local objects too large", func); + return TRUE; + } + + return FALSE; +} + /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it with machine mode MODE. @@ -454,7 +472,7 @@ assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align, /* On a big-endian machine, if we are allocating more space than we will use, use the least significant bytes of those that are allocated. */ - if (BYTES_BIG_ENDIAN && mode != BLKmode) + if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size) bigend_correction = size - GET_MODE_SIZE (mode); /* If we have already instantiated virtual registers, return the actual @@ -474,10 +492,14 @@ assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align, function->x_frame_offset += size; x = gen_rtx_MEM (mode, addr); + MEM_NOTRAP_P (x) = 1; function->x_stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list); + if (frame_offset_overflow (function->x_frame_offset, function->decl)) + function->x_frame_offset = 0; + return x; } @@ -523,14 +545,10 @@ insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list) static struct temp_slot ** temp_slots_at_level (int level) { + if (level >= (int) VEC_length (temp_slot_p, used_temp_slots)) + VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1); - if (!used_temp_slots) - VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots"); - - while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots)) - VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL); - - return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level); + return &(VEC_address (temp_slot_p, used_temp_slots)[level]); } /* Returns the maximal temporary slot level. */ @@ -541,7 +559,7 @@ max_slot_level (void) if (!used_temp_slots) return -1; - return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1; + return VEC_length (temp_slot_p, used_temp_slots) - 1; } /* Moves temporary slot TEMP to LEVEL. */ @@ -609,22 +627,30 @@ assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, /* Try to find an available, already-allocated temporary of the proper mode which meets the size and alignment requirements. Choose the - smallest one with the closest alignment. */ - for (p = avail_temp_slots; p; p = p->next) + smallest one with the closest alignment. + + If assign_stack_temp is called outside of the tree->rtl expansion, + we cannot reuse the stack slots (that may still refer to + VIRTUAL_STACK_VARS_REGNUM). */ + if (!virtuals_instantiated) { - if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode - && objects_must_conflict_p (p->type, type) - && (best_p == 0 || best_p->size > p->size - || (best_p->size == p->size && best_p->align > p->align))) + for (p = avail_temp_slots; p; p = p->next) { - if (p->align == align && p->size == size) + if (p->align >= align && p->size >= size + && GET_MODE (p->slot) == mode + && objects_must_conflict_p (p->type, type) + && (best_p == 0 || best_p->size > p->size + || (best_p->size == p->size && best_p->align > p->align))) { - selected = p; - cut_slot_from_list (selected, &avail_temp_slots); - best_p = 0; - break; + if (p->align == align && p->size == size) + { + selected = p; + cut_slot_from_list (selected, &avail_temp_slots); + best_p = 0; + break; + } + best_p = p; } - best_p = p; } } @@ -649,9 +675,7 @@ assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, p->size = best_p->size - rounded_size; p->base_offset = best_p->base_offset + rounded_size; p->full_size = best_p->full_size - rounded_size; - p->slot = gen_rtx_MEM (BLKmode, - plus_constant (XEXP (best_p->slot, 0), - rounded_size)); + p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size); p->align = best_p->align; p->address = 0; p->type = best_p->type; @@ -741,8 +765,10 @@ assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, if (type != 0) { MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type); - MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type)); + MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type) + || TREE_CODE (type) == COMPLEX_TYPE)); } + MEM_NOTRAP_P (slot) = 1; return slot; } @@ -789,7 +815,6 @@ assign_temp (tree type_or_decl, int keep, int memory_required, if (mode == BLKmode || memory_required) { HOST_WIDE_INT size = int_size_in_bytes (type); - tree size_tree; rtx tmp; /* Zero sized arrays are GNU C extension. Set size to 1 to avoid @@ -798,20 +823,10 @@ assign_temp (tree type_or_decl, int keep, int memory_required, size = 1; /* Unfortunately, we don't yet know how to allocate variable-sized - temporaries. However, sometimes we have a fixed upper limit on - the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that - instead. This is the case for Chill variable-sized strings. */ - if (size == -1 && TREE_CODE (type) == ARRAY_TYPE - && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE - && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1)) - size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1); - - /* If we still haven't been able to get a size, see if the language - can compute a maximum size. */ - if (size == -1 - && (size_tree = lang_hooks.types.max_size (type)) != 0 - && host_integerp (size_tree, 1)) - size = tree_low_cst (size_tree, 1); + temporaries. However, sometimes we can find a fixed upper limit on + the size, so try that instead. */ + else if (size == -1) + size = max_int_size_in_bytes (type); /* The size of the temporary may be too large to fit into an integer. */ /* ??? Not sure this should happen except for user silliness, so limit @@ -1198,12 +1213,12 @@ static int cfa_offset; `current_function_outgoing_args_size'. Nevertheless, we must allow for it when allocating stack dynamic objects. */ -#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE) +#if defined(REG_PARM_STACK_SPACE) #define STACK_DYNAMIC_OFFSET(FNDECL) \ ((ACCUMULATE_OUTGOING_ARGS \ - ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\ - + (STACK_POINTER_OFFSET)) \ - + ? (current_function_outgoing_args_size \ + + (OUTGOING_REG_PARM_STACK_SPACE ? 0 : REG_PARM_STACK_SPACE (FNDECL))) \ + : 0) + (STACK_POINTER_OFFSET)) #else #define STACK_DYNAMIC_OFFSET(FNDECL) \ ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \ @@ -1211,12 +1226,6 @@ static int cfa_offset; #endif #endif -/* On most machines, the CFA coincides with the first incoming parm. */ - -#ifndef ARG_POINTER_CFA_OFFSET -#define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL) -#endif - /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX is a virtual register, return the equivalent hard register and set the @@ -1237,7 +1246,14 @@ instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset) else if (x == virtual_outgoing_args_rtx) new = stack_pointer_rtx, offset = out_arg_offset; else if (x == virtual_cfa_rtx) - new = arg_pointer_rtx, offset = cfa_offset; + { +#ifdef FRAME_POINTER_CFA_OFFSET + new = frame_pointer_rtx; +#else + new = arg_pointer_rtx; +#endif + offset = cfa_offset; + } else return NULL_RTX; @@ -1508,7 +1524,14 @@ instantiate_virtual_regs_in_insn (rtx insn) Validate the new value vs the insn predicate. Note that asm insns will have insn_code -1 here. */ if (!safe_insn_predicate (insn_code, i, x)) - x = force_reg (insn_data[insn_code].operand[i].mode, x); + { + start_sequence (); + x = force_reg (insn_data[insn_code].operand[i].mode, x); + seq = get_insns (); + end_sequence (); + if (seq) + emit_insn_before (seq, insn); + } *recog_data.operand_loc[i] = recog_data.operand[i] = x; any_change = true; @@ -1519,7 +1542,7 @@ instantiate_virtual_regs_in_insn (rtx insn) /* Propagate operand changes into the duplicates. */ for (i = 0; i < recog_data.n_dups; ++i) *recog_data.dup_loc[i] - = recog_data.operand[(unsigned)recog_data.dup_num[i]]; + = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]); /* Force re-recognition of the instruction for validation. */ INSN_CODE (insn) = -1; @@ -1574,6 +1597,22 @@ instantiate_decl (rtx x) for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL); } +/* Helper for instantiate_decls called via walk_tree: Process all decls + in the given DECL_VALUE_EXPR. */ + +static tree +instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) +{ + tree t = *tp; + if (! EXPR_P (t) && ! GIMPLE_STMT_P (t)) + { + *walk_subtrees = 0; + if (DECL_P (t) && DECL_RTL_SET_P (t)) + instantiate_decl (DECL_RTL (t)); + } + return NULL; +} + /* Subroutine of instantiate_decls: Process all decls in the given BLOCK node and all its subblocks. */ @@ -1583,8 +1622,15 @@ instantiate_decls_1 (tree let) tree t; for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t)) - if (DECL_RTL_SET_P (t)) - instantiate_decl (DECL_RTL (t)); + { + if (DECL_RTL_SET_P (t)) + instantiate_decl (DECL_RTL (t)); + if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t)) + { + tree v = DECL_VALUE_EXPR (t); + walk_tree (&v, instantiate_expr, NULL, NULL); + } + } /* Process all subblocks. */ for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t)) @@ -1604,6 +1650,11 @@ instantiate_decls (tree fndecl) { instantiate_decl (DECL_RTL (decl)); instantiate_decl (DECL_INCOMING_RTL (decl)); + if (DECL_HAS_VALUE_EXPR_P (decl)) + { + tree v = DECL_VALUE_EXPR (decl); + walk_tree (&v, instantiate_expr, NULL, NULL); + } } /* Now process all variables defined in the function or its subblocks. */ @@ -1613,7 +1664,7 @@ instantiate_decls (tree fndecl) /* Pass through the INSNS of function FNDECL and convert virtual register references to hard register references. */ -void +static unsigned int instantiate_virtual_regs (void) { rtx insn; @@ -1623,7 +1674,11 @@ instantiate_virtual_regs (void) var_offset = STARTING_FRAME_OFFSET; dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl); out_arg_offset = STACK_POINTER_OFFSET; +#ifdef FRAME_POINTER_CFA_OFFSET + cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl); +#else cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl); +#endif /* Initialize recognition, indicating that volatile is OK. */ init_recog (); @@ -1661,11 +1716,12 @@ instantiate_virtual_regs (void) /* Indicate that, from now on, assign_stack_local should use frame_pointer_rtx. */ virtuals_instantiated = 1; + return 0; } struct tree_opt_pass pass_instantiate_virtual_regs = { - NULL, /* name */ + "vregs", /* name */ NULL, /* gate */ instantiate_virtual_regs, /* execute */ NULL, /* sub */ @@ -1676,7 +1732,7 @@ struct tree_opt_pass pass_instantiate_virtual_regs = 0, /* properties_provided */ 0, /* properties_destroyed */ 0, /* todo_flags_start */ - 0, /* todo_flags_finish */ + TODO_dump_func, /* todo_flags_finish */ 0 /* letter */ }; @@ -1694,15 +1750,21 @@ aggregate_value_p (tree exp, tree fntype) tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp); + /* DECL node associated with FNTYPE when relevant, which we might need to + check for by-invisible-reference returns, typically for CALL_EXPR input + EXPressions. */ + tree fndecl = NULL_TREE; + if (fntype) switch (TREE_CODE (fntype)) { case CALL_EXPR: - fntype = get_callee_fndecl (fntype); - fntype = fntype ? TREE_TYPE (fntype) : 0; + fndecl = get_callee_fndecl (fntype); + fntype = fndecl ? TREE_TYPE (fndecl) : 0; break; case FUNCTION_DECL: - fntype = TREE_TYPE (fntype); + fndecl = fntype; + fntype = TREE_TYPE (fndecl); break; case FUNCTION_TYPE: case METHOD_TYPE: @@ -1717,11 +1779,23 @@ aggregate_value_p (tree exp, tree fntype) if (TREE_CODE (type) == VOID_TYPE) return 0; + /* If the front end has decided that this needs to be passed by reference, do so. */ if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL) && DECL_BY_REFERENCE (exp)) return 1; + + /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the + called function RESULT_DECL, meaning the function returns in memory by + invisible reference. This check lets front-ends not set TREE_ADDRESSABLE + on the function type, which used to be the way to request such a return + mechanism but might now be causing troubles at gimplification time if + temporaries with the function type need to be created. */ + if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl) + && DECL_BY_REFERENCE (DECL_RESULT (fndecl))) + return 1; + if (targetm.calls.return_in_memory (type, fntype)) return 1; /* Types that are TREE_ADDRESSABLE must be constructed in memory, @@ -1823,7 +1897,8 @@ struct assign_parm_data_all struct args_size stack_args_size; tree function_result_decl; tree orig_fnargs; - rtx conversion_insns; + rtx first_conversion_insn; + rtx last_conversion_insn; HOST_WIDE_INT pretend_args_size; HOST_WIDE_INT extra_pretend_bytes; int reg_parm_stack_space; @@ -2019,9 +2094,8 @@ assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm, /* If the parm is to be passed as a transparent union, use the type of the first field for the tests below. We have already verified that the modes are the same. */ - if (DECL_TRANSPARENT_UNION (parm) - || (TREE_CODE (passed_type) == UNION_TYPE - && TYPE_TRANSPARENT_UNION (passed_type))) + if (TREE_CODE (passed_type) == UNION_TYPE + && TYPE_TRANSPARENT_UNION (passed_type)) passed_type = TREE_TYPE (TYPE_FIELDS (passed_type)); /* See if this arg was passed by invisible reference. */ @@ -2418,7 +2492,8 @@ assign_parm_setup_block (struct assign_parm_data_all *all, { rtx parmreg = gen_reg_rtx (data->nominal_mode); - push_to_sequence (all->conversion_insns); + push_to_sequence2 (all->first_conversion_insn, + all->last_conversion_insn); /* For values returned in multiple registers, handle possible incompatible calls to emit_group_store. @@ -2443,7 +2518,8 @@ assign_parm_setup_block (struct assign_parm_data_all *all, emit_group_store (parmreg, entry_parm, data->nominal_type, int_size_in_bytes (data->nominal_type)); - all->conversion_insns = get_insns (); + all->first_conversion_insn = get_insns (); + all->last_conversion_insn = get_last_insn (); end_sequence (); SET_DECL_RTL (parm, parmreg); @@ -2490,9 +2566,11 @@ assign_parm_setup_block (struct assign_parm_data_all *all, /* Handle values in multiple non-contiguous locations. */ if (GET_CODE (entry_parm) == PARALLEL) { - push_to_sequence (all->conversion_insns); + push_to_sequence2 (all->first_conversion_insn, + all->last_conversion_insn); emit_group_store (mem, entry_parm, data->passed_type, size); - all->conversion_insns = get_insns (); + all->first_conversion_insn = get_insns (); + all->last_conversion_insn = get_last_insn (); end_sequence (); } @@ -2551,10 +2629,11 @@ assign_parm_setup_block (struct assign_parm_data_all *all, } else if (data->stack_parm == 0) { - push_to_sequence (all->conversion_insns); + push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); emit_block_move (stack_parm, data->entry_parm, GEN_INT (size), BLOCK_OP_NORMAL); - all->conversion_insns = get_insns (); + all->first_conversion_insn = get_insns (); + all->last_conversion_insn = get_last_insn (); end_sequence (); } @@ -2577,8 +2656,10 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, /* Store the parm in a pseudoregister during the function, but we may need to do it in a wider mode. */ + /* This is not really promoting for a call. However we need to be + consistent with assign_parm_find_data_types and expand_expr_real_1. */ promoted_nominal_mode - = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 0); + = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1); parmreg = gen_reg_rtx (promoted_nominal_mode); @@ -2625,7 +2706,7 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, emit_move_insn (tempreg, validize_mem (data->entry_parm)); - push_to_sequence (all->conversion_insns); + push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp); if (GET_CODE (tempreg) == SUBREG @@ -2644,9 +2725,10 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, /* TREE_USED gets set erroneously during expand_assignment. */ save_tree_used = TREE_USED (parm); - expand_assignment (parm, make_tree (data->nominal_type, tempreg)); + expand_assignment (parm, make_tree (data->nominal_type, tempreg), false); TREE_USED (parm) = save_tree_used; - all->conversion_insns = get_insns (); + all->first_conversion_insn = get_insns (); + all->last_conversion_insn = get_last_insn (); end_sequence (); did_conversion = true; @@ -2672,11 +2754,13 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm))); int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm)); - push_to_sequence (all->conversion_insns); + push_to_sequence2 (all->first_conversion_insn, + all->last_conversion_insn); emit_move_insn (tempreg, DECL_RTL (parm)); tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p); emit_move_insn (parmreg, tempreg); - all->conversion_insns = get_insns (); + all->first_conversion_insn = get_insns (); + all->last_conversion_insn = get_last_insn (); end_sequence (); did_conversion = true; @@ -2728,20 +2812,14 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, continue; if (SET_DEST (set) == regno_reg_rtx [regnoi]) - REG_NOTES (sinsn) - = gen_rtx_EXPR_LIST (REG_EQUIV, stacki, - REG_NOTES (sinsn)); + set_unique_reg_note (sinsn, REG_EQUIV, stacki); else if (SET_DEST (set) == regno_reg_rtx [regnor]) - REG_NOTES (sinsn) - = gen_rtx_EXPR_LIST (REG_EQUIV, stackr, - REG_NOTES (sinsn)); + set_unique_reg_note (sinsn, REG_EQUIV, stackr); } } else if ((set = single_set (linsn)) != 0 && SET_DEST (set) == parmreg) - REG_NOTES (linsn) - = gen_rtx_EXPR_LIST (REG_EQUIV, - data->stack_parm, REG_NOTES (linsn)); + set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm); } /* For pointer data type, suggest pointer register. */ @@ -2768,7 +2846,7 @@ assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm, emit_move_insn (tempreg, validize_mem (data->entry_parm)); - push_to_sequence (all->conversion_insns); + push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn); to_conversion = true; data->entry_parm = convert_to_mode (data->nominal_mode, tempreg, @@ -2800,7 +2878,8 @@ assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm, { /* Use a block move to handle potentially misaligned entry_parm. */ if (!to_conversion) - push_to_sequence (all->conversion_insns); + push_to_sequence2 (all->first_conversion_insn, + all->last_conversion_insn); to_conversion = true; emit_block_move (dest, src, @@ -2813,7 +2892,8 @@ assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm, if (to_conversion) { - all->conversion_insns = get_insns (); + all->first_conversion_insn = get_insns (); + all->last_conversion_insn = get_last_insn (); end_sequence (); } @@ -2857,10 +2937,12 @@ assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs) set_mem_attributes (tmp, parm, 1); rmem = adjust_address_nv (tmp, inner, 0); imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner)); - push_to_sequence (all->conversion_insns); + push_to_sequence2 (all->first_conversion_insn, + all->last_conversion_insn); emit_move_insn (rmem, real); emit_move_insn (imem, imag); - all->conversion_insns = get_insns (); + all->first_conversion_insn = get_insns (); + all->last_conversion_insn = get_last_insn (); end_sequence (); } else @@ -2901,22 +2983,9 @@ assign_parms (tree fndecl) { struct assign_parm_data_all all; tree fnargs, parm; - rtx internal_arg_pointer; - - /* If the reg that the virtual arg pointer will be translated into is - not a fixed reg or is the stack pointer, make a copy of the virtual - arg pointer, and address parms via the copy. The frame pointer is - considered fixed even though it is not marked as such. - - The second time through, simply use ap to avoid generating rtx. */ - if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM - || ! (fixed_regs[ARG_POINTER_REGNUM] - || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))) - internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx); - else - internal_arg_pointer = virtual_incoming_args_rtx; - current_function_internal_arg_pointer = internal_arg_pointer; + current_function_internal_arg_pointer + = targetm.calls.internal_arg_pointer (); assign_parms_initialize_all (&all); fnargs = assign_parms_augmented_arg_list (&all); @@ -2971,7 +3040,7 @@ assign_parms (tree fndecl) /* Output all parameter conversion instructions (possibly including calls) now that all parameters have been copied out of hard registers. */ - emit_insn (all.conversion_insns); + emit_insn (all.first_conversion_insn); /* If we are receiving a struct value address as the first argument, set up the RTL for the function result. As this might require code to convert @@ -3007,9 +3076,8 @@ assign_parms (tree fndecl) REG_PARM_STACK_SPACE (fndecl)); #endif - current_function_args_size - = ((current_function_args_size + STACK_BYTES - 1) - / STACK_BYTES) * STACK_BYTES; + current_function_args_size = CEIL_ROUND (current_function_args_size, + PARM_BOUNDARY / BITS_PER_UNIT); #ifdef ARGS_GROW_DOWNWARD current_function_arg_offset_rtx @@ -3150,22 +3218,21 @@ gimplify_parameters (void) } else { - tree ptr_type, addr, args; + tree ptr_type, addr; ptr_type = build_pointer_type (type); addr = create_tmp_var (ptr_type, get_name (parm)); DECL_IGNORED_P (addr) = 0; local = build_fold_indirect_ref (addr); - args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL); t = built_in_decls[BUILT_IN_ALLOCA]; - t = build_function_call_expr (t, args); + t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm)); t = fold_convert (ptr_type, t); - t = build2 (MODIFY_EXPR, void_type_node, addr, t); + t = build_gimple_modify_stmt (addr, t); gimplify_and_add (t, &stmts); } - t = build2 (MODIFY_EXPR, void_type_node, local, parm); + t = build_gimple_modify_stmt (local, parm); gimplify_and_add (t, &stmts); SET_DECL_VALUE_EXPR (parm, local); @@ -3177,40 +3244,6 @@ gimplify_parameters (void) return stmts; } -/* Indicate whether REGNO is an incoming argument to the current function - that was promoted to a wider mode. If so, return the RTX for the - register (to get its mode). PMODE and PUNSIGNEDP are set to the mode - that REGNO is promoted from and whether the promotion was signed or - unsigned. */ - -rtx -promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp) -{ - tree arg; - - for (arg = DECL_ARGUMENTS (current_function_decl); arg; - arg = TREE_CHAIN (arg)) - if (REG_P (DECL_INCOMING_RTL (arg)) - && REGNO (DECL_INCOMING_RTL (arg)) == regno - && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg))) - { - enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg)); - int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg)); - - mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1); - if (mode == GET_MODE (DECL_INCOMING_RTL (arg)) - && mode != DECL_MODE (arg)) - { - *pmode = DECL_MODE (arg); - *punsignedp = unsignedp; - return DECL_INCOMING_RTL (arg); - } - } - - return 0; -} - - /* Compute the size and offset from the start of the stacked arguments for a parm passed in mode PASSED_MODE and with type TYPE. @@ -3378,10 +3411,9 @@ pad_to_arg_alignment (struct args_size *offset_ptr, int boundary, HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET; #ifdef SPARC_STACK_BOUNDARY_HACK - /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY - higher than the real alignment of %sp. However, when it does this, - the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY. - This is a temporary hack while the sparc port is fixed. */ + /* ??? The SPARC port may claim a STACK_BOUNDARY higher than + the real alignment of %sp. However, when it does this, the + alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */ if (SPARC_STACK_BOUNDARY_HACK) sp_offset = 0; #endif @@ -3455,13 +3487,32 @@ pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree siz } } -/* Walk the tree of blocks describing the binding levels within a function - and warn about variables the might be killed by setjmp or vfork. - This is done after calling flow_analysis and before global_alloc - clobbers the pseudo-regs to hard regs. */ -void -setjmp_vars_warning (tree block) +/* True if register REGNO was alive at a place where `setjmp' was + called and was set more than once or is an argument. Such regs may + be clobbered by `longjmp'. */ + +static bool +regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno) +{ + /* There appear to be cases where some local vars never reach the + backend but have bogus regnos. */ + if (regno >= max_reg_num ()) + return false; + + return ((REG_N_SETS (regno) > 1 + || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno)) + && REGNO_REG_SET_P (setjmp_crosses, regno)); +} + +/* Walk the tree of blocks describing the binding levels within a + function and warn about variables the might be killed by setjmp or + vfork. This is done after calling flow_analysis before register + allocation since that will clobber the pseudo-regs to hard + regs. */ + +static void +setjmp_vars_warning (bitmap setjmp_crosses, tree block) { tree decl, sub; @@ -3470,32 +3521,47 @@ setjmp_vars_warning (tree block) if (TREE_CODE (decl) == VAR_DECL && DECL_RTL_SET_P (decl) && REG_P (DECL_RTL (decl)) - && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl)))) - warning (0, "variable %q+D might be clobbered by %" - " or %", - decl); + && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) + warning (OPT_Wclobbered, "variable %q+D might be clobbered by" + " % or %", decl); } for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub)) - setjmp_vars_warning (sub); + setjmp_vars_warning (setjmp_crosses, sub); } /* Do the appropriate part of setjmp_vars_warning but for arguments instead of local variables. */ -void -setjmp_args_warning (void) +static void +setjmp_args_warning (bitmap setjmp_crosses) { tree decl; for (decl = DECL_ARGUMENTS (current_function_decl); decl; decl = TREE_CHAIN (decl)) if (DECL_RTL (decl) != 0 && REG_P (DECL_RTL (decl)) - && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl)))) - warning (0, "argument %q+D might be clobbered by % or %", + && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) + warning (OPT_Wclobbered, + "argument %q+D might be clobbered by % or %", decl); } +/* Generate warning messages for variables live across setjmp. */ + +void +generate_setjmp_warnings (void) +{ + bitmap setjmp_crosses = regstat_get_setjmp_crosses (); + + if (n_basic_blocks == NUM_FIXED_BLOCKS + || bitmap_empty_p (setjmp_crosses)) + return; + + setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl)); + setjmp_args_warning (setjmp_crosses); +} + /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END}, and create duplicate blocks. */ @@ -3525,9 +3591,6 @@ reorder_blocks (void) reorder_blocks_1 (get_insns (), block, &block_stack); BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block)); - /* Remove deleted blocks from the block fragment chains. */ - reorder_fix_fragments (block); - VEC_free (tree, heap, block_stack); } @@ -3553,20 +3616,21 @@ reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack) { if (NOTE_P (insn)) { - if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG) + if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG) { tree block = NOTE_BLOCK (insn); + tree origin; + + origin = (BLOCK_FRAGMENT_ORIGIN (block) + ? BLOCK_FRAGMENT_ORIGIN (block) + : block); /* If we have seen this block before, that means it now spans multiple address regions. Create a new fragment. */ if (TREE_ASM_WRITTEN (block)) { tree new_block = copy_node (block); - tree origin; - origin = (BLOCK_FRAGMENT_ORIGIN (block) - ? BLOCK_FRAGMENT_ORIGIN (block) - : block); BLOCK_FRAGMENT_ORIGIN (new_block) = origin; BLOCK_FRAGMENT_CHAIN (new_block) = BLOCK_FRAGMENT_CHAIN (origin); @@ -3583,14 +3647,17 @@ reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack) will cause infinite recursion. */ if (block != current_block) { + if (block != origin) + gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block); + BLOCK_SUPERCONTEXT (block) = current_block; BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block); BLOCK_SUBBLOCKS (current_block) = block; - current_block = block; + current_block = origin; } VEC_safe_push (tree, heap, *p_block_stack, block); } - else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END) + else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END) { NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack); BLOCK_SUBBLOCKS (current_block) @@ -3601,61 +3668,6 @@ reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack) } } -/* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer - appears in the block tree, select one of the fragments to become - the new origin block. */ - -static void -reorder_fix_fragments (tree block) -{ - while (block) - { - tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block); - tree new_origin = NULL_TREE; - - if (dup_origin) - { - if (! TREE_ASM_WRITTEN (dup_origin)) - { - new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin); - - /* Find the first of the remaining fragments. There must - be at least one -- the current block. */ - while (! TREE_ASM_WRITTEN (new_origin)) - new_origin = BLOCK_FRAGMENT_CHAIN (new_origin); - BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE; - } - } - else if (! dup_origin) - new_origin = block; - - /* Re-root the rest of the fragments to the new origin. In the - case that DUP_ORIGIN was null, that means BLOCK was the origin - of a chain of fragments and we want to remove those fragments - that didn't make it to the output. */ - if (new_origin) - { - tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin); - tree chain = *pp; - - while (chain) - { - if (TREE_ASM_WRITTEN (chain)) - { - BLOCK_FRAGMENT_ORIGIN (chain) = new_origin; - *pp = chain; - pp = &BLOCK_FRAGMENT_CHAIN (chain); - } - chain = BLOCK_FRAGMENT_CHAIN (chain); - } - *pp = NULL_TREE; - } - - reorder_fix_fragments (BLOCK_SUBBLOCKS (block)); - block = BLOCK_CHAIN (block); - } -} - /* Reverse the order of elements in the chain T of blocks, and return the new head of the chain (old last element). */ @@ -3712,7 +3724,7 @@ get_block_vector (tree block, int *n_blocks_p) tree *block_vector; *n_blocks_p = all_blocks (block, NULL); - block_vector = xmalloc (*n_blocks_p * sizeof (tree)); + block_vector = XNEWVEC (tree, *n_blocks_p); all_blocks (block, block_vector); return block_vector; @@ -3770,6 +3782,14 @@ debug_find_var_in_block_tree (tree var, tree block) return NULL_TREE; } + +/* Return value of funcdef and increase it. */ +int +get_next_funcdef_no (void) +{ + return funcdef_no++; +} + /* Allocate a function structure for FNDECL and set its contents to the defaults. */ @@ -3784,7 +3804,7 @@ allocate_struct_function (tree fndecl) cfun->stack_alignment_needed = STACK_BOUNDARY; cfun->preferred_stack_boundary = STACK_BOUNDARY; - current_function_funcdef_no = funcdef_no++; + current_function_funcdef_no = get_next_funcdef_no (); cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL; @@ -3872,18 +3892,6 @@ init_function_start (tree subr) { prepare_function_start (subr); - /* Prevent ever trying to delete the first instruction of a - function. Also tell final how to output a linenum before the - function prologue. Note linenums could be missing, e.g. when - compiling a Java .class file. */ - if (! DECL_IS_BUILTIN (subr)) - emit_line_note (DECL_SOURCE_LOCATION (subr)); - - /* Make sure first insn is a note even if we don't want linenums. - This makes sure the first insn will never be deleted. - Also, final expects a note to appear there. */ - emit_note (NOTE_INSN_DELETED); - /* Warn if this value is an aggregate type, regardless of which calling convention we are using for it. */ if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)))) @@ -3892,7 +3900,7 @@ init_function_start (tree subr) /* Make sure all values used by the optimization passes have sane defaults. */ -void +unsigned int init_function_for_compilation (void) { reg_renumber = 0; @@ -3902,6 +3910,7 @@ init_function_for_compilation (void) gcc_assert (VEC_length (int, prologue) == 0); gcc_assert (VEC_length (int, epilogue) == 0); gcc_assert (VEC_length (int, sibcall_epilogue) == 0); + return 0; } struct tree_opt_pass pass_init_function = @@ -3925,42 +3934,6 @@ struct tree_opt_pass pass_init_function = void expand_main_function (void) { -#ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN - if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN) - { - int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; - rtx tmp, seq; - - start_sequence (); - /* Forcibly align the stack. */ -#ifdef STACK_GROWS_DOWNWARD - tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align), - stack_pointer_rtx, 1, OPTAB_WIDEN); -#else - tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx, - GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN); - tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align), - stack_pointer_rtx, 1, OPTAB_WIDEN); -#endif - if (tmp != stack_pointer_rtx) - emit_move_insn (stack_pointer_rtx, tmp); - - /* Enlist allocate_dynamic_stack_space to pick up the pieces. */ - tmp = force_reg (Pmode, const0_rtx); - allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT); - seq = get_insns (); - end_sequence (); - - for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp)) - if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG) - break; - if (tmp) - emit_insn_before (seq, tmp); - else - emit_insn (seq); - } -#endif - #if (defined(INVOKE__main) \ || (!defined(HAS_INIT_SECTION) \ && !defined(INIT_SECTION_ASM_OP) \ @@ -4014,7 +3987,7 @@ stack_protect_prologue (void) # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX) #endif -static void +void stack_protect_epilogue (void) { tree guard_decl = targetm.stack_protect_guard (); @@ -4103,7 +4076,7 @@ expand_function_start (tree subr) else #endif { - rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1); + rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2); /* Expect to be passed the address of a place to store the value. If it is passed as an argument, assign_parms will take care of it. */ @@ -4207,8 +4180,8 @@ expand_function_start (tree subr) as opposed to parm setup. */ emit_note (NOTE_INSN_FUNCTION_BEG); - if (!NOTE_P (get_last_insn ())) - emit_note (NOTE_INSN_DELETED); + gcc_assert (NOTE_P (get_last_insn ())); + parm_birth_insn = get_last_insn (); if (current_function_profile) @@ -4218,10 +4191,10 @@ expand_function_start (tree subr) #endif } - /* After the display initializations is where the tail-recursion label - should go, if we end up needing one. Ensure we have a NOTE here - since some things (like trampolines) get placed before this. */ - tail_recursion_reentry = emit_note (NOTE_INSN_DELETED); + /* After the display initializations is where the stack checking + probe should go. */ + if(flag_stack_check) + stack_check_probe_note = emit_note (NOTE_INSN_DELETED); /* Make sure there is a line number after the function entry setup code. */ force_next_line_note (); @@ -4299,7 +4272,7 @@ do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED) emit_insn (gen_rtx_USE (VOIDmode, reg)); } -void +static void use_return_register (void) { diddle_return_value (do_use_return_reg, NULL); @@ -4347,7 +4320,7 @@ expand_function_end (void) GEN_INT (STACK_CHECK_MAX_FRAME_SIZE)); seq = get_insns (); end_sequence (); - emit_insn_before (seq, tail_recursion_reentry); + emit_insn_before (seq, stack_check_probe_note); break; } } @@ -4366,31 +4339,10 @@ expand_function_end (void) clear_pending_stack_adjust (); do_pending_stack_adjust (); - /* @@@ This is a kludge. We want to ensure that instructions that - may trap are not moved into the epilogue by scheduling, because - we don't always emit unwind information for the epilogue. - However, not all machine descriptions define a blockage insn, so - emit an ASM_INPUT to act as one. */ - if (flag_non_call_exceptions) - emit_insn (gen_rtx_ASM_INPUT (VOIDmode, "")); - - /* Mark the end of the function body. - If control reaches this insn, the function can drop through - without returning a value. */ - emit_note (NOTE_INSN_FUNCTION_END); - - /* Must mark the last line number note in the function, so that the test - coverage code can avoid counting the last line twice. This just tells - the code to ignore the immediately following line note, since there - already exists a copy of this note somewhere above. This line number - note is still needed for debugging though, so we can't delete it. */ - if (flag_test_coverage) - emit_note (NOTE_INSN_REPEATED_LINE_NUMBER); - /* Output a linenumber for the end of the function. SDB depends on this. */ force_next_line_note (); - emit_line_note (input_location); + set_curr_insn_source_location (input_location); /* Before the return label (if any), clobber the return registers so that they are not propagated live to the rest of @@ -4405,10 +4357,25 @@ expand_function_end (void) /* Output the label for the actual return from the function. */ emit_label (return_label); - /* Let except.c know where it should emit the call to unregister - the function context for sjlj exceptions. */ - if (flag_exceptions && USING_SJLJ_EXCEPTIONS) - sjlj_emit_function_exit_after (get_last_insn ()); + if (USING_SJLJ_EXCEPTIONS) + { + /* Let except.c know where it should emit the call to unregister + the function context for sjlj exceptions. */ + if (flag_exceptions) + sjlj_emit_function_exit_after (get_last_insn ()); + } + else + { + /* We want to ensure that instructions that may trap are not + moved into the epilogue by scheduling, because we don't + always emit unwind information for the epilogue. */ + if (flag_non_call_exceptions) + emit_insn (gen_blockage ()); + } + + /* If this is an implementation of throw, do what's necessary to + communicate between __builtin_eh_return and the epilogue. */ + expand_eh_return (); /* If scalar return value was computed in a pseudo-reg, or was a named return value that got dumped to the stack, copy that to the hard @@ -4471,6 +4438,24 @@ expand_function_end (void) TREE_TYPE (decl_result), int_size_in_bytes (TREE_TYPE (decl_result))); } + /* In the case of complex integer modes smaller than a word, we'll + need to generate some non-trivial bitfield insertions. Do that + on a pseudo and not the hard register. */ + else if (GET_CODE (decl_rtl) == CONCAT + && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT + && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD) + { + int old_generating_concat_p; + rtx tmp; + + old_generating_concat_p = generating_concat_p; + generating_concat_p = 0; + tmp = gen_reg_rtx (GET_MODE (decl_rtl)); + generating_concat_p = old_generating_concat_p; + + emit_move_insn (tmp, decl_rtl); + emit_move_insn (real_decl_rtl, tmp); + } else emit_move_insn (real_decl_rtl, decl_rtl); } @@ -4512,10 +4497,6 @@ expand_function_end (void) current_function_return_rtx = outgoing; } - /* If this is an implementation of throw, do what's necessary to - communicate between __builtin_eh_return and the epilogue. */ - expand_eh_return (); - /* Emit the actual code to clobber return register. */ { rtx seq; @@ -4532,6 +4513,14 @@ expand_function_end (void) /* Output the label for the naked return from the function. */ emit_label (naked_return_label); + /* @@@ This is a kludge. We want to ensure that instructions that + may trap are not moved into the epilogue by scheduling, because + we don't always emit unwind information for the epilogue. + However, not all machine descriptions define a blockage insn, so + emit an ASM_INPUT to act as one. */ + if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions) + emit_insn (gen_rtx_ASM_INPUT (VOIDmode, "")); + /* If stack protection is enabled for this function, check the guard. */ if (cfun->stack_protect_guard) stack_protect_epilogue (); @@ -4661,11 +4650,9 @@ sibcall_epilogue_contains (rtx insn) block_for_insn appropriately. */ static void -emit_return_into_block (basic_block bb, rtx line_note) +emit_return_into_block (basic_block bb) { emit_jump_insn_after (gen_return (), BB_END (bb)); - if (line_note) - emit_note_copy_after (line_note, PREV_INSN (BB_END (bb))); } #endif /* HAVE_return */ @@ -4823,6 +4810,7 @@ keep_stack_depressed (rtx insns) info.sp_offset)); retaddr = gen_rtx_MEM (Pmode, retaddr); + MEM_NOTRAP_P (retaddr) = 1; /* If there is a pending load to the equivalent register for SP and we reference that register, we must load our address into @@ -4838,10 +4826,9 @@ keep_stack_depressed (rtx insns) && !fixed_regs[regno] && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno) && !REGNO_REG_SET_P - (EXIT_BLOCK_PTR->il.rtl->global_live_at_start, regno) + (DF_LR_IN (EXIT_BLOCK_PTR), regno) && !refers_to_regno_p (regno, - regno + hard_regno_nregs[regno] - [Pmode], + end_hard_regno (Pmode, regno), info.equiv_reg_src, NULL) && info.const_equiv[regno] == 0) break; @@ -5050,8 +5037,8 @@ emit_equiv_load (struct epi_info *p) this into place with notes indicating where the prologue ends and where the epilogue begins. Update the basic block information when possible. */ -void -thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED) +static void +thread_prologue_and_epilogue_insns (void) { int inserted = 0; edge e; @@ -5073,6 +5060,11 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED) seq = gen_prologue (); emit_insn (seq); + /* Insert an explicit USE for the frame pointer + if the profiling is on and the frame pointer is required. */ + if (current_function_profile && frame_pointer_needed) + emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); + /* Retain a map of the prologue insns. */ record_insns (seq, &prologue); prologue_end = emit_note (NOTE_INSN_PROLOGUE_END); @@ -5130,18 +5122,6 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED) if (BB_HEAD (last) == label && LABEL_P (label)) { edge_iterator ei2; - rtx epilogue_line_note = NULL_RTX; - - /* Locate the line number associated with the closing brace, - if we can find one. */ - for (seq = get_last_insn (); - seq && ! active_insn_p (seq); - seq = PREV_INSN (seq)) - if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0) - { - epilogue_line_note = seq; - break; - } for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); ) { @@ -5165,7 +5145,7 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED) with a simple return instruction. */ if (simplejump_p (jump)) { - emit_return_into_block (bb, epilogue_line_note); + emit_return_into_block (bb); delete_insn (jump); } @@ -5202,7 +5182,7 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED) this is still reachable will be determined later. */ emit_barrier_after (BB_END (last)); - emit_return_into_block (last, epilogue_line_note); + emit_return_into_block (last); epilogue_end = BB_END (last); single_succ_edge (last)->flags &= ~EDGE_FALLTHRU; goto epilogue_done; @@ -5264,7 +5244,8 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED) fixup_fallthru_exit_predecessor. */ cfg_layout_initialize (0); FOR_EACH_BB (cur_bb) - if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0) + if (cur_bb->index >= NUM_FIXED_BLOCKS + && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS) cur_bb->aux = cur_bb->next_bb; cfg_layout_finalize (); } @@ -5303,61 +5284,6 @@ epilogue_done: } #endif -#ifdef HAVE_prologue - /* This is probably all useless now that we use locators. */ - if (prologue_end) - { - rtx insn, prev; - - /* GDB handles `break f' by setting a breakpoint on the first - line note after the prologue. Which means (1) that if - there are line number notes before where we inserted the - prologue we should move them, and (2) we should generate a - note before the end of the first basic block, if there isn't - one already there. - - ??? This behavior is completely broken when dealing with - multiple entry functions. We simply place the note always - into first basic block and let alternate entry points - to be missed. - */ - - for (insn = prologue_end; insn; insn = prev) - { - prev = PREV_INSN (insn); - if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0) - { - /* Note that we cannot reorder the first insn in the - chain, since rest_of_compilation relies on that - remaining constant. */ - if (prev == NULL) - break; - reorder_insns (insn, insn, prologue_end); - } - } - - /* Find the last line number note in the first block. */ - for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb); - insn != prologue_end && insn; - insn = PREV_INSN (insn)) - if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0) - break; - - /* If we didn't find one, make a copy of the first line number - we run across. */ - if (! insn) - { - for (insn = next_active_insn (prologue_end); - insn; - insn = PREV_INSN (insn)) - if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0) - { - emit_note_copy_after (insn, prologue_end); - break; - } - } - } -#endif #ifdef HAVE_epilogue if (epilogue_end) { @@ -5365,27 +5291,30 @@ epilogue_done: /* Similarly, move any line notes that appear after the epilogue. There is no need, however, to be quite so anal about the existence - of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly) + of such a note. Also possibly move NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug info generation. */ for (insn = epilogue_end; insn; insn = next) { next = NEXT_INSN (insn); if (NOTE_P (insn) - && (NOTE_LINE_NUMBER (insn) > 0 - || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG - || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)) + && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)) reorder_insns (insn, insn, PREV_INSN (epilogue_end)); } } #endif + + /* Threading the prologue and epilogue changes the artificial refs + in the entry and exit blocks. */ + epilogue_completed = 1; + df_update_entry_exit_and_calls (); } /* Reposition the prologue-end and epilogue-begin notes after instruction scheduling and delayed branch scheduling. */ void -reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED) +reposition_prologue_and_epilogue_notes (void) { #if defined (HAVE_prologue) || defined (HAVE_epilogue) rtx insn, last, note; @@ -5398,11 +5327,11 @@ reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED) /* Scan from the beginning until we reach the last prologue insn. We apparently can't depend on basic_block_{head,end} after reorg has run. */ - for (insn = f; insn; insn = NEXT_INSN (insn)) + for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) { if (NOTE_P (insn)) { - if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END) + if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END) note = insn; } else if (contains (insn, &prologue)) @@ -5421,7 +5350,7 @@ reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED) { for (note = last; (note = NEXT_INSN (note));) if (NOTE_P (note) - && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END) + && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END) break; } @@ -5443,7 +5372,7 @@ reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED) { if (NOTE_P (insn)) { - if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG) + if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG) note = insn; } else if (contains (insn, &epilogue)) @@ -5462,7 +5391,7 @@ reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED) { for (note = insn; (note = PREV_INSN (note));) if (NOTE_P (note) - && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG) + && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG) break; } @@ -5473,74 +5402,57 @@ reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED) #endif /* HAVE_prologue or HAVE_epilogue */ } -/* Resets insn_block_boundaries array. */ - -void -reset_block_changes (void) -{ - VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block"); - VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE); -} - -/* Record the boundary for BLOCK. */ -void -record_block_change (tree block) -{ - int i, n; - tree last_block; - - if (!block) - return; - - last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block); - VARRAY_POP (cfun->ib_boundaries_block); - n = get_max_uid (); - for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++) - VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block); - - VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block); -} - -/* Finishes record of boundaries. */ -void finalize_block_changes (void) -{ - record_block_change (DECL_INITIAL (current_function_decl)); -} - -/* For INSN return the BLOCK it belongs to. */ -void -check_block_change (rtx insn, tree *block) -{ - unsigned uid = INSN_UID (insn); - - if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block)) - return; - - *block = VARRAY_TREE (cfun->ib_boundaries_block, uid); -} - -/* Releases the ib_boundaries_block records. */ -void -free_block_changes (void) -{ - cfun->ib_boundaries_block = NULL; -} - /* Returns the name of the current function. */ const char * current_function_name (void) { return lang_hooks.decl_printable_name (cfun->decl, 2); } + +/* Returns the raw (mangled) name of the current function. */ +const char * +current_function_assembler_name (void) +{ + return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl)); +} -static void +static unsigned int rest_of_handle_check_leaf_regs (void) { #ifdef LEAF_REGISTERS current_function_uses_only_leaf_regs = optimize > 0 && only_leaf_regs_used () && leaf_function_p (); #endif + return 0; +} + +/* Insert a TYPE into the used types hash table of CFUN. */ +static void +used_types_insert_helper (tree type, struct function *func) +{ + if (type != NULL && func != NULL) + { + void **slot; + + if (func->used_types_hash == NULL) + func->used_types_hash = htab_create_ggc (37, htab_hash_pointer, + htab_eq_pointer, NULL); + slot = htab_find_slot (func->used_types_hash, type, INSERT); + if (*slot == NULL) + *slot = type; + } +} + +/* Given a type, insert it into the used hash table in cfun. */ +void +used_types_insert (tree t) +{ + while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE) + t = TREE_TYPE (t); + t = TYPE_MAIN_VARIANT (t); + if (debug_info_level > DINFO_LEVEL_NONE) + used_types_insert_helper (t, cfun); } struct tree_opt_pass pass_leaf_regs = @@ -5560,5 +5472,38 @@ struct tree_opt_pass pass_leaf_regs = 0 /* letter */ }; +static unsigned int +rest_of_handle_thread_prologue_and_epilogue (void) +{ + if (optimize) + cleanup_cfg (CLEANUP_EXPENSIVE); + /* On some machines, the prologue and epilogue code, or parts thereof, + can be represented as RTL. Doing so lets us schedule insns between + it and the rest of the code and also allows delayed branch + scheduling to operate in the epilogue. */ + + thread_prologue_and_epilogue_insns (); + return 0; +} + +struct tree_opt_pass pass_thread_prologue_and_epilogue = +{ + "pro_and_epilogue", /* name */ + NULL, /* gate */ + rest_of_handle_thread_prologue_and_epilogue, /* execute */ + NULL, /* sub */ + NULL, /* next */ + 0, /* static_pass_number */ + TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */ + 0, /* properties_required */ + 0, /* properties_provided */ + 0, /* properties_destroyed */ + TODO_verify_flow, /* todo_flags_start */ + TODO_dump_func | + TODO_df_finish | + TODO_ggc_collect, /* todo_flags_finish */ + 'w' /* letter */ +}; + #include "gt-function.h"