X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Fcalls.c;h=657439a60eb6c44e492e7aa51eefcd54397e6927;hb=6d82c0878e869e6a0ef67a26f958f3944e4974be;hp=d84eb51aa3631a1dea087021cbbed626bcee814b;hpb=c2f47e150f3c68a813f92460462c2e70155f2c67;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/calls.c b/gcc/calls.c index d84eb51aa36..657439a60eb 100644 --- a/gcc/calls.c +++ b/gcc/calls.c @@ -7,7 +7,7 @@ This file is part of GCC. GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free -Software Foundation; either version 2, or (at your option) any later +Software Foundation; either version 3, or (at your option) any later version. GCC is distributed in the hope that it will be useful, but WITHOUT ANY @@ -16,9 +16,8 @@ FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License -along with GCC; see the file COPYING. If not, write to the Free -Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA -02110-1301, USA. */ +along with GCC; see the file COPYING3. If not see +. */ #include "config.h" #include "system.h" @@ -41,6 +40,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA #include "target.h" #include "cgraph.h" #include "except.h" +#include "dbgcnt.h" /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */ #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT) @@ -132,7 +132,8 @@ static int finalize_must_preallocate (int, int, struct arg_data *, static void precompute_arguments (int, int, struct arg_data *); static int compute_argument_block_size (int, struct args_size *, int); static void initialize_argument_information (int, struct arg_data *, - struct args_size *, int, tree, + struct args_size *, int, + tree, tree, tree, CUMULATIVE_ARGS *, int, rtx *, int *, int *, int *, bool *, bool); @@ -142,13 +143,12 @@ static void load_register_parameters (struct arg_data *, int, rtx *, int, int, int *); static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type, enum machine_mode, int, va_list); -static int special_function_p (tree, int); +static int special_function_p (const_tree, int); static int check_sibcall_argument_overlap_1 (rtx); static int check_sibcall_argument_overlap (rtx, struct arg_data *, int); static int combine_pending_stack_adjustment_and_call (int, struct args_size *, unsigned int); -static tree split_complex_values (tree); static tree split_complex_types (tree); #ifdef REG_PARM_STACK_SPACE @@ -469,7 +469,7 @@ emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED, space from the stack such as alloca. */ static int -special_function_p (tree fndecl, int flags) +special_function_p (const_tree fndecl, int flags) { if (fndecl && DECL_NAME (fndecl) && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17 @@ -543,14 +543,14 @@ special_function_p (tree fndecl, int flags) /* Return nonzero when FNDECL represents a call to setjmp. */ int -setjmp_call_p (tree fndecl) +setjmp_call_p (const_tree fndecl) { return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE; } /* Return true when exp contains alloca call. */ bool -alloca_call_p (tree exp) +alloca_call_p (const_tree exp) { if (TREE_CODE (exp) == CALL_EXPR && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR @@ -564,10 +564,10 @@ alloca_call_p (tree exp) /* Detect flags (function attributes) from the function decl or type node. */ int -flags_from_decl_or_type (tree exp) +flags_from_decl_or_type (const_tree exp) { int flags = 0; - tree type = exp; + const_tree type = exp; if (DECL_P (exp)) { @@ -616,7 +616,7 @@ flags_from_decl_or_type (tree exp) /* Detect flags from a CALL_EXPR. */ int -call_expr_flags (tree t) +call_expr_flags (const_tree t) { int flags; tree decl = get_callee_fndecl (t); @@ -889,12 +889,15 @@ store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals) } /* Fill in ARGS_SIZE and ARGS array based on the parameters found in - ACTPARMS. + CALL_EXPR EXP. NUM_ACTUALS is the total number of parameters. N_NAMED_ARGS is the total number of named arguments. + STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return + value, or null. + FNDECL is the tree code for the target of this call (if known) ARGS_SO_FAR holds state needed by the target to know where to place @@ -920,7 +923,8 @@ initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED, struct arg_data *args, struct args_size *args_size, int n_named_args ATTRIBUTE_UNUSED, - tree actparms, tree fndecl, + tree exp, tree struct_value_addr_value, + tree fndecl, CUMULATIVE_ARGS *args_so_far, int reg_parm_stack_space, rtx *old_stack_level, int *old_pending_adj, @@ -934,7 +938,6 @@ initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED, int argpos; int i; - tree p; args_size->constant = 0; args_size->var = 0; @@ -954,15 +957,45 @@ initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED, i = 0, inc = 1; } + /* First fill in the actual arguments in the ARGS array, splitting + complex arguments if necessary. */ + { + int j = i; + call_expr_arg_iterator iter; + tree arg; + + if (struct_value_addr_value) + { + args[j].tree_value = struct_value_addr_value; + j += inc; + } + FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) + { + tree argtype = TREE_TYPE (arg); + if (targetm.calls.split_complex_arg + && argtype + && TREE_CODE (argtype) == COMPLEX_TYPE + && targetm.calls.split_complex_arg (argtype)) + { + tree subtype = TREE_TYPE (argtype); + arg = save_expr (arg); + args[j].tree_value = build1 (REALPART_EXPR, subtype, arg); + j += inc; + args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg); + } + else + args[j].tree_value = arg; + j += inc; + } + } + /* I counts args in order (to be) pushed; ARGPOS counts in order written. */ - for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++) + for (argpos = 0; argpos < num_actuals; i += inc, argpos++) { - tree type = TREE_TYPE (TREE_VALUE (p)); + tree type = TREE_TYPE (args[i].tree_value); int unsignedp; enum machine_mode mode; - args[i].tree_value = TREE_VALUE (p); - /* Replace erroneous argument with constant zero. */ if (type == error_mark_node || !COMPLETE_TYPE_P (type)) args[i].tree_value = integer_zero_node, type = integer_type_node; @@ -1030,7 +1063,7 @@ initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED, { /* This is a variable-sized object. Make space on the stack for it. */ - rtx size_rtx = expr_size (TREE_VALUE (p)); + rtx size_rtx = expr_size (args[i].tree_value); if (*old_stack_level == 0) { @@ -1047,7 +1080,7 @@ initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED, else copy = assign_temp (type, 0, 1, 0); - store_expr (args[i].tree_value, copy, 0); + store_expr (args[i].tree_value, copy, 0, false); if (callee_copies) *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK); @@ -1189,13 +1222,12 @@ compute_argument_block_size (int reg_parm_stack_space, = size_binop (MAX_EXPR, args_size->var, ssize_int (reg_parm_stack_space)); -#ifndef OUTGOING_REG_PARM_STACK_SPACE /* The area corresponding to register parameters is not to count in the size of the block we need. So make the adjustment. */ - args_size->var - = size_binop (MINUS_EXPR, args_size->var, - ssize_int (reg_parm_stack_space)); -#endif + if (!OUTGOING_REG_PARM_STACK_SPACE) + args_size->var + = size_binop (MINUS_EXPR, args_size->var, + ssize_int (reg_parm_stack_space)); } } else @@ -1213,9 +1245,8 @@ compute_argument_block_size (int reg_parm_stack_space, args_size->constant = MAX (args_size->constant, reg_parm_stack_space); -#ifndef OUTGOING_REG_PARM_STACK_SPACE - args_size->constant -= reg_parm_stack_space; -#endif + if (!OUTGOING_REG_PARM_STACK_SPACE) + args_size->constant -= reg_parm_stack_space; } return unadjusted_args_size; } @@ -1237,13 +1268,25 @@ precompute_arguments (int flags, int num_actuals, struct arg_data *args) /* If this is a libcall, then precompute all arguments so that we do not get extraneous instructions emitted as part of the libcall sequence. */ - if ((flags & ECF_LIBCALL_BLOCK) == 0) + + /* If we preallocated the stack space, and some arguments must be passed + on the stack, then we must precompute any parameter which contains a + function call which will store arguments on the stack. + Otherwise, evaluating the parameter may clobber previous parameters + which have already been stored into the stack. (we have code to avoid + such case by saving the outgoing stack arguments, but it results in + worse code) */ + if ((flags & ECF_LIBCALL_BLOCK) == 0 && !ACCUMULATE_OUTGOING_ARGS) return; for (i = 0; i < num_actuals; i++) { enum machine_mode mode; + if ((flags & ECF_LIBCALL_BLOCK) == 0 + && TREE_CODE (args[i].tree_value) != CALL_EXPR) + continue; + /* If this is an addressable type, we cannot pre-evaluate it. */ gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))); @@ -1449,7 +1492,7 @@ rtx_for_function_call (tree fndecl, tree addr) { /* If this is the first use of the function, see if we need to make an external definition for it. */ - if (! TREE_USED (fndecl)) + if (!TREE_USED (fndecl) && fndecl != current_function_decl) { assemble_external (fndecl); TREE_USED (fndecl) = 1; @@ -1481,10 +1524,14 @@ mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size) if (addr == current_function_internal_arg_pointer) i = 0; else if (GET_CODE (addr) == PLUS - && (XEXP (addr, 0) - == current_function_internal_arg_pointer) + && XEXP (addr, 0) == current_function_internal_arg_pointer && GET_CODE (XEXP (addr, 1)) == CONST_INT) i = INTVAL (XEXP (addr, 1)); + /* Return true for arg pointer based indexed addressing. */ + else if (GET_CODE (addr) == PLUS + && (XEXP (addr, 0) == current_function_internal_arg_pointer + || XEXP (addr, 1) == current_function_internal_arg_pointer)) + return true; else return false; @@ -1809,6 +1856,31 @@ shift_return_value (enum machine_mode mode, bool left_p, rtx value) return true; } +/* If X is a likely-spilled register value, copy it to a pseudo + register and return that register. Return X otherwise. */ + +static rtx +avoid_likely_spilled_reg (rtx x) +{ + rtx new; + + if (REG_P (x) + && HARD_REGISTER_P (x) + && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (x)))) + { + /* Make sure that we generate a REG rather than a CONCAT. + Moves into CONCATs can need nontrivial instructions, + and the whole point of this function is to avoid + using the hard register directly in such a situation. */ + generating_concat_p = 0; + new = gen_reg_rtx (GET_MODE (x)); + generating_concat_p = 1; + emit_move_insn (new, x); + return new; + } + return x; +} + /* Generate all the code for a CALL_EXPR exp and return an rtx for its value. Store the value in TARGET (specified as an rtx) if convenient. @@ -1821,9 +1893,6 @@ expand_call (tree exp, rtx target, int ignore) /* Nonzero if we are currently expanding a call. */ static int currently_expanding_call = 0; - /* List of actual parameters. */ - /* FIXME: rewrite this so that it doesn't cons up a TREE_LIST. */ - tree actparms = CALL_EXPR_ARGS (exp); /* RTX for the function to be called. */ rtx funexp; /* Sequence of insns to perform a normal "call". */ @@ -1851,6 +1920,8 @@ expand_call (tree exp, rtx target, int ignore) an extra, implicit first parameter. Otherwise, it is passed by being copied directly into struct_value_rtx. */ int structure_value_addr_parm = 0; + /* Holds the value of implicit argument for the struct value. */ + tree structure_value_addr_value = NULL_TREE; /* Size of aggregate value wanted, or zero if none wanted or if we are using the non-reentrant PCC calling convention or expecting the value in registers. */ @@ -1865,6 +1936,8 @@ expand_call (tree exp, rtx target, int ignore) /* Number of named args. Args after this are anonymous ones and they must all go on the stack. */ int n_named_args; + /* Number of complex actual arguments that need to be split. */ + int num_complex_actuals = 0; /* Vector of information about each argument. Arguments are numbered in the order they will be pushed, @@ -1967,9 +2040,10 @@ expand_call (tree exp, rtx target, int ignore) { bool volatilep = false; tree arg; + call_expr_arg_iterator iter; - for (arg = actparms; arg; arg = TREE_CHAIN (arg)) - if (TREE_THIS_VOLATILE (TREE_VALUE (arg))) + FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) + if (TREE_THIS_VOLATILE (arg)) { volatilep = true; break; @@ -1977,9 +2051,8 @@ expand_call (tree exp, rtx target, int ignore) if (! volatilep) { - for (arg = actparms; arg; arg = TREE_CHAIN (arg)) - expand_expr (TREE_VALUE (arg), const0_rtx, - VOIDmode, EXPAND_NORMAL); + FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) + expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL); return const0_rtx; } } @@ -1988,10 +2061,8 @@ expand_call (tree exp, rtx target, int ignore) reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl); #endif -#ifndef OUTGOING_REG_PARM_STACK_SPACE - if (reg_parm_stack_space > 0 && PUSH_ARGS) + if (!OUTGOING_REG_PARM_STACK_SPACE && reg_parm_stack_space > 0 && PUSH_ARGS) must_preallocate = 1; -#endif /* Set up a place to return a structure. */ @@ -2040,12 +2111,21 @@ expand_call (tree exp, rtx target, int ignore) gcc_assert (POINTER_TYPE_P (funtype)); funtype = TREE_TYPE (funtype); - /* Munge the tree to split complex arguments into their imaginary - and real parts. */ + /* Count whether there are actual complex arguments that need to be split + into their real and imaginary parts. Munge the type_arg_types + appropriately here as well. */ if (targetm.calls.split_complex_arg) { + call_expr_arg_iterator iter; + tree arg; + FOR_EACH_CALL_EXPR_ARG (arg, iter, exp) + { + tree type = TREE_TYPE (arg); + if (type && TREE_CODE (type) == COMPLEX_TYPE + && targetm.calls.split_complex_arg (type)) + num_complex_actuals++; + } type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype)); - actparms = split_complex_values (actparms); } else type_arg_types = TYPE_ARG_TYPES (funtype); @@ -2054,7 +2134,8 @@ expand_call (tree exp, rtx target, int ignore) current_function_calls_alloca = 1; /* If struct_value_rtx is 0, it means pass the address - as if it were an extra parameter. */ + as if it were an extra parameter. Put the argument expression + in structure_value_addr_value. */ if (structure_value_addr && struct_value == 0) { /* If structure_value_addr is a REG other than @@ -2070,17 +2151,14 @@ expand_call (tree exp, rtx target, int ignore) (Pmode, structure_value_addr)) : structure_value_addr); - actparms - = tree_cons (error_mark_node, - make_tree (build_pointer_type (TREE_TYPE (funtype)), - temp), - actparms); + structure_value_addr_value = + make_tree (build_pointer_type (TREE_TYPE (funtype)), temp); structure_value_addr_parm = 1; } /* Count the arguments and set NUM_ACTUALS. */ - for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p)) - num_actuals++; + num_actuals = + call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm; /* Compute number of named args. First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */ @@ -2138,7 +2216,8 @@ expand_call (tree exp, rtx target, int ignore) /* Build up entries in the ARGS array, compute the size of the arguments into ARGS_SIZE, etc. */ initialize_argument_information (num_actuals, args, &args_size, - n_named_args, actparms, fndecl, + n_named_args, exp, + structure_value_addr_value, fndecl, &args_so_far, reg_parm_stack_space, &old_stack_level, &old_pending_adj, &must_preallocate, &flags, @@ -2180,7 +2259,8 @@ expand_call (tree exp, rtx target, int ignore) if (currently_expanding_call++ != 0 || !flag_optimize_sibling_calls || args_size.var - || lookup_stmt_eh_region (exp) >= 0) + || lookup_stmt_eh_region (exp) >= 0 + || dbg_cnt (tail_call) == false) try_tail_call = 0; /* Rest of purposes for tail call optimizations to fail. */ @@ -2384,12 +2464,11 @@ expand_call (tree exp, rtx target, int ignore) Another approach might be to try to reorder the argument evaluations to avoid this conflicting stack usage. */ -#ifndef OUTGOING_REG_PARM_STACK_SPACE /* Since we will be writing into the entire argument area, the map must be allocated for its entire size, not just the part that is the responsibility of the caller. */ - needed += reg_parm_stack_space; -#endif + if (!OUTGOING_REG_PARM_STACK_SPACE) + needed += reg_parm_stack_space; #ifdef ARGS_GROW_DOWNWARD highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, @@ -2485,12 +2564,10 @@ expand_call (tree exp, rtx target, int ignore) an argument. */ if (stack_arg_under_construction) { -#ifndef OUTGOING_REG_PARM_STACK_SPACE - rtx push_size = GEN_INT (reg_parm_stack_space - + adjusted_args_size.constant); -#else - rtx push_size = GEN_INT (adjusted_args_size.constant); -#endif + rtx push_size + = GEN_INT (adjusted_args_size.constant + + (OUTGOING_REG_PARM_STACK_SPACE ? 0 + : reg_parm_stack_space)); if (old_stack_level == 0) { emit_stack_save (SAVE_BLOCK, &old_stack_level, @@ -2660,11 +2737,9 @@ expand_call (tree exp, rtx target, int ignore) /* If register arguments require space on the stack and stack space was not preallocated, allocate stack space here for arguments passed in registers. */ -#ifdef OUTGOING_REG_PARM_STACK_SPACE - if (!ACCUMULATE_OUTGOING_ARGS + if (OUTGOING_REG_PARM_STACK_SPACE && !ACCUMULATE_OUTGOING_ARGS && must_preallocate == 0 && reg_parm_stack_space > 0) anti_adjust_stack (GEN_INT (reg_parm_stack_space)); -#endif /* Pass the function the address in which to return a structure value. */ @@ -2818,9 +2893,10 @@ expand_call (tree exp, rtx target, int ignore) valreg = temp; } - /* For calls to `setjmp', etc., inform flow.c it should complain - if nonvolatile values are live. For functions that cannot return, - inform flow that control does not fall through. */ + /* For calls to `setjmp', etc., inform + function.c:setjmp_warnings that it should complain if + nonvolatile values are live. For functions that cannot + return, inform flow that control does not fall through. */ if ((flags & ECF_NORETURN) || pass == 0) { @@ -2902,11 +2978,8 @@ expand_call (tree exp, rtx target, int ignore) /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard reg to a plain register. */ - if (REG_P (valreg) - && HARD_REGISTER_P (valreg) - && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (valreg))) - && !(REG_P (target) && !HARD_REGISTER_P (target))) - valreg = copy_to_reg (valreg); + if (!REG_P (target) || HARD_REGISTER_P (target)) + valreg = avoid_likely_spilled_reg (valreg); /* If TARGET is a MEM in the argument area, and we have saved part of the argument area, then we can't store @@ -2951,7 +3024,7 @@ expand_call (tree exp, rtx target, int ignore) sibcall_failure = 1; } else - target = copy_to_reg (valreg); + target = copy_to_reg (avoid_likely_spilled_reg (valreg)); if (targetm.calls.promote_function_return(funtype)) { @@ -3136,7 +3209,7 @@ fixup_tail_calls (void) /* There are never REG_EQUIV notes for the incoming arguments after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */ if (NOTE_P (insn) - && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG) + && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG) break; note = find_reg_note (insn, REG_EQUIV, 0); @@ -3147,60 +3220,6 @@ fixup_tail_calls (void) } } -/* Traverse an argument list in VALUES and expand all complex - arguments into their components. */ -static tree -split_complex_values (tree values) -{ - tree p; - - /* Before allocating memory, check for the common case of no complex. */ - for (p = values; p; p = TREE_CHAIN (p)) - { - tree type = TREE_TYPE (TREE_VALUE (p)); - if (type && TREE_CODE (type) == COMPLEX_TYPE - && targetm.calls.split_complex_arg (type)) - goto found; - } - return values; - - found: - values = copy_list (values); - - for (p = values; p; p = TREE_CHAIN (p)) - { - tree complex_value = TREE_VALUE (p); - tree complex_type; - - complex_type = TREE_TYPE (complex_value); - if (!complex_type) - continue; - - if (TREE_CODE (complex_type) == COMPLEX_TYPE - && targetm.calls.split_complex_arg (complex_type)) - { - tree subtype; - tree real, imag, next; - - subtype = TREE_TYPE (complex_type); - complex_value = save_expr (complex_value); - real = build1 (REALPART_EXPR, subtype, complex_value); - imag = build1 (IMAGPART_EXPR, subtype, complex_value); - - TREE_VALUE (p) = real; - next = TREE_CHAIN (p); - imag = build_tree_list (NULL_TREE, imag); - TREE_CHAIN (p) = imag; - TREE_CHAIN (imag) = next; - - /* Skip the newly created node. */ - p = TREE_CHAIN (p); - } - } - - return values; -} - /* Traverse a list of TYPES and expand all complex types into their components. */ static tree @@ -3540,9 +3559,8 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value, args_size.constant = MAX (args_size.constant, reg_parm_stack_space); -#ifndef OUTGOING_REG_PARM_STACK_SPACE - args_size.constant -= reg_parm_stack_space; -#endif + if (!OUTGOING_REG_PARM_STACK_SPACE) + args_size.constant -= reg_parm_stack_space; if (args_size.constant > current_function_outgoing_args_size) current_function_outgoing_args_size = args_size.constant; @@ -3563,12 +3581,11 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value, needed = args_size.constant; -#ifndef OUTGOING_REG_PARM_STACK_SPACE /* Since we will be writing into the entire argument area, the map must be allocated for its entire size, not just the part that is the responsibility of the caller. */ - needed += reg_parm_stack_space; -#endif + if (!OUTGOING_REG_PARM_STACK_SPACE) + needed += reg_parm_stack_space; #ifdef ARGS_GROW_DOWNWARD highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, @@ -3782,7 +3799,18 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value, if (reg != 0 && GET_CODE (reg) == PARALLEL) use_group_regs (&call_fusage, reg); else if (reg != 0) - use_reg (&call_fusage, reg); + { + int partial = argvec[count].partial; + if (partial) + { + int nregs; + gcc_assert (partial % UNITS_PER_WORD == 0); + nregs = partial / UNITS_PER_WORD; + use_regs (&call_fusage, REGNO (reg), nregs); + } + else + use_reg (&call_fusage, reg); + } } /* Pass the function the address in which to return a structure value. */ @@ -3824,9 +3852,10 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value, valreg, old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far); - /* For calls to `setjmp', etc., inform flow.c it should complain - if nonvolatile values are live. For functions that cannot return, - inform flow that control does not fall through. */ + /* For calls to `setjmp', etc., inform function.c:setjmp_warnings + that it should complain if nonvolatile values are live. For + functions that cannot return, inform flow that control does not + fall through. */ if (flags & ECF_NORETURN) { @@ -4318,6 +4347,7 @@ store_one_arg (struct arg_data *arg, rtx argblock, int flags, /* expand_call should ensure this. */ gcc_assert (!arg->locate.offset.var + && arg->locate.size.var == 0 && GET_CODE (size_rtx) == CONST_INT); if (arg->locate.offset.constant > i) @@ -4327,7 +4357,21 @@ store_one_arg (struct arg_data *arg, rtx argblock, int flags, } else if (arg->locate.offset.constant < i) { - if (i < arg->locate.offset.constant + INTVAL (size_rtx)) + /* Use arg->locate.size.constant instead of size_rtx + because we only care about the part of the argument + on the stack. */ + if (i < (arg->locate.offset.constant + + arg->locate.size.constant)) + sibcall_failure = 1; + } + else + { + /* Even though they appear to be at the same location, + if part of the outgoing argument is in registers, + they aren't really at the same location. Check for + this by making sure that the incoming size is the + same as the outgoing size. */ + if (arg->locate.size.constant != INTVAL (size_rtx)) sibcall_failure = 1; } } @@ -4382,7 +4426,7 @@ store_one_arg (struct arg_data *arg, rtx argblock, int flags, bool must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED, - tree type) + const_tree type) { if (!type) return false; @@ -4404,7 +4448,7 @@ must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED, /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */ bool -must_pass_in_stack_var_size_or_pad (enum machine_mode mode, tree type) +must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type) { if (!type) return false;