X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Fcalls.c;h=482d4876f98093348029623e3e8d88f1eb333aa9;hb=11a8ca07364cc782101c13138a71707359bc229b;hp=abd88fe6c72b37a46c2e80b4945df3dbe383b4b5;hpb=a35a63fff496e284f010031e5d6d3451a9a26579;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/calls.c b/gcc/calls.c index abd88fe6c72..482d4876f98 100644 --- a/gcc/calls.c +++ b/gcc/calls.c @@ -1,5 +1,5 @@ /* Convert function calls to rtl insns, for GNU C compiler. - Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998 + Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc. This file is part of GCC. @@ -27,6 +27,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA #include "tree.h" #include "flags.h" #include "expr.h" +#include "optabs.h" #include "libfuncs.h" #include "function.h" #include "regs.h" @@ -37,26 +38,8 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA #include "sbitmap.h" #include "langhooks.h" #include "target.h" - -/* Decide whether a function's arguments should be processed - from first to last or from last to first. - - They should if the stack and args grow in opposite directions, but - only if we have push insns. */ - -#ifdef PUSH_ROUNDING - -#ifndef PUSH_ARGS_REVERSED -#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD) -#define PUSH_ARGS_REVERSED PUSH_ARGS -#endif -#endif - -#endif - -#ifndef PUSH_ARGS_REVERSED -#define PUSH_ARGS_REVERSED 0 -#endif +#include "cgraph.h" +#include "except.h" #ifndef STACK_POINTER_OFFSET #define STACK_POINTER_OFFSET 0 @@ -96,16 +79,8 @@ struct arg_data even though pass_on_stack is zero, just because FUNCTION_ARG says so. pass_on_stack identifies arguments that *cannot* go in registers. */ int pass_on_stack; - /* Offset of this argument from beginning of stack-args. */ - struct args_size offset; - /* Similar, but offset to the start of the stack slot. Different from - OFFSET if this arg pads downward. */ - struct args_size slot_offset; - /* Size of this argument on the stack, rounded up for any padding it gets, - parts of the argument passed in registers do not count. - If REG_PARM_STACK_SPACE is defined, then register parms - are counted here as well. */ - struct args_size size; + /* Some fields packaged up for locate_and_pad_parm. */ + struct locate_and_pad_arg_data locate; /* Location on the stack at which parameter should be stored. The store has already been done if STACK == VALUE. */ rtx stack; @@ -121,9 +96,6 @@ struct arg_data word-sized pseudos we made. */ rtx *aligned_regs; int n_aligned_regs; - /* The amount that the stack pointer needs to be adjusted to - force alignment for the next argument. */ - struct args_size alignment_pad; }; /* A vector of one char per byte of stack space. A byte if nonzero if @@ -149,59 +121,41 @@ static sbitmap stored_args_map; argument list for the constructor call. */ int stack_arg_under_construction; -static int calls_function PARAMS ((tree, int)); -static int calls_function_1 PARAMS ((tree, int)); - -static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT, - HOST_WIDE_INT, HOST_WIDE_INT, rtx, - rtx, int, rtx, int, - CUMULATIVE_ARGS *)); -static void precompute_register_parameters PARAMS ((int, - struct arg_data *, - int *)); -static int store_one_arg PARAMS ((struct arg_data *, rtx, int, int, - int)); -static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *, - int)); -static int finalize_must_preallocate PARAMS ((int, int, - struct arg_data *, - struct args_size *)); -static void precompute_arguments PARAMS ((int, int, - struct arg_data *)); -static int compute_argument_block_size PARAMS ((int, - struct args_size *, - int)); -static void initialize_argument_information PARAMS ((int, - struct arg_data *, - struct args_size *, - int, tree, tree, - CUMULATIVE_ARGS *, - int, rtx *, int *, - int *, int *)); -static void compute_argument_addresses PARAMS ((struct arg_data *, - rtx, int)); -static rtx rtx_for_function_call PARAMS ((tree, tree)); -static void load_register_parameters PARAMS ((struct arg_data *, - int, rtx *, int, - int, int *)); -static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx, - enum libcall_type, - enum machine_mode, - int, va_list)); -static int special_function_p PARAMS ((tree, int)); -static rtx try_to_integrate PARAMS ((tree, tree, rtx, - int, tree, rtx)); -static int check_sibcall_argument_overlap_1 PARAMS ((rtx)); -static int check_sibcall_argument_overlap PARAMS ((rtx, struct arg_data *, - int)); - -static int combine_pending_stack_adjustment_and_call - PARAMS ((int, struct args_size *, int)); -static tree fix_unsafe_tree PARAMS ((tree)); +static int calls_function (tree, int); +static int calls_function_1 (tree, int); + +static void emit_call_1 (rtx, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, + HOST_WIDE_INT, rtx, rtx, int, rtx, int, + CUMULATIVE_ARGS *); +static void precompute_register_parameters (int, struct arg_data *, int *); +static int store_one_arg (struct arg_data *, rtx, int, int, int); +static void store_unaligned_arguments_into_pseudos (struct arg_data *, int); +static int finalize_must_preallocate (int, int, struct arg_data *, + struct args_size *); +static void precompute_arguments (int, int, struct arg_data *); +static int compute_argument_block_size (int, struct args_size *, int); +static void initialize_argument_information (int, struct arg_data *, + struct args_size *, int, tree, + tree, CUMULATIVE_ARGS *, int, + rtx *, int *, int *, int *); +static void compute_argument_addresses (struct arg_data *, rtx, int); +static rtx rtx_for_function_call (tree, tree); +static void load_register_parameters (struct arg_data *, int, rtx *, int, + int, int *); +static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type, + enum machine_mode, int, va_list); +static int special_function_p (tree, int); +static rtx try_to_integrate (tree, tree, rtx, int, tree, rtx); +static int check_sibcall_argument_overlap_1 (rtx); +static int check_sibcall_argument_overlap (rtx, struct arg_data *, int); + +static int combine_pending_stack_adjustment_and_call (int, struct args_size *, + int); +static tree fix_unsafe_tree (tree); #ifdef REG_PARM_STACK_SPACE -static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *)); -static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int)); +static rtx save_fixed_argument_area (int, rtx, int *, int *); +static void restore_fixed_argument_area (rtx, rtx, int, int); #endif /* If WHICH is 1, return 1 if EXP contains a call to the built-in function @@ -215,9 +169,7 @@ static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int)); static tree calls_function_save_exprs; static int -calls_function (exp, which) - tree exp; - int which; +calls_function (tree exp, int which) { int val; @@ -230,9 +182,7 @@ calls_function (exp, which) /* Recursive function to do the work of above function. */ static int -calls_function_1 (exp, which) - tree exp; - int which; +calls_function_1 (tree exp, int which) { int i; enum tree_code code = TREE_CODE (exp); @@ -312,8 +262,8 @@ calls_function_1 (exp, which) break; } - /* Only expressions, references, and blocks can contain calls. */ - if (! IS_EXPR_CODE_CLASS (class) && class != 'r' && class != 'b') + /* Only expressions and blocks can contain calls. */ + if (! IS_EXPR_CODE_CLASS (class) && class != 'b') return 0; for (i = 0; i < length; i++) @@ -332,12 +282,8 @@ calls_function_1 (exp, which) CALL_INSN_FUNCTION_USAGE information. */ rtx -prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen, sibcallp) - rtx funexp; - tree fndecl; - rtx *call_fusage; - int reg_parm_seen; - int sibcallp; +prepare_call_address (rtx funexp, tree fndecl, rtx *call_fusage, + int reg_parm_seen, int sibcallp) { rtx static_chain_value = 0; @@ -418,21 +364,13 @@ prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen, sibcallp) denote registers used by the called function. */ static void -emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size, - struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop, - call_fusage, ecf_flags, args_so_far) - rtx funexp; - tree fndecl ATTRIBUTE_UNUSED; - tree funtype ATTRIBUTE_UNUSED; - HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED; - HOST_WIDE_INT rounded_stack_size; - HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED; - rtx next_arg_reg ATTRIBUTE_UNUSED; - rtx valreg; - int old_inhibit_defer_pop; - rtx call_fusage; - int ecf_flags; - CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED; +emit_call_1 (rtx funexp, tree fndecl ATTRIBUTE_UNUSED, tree funtype ATTRIBUTE_UNUSED, + HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED, + HOST_WIDE_INT rounded_stack_size, + HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED, + rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg, + int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags, + CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED) { rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size); rtx call_insn; @@ -446,7 +384,7 @@ emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size, #ifdef CALL_POPS_ARGS n_popped += CALL_POPS_ARGS (* args_so_far); #endif - + /* Ensure address is valid. SYMBOL_REF is already valid, so no need, and we don't want to load it into a register as an optimization, because prepare_call_address already did it if it should be done. */ @@ -545,14 +483,8 @@ emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size, #endif abort (); - /* Find the CALL insn we just emitted. */ - for (call_insn = get_last_insn (); - call_insn && GET_CODE (call_insn) != CALL_INSN; - call_insn = PREV_INSN (call_insn)) - ; - - if (! call_insn) - abort (); + /* Find the call we just emitted. */ + call_insn = last_call_insn (); /* Mark memory as used for "pure" function call. */ if (ecf_flags & ECF_PURE) @@ -563,20 +495,8 @@ emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size, gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))), call_fusage); - /* Put the register usage information on the CALL. If there is already - some usage information, put ours at the end. */ - if (CALL_INSN_FUNCTION_USAGE (call_insn)) - { - rtx link; - - for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0; - link = XEXP (link, 1)) - ; - - XEXP (link, 1) = call_fusage; - } - else - CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage; + /* Put the register usage information there. */ + add_function_usage_to (call_insn, call_fusage); /* If this is a const call, then set the insn's unchanging bit. */ if (ecf_flags & (ECF_CONST | ECF_PURE)) @@ -587,6 +507,8 @@ emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size, if (ecf_flags & ECF_NOTHROW) REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx, REG_NOTES (call_insn)); + else + note_eh_region_may_contain_throw (); if (ecf_flags & ECF_NORETURN) REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx, @@ -608,6 +530,10 @@ emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size, if the context of the call as a whole permits. */ inhibit_defer_pop = old_inhibit_defer_pop; + /* Don't bother cleaning up after a noreturn function. */ + if (ecf_flags & (ECF_NORETURN | ECF_LONGJMP)) + return; + if (n_popped > 0) { if (!already_popped) @@ -664,24 +590,25 @@ emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size, Similarly set LONGJMP for if the function is in the longjmp family. - Set MALLOC for any of the standard memory allocation functions which - allocate from the heap. - Set MAY_BE_ALLOCA for any memory allocation function that might allocate space from the stack such as alloca. */ static int -special_function_p (fndecl, flags) - tree fndecl; - int flags; +special_function_p (tree fndecl, int flags) { if (! (flags & ECF_MALLOC) && fndecl && DECL_NAME (fndecl) && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17 /* Exclude functions not at the file scope, or not `extern', since they are not the magic functions we would otherwise - think they are. */ - && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl)) + think they are. + FIXME: this should be handled with attributes, not with this + hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong + because you can declare fork() inside a function if you + wish. */ + && (DECL_CONTEXT (fndecl) == NULL_TREE + || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL) + && TREE_PUBLIC (fndecl)) { const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl)); const char *tname = name; @@ -746,19 +673,6 @@ special_function_p (fndecl, flags) || ((tname[5] == 'p' || tname[5] == 'e') && tname[6] == '\0')))) flags |= ECF_FORK_OR_EXEC; - - /* Do not add any more malloc-like functions to this list, - instead mark them as malloc functions using the malloc attribute. - Note, realloc is not suitable for attribute malloc since - it may return the same address across multiple calls. - C++ operator new is not suitable because it is not required - to return a unique pointer; indeed, the standard placement new - just returns its argument. */ - else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode - && (! strcmp (tname, "malloc") - || ! strcmp (tname, "calloc") - || ! strcmp (tname, "strdup"))) - flags |= ECF_MALLOC; } return flags; } @@ -766,16 +680,14 @@ special_function_p (fndecl, flags) /* Return nonzero when tree represent call to longjmp. */ int -setjmp_call_p (fndecl) - tree fndecl; +setjmp_call_p (tree fndecl) { return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE; } /* Return true when exp contains alloca call. */ bool -alloca_call_p (exp) - tree exp; +alloca_call_p (tree exp) { if (TREE_CODE (exp) == CALL_EXPR && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR @@ -790,30 +702,41 @@ alloca_call_p (exp) /* Detect flags (function attributes) from the function decl or type node. */ int -flags_from_decl_or_type (exp) - tree exp; +flags_from_decl_or_type (tree exp) { int flags = 0; tree type = exp; - /* ??? We can't set IS_MALLOC for function types? */ + if (DECL_P (exp)) { + struct cgraph_rtl_info *i = cgraph_rtl_info (exp); type = TREE_TYPE (exp); + if (i) + { + if (i->pure_function) + flags |= ECF_PURE | ECF_LIBCALL_BLOCK; + if (i->const_function) + flags |= ECF_CONST | ECF_LIBCALL_BLOCK; + } + /* The function exp may have the `malloc' attribute. */ - if (DECL_P (exp) && DECL_IS_MALLOC (exp)) + if (DECL_IS_MALLOC (exp)) flags |= ECF_MALLOC; /* The function exp may have the `pure' attribute. */ - if (DECL_P (exp) && DECL_IS_PURE (exp)) + if (DECL_IS_PURE (exp)) flags |= ECF_PURE | ECF_LIBCALL_BLOCK; if (TREE_NOTHROW (exp)) flags |= ECF_NOTHROW; + + if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp)) + flags |= ECF_LIBCALL_BLOCK; } if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp)) - flags |= ECF_CONST | ECF_LIBCALL_BLOCK; + flags |= ECF_CONST; if (TREE_THIS_VOLATILE (exp)) flags |= ECF_NORETURN; @@ -837,10 +760,7 @@ flags_from_decl_or_type (exp) Set REG_PARM_SEEN if we encounter a register parameter. */ static void -precompute_register_parameters (num_actuals, args, reg_parm_seen) - int num_actuals; - struct arg_data *args; - int *reg_parm_seen; +precompute_register_parameters (int num_actuals, struct arg_data *args, int *reg_parm_seen) { int i; @@ -905,12 +825,7 @@ precompute_register_parameters (num_actuals, args, reg_parm_seen) parameters, we must save and restore it. */ static rtx -save_fixed_argument_area (reg_parm_stack_space, argblock, - low_to_save, high_to_save) - int reg_parm_stack_space; - rtx argblock; - int *low_to_save; - int *high_to_save; +save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save) { int low; int high; @@ -977,11 +892,7 @@ save_fixed_argument_area (reg_parm_stack_space, argblock, } static void -restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save) - rtx save_area; - rtx argblock; - int high_to_save; - int low_to_save; +restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save) { enum machine_mode save_mode = GET_MODE (save_area); int delta; @@ -1016,9 +927,7 @@ restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save) the aligned_regs array if it is nonzero. */ static void -store_unaligned_arguments_into_pseudos (args, num_actuals) - struct arg_data *args; - int num_actuals; +store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals) { int i, j; @@ -1029,22 +938,26 @@ store_unaligned_arguments_into_pseudos (args, num_actuals) < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD))) { int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); - int big_endian_correction = 0; + int nregs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; + int endian_correction = 0; - args[i].n_aligned_regs - = args[i].partial ? args[i].partial - : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; + args[i].n_aligned_regs = args[i].partial ? args[i].partial : nregs; + args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs); - args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx) - * args[i].n_aligned_regs); - - /* Structures smaller than a word are aligned to the least - significant byte (to the right). On a BYTES_BIG_ENDIAN machine, + /* Structures smaller than a word are normally aligned to the + least significant byte. On a BYTES_BIG_ENDIAN machine, this means we must skip the empty high order bytes when calculating the bit offset. */ - if (BYTES_BIG_ENDIAN - && bytes < UNITS_PER_WORD) - big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT)); + if (bytes < UNITS_PER_WORD +#ifdef BLOCK_REG_PADDING + && (BLOCK_REG_PADDING (args[i].mode, + TREE_TYPE (args[i].tree_value), 1) + == downward) +#else + && BYTES_BIG_ENDIAN +#endif + ) + endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT; for (j = 0; j < args[i].n_aligned_regs; j++) { @@ -1053,6 +966,8 @@ store_unaligned_arguments_into_pseudos (args, num_actuals) int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD); args[i].aligned_regs[j] = reg; + word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX, + word_mode, word_mode, BITS_PER_WORD); /* There is no need to restrict this code to loading items in TYPE_ALIGN sized hunks. The bitfield instructions can @@ -1068,11 +983,8 @@ store_unaligned_arguments_into_pseudos (args, num_actuals) emit_move_insn (reg, const0_rtx); bytes -= bitsize / BITS_PER_UNIT; - store_bit_field (reg, bitsize, big_endian_correction, word_mode, - extract_bit_field (word, bitsize, 0, 1, NULL_RTX, - word_mode, word_mode, - BITS_PER_WORD), - BITS_PER_WORD); + store_bit_field (reg, bitsize, endian_correction, word_mode, + word, BITS_PER_WORD); } } } @@ -1099,23 +1011,15 @@ store_unaligned_arguments_into_pseudos (args, num_actuals) flags which may may be modified by this routine. */ static void -initialize_argument_information (num_actuals, args, args_size, n_named_args, - actparms, fndecl, args_so_far, - reg_parm_stack_space, old_stack_level, - old_pending_adj, must_preallocate, - ecf_flags) - int num_actuals ATTRIBUTE_UNUSED; - struct arg_data *args; - struct args_size *args_size; - int n_named_args ATTRIBUTE_UNUSED; - tree actparms; - tree fndecl; - CUMULATIVE_ARGS *args_so_far; - int reg_parm_stack_space; - rtx *old_stack_level; - int *old_pending_adj; - int *must_preallocate; - int *ecf_flags; +initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED, + struct arg_data *args, + struct args_size *args_size, + int n_named_args ATTRIBUTE_UNUSED, + tree actparms, tree fndecl, + CUMULATIVE_ARGS *args_so_far, + int reg_parm_stack_space, + rtx *old_stack_level, int *old_pending_adj, + int *must_preallocate, int *ecf_flags) { /* 1 if scanning parms front to back, -1 if scanning back to front. */ int inc; @@ -1123,7 +1027,6 @@ initialize_argument_information (num_actuals, args, args_size, n_named_args, /* Count arg position in order args appear. */ int argpos; - struct args_size alignment_pad; int i; tree p; @@ -1179,8 +1082,7 @@ initialize_argument_information (num_actuals, args, args_size, n_named_args, with those made by function.c. */ /* See if this argument should be passed by invisible reference. */ - if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST - && contains_placeholder_p (TYPE_SIZE (type))) + if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type)) || TREE_ADDRESSABLE (type) #ifdef FUNCTION_ARG_PASS_BY_REFERENCE || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type), @@ -1275,9 +1177,8 @@ initialize_argument_information (num_actuals, args, args_size, n_named_args, mode = TYPE_MODE (type); unsignedp = TREE_UNSIGNED (type); -#ifdef PROMOTE_FUNCTION_ARGS - mode = promote_mode (type, mode, &unsignedp, 1); -#endif + if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0)) + mode = promote_mode (type, mode, &unsignedp, 1); args[i].unsignedp = unsignedp; args[i].mode = mode; @@ -1334,39 +1235,22 @@ initialize_argument_information (num_actuals, args, args_size, n_named_args, #else args[i].reg != 0, #endif - fndecl, args_size, &args[i].offset, - &args[i].size, &alignment_pad); - -#ifndef ARGS_GROW_DOWNWARD - args[i].slot_offset = *args_size; + args[i].pass_on_stack ? 0 : args[i].partial, + fndecl, args_size, &args[i].locate); +#ifdef BLOCK_REG_PADDING + else + /* The argument is passed entirely in registers. See at which + end it should be padded. */ + args[i].locate.where_pad = + BLOCK_REG_PADDING (mode, type, + int_size_in_bytes (type) <= UNITS_PER_WORD); #endif - args[i].alignment_pad = alignment_pad; - - /* If a part of the arg was put into registers, - don't include that part in the amount pushed. */ - if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack) - args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD) - / (PARM_BOUNDARY / BITS_PER_UNIT) - * (PARM_BOUNDARY / BITS_PER_UNIT)); - /* Update ARGS_SIZE, the total stack space for args so far. */ - args_size->constant += args[i].size.constant; - if (args[i].size.var) - { - ADD_PARM_SIZE (*args_size, args[i].size.var); - } - - /* Since the slot offset points to the bottom of the slot, - we must record it after incrementing if the args grow down. */ -#ifdef ARGS_GROW_DOWNWARD - args[i].slot_offset = *args_size; - - args[i].slot_offset.constant = -args_size->constant; - if (args_size->var) - SUB_PARM_SIZE (args[i].slot_offset, args_size->var); -#endif + args_size->constant += args[i].locate.size.constant; + if (args[i].locate.size.var) + ADD_PARM_SIZE (*args_size, args[i].locate.size.var); /* Increment ARGS_SO_FAR, which has info about which arg-registers have been used, etc. */ @@ -1383,11 +1267,9 @@ initialize_argument_information (num_actuals, args, args_size, n_named_args, for arguments passed in registers. */ static int -compute_argument_block_size (reg_parm_stack_space, args_size, - preferred_stack_boundary) - int reg_parm_stack_space; - struct args_size *args_size; - int preferred_stack_boundary ATTRIBUTE_UNUSED; +compute_argument_block_size (int reg_parm_stack_space, + struct args_size *args_size, + int preferred_stack_boundary ATTRIBUTE_UNUSED) { int unadjusted_args_size = args_size->constant; @@ -1470,10 +1352,7 @@ compute_argument_block_size (reg_parm_stack_space, args_size, precomputed argument. */ static void -precompute_arguments (flags, num_actuals, args) - int flags; - int num_actuals; - struct arg_data *args; +precompute_arguments (int flags, int num_actuals, struct arg_data *args) { int i; @@ -1541,11 +1420,7 @@ precompute_arguments (flags, num_actuals, args) compute and return the final value for MUST_PREALLOCATE. */ static int -finalize_must_preallocate (must_preallocate, num_actuals, args, args_size) - int must_preallocate; - int num_actuals; - struct arg_data *args; - struct args_size *args_size; +finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size) { /* See if we have or want to preallocate stack space. @@ -1604,10 +1479,7 @@ finalize_must_preallocate (must_preallocate, num_actuals, args, args_size) ARGBLOCK is an rtx for the address of the outgoing arguments. */ static void -compute_argument_addresses (args, argblock, num_actuals) - struct arg_data *args; - rtx argblock; - int num_actuals; +compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals) { if (argblock) { @@ -1619,8 +1491,8 @@ compute_argument_addresses (args, argblock, num_actuals) for (i = 0; i < num_actuals; i++) { - rtx offset = ARGS_SIZE_RTX (args[i].offset); - rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset); + rtx offset = ARGS_SIZE_RTX (args[i].locate.offset); + rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset); rtx addr; /* Skip this parm if it will not be passed on the stack. */ @@ -1634,6 +1506,7 @@ compute_argument_addresses (args, argblock, num_actuals) addr = plus_constant (addr, arg_offset); args[i].stack = gen_rtx_MEM (args[i].mode, addr); + set_mem_align (args[i].stack, PARM_BOUNDARY); set_mem_attributes (args[i].stack, TREE_TYPE (args[i].tree_value), 1); @@ -1644,6 +1517,7 @@ compute_argument_addresses (args, argblock, num_actuals) addr = plus_constant (addr, arg_offset); args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr); + set_mem_align (args[i].stack_slot, PARM_BOUNDARY); set_mem_attributes (args[i].stack_slot, TREE_TYPE (args[i].tree_value), 1); @@ -1663,12 +1537,10 @@ compute_argument_addresses (args, argblock, num_actuals) FNDECL is the tree node for the target function. For an indirect call FNDECL will be NULL_TREE. - EXP is the CALL_EXPR for this call. */ + ADDR is the operand 0 of CALL_EXPR for this call. */ static rtx -rtx_for_function_call (fndecl, exp) - tree fndecl; - tree exp; +rtx_for_function_call (tree fndecl, tree addr) { rtx funexp; @@ -1690,7 +1562,7 @@ rtx_for_function_call (fndecl, exp) /* Generate an rtx (probably a pseudo-register) for the address. */ { push_temp_slots (); - funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); + funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0); pop_temp_slots (); /* FUNEXP can't be BLKmode. */ emit_queue (); } @@ -1702,20 +1574,15 @@ rtx_for_function_call (fndecl, exp) expressions were already evaluated. Mark all register-parms as living through the call, putting these USE - insns in the CALL_INSN_FUNCTION_USAGE field. - + insns in the CALL_INSN_FUNCTION_USAGE field. + When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap checking, setting *SIBCALL_FAILURE if appropriate. */ static void -load_register_parameters (args, num_actuals, call_fusage, flags, - is_sibcall, sibcall_failure) - struct arg_data *args; - int num_actuals; - rtx *call_fusage; - int flags; - int is_sibcall; - int *sibcall_failure; +load_register_parameters (struct arg_data *args, int num_actuals, + rtx *call_fusage, int flags, int is_sibcall, + int *sibcall_failure) { int i, j; @@ -1727,35 +1594,67 @@ load_register_parameters (args, num_actuals, call_fusage, flags, { rtx reg = ((flags & ECF_SIBCALL) ? args[i].tail_call_reg : args[i].reg); - int partial = args[i].partial; - int nregs; - if (reg) { + int partial = args[i].partial; + int nregs; + int size = 0; rtx before_arg = get_last_insn (); /* Set to non-negative if must move a word at a time, even if just one word (e.g, partial == 1 && mode == DFmode). Set to -1 if we just use a normal move insn. This value can be zero if the argument is a zero size structure with no fields. */ - nregs = (partial ? partial - : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode - ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value)) - + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) - : -1)); + nregs = -1; + if (partial) + nregs = partial; + else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode) + { + size = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); + nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; + } + else + size = GET_MODE_SIZE (args[i].mode); /* Handle calls that pass values in multiple non-contiguous locations. The Irix 6 ABI has examples of this. */ if (GET_CODE (reg) == PARALLEL) - emit_group_load (reg, args[i].value, - int_size_in_bytes (TREE_TYPE (args[i].tree_value))); + { + tree type = TREE_TYPE (args[i].tree_value); + emit_group_load (reg, args[i].value, type, + int_size_in_bytes (type)); + } /* If simple case, just do move. If normal partial, store_one_arg has already loaded the register for us. In all other cases, load the register(s) from memory. */ else if (nregs == -1) - emit_move_insn (reg, args[i].value); + { + emit_move_insn (reg, args[i].value); +#ifdef BLOCK_REG_PADDING + /* Handle case where we have a value that needs shifting + up to the msb. eg. a QImode value and we're padding + upward on a BYTES_BIG_ENDIAN machine. */ + if (size < UNITS_PER_WORD + && (args[i].locate.where_pad + == (BYTES_BIG_ENDIAN ? upward : downward))) + { + rtx x; + int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; + + /* Assigning REG here rather than a temp makes CALL_FUSAGE + report the whole reg as used. Strictly speaking, the + call only uses SIZE bytes at the msb end, but it doesn't + seem worth generating rtl to say that. */ + reg = gen_rtx_REG (word_mode, REGNO (reg)); + x = expand_binop (word_mode, ashl_optab, reg, + GEN_INT (shift), reg, 1, OPTAB_WIDEN); + if (x != reg) + emit_move_insn (reg, x); + } +#endif + } /* If we have pre-computed the values to put in the registers in the case of non-aligned structures, copy them in now. */ @@ -1766,9 +1665,30 @@ load_register_parameters (args, num_actuals, call_fusage, flags, args[i].aligned_regs[j]); else if (partial == 0 || args[i].pass_on_stack) - move_block_to_reg (REGNO (reg), - validize_mem (args[i].value), nregs, - args[i].mode); + { + rtx mem = validize_mem (args[i].value); + +#ifdef BLOCK_REG_PADDING + /* Handle a BLKmode that needs shifting. */ + if (nregs == 1 && size < UNITS_PER_WORD + && args[i].locate.where_pad == downward) + { + rtx tem = operand_subword_force (mem, 0, args[i].mode); + rtx ri = gen_rtx_REG (word_mode, REGNO (reg)); + rtx x = gen_reg_rtx (word_mode); + int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT; + optab dir = BYTES_BIG_ENDIAN ? lshr_optab : ashl_optab; + + emit_move_insn (x, tem); + x = expand_binop (word_mode, dir, x, GEN_INT (shift), + ri, 1, OPTAB_WIDEN); + if (x != ri) + emit_move_insn (ri, x); + } + else +#endif + move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode); + } /* When a parameter is a block, and perhaps in other cases, it is possible that it did a load from an argument slot that was @@ -1793,13 +1713,8 @@ load_register_parameters (args, num_actuals, call_fusage, flags, about the parameters. */ static rtx -try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr) - tree fndecl; - tree actparms; - rtx target; - int ignore; - tree type; - rtx structure_value_addr; +try_to_integrate (tree fndecl, tree actparms, rtx target, int ignore, + tree type, rtx structure_value_addr) { rtx temp; rtx before_call; @@ -1904,7 +1819,8 @@ try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr) if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline && optimize > 0 && !TREE_ADDRESSABLE (fndecl)) { - warning_with_decl (fndecl, "inlining failed in call to `%s'"); + warning ("%Hinlining failed in call to '%F'", + &DECL_SOURCE_LOCATION (fndecl), fndecl); warning ("called from here"); } (*lang_hooks.mark_addressable) (fndecl); @@ -1921,12 +1837,9 @@ try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr) be popped after the call. Returns the adjustment. */ static int -combine_pending_stack_adjustment_and_call (unadjusted_args_size, - args_size, - preferred_unit_stack_boundary) - int unadjusted_args_size; - struct args_size *args_size; - int preferred_unit_stack_boundary; +combine_pending_stack_adjustment_and_call (int unadjusted_args_size, + struct args_size *args_size, + int preferred_unit_stack_boundary) { /* The number of bytes to pop so that the stack will be under-aligned by UNADJUSTED_ARGS_SIZE bytes. */ @@ -1977,8 +1890,7 @@ combine_pending_stack_adjustment_and_call (unadjusted_args_size, zero otherwise. */ static int -check_sibcall_argument_overlap_1 (x) - rtx x; +check_sibcall_argument_overlap_1 (rtx x) { RTX_CODE code; int i, j; @@ -2041,10 +1953,7 @@ check_sibcall_argument_overlap_1 (x) slots, zero otherwise. */ static int -check_sibcall_argument_overlap (insn, arg, mark_stored_args_map) - rtx insn; - struct arg_data *arg; - int mark_stored_args_map; +check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map) { int low, high; @@ -2061,20 +1970,19 @@ check_sibcall_argument_overlap (insn, arg, mark_stored_args_map) if (mark_stored_args_map) { #ifdef ARGS_GROW_DOWNWARD - low = -arg->slot_offset.constant - arg->size.constant; + low = -arg->locate.slot_offset.constant - arg->locate.size.constant; #else - low = arg->slot_offset.constant; + low = arg->locate.slot_offset.constant; #endif - for (high = low + arg->size.constant; low < high; low++) + for (high = low + arg->locate.size.constant; low < high; low++) SET_BIT (stored_args_map, low); } return insn != NULL_RTX; } static tree -fix_unsafe_tree (t) - tree t; +fix_unsafe_tree (tree t) { switch (unsafe_for_reeval (t)) { @@ -2108,10 +2016,7 @@ fix_unsafe_tree (t) If IGNORE is nonzero, then we ignore the value of the function call. */ rtx -expand_call (exp, target, ignore) - tree exp; - rtx target; - int ignore; +expand_call (tree exp, rtx target, int ignore) { /* Nonzero if we are currently expanding a call. */ static int currently_expanding_call = 0; @@ -2128,6 +2033,7 @@ expand_call (exp, target, ignore) rtx tail_call_insns = NULL_RTX; /* Data type of the function. */ tree funtype; + tree type_arg_types; /* Declaration of the function being called, or 0 if the function is computed (not known by name). */ tree fndecl = 0; @@ -2153,6 +2059,7 @@ expand_call (exp, target, ignore) /* Nonzero if called function returns an aggregate in memory PCC style, by returning the address of where to find it. */ int pcc_struct_value = 0; + rtx struct_value = 0; /* Number of actual parameters in this call, including struct value addr. */ int num_actuals; @@ -2204,14 +2111,23 @@ expand_call (exp, target, ignore) int initial_highest_arg_in_use = highest_outgoing_arg_in_use; char *initial_stack_usage_map = stack_usage_map; - int old_stack_arg_under_construction = 0; + int old_stack_allocated; + + /* State variables to track stack modifications. */ rtx old_stack_level = 0; + int old_stack_arg_under_construction = 0; int old_pending_adj = 0; int old_inhibit_defer_pop = inhibit_defer_pop; - int old_stack_allocated; + + /* Some stack pointer alterations we make are performed via + allocate_dynamic_stack_space. This modifies the stack_pointer_delta, + which we then also need to save/restore along the way. */ + int old_stack_pointer_delta = 0; + rtx call_fusage; tree p = TREE_OPERAND (exp, 0); + tree addr = TREE_OPERAND (exp, 0); int i; /* The alignment of the stack, in bits. */ HOST_WIDE_INT preferred_stack_boundary; @@ -2244,7 +2160,8 @@ expand_call (exp, target, ignore) if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline && optimize > 0) { - warning_with_decl (fndecl, "can't inline call to `%s'"); + warning ("%Hcan't inline call to '%F'", + &DECL_SOURCE_LOCATION (fndecl), fndecl); warning ("called from here"); } (*lang_hooks.mark_addressable) (fndecl); @@ -2258,6 +2175,39 @@ expand_call (exp, target, ignore) else flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p))); + struct_value = targetm.calls.struct_value_rtx (fndecl ? TREE_TYPE (fndecl) : 0, 0); + + /* Warn if this value is an aggregate type, + regardless of which calling convention we are using for it. */ + if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp))) + warning ("function call has aggregate value"); + + /* If the result of a pure or const function call is ignored (or void), + and none of its arguments are volatile, we can avoid expanding the + call and just evaluate the arguments for side-effects. */ + if ((flags & (ECF_CONST | ECF_PURE)) + && (ignore || target == const0_rtx + || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)) + { + bool volatilep = false; + tree arg; + + for (arg = actparms; arg; arg = TREE_CHAIN (arg)) + if (TREE_THIS_VOLATILE (TREE_VALUE (arg))) + { + volatilep = true; + break; + } + + if (! volatilep) + { + for (arg = actparms; arg; arg = TREE_CHAIN (arg)) + expand_expr (TREE_VALUE (arg), const0_rtx, + VOIDmode, EXPAND_NORMAL); + return const0_rtx; + } + } + #ifdef REG_PARM_STACK_SPACE #ifdef MAYBE_REG_PARM_STACK_SPACE reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE; @@ -2271,15 +2221,10 @@ expand_call (exp, target, ignore) must_preallocate = 1; #endif - /* Warn if this value is an aggregate type, - regardless of which calling convention we are using for it. */ - if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp))) - warning ("function call has aggregate value"); - /* Set up a place to return a structure. */ /* Cater to broken compilers. */ - if (aggregate_value_p (exp)) + if (aggregate_value_p (exp, fndecl)) { /* This call returns a big structure. */ flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK); @@ -2341,13 +2286,29 @@ expand_call (exp, target, ignore) /* Figure out the amount to which the stack should be aligned. */ preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; + if (fndecl) + { + struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl); + if (i && i->preferred_incoming_stack_boundary) + preferred_stack_boundary = i->preferred_incoming_stack_boundary; + } /* Operand 0 is a pointer-to-function; get the type of the function. */ - funtype = TREE_TYPE (TREE_OPERAND (exp, 0)); + funtype = TREE_TYPE (addr); if (! POINTER_TYPE_P (funtype)) abort (); funtype = TREE_TYPE (funtype); + /* Munge the tree to split complex arguments into their imaginary + and real parts. */ + if (SPLIT_COMPLEX_ARGS) + { + type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype)); + actparms = split_complex_values (actparms); + } + else + type_arg_types = TYPE_ARG_TYPES (funtype); + /* See if this is a call to a function that can return more than once or a call to longjmp or malloc. */ flags |= special_function_p (fndecl, flags); @@ -2357,7 +2318,7 @@ expand_call (exp, target, ignore) /* If struct_value_rtx is 0, it means pass the address as if it were an extra parameter. */ - if (structure_value_addr && struct_value_rtx == 0) + if (structure_value_addr && struct_value == 0) { /* If structure_value_addr is a REG other than virtual_outgoing_args_rtx, we can use always use it. If it @@ -2383,6 +2344,14 @@ expand_call (exp, target, ignore) for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p)) num_actuals++; + /* Start updating where the next arg would go. + + On some machines (such as the PA) indirect calls have a difuferent + calling convention than normal calls. The last argument in + INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call + or not. */ + INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl); + /* Compute number of named args. Normally, don't include the last named arg if anonymous args follow. We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero. @@ -2399,30 +2368,22 @@ expand_call (exp, target, ignore) reliable way to pass unnamed args in registers, so we must force them into memory. */ - if ((STRICT_ARGUMENT_NAMING - || ! PRETEND_OUTGOING_VARARGS_NAMED) - && TYPE_ARG_TYPES (funtype) != 0) + if ((targetm.calls.strict_argument_naming (&args_so_far) + || ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far)) + && type_arg_types != 0) n_named_args - = (list_length (TYPE_ARG_TYPES (funtype)) + = (list_length (type_arg_types) /* Don't include the last named arg. */ - - (STRICT_ARGUMENT_NAMING ? 0 : 1) + - (targetm.calls.strict_argument_naming (&args_so_far) ? 0 : 1) /* Count the struct value address, if it is passed as a parm. */ + structure_value_addr_parm); else /* If we know nothing, treat all args as named. */ n_named_args = num_actuals; - /* Start updating where the next arg would go. - - On some machines (such as the PA) indirect calls have a different - calling convention than normal calls. The last argument in - INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call - or not. */ - INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl); - /* Make a vector to hold all the information about each arg. */ - args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data)); - memset ((char *) args, 0, num_actuals * sizeof (struct arg_data)); + args = alloca (num_actuals * sizeof (struct arg_data)); + memset (args, 0, num_actuals * sizeof (struct arg_data)); /* Build up entries in the ARGS array, compute the size of the arguments into ARGS_SIZE, etc. */ @@ -2474,14 +2435,14 @@ expand_call (exp, target, ignore) if (currently_expanding_call++ != 0 || !flag_optimize_sibling_calls || !rtx_equal_function_value_matters - || any_pending_cleanups (1) + || any_pending_cleanups () || args_size.var) try_tail_call = try_tail_recursion = 0; /* Tail recursion fails, when we are not dealing with recursive calls. */ if (!try_tail_recursion - || TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR - || TREE_OPERAND (TREE_OPERAND (exp, 0), 0) != current_function_decl) + || TREE_CODE (addr) != ADDR_EXPR + || TREE_OPERAND (addr, 0) != current_function_decl) try_tail_recursion = 0; /* Rest of purposes for tail call optimizations to fail. */ @@ -2503,16 +2464,21 @@ expand_call (exp, target, ignore) /* Functions that do not return exactly once may not be sibcall optimized. */ || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN)) - || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))) + || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))) + /* If the called function is nested in the current one, it might access + some of the caller's arguments, but could clobber them beforehand if + the argument areas are shared. */ + || (fndecl && decl_function_context (fndecl) == current_function_decl) /* If this function requires more stack slots than the current function, we cannot change it into a sibling call. */ || args_size.constant > current_function_args_size /* If the callee pops its own arguments, then it must pop exactly the same number of arguments as the current function. */ - || RETURN_POPS_ARGS (fndecl, funtype, args_size.constant) - != RETURN_POPS_ARGS (current_function_decl, - TREE_TYPE (current_function_decl), - current_function_args_size)) + || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant) + != RETURN_POPS_ARGS (current_function_decl, + TREE_TYPE (current_function_decl), + current_function_args_size)) + || !(*lang_hooks.decls.ok_for_sibcall) (fndecl)) try_tail_call = 0; if (try_tail_call || try_tail_recursion) @@ -2556,12 +2522,12 @@ expand_call (exp, target, ignore) if (try_tail_recursion) actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms); } - /* Do the same for the function address if it is an expression. */ + /* Do the same for the function address if it is an expression. */ if (!fndecl) - TREE_OPERAND (exp, 0) = fix_unsafe_tree (TREE_OPERAND (exp, 0)); + addr = fix_unsafe_tree (addr); /* Expanding one of those dangerous arguments could have added cleanups, but otherwise give it a whirl. */ - if (any_pending_cleanups (1)) + if (any_pending_cleanups ()) try_tail_call = try_tail_recursion = 0; } @@ -2593,7 +2559,7 @@ expand_call (exp, target, ignore) expand_start_target_temps (); if (optimize_tail_recursion (actparms, get_last_insn ())) { - if (any_pending_cleanups (1)) + if (any_pending_cleanups ()) try_tail_call = try_tail_recursion = 0; else tail_recursion_insns = get_insns (); @@ -2616,9 +2582,7 @@ expand_call (exp, target, ignore) is subject to race conditions, just as with multithreaded programs. */ - emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__gcov_flush"), - LCT_ALWAYS_RETURN, - VOIDmode, 0); + emit_library_call (gcov_flush_libfunc, LCT_ALWAYS_RETURN, VOIDmode, 0); } /* Ensure current function's preferred stack boundary is at least @@ -2627,6 +2591,8 @@ expand_call (exp, target, ignore) if (cfun->preferred_stack_boundary < preferred_stack_boundary && fndecl != current_function_decl) cfun->preferred_stack_boundary = preferred_stack_boundary; + if (fndecl == current_function_decl) + cfun->recursive_call_emit = true; preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT; @@ -2748,6 +2714,7 @@ expand_call (exp, target, ignore) if (old_stack_level == 0) { emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX); + old_stack_pointer_delta = stack_pointer_delta; old_pending_adj = pending_stack_adjust; pending_stack_adjust = 0; /* stack_arg_under_construction says whether a stack arg is @@ -2806,8 +2773,7 @@ expand_call (exp, target, ignore) highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, needed); #endif - stack_usage_map - = (char *) alloca (highest_outgoing_arg_in_use); + stack_usage_map = alloca (highest_outgoing_arg_in_use); if (initial_highest_arg_in_use) memcpy (stack_usage_map, initial_stack_usage_map, @@ -2864,7 +2830,12 @@ expand_call (exp, target, ignore) if (needed == 0) argblock = virtual_outgoing_args_rtx; else - argblock = push_block (GEN_INT (needed), 0, 0); + { + argblock = push_block (GEN_INT (needed), 0, 0); +#ifdef ARGS_GROW_DOWNWARD + argblock = plus_constant (argblock, needed); +#endif + } /* We only really need to call `copy_to_reg' in the case where push insns are going to be used to pass ARGBLOCK @@ -2874,51 +2845,55 @@ expand_call (exp, target, ignore) VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might as well always do it. */ argblock = copy_to_reg (argblock); + } + } + } - /* The save/restore code in store_one_arg handles all - cases except one: a constructor call (including a C - function returning a BLKmode struct) to initialize - an argument. */ - if (stack_arg_under_construction) - { + if (ACCUMULATE_OUTGOING_ARGS) + { + /* The save/restore code in store_one_arg handles all + cases except one: a constructor call (including a C + function returning a BLKmode struct) to initialize + an argument. */ + if (stack_arg_under_construction) + { #ifndef OUTGOING_REG_PARM_STACK_SPACE - rtx push_size = GEN_INT (reg_parm_stack_space - + adjusted_args_size.constant); + rtx push_size = GEN_INT (reg_parm_stack_space + + adjusted_args_size.constant); #else - rtx push_size = GEN_INT (adjusted_args_size.constant); + rtx push_size = GEN_INT (adjusted_args_size.constant); #endif - if (old_stack_level == 0) - { - emit_stack_save (SAVE_BLOCK, &old_stack_level, - NULL_RTX); - old_pending_adj = pending_stack_adjust; - pending_stack_adjust = 0; - /* stack_arg_under_construction says whether a stack - arg is being constructed at the old stack level. - Pushing the stack gets a clean outgoing argument - block. */ - old_stack_arg_under_construction - = stack_arg_under_construction; - stack_arg_under_construction = 0; - /* Make a new map for the new argument list. */ - stack_usage_map = (char *) - alloca (highest_outgoing_arg_in_use); - memset (stack_usage_map, 0, highest_outgoing_arg_in_use); - highest_outgoing_arg_in_use = 0; - } - allocate_dynamic_stack_space (push_size, NULL_RTX, - BITS_PER_UNIT); - } - /* If argument evaluation might modify the stack pointer, - copy the address of the argument list to a register. */ - for (i = 0; i < num_actuals; i++) - if (args[i].pass_on_stack) - { - argblock = copy_addr_to_reg (argblock); - break; - } + if (old_stack_level == 0) + { + emit_stack_save (SAVE_BLOCK, &old_stack_level, + NULL_RTX); + old_stack_pointer_delta = stack_pointer_delta; + old_pending_adj = pending_stack_adjust; + pending_stack_adjust = 0; + /* stack_arg_under_construction says whether a stack + arg is being constructed at the old stack level. + Pushing the stack gets a clean outgoing argument + block. */ + old_stack_arg_under_construction + = stack_arg_under_construction; + stack_arg_under_construction = 0; + /* Make a new map for the new argument list. */ + stack_usage_map = alloca (highest_outgoing_arg_in_use); + memset (stack_usage_map, 0, highest_outgoing_arg_in_use); + highest_outgoing_arg_in_use = 0; } + allocate_dynamic_stack_space (push_size, NULL_RTX, + BITS_PER_UNIT); } + + /* If argument evaluation might modify the stack pointer, + copy the address of the argument list to a register. */ + for (i = 0; i < num_actuals; i++) + if (args[i].pass_on_stack) + { + argblock = copy_addr_to_reg (argblock); + break; + } } compute_argument_addresses (args, argblock, num_actuals); @@ -2949,7 +2924,7 @@ expand_call (exp, target, ignore) be deferred during the evaluation of the arguments. */ NO_DEFER_POP; - funexp = rtx_for_function_call (fndecl, exp); + funexp = rtx_for_function_call (fndecl, addr); /* Figure out the register where the value, if any, will come back. */ valreg = 0; @@ -3038,13 +3013,18 @@ expand_call (exp, target, ignore) structure value. */ if (pass != 0 && structure_value_addr && ! structure_value_addr_parm) { - emit_move_insn (struct_value_rtx, +#ifdef POINTERS_EXTEND_UNSIGNED + if (GET_MODE (structure_value_addr) != Pmode) + structure_value_addr = convert_memory_address + (Pmode, structure_value_addr); +#endif + emit_move_insn (struct_value, force_reg (Pmode, force_operand (structure_value_addr, NULL_RTX))); - if (GET_CODE (struct_value_rtx) == REG) - use_reg (&call_fusage, struct_value_rtx); + if (GET_CODE (struct_value) == REG) + use_reg (&call_fusage, struct_value); } funexp = prepare_call_address (funexp, fndecl, &call_fusage, @@ -3084,11 +3064,6 @@ expand_call (exp, target, ignore) next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, flags, & args_so_far); - /* Verify that we've deallocated all the stack we used. */ - if (pass - && old_stack_allocated != stack_pointer_delta - pending_stack_adjust) - abort (); - /* If call is cse'able, make appropriate pair of reg-notes around it. Test valreg so we don't crash; may safely ignore `const' if return type is void. Disable for PARALLEL return values, because @@ -3096,10 +3071,19 @@ expand_call (exp, target, ignore) if (pass && (flags & ECF_LIBCALL_BLOCK)) { rtx insns; + rtx insn; + bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL; + + insns = get_insns (); + + /* Expansion of block moves possibly introduced a loop that may + not appear inside libcall block. */ + for (insn = insns; insn; insn = NEXT_INSN (insn)) + if (GET_CODE (insn) == JUMP_INSN) + failed = true; - if (valreg == 0 || GET_CODE (valreg) == PARALLEL) + if (failed) { - insns = get_insns (); end_sequence (); emit_insn (insns); } @@ -3120,7 +3104,6 @@ expand_call (exp, target, ignore) args[i].initial_value, note); note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note); - insns = get_insns (); end_sequence (); if (flags & ECF_PURE) @@ -3179,24 +3162,20 @@ expand_call (exp, target, ignore) } emit_barrier_after (last); + + /* Stack adjustments after a noreturn call are dead code. */ + stack_pointer_delta = old_stack_allocated; + pending_stack_adjust = 0; } if (flags & ECF_LONGJMP) current_function_calls_longjmp = 1; - /* If this function is returning into a memory location marked as - readonly, it means it is initializing that location. But we normally - treat functions as not clobbering such locations, so we need to - specify that this one does. */ - if (target != 0 && GET_CODE (target) == MEM - && structure_value_addr != 0 && RTX_UNCHANGING_P (target)) - emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); - /* If value type not void, return an rtx for the value. */ /* If there are cleanups to be called, don't use a hard reg as target. We need to double check this and see if it matters anymore. */ - if (any_pending_cleanups (1)) + if (any_pending_cleanups ()) { if (target && REG_P (target) && REGNO (target) < FIRST_PSEUDO_REGISTER) @@ -3243,7 +3222,7 @@ expand_call (exp, target, ignore) } if (! rtx_equal_p (target, valreg)) - emit_group_store (target, valreg, + emit_group_store (target, valreg, TREE_TYPE (exp), int_size_in_bytes (TREE_TYPE (exp))); /* We can not support sibling calls for this case. */ @@ -3260,6 +3239,12 @@ expand_call (exp, target, ignore) If they refer to the same register, this move will be a no-op, except when function inlining is being done. */ emit_move_insn (target, valreg); + + /* If we are setting a MEM, this code must be executed. Since it is + emitted after the call insn, sibcall optimization cannot be + performed in that case. */ + if (GET_CODE (target) == MEM) + sibcall_failure = 1; } else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode) { @@ -3271,7 +3256,8 @@ expand_call (exp, target, ignore) else target = copy_to_reg (valreg); -#ifdef PROMOTE_FUNCTION_RETURN + if (targetm.calls.promote_function_return(funtype)) + { /* If we promoted this return value, make the proper SUBREG. TARGET might be const0_rtx here, so be careful. */ if (GET_CODE (target) == REG @@ -3302,7 +3288,7 @@ expand_call (exp, target, ignore) SUBREG_PROMOTED_VAR_P (target) = 1; SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp); } -#endif + } /* If size of args is variable or this was a constructor call for a stack argument, restore saved stack-pointer value. */ @@ -3310,6 +3296,7 @@ expand_call (exp, target, ignore) if (old_stack_level && ! (flags & ECF_SP_DEPRESSED)) { emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); + stack_pointer_delta = old_stack_pointer_delta; pending_stack_adjust = old_pending_adj; stack_arg_under_construction = old_stack_arg_under_construction; highest_outgoing_arg_in_use = initial_highest_arg_in_use; @@ -3338,7 +3325,7 @@ expand_call (exp, target, ignore) emit_move_insn (stack_area, args[i].save_area); else emit_block_move (stack_area, args[i].save_area, - GEN_INT (args[i].size.constant), + GEN_INT (args[i].locate.size.constant), BLOCK_OP_CALL_PARM); } @@ -3366,6 +3353,22 @@ expand_call (exp, target, ignore) expand_end_target_temps (); } + /* If this function is returning into a memory location marked as + readonly, it means it is initializing that location. We normally treat + functions as not clobbering such locations, so we need to specify that + this one does. We do this by adding the appropriate CLOBBER to the + CALL_INSN function usage list. This cannot be done by emitting a + standalone CLOBBER after the call because the latter would be ignored + by at least the delay slot scheduling pass. We do this now instead of + adding to call_fusage before the call to emit_call_1 because TARGET + may be modified in the meantime. */ + if (structure_value_addr != 0 && target != 0 + && GET_CODE (target) == MEM && RTX_UNCHANGING_P (target)) + add_function_usage_to + (last_call_insn (), + gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target), + NULL_RTX)); + insns = get_insns (); end_sequence (); @@ -3390,7 +3393,15 @@ expand_call (exp, target, ignore) sbitmap_free (stored_args_map); } else - normal_call_insns = insns; + { + normal_call_insns = insns; + + /* Verify that we've deallocated all the stack we used. */ + if (! (flags & (ECF_NORETURN | ECF_LONGJMP)) + && old_stack_allocated != stack_pointer_delta + - pending_stack_adjust) + abort (); + } /* If something prevents making this a sibling call, zero out the sequence. */ @@ -3457,20 +3468,91 @@ expand_call (exp, target, ignore) return target; } + +/* Traverse an argument list in VALUES and expand all complex + arguments into their components. */ +tree +split_complex_values (tree values) +{ + tree p; + + values = copy_list (values); + + for (p = values; p; p = TREE_CHAIN (p)) + { + tree complex_value = TREE_VALUE (p); + tree complex_type; + + complex_type = TREE_TYPE (complex_value); + if (!complex_type) + continue; + + if (TREE_CODE (complex_type) == COMPLEX_TYPE) + { + tree subtype; + tree real, imag, next; + + subtype = TREE_TYPE (complex_type); + complex_value = save_expr (complex_value); + real = build1 (REALPART_EXPR, subtype, complex_value); + imag = build1 (IMAGPART_EXPR, subtype, complex_value); + + TREE_VALUE (p) = real; + next = TREE_CHAIN (p); + imag = build_tree_list (NULL_TREE, imag); + TREE_CHAIN (p) = imag; + TREE_CHAIN (imag) = next; + + /* Skip the newly created node. */ + p = TREE_CHAIN (p); + } + } + + return values; +} + +/* Traverse a list of TYPES and expand all complex types into their + components. */ +tree +split_complex_types (tree types) +{ + tree p; + + types = copy_list (types); + + for (p = types; p; p = TREE_CHAIN (p)) + { + tree complex_type = TREE_VALUE (p); + + if (TREE_CODE (complex_type) == COMPLEX_TYPE) + { + tree next, imag; + + /* Rewrite complex type with component type. */ + TREE_VALUE (p) = TREE_TYPE (complex_type); + next = TREE_CHAIN (p); + + /* Add another component type for the imaginary part. */ + imag = build_tree_list (NULL_TREE, TREE_VALUE (p)); + TREE_CHAIN (p) = imag; + TREE_CHAIN (imag) = next; + + /* Skip the newly created node. */ + p = TREE_CHAIN (p); + } + } + + return types; +} /* Output a library call to function FUN (a SYMBOL_REF rtx). The RETVAL parameter specifies whether return value needs to be saved, other parameters are documented in the emit_library_call function below. */ static rtx -emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) - int retval; - rtx orgfun; - rtx value; - enum libcall_type fn_type; - enum machine_mode outmode; - int nargs; - va_list p; +emit_library_call_value_1 (int retval, rtx orgfun, rtx value, + enum libcall_type fn_type, + enum machine_mode outmode, int nargs, va_list p) { /* Total size in bytes of all the stack-parms scanned so far. */ struct args_size args_size; @@ -3480,7 +3562,6 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) rtx fun; int inc; int count; - struct args_size alignment_pad; rtx argblock = 0; CUMULATIVE_ARGS args_so_far; struct arg @@ -3489,8 +3570,7 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) enum machine_mode mode; rtx reg; int partial; - struct args_size offset; - struct args_size size; + struct locate_and_pad_arg_data locate; rtx save_area; }; struct arg *argvec; @@ -3517,6 +3597,8 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) int initial_highest_arg_in_use = highest_outgoing_arg_in_use; char *initial_stack_usage_map = stack_usage_map; + rtx struct_value = targetm.calls.struct_value_rtx (0, 0); + #ifdef REG_PARM_STACK_SPACE #ifdef MAYBE_REG_PARM_STACK_SPACE reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE; @@ -3569,7 +3651,7 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) if (outmode != VOIDmode) { tfom = (*lang_hooks.types.type_for_mode) (outmode, 0); - if (aggregate_value_p (tfom)) + if (aggregate_value_p (tfom, 0)) { #ifdef PCC_STATIC_STRUCT_RETURN rtx pointer_reg @@ -3601,8 +3683,8 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) of the full argument passing conventions to limit complexity here since library functions shouldn't have many args. */ - argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg)); - memset ((char *) argvec, 0, (nargs + 1) * sizeof (struct arg)); + argvec = alloca ((nargs + 1) * sizeof (struct arg)); + memset (argvec, 0, (nargs + 1) * sizeof (struct arg)); #ifdef INIT_CUMULATIVE_LIBCALL_ARGS INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun); @@ -3624,7 +3706,7 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) /* If there's a structure value address to be passed, either pass it in the special place, or pass it as an extra argument. */ - if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value) + if (mem_value && struct_value == 0 && ! pcc_struct_value) { rtx addr = XEXP (mem_value, 0); nargs++; @@ -3650,12 +3732,11 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) #else argvec[count].reg != 0, #endif - NULL_TREE, &args_size, &argvec[count].offset, - &argvec[count].size, &alignment_pad); + 0, NULL_TREE, &args_size, &argvec[count].locate); if (argvec[count].reg == 0 || argvec[count].partial != 0 || reg_parm_stack_space > 0) - args_size.constant += argvec[count].size.constant; + args_size.constant += argvec[count].locate.size.constant; FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1); @@ -3673,13 +3754,6 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)) abort (); - /* On some machines, there's no way to pass a float to a library fcn. - Pass it as a double instead. */ -#ifdef LIBGCC_NEEDS_DOUBLE - if (LIBGCC_NEEDS_DOUBLE && mode == SFmode) - val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode; -#endif - /* There's no need to call protect_from_queue, because either emit_move_insn or emit_push_insn will do that. */ @@ -3693,7 +3767,7 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) { rtx slot; int must_copy = 1 -#ifdef FUNCTION_ARG_CALLEE_COPIES +#ifdef FUNCTION_ARG_CALLEE_COPIES && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode, NULL_TREE, 1) #endif @@ -3769,18 +3843,15 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) #else argvec[count].reg != 0, #endif - NULL_TREE, &args_size, &argvec[count].offset, - &argvec[count].size, &alignment_pad); + argvec[count].partial, + NULL_TREE, &args_size, &argvec[count].locate); - if (argvec[count].size.var) + if (argvec[count].locate.size.var) abort (); - if (reg_parm_stack_space == 0 && argvec[count].partial) - argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD; - if (argvec[count].reg == 0 || argvec[count].partial != 0 || reg_parm_stack_space > 0) - args_size.constant += argvec[count].size.constant; + args_size.constant += argvec[count].locate.size.constant; FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1); } @@ -3841,7 +3912,7 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, needed); #endif - stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use); + stack_usage_map = alloca (highest_outgoing_arg_in_use); if (initial_highest_arg_in_use) memcpy (stack_usage_map, initial_stack_usage_map, @@ -3918,11 +3989,11 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) #ifdef ARGS_GROW_DOWNWARD /* stack_slot is negative, but we want to index stack_usage_map with positive values. */ - upper_bound = -argvec[argnum].offset.constant + 1; - lower_bound = upper_bound - argvec[argnum].size.constant; + upper_bound = -argvec[argnum].locate.offset.constant + 1; + lower_bound = upper_bound - argvec[argnum].locate.size.constant; #else - lower_bound = argvec[argnum].offset.constant; - upper_bound = lower_bound + argvec[argnum].size.constant; + lower_bound = argvec[argnum].locate.offset.constant; + upper_bound = lower_bound + argvec[argnum].locate.size.constant; #endif i = lower_bound; @@ -3935,29 +4006,43 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) if (i < upper_bound) { - /* We need to make a save area. See what mode we can make - it. */ + /* We need to make a save area. */ + unsigned int size + = argvec[argnum].locate.size.constant * BITS_PER_UNIT; enum machine_mode save_mode - = mode_for_size (argvec[argnum].size.constant - * BITS_PER_UNIT, - MODE_INT, 1); + = mode_for_size (size, MODE_INT, 1); + rtx adr + = plus_constant (argblock, + argvec[argnum].locate.offset.constant); rtx stack_area - = gen_rtx_MEM - (save_mode, - memory_address - (save_mode, - plus_constant (argblock, - argvec[argnum].offset.constant))); - argvec[argnum].save_area = gen_reg_rtx (save_mode); - - emit_move_insn (argvec[argnum].save_area, stack_area); + = gen_rtx_MEM (save_mode, memory_address (save_mode, adr)); + + if (save_mode == BLKmode) + { + argvec[argnum].save_area + = assign_stack_temp (BLKmode, + argvec[argnum].locate.size.constant, + 0); + + emit_block_move (validize_mem (argvec[argnum].save_area), + stack_area, + GEN_INT (argvec[argnum].locate.size.constant), + BLOCK_OP_CALL_PARM); + } + else + { + argvec[argnum].save_area = gen_reg_rtx (save_mode); + + emit_move_insn (argvec[argnum].save_area, stack_area); + } } } emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY, partial, reg, 0, argblock, - GEN_INT (argvec[argnum].offset.constant), - reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad)); + GEN_INT (argvec[argnum].locate.offset.constant), + reg_parm_stack_space, + ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad)); /* Now mark the segment we just used. */ if (ACCUMULATE_OUTGOING_ARGS) @@ -3994,7 +4079,7 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) /* Handle calls that pass values in multiple non-contiguous locations. The PA64 has examples of this for library calls. */ if (reg != 0 && GET_CODE (reg) == PARALLEL) - emit_group_load (reg, val, GET_MODE_SIZE (GET_MODE (val))); + emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (GET_MODE (val))); else if (reg != 0 && partial == 0) emit_move_insn (reg, val); @@ -4012,14 +4097,14 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) } /* Pass the function the address in which to return a structure value. */ - if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value) + if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value) { - emit_move_insn (struct_value_rtx, + emit_move_insn (struct_value, force_reg (Pmode, force_operand (XEXP (mem_value, 0), NULL_RTX))); - if (GET_CODE (struct_value_rtx) == REG) - use_reg (&call_fusage, struct_value_rtx); + if (GET_CODE (struct_value) == REG) + use_reg (&call_fusage, struct_value); } /* Don't allow popping to be deferred, since then @@ -4098,7 +4183,8 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) if (GET_CODE (valreg) == PARALLEL) { temp = gen_reg_rtx (outmode); - emit_group_store (temp, valreg, outmode); + emit_group_store (temp, valreg, NULL_TREE, + GET_MODE_SIZE (outmode)); valreg = temp; } @@ -4141,7 +4227,7 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) { if (value == 0) value = gen_reg_rtx (outmode); - emit_group_store (value, valreg, outmode); + emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode)); } else if (value != 0) emit_move_insn (value, valreg); @@ -4162,14 +4248,18 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) if (argvec[count].save_area) { enum machine_mode save_mode = GET_MODE (argvec[count].save_area); - rtx stack_area - = gen_rtx_MEM (save_mode, - memory_address - (save_mode, - plus_constant (argblock, - argvec[count].offset.constant))); - - emit_move_insn (stack_area, argvec[count].save_area); + rtx adr = plus_constant (argblock, + argvec[count].locate.offset.constant); + rtx stack_area = gen_rtx_MEM (save_mode, + memory_address (save_mode, adr)); + + if (save_mode == BLKmode) + emit_block_move (stack_area, + validize_mem (argvec[count].save_area), + GEN_INT (argvec[count].locate.size.constant), + BLOCK_OP_CALL_PARM); + else + emit_move_insn (stack_area, argvec[count].save_area); } highest_outgoing_arg_in_use = initial_highest_arg_in_use; @@ -4195,18 +4285,14 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p) or other LCT_ value for other types of library calls. */ void -emit_library_call VPARAMS((rtx orgfun, enum libcall_type fn_type, - enum machine_mode outmode, int nargs, ...)) +emit_library_call (rtx orgfun, enum libcall_type fn_type, + enum machine_mode outmode, int nargs, ...) { - VA_OPEN (p, nargs); - VA_FIXEDARG (p, rtx, orgfun); - VA_FIXEDARG (p, int, fn_type); - VA_FIXEDARG (p, enum machine_mode, outmode); - VA_FIXEDARG (p, int, nargs); + va_list p; + va_start (p, nargs); emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p); - - VA_CLOSE (p); + va_end (p); } /* Like emit_library_call except that an extra argument, VALUE, @@ -4218,23 +4304,17 @@ emit_library_call VPARAMS((rtx orgfun, enum libcall_type fn_type, If VALUE is nonzero, VALUE is returned. */ rtx -emit_library_call_value VPARAMS((rtx orgfun, rtx value, - enum libcall_type fn_type, - enum machine_mode outmode, int nargs, ...)) +emit_library_call_value (rtx orgfun, rtx value, + enum libcall_type fn_type, + enum machine_mode outmode, int nargs, ...) { rtx result; - - VA_OPEN (p, nargs); - VA_FIXEDARG (p, rtx, orgfun); - VA_FIXEDARG (p, rtx, value); - VA_FIXEDARG (p, int, fn_type); - VA_FIXEDARG (p, enum machine_mode, outmode); - VA_FIXEDARG (p, int, nargs); + va_list p; + va_start (p, nargs); result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode, nargs, p); - - VA_CLOSE (p); + va_end (p); return result; } @@ -4259,12 +4339,8 @@ emit_library_call_value VPARAMS((rtx orgfun, rtx value, zero otherwise. */ static int -store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space) - struct arg_data *arg; - rtx argblock; - int flags; - int variable_size ATTRIBUTE_UNUSED; - int reg_parm_stack_space; +store_one_arg (struct arg_data *arg, rtx argblock, int flags, + int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space) { tree pval = arg->tree_value; rtx reg = 0; @@ -4294,14 +4370,14 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space) else upper_bound = 0; - lower_bound = upper_bound - arg->size.constant; + lower_bound = upper_bound - arg->locate.size.constant; #else if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS) lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)); else lower_bound = 0; - upper_bound = lower_bound + arg->size.constant; + upper_bound = lower_bound + arg->locate.size.constant; #endif i = lower_bound; @@ -4314,13 +4390,11 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space) if (i < upper_bound) { - /* We need to make a save area. See what mode we can make it. */ - enum machine_mode save_mode - = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1); - rtx stack_area - = gen_rtx_MEM (save_mode, - memory_address (save_mode, - XEXP (arg->stack_slot, 0))); + /* We need to make a save area. */ + unsigned int size = arg->locate.size.constant * BITS_PER_UNIT; + enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1); + rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0)); + rtx stack_area = gen_rtx_MEM (save_mode, adr); if (save_mode == BLKmode) { @@ -4446,10 +4520,10 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space) /* This isn't already where we want it on the stack, so put it there. This can either be done with push or copy insns. */ - emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, + emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, PARM_BOUNDARY, partial, reg, used - size, argblock, - ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space, - ARGS_SIZE_RTX (arg->alignment_pad)); + ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, + ARGS_SIZE_RTX (arg->locate.alignment_pad)); /* Unless this is a partially-in-register argument, the argument is now in the stack. */ @@ -4471,16 +4545,17 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space) /* Round its size up to a multiple of the allocation unit for arguments. */ - if (arg->size.var != 0) + if (arg->locate.size.var != 0) { excess = 0; - size_rtx = ARGS_SIZE_RTX (arg->size); + size_rtx = ARGS_SIZE_RTX (arg->locate.size); } else { /* PUSH_ROUNDING has no effect on us, because emit_push_insn for BLKmode is careful to avoid it. */ - excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval)) + excess = (arg->locate.size.constant + - int_size_in_bytes (TREE_TYPE (pval)) + partial * UNITS_PER_WORD); size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)), NULL_RTX, TYPE_MODE (sizetype), 0); @@ -4494,7 +4569,7 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space) PARM_BOUNDARY, but the actual argument isn't. */ if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward) { - if (arg->size.var) + if (arg->locate.size.var) parm_align = BITS_PER_UNIT; else if (excess) { @@ -4506,7 +4581,7 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space) if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM) { /* emit_push_insn might not work properly if arg->value and - argblock + arg->offset areas overlap. */ + argblock + arg->locate.offset areas overlap. */ rtx x = arg->value; int i = 0; @@ -4520,17 +4595,17 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space) i = INTVAL (XEXP (XEXP (x, 0), 1)); /* expand_call should ensure this */ - if (arg->offset.var || GET_CODE (size_rtx) != CONST_INT) + if (arg->locate.offset.var || GET_CODE (size_rtx) != CONST_INT) abort (); - if (arg->offset.constant > i) + if (arg->locate.offset.constant > i) { - if (arg->offset.constant < i + INTVAL (size_rtx)) + if (arg->locate.offset.constant < i + INTVAL (size_rtx)) sibcall_failure = 1; } - else if (arg->offset.constant < i) + else if (arg->locate.offset.constant < i) { - if (i < arg->offset.constant + INTVAL (size_rtx)) + if (i < arg->locate.offset.constant + INTVAL (size_rtx)) sibcall_failure = 1; } } @@ -4538,8 +4613,8 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space) emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx, parm_align, partial, reg, excess, argblock, - ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space, - ARGS_SIZE_RTX (arg->alignment_pad)); + ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space, + ARGS_SIZE_RTX (arg->locate.alignment_pad)); /* Unless this is a partially-in-register argument, the argument is now in the stack. @@ -4577,7 +4652,6 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space) return sibcall_failure; } - /* Nonzero if we do not know how to pass TYPE solely in registers. We cannot do so in the following cases: @@ -4595,9 +4669,7 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space) For a little-endian machine, the reverse is true. */ bool -default_must_pass_in_stack (mode, type) - enum machine_mode mode; - tree type; +default_must_pass_in_stack (enum machine_mode mode, tree type) { if (!type) return false;