1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
39 #include "langhooks.h"
45 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
46 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
48 /* Data structure and subroutines used within expand_call. */
52 /* Tree node for this argument. */
54 /* Mode for value; TYPE_MODE unless promoted. */
55 enum machine_mode mode;
56 /* Current RTL value for argument, or 0 if it isn't precomputed. */
58 /* Initially-compute RTL value for argument; only for const functions. */
60 /* Register to pass this argument in, 0 if passed on stack, or an
61 PARALLEL if the arg is to be copied into multiple non-contiguous
64 /* Register to pass this argument in when generating tail call sequence.
65 This is not the same register as for normal calls on machines with
68 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
69 form for emit_group_move. */
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
74 /* Number of bytes to put in registers. 0 means put the whole arg
75 in registers. Also 0 if not passed in registers. */
77 /* Nonzero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
82 /* Some fields packaged up for locate_and_pad_parm. */
83 struct locate_and_pad_arg_data locate;
84 /* Location on the stack at which parameter should be stored. The store
85 has already been done if STACK == VALUE. */
87 /* Location on the stack of the start of this argument slot. This can
88 differ from STACK if this arg pads downward. This location is known
89 to be aligned to FUNCTION_ARG_BOUNDARY. */
91 /* Place that this stack area has been saved, if needed. */
93 /* If an argument's alignment does not permit direct copying into registers,
94 copy in smaller-sized pieces into pseudos. These are stored in a
95 block pointed to by this field. The next field says how many
96 word-sized pseudos we made. */
101 /* A vector of one char per byte of stack space. A byte if nonzero if
102 the corresponding stack location has been used.
103 This vector is used to prevent a function call within an argument from
104 clobbering any stack already set up. */
105 static char *stack_usage_map;
107 /* Size of STACK_USAGE_MAP. */
108 static int highest_outgoing_arg_in_use;
110 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
111 stack location's tail call argument has been already stored into the stack.
112 This bitmap is used to prevent sibling call optimization if function tries
113 to use parent's incoming argument slots when they have been already
114 overwritten with tail call arguments. */
115 static sbitmap stored_args_map;
117 /* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
122 static int stack_arg_under_construction;
124 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
125 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
127 static void precompute_register_parameters (int, struct arg_data *, int *);
128 static int store_one_arg (struct arg_data *, rtx, int, int, int);
129 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
130 static int finalize_must_preallocate (int, int, struct arg_data *,
132 static void precompute_arguments (int, int, struct arg_data *);
133 static int compute_argument_block_size (int, struct args_size *, int);
134 static void initialize_argument_information (int, struct arg_data *,
135 struct args_size *, int,
137 tree, CUMULATIVE_ARGS *, int,
138 rtx *, int *, int *, int *,
140 static void compute_argument_addresses (struct arg_data *, rtx, int);
141 static rtx rtx_for_function_call (tree, tree);
142 static void load_register_parameters (struct arg_data *, int, rtx *, int,
144 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
145 enum machine_mode, int, va_list);
146 static int special_function_p (const_tree, int);
147 static int check_sibcall_argument_overlap_1 (rtx);
148 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
150 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
152 static tree split_complex_types (tree);
154 #ifdef REG_PARM_STACK_SPACE
155 static rtx save_fixed_argument_area (int, rtx, int *, int *);
156 static void restore_fixed_argument_area (rtx, rtx, int, int);
159 /* Force FUNEXP into a form suitable for the address of a CALL,
160 and return that as an rtx. Also load the static chain register
161 if FNDECL is a nested function.
163 CALL_FUSAGE points to a variable holding the prospective
164 CALL_INSN_FUNCTION_USAGE information. */
167 prepare_call_address (rtx funexp, rtx static_chain_value,
168 rtx *call_fusage, int reg_parm_seen, int sibcallp)
170 /* Make a valid memory address and copy constants through pseudo-regs,
171 but not for a constant address if -fno-function-cse. */
172 if (GET_CODE (funexp) != SYMBOL_REF)
173 /* If we are using registers for parameters, force the
174 function address into a register now. */
175 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
176 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
177 : memory_address (FUNCTION_MODE, funexp));
180 #ifndef NO_FUNCTION_CSE
181 if (optimize && ! flag_no_function_cse)
182 funexp = force_reg (Pmode, funexp);
186 if (static_chain_value != 0)
188 static_chain_value = convert_memory_address (Pmode, static_chain_value);
189 emit_move_insn (static_chain_rtx, static_chain_value);
191 if (REG_P (static_chain_rtx))
192 use_reg (call_fusage, static_chain_rtx);
198 /* Generate instructions to call function FUNEXP,
199 and optionally pop the results.
200 The CALL_INSN is the first insn generated.
202 FNDECL is the declaration node of the function. This is given to the
203 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
205 FUNTYPE is the data type of the function. This is given to the macro
206 RETURN_POPS_ARGS to determine whether this function pops its own args.
207 We used to allow an identifier for library functions, but that doesn't
208 work when the return type is an aggregate type and the calling convention
209 says that the pointer to this aggregate is to be popped by the callee.
211 STACK_SIZE is the number of bytes of arguments on the stack,
212 ROUNDED_STACK_SIZE is that number rounded up to
213 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
214 both to put into the call insn and to generate explicit popping
217 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
218 It is zero if this call doesn't want a structure value.
220 NEXT_ARG_REG is the rtx that results from executing
221 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
222 just after all the args have had their registers assigned.
223 This could be whatever you like, but normally it is the first
224 arg-register beyond those used for args in this call,
225 or 0 if all the arg-registers are used in this call.
226 It is passed on to `gen_call' so you can put this info in the call insn.
228 VALREG is a hard register in which a value is returned,
229 or 0 if the call does not return a value.
231 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
232 the args to this call were processed.
233 We restore `inhibit_defer_pop' to that value.
235 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
236 denote registers used by the called function. */
239 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
240 tree funtype ATTRIBUTE_UNUSED,
241 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
242 HOST_WIDE_INT rounded_stack_size,
243 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
244 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
245 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
246 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
248 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
250 int already_popped = 0;
251 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
252 #if defined (HAVE_call) && defined (HAVE_call_value)
253 rtx struct_value_size_rtx;
254 struct_value_size_rtx = GEN_INT (struct_value_size);
257 #ifdef CALL_POPS_ARGS
258 n_popped += CALL_POPS_ARGS (* args_so_far);
261 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
262 and we don't want to load it into a register as an optimization,
263 because prepare_call_address already did it if it should be done. */
264 if (GET_CODE (funexp) != SYMBOL_REF)
265 funexp = memory_address (FUNCTION_MODE, funexp);
267 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
268 if ((ecf_flags & ECF_SIBCALL)
269 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
270 && (n_popped > 0 || stack_size == 0))
272 rtx n_pop = GEN_INT (n_popped);
275 /* If this subroutine pops its own args, record that in the call insn
276 if possible, for the sake of frame pointer elimination. */
279 pat = GEN_SIBCALL_VALUE_POP (valreg,
280 gen_rtx_MEM (FUNCTION_MODE, funexp),
281 rounded_stack_size_rtx, next_arg_reg,
284 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
285 rounded_stack_size_rtx, next_arg_reg, n_pop);
287 emit_call_insn (pat);
293 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
294 /* If the target has "call" or "call_value" insns, then prefer them
295 if no arguments are actually popped. If the target does not have
296 "call" or "call_value" insns, then we must use the popping versions
297 even if the call has no arguments to pop. */
298 #if defined (HAVE_call) && defined (HAVE_call_value)
299 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
302 if (HAVE_call_pop && HAVE_call_value_pop)
305 rtx n_pop = GEN_INT (n_popped);
308 /* If this subroutine pops its own args, record that in the call insn
309 if possible, for the sake of frame pointer elimination. */
312 pat = GEN_CALL_VALUE_POP (valreg,
313 gen_rtx_MEM (FUNCTION_MODE, funexp),
314 rounded_stack_size_rtx, next_arg_reg, n_pop);
316 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
317 rounded_stack_size_rtx, next_arg_reg, n_pop);
319 emit_call_insn (pat);
325 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
326 if ((ecf_flags & ECF_SIBCALL)
327 && HAVE_sibcall && HAVE_sibcall_value)
330 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
331 gen_rtx_MEM (FUNCTION_MODE, funexp),
332 rounded_stack_size_rtx,
333 next_arg_reg, NULL_RTX));
335 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
336 rounded_stack_size_rtx, next_arg_reg,
337 struct_value_size_rtx));
342 #if defined (HAVE_call) && defined (HAVE_call_value)
343 if (HAVE_call && HAVE_call_value)
346 emit_call_insn (GEN_CALL_VALUE (valreg,
347 gen_rtx_MEM (FUNCTION_MODE, funexp),
348 rounded_stack_size_rtx, next_arg_reg,
351 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
352 rounded_stack_size_rtx, next_arg_reg,
353 struct_value_size_rtx));
359 /* Find the call we just emitted. */
360 call_insn = last_call_insn ();
362 /* Put the register usage information there. */
363 add_function_usage_to (call_insn, call_fusage);
365 /* If this is a const call, then set the insn's unchanging bit. */
366 if (ecf_flags & ECF_CONST)
367 RTL_CONST_CALL_P (call_insn) = 1;
369 /* If this is a pure call, then set the insn's unchanging bit. */
370 if (ecf_flags & ECF_PURE)
371 RTL_PURE_CALL_P (call_insn) = 1;
373 /* If this is a const call, then set the insn's unchanging bit. */
374 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
375 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
377 /* If this call can't throw, attach a REG_EH_REGION reg note to that
379 if (ecf_flags & ECF_NOTHROW)
380 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
381 REG_NOTES (call_insn));
384 int rn = lookup_stmt_eh_region (fntree);
386 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
387 throw, which we already took care of. */
389 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
390 REG_NOTES (call_insn));
393 if (ecf_flags & ECF_NORETURN)
394 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
395 REG_NOTES (call_insn));
397 if (ecf_flags & ECF_RETURNS_TWICE)
399 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
400 REG_NOTES (call_insn));
401 cfun->calls_setjmp = 1;
404 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
406 /* Restore this now, so that we do defer pops for this call's args
407 if the context of the call as a whole permits. */
408 inhibit_defer_pop = old_inhibit_defer_pop;
413 CALL_INSN_FUNCTION_USAGE (call_insn)
414 = gen_rtx_EXPR_LIST (VOIDmode,
415 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
416 CALL_INSN_FUNCTION_USAGE (call_insn));
417 rounded_stack_size -= n_popped;
418 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
419 stack_pointer_delta -= n_popped;
422 if (!ACCUMULATE_OUTGOING_ARGS)
424 /* If returning from the subroutine does not automatically pop the args,
425 we need an instruction to pop them sooner or later.
426 Perhaps do it now; perhaps just record how much space to pop later.
428 If returning from the subroutine does pop the args, indicate that the
429 stack pointer will be changed. */
431 if (rounded_stack_size != 0)
433 if (ecf_flags & ECF_NORETURN)
434 /* Just pretend we did the pop. */
435 stack_pointer_delta -= rounded_stack_size;
436 else if (flag_defer_pop && inhibit_defer_pop == 0
437 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
438 pending_stack_adjust += rounded_stack_size;
440 adjust_stack (rounded_stack_size_rtx);
443 /* When we accumulate outgoing args, we must avoid any stack manipulations.
444 Restore the stack pointer to its original value now. Usually
445 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
446 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
447 popping variants of functions exist as well.
449 ??? We may optimize similar to defer_pop above, but it is
450 probably not worthwhile.
452 ??? It will be worthwhile to enable combine_stack_adjustments even for
455 anti_adjust_stack (GEN_INT (n_popped));
458 /* Determine if the function identified by NAME and FNDECL is one with
459 special properties we wish to know about.
461 For example, if the function might return more than one time (setjmp), then
462 set RETURNS_TWICE to a nonzero value.
464 Similarly set NORETURN if the function is in the longjmp family.
466 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
467 space from the stack such as alloca. */
470 special_function_p (const_tree fndecl, int flags)
472 if (fndecl && DECL_NAME (fndecl)
473 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
474 /* Exclude functions not at the file scope, or not `extern',
475 since they are not the magic functions we would otherwise
477 FIXME: this should be handled with attributes, not with this
478 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
479 because you can declare fork() inside a function if you
481 && (DECL_CONTEXT (fndecl) == NULL_TREE
482 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
483 && TREE_PUBLIC (fndecl))
485 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
486 const char *tname = name;
488 /* We assume that alloca will always be called by name. It
489 makes no sense to pass it as a pointer-to-function to
490 anything that does not understand its behavior. */
491 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
493 && ! strcmp (name, "alloca"))
494 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
496 && ! strcmp (name, "__builtin_alloca"))))
497 flags |= ECF_MAY_BE_ALLOCA;
499 /* Disregard prefix _, __ or __x. */
502 if (name[1] == '_' && name[2] == 'x')
504 else if (name[1] == '_')
513 && (! strcmp (tname, "setjmp")
514 || ! strcmp (tname, "setjmp_syscall")))
516 && ! strcmp (tname, "sigsetjmp"))
518 && ! strcmp (tname, "savectx")))
519 flags |= ECF_RETURNS_TWICE;
522 && ! strcmp (tname, "siglongjmp"))
523 flags |= ECF_NORETURN;
525 else if ((tname[0] == 'q' && tname[1] == 's'
526 && ! strcmp (tname, "qsetjmp"))
527 || (tname[0] == 'v' && tname[1] == 'f'
528 && ! strcmp (tname, "vfork"))
529 || (tname[0] == 'g' && tname[1] == 'e'
530 && !strcmp (tname, "getcontext")))
531 flags |= ECF_RETURNS_TWICE;
533 else if (tname[0] == 'l' && tname[1] == 'o'
534 && ! strcmp (tname, "longjmp"))
535 flags |= ECF_NORETURN;
541 /* Return nonzero when FNDECL represents a call to setjmp. */
544 setjmp_call_p (const_tree fndecl)
546 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
549 /* Return true when exp contains alloca call. */
551 alloca_call_p (const_tree exp)
553 if (TREE_CODE (exp) == CALL_EXPR
554 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
555 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
556 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
557 & ECF_MAY_BE_ALLOCA))
562 /* Detect flags (function attributes) from the function decl or type node. */
565 flags_from_decl_or_type (const_tree exp)
568 const_tree type = exp;
572 type = TREE_TYPE (exp);
574 /* The function exp may have the `malloc' attribute. */
575 if (DECL_IS_MALLOC (exp))
578 /* The function exp may have the `returns_twice' attribute. */
579 if (DECL_IS_RETURNS_TWICE (exp))
580 flags |= ECF_RETURNS_TWICE;
582 /* Process the pure and const attributes. */
583 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
585 if (DECL_PURE_P (exp))
587 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
588 flags |= ECF_LOOPING_CONST_OR_PURE;
590 if (DECL_IS_NOVOPS (exp))
593 if (TREE_NOTHROW (exp))
594 flags |= ECF_NOTHROW;
596 flags = special_function_p (exp, flags);
598 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
601 if (TREE_THIS_VOLATILE (exp))
602 flags |= ECF_NORETURN;
607 /* Detect flags from a CALL_EXPR. */
610 call_expr_flags (const_tree t)
613 tree decl = get_callee_fndecl (t);
616 flags = flags_from_decl_or_type (decl);
619 t = TREE_TYPE (CALL_EXPR_FN (t));
620 if (t && TREE_CODE (t) == POINTER_TYPE)
621 flags = flags_from_decl_or_type (TREE_TYPE (t));
629 /* Precompute all register parameters as described by ARGS, storing values
630 into fields within the ARGS array.
632 NUM_ACTUALS indicates the total number elements in the ARGS array.
634 Set REG_PARM_SEEN if we encounter a register parameter. */
637 precompute_register_parameters (int num_actuals, struct arg_data *args,
644 for (i = 0; i < num_actuals; i++)
645 if (args[i].reg != 0 && ! args[i].pass_on_stack)
649 if (args[i].value == 0)
652 args[i].value = expand_normal (args[i].tree_value);
653 preserve_temp_slots (args[i].value);
657 /* If the value is a non-legitimate constant, force it into a
658 pseudo now. TLS symbols sometimes need a call to resolve. */
659 if (CONSTANT_P (args[i].value)
660 && !LEGITIMATE_CONSTANT_P (args[i].value))
661 args[i].value = force_reg (args[i].mode, args[i].value);
663 /* If we are to promote the function arg to a wider mode,
666 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
668 = convert_modes (args[i].mode,
669 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
670 args[i].value, args[i].unsignedp);
672 /* If we're going to have to load the value by parts, pull the
673 parts into pseudos. The part extraction process can involve
674 non-trivial computation. */
675 if (GET_CODE (args[i].reg) == PARALLEL)
677 tree type = TREE_TYPE (args[i].tree_value);
678 args[i].parallel_value
679 = emit_group_load_into_temps (args[i].reg, args[i].value,
680 type, int_size_in_bytes (type));
683 /* If the value is expensive, and we are inside an appropriately
684 short loop, put the value into a pseudo and then put the pseudo
687 For small register classes, also do this if this call uses
688 register parameters. This is to avoid reload conflicts while
689 loading the parameters registers. */
691 else if ((! (REG_P (args[i].value)
692 || (GET_CODE (args[i].value) == SUBREG
693 && REG_P (SUBREG_REG (args[i].value)))))
694 && args[i].mode != BLKmode
695 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
696 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
698 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
702 #ifdef REG_PARM_STACK_SPACE
704 /* The argument list is the property of the called routine and it
705 may clobber it. If the fixed area has been used for previous
706 parameters, we must save and restore it. */
709 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
714 /* Compute the boundary of the area that needs to be saved, if any. */
715 high = reg_parm_stack_space;
716 #ifdef ARGS_GROW_DOWNWARD
719 if (high > highest_outgoing_arg_in_use)
720 high = highest_outgoing_arg_in_use;
722 for (low = 0; low < high; low++)
723 if (stack_usage_map[low] != 0)
726 enum machine_mode save_mode;
731 while (stack_usage_map[--high] == 0)
735 *high_to_save = high;
737 num_to_save = high - low + 1;
738 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
740 /* If we don't have the required alignment, must do this
742 if ((low & (MIN (GET_MODE_SIZE (save_mode),
743 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
746 #ifdef ARGS_GROW_DOWNWARD
751 stack_area = gen_rtx_MEM (save_mode,
752 memory_address (save_mode,
753 plus_constant (argblock,
756 set_mem_align (stack_area, PARM_BOUNDARY);
757 if (save_mode == BLKmode)
759 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
760 emit_block_move (validize_mem (save_area), stack_area,
761 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
765 save_area = gen_reg_rtx (save_mode);
766 emit_move_insn (save_area, stack_area);
776 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
778 enum machine_mode save_mode = GET_MODE (save_area);
782 #ifdef ARGS_GROW_DOWNWARD
783 delta = -high_to_save;
787 stack_area = gen_rtx_MEM (save_mode,
788 memory_address (save_mode,
789 plus_constant (argblock, delta)));
790 set_mem_align (stack_area, PARM_BOUNDARY);
792 if (save_mode != BLKmode)
793 emit_move_insn (stack_area, save_area);
795 emit_block_move (stack_area, validize_mem (save_area),
796 GEN_INT (high_to_save - low_to_save + 1),
799 #endif /* REG_PARM_STACK_SPACE */
801 /* If any elements in ARGS refer to parameters that are to be passed in
802 registers, but not in memory, and whose alignment does not permit a
803 direct copy into registers. Copy the values into a group of pseudos
804 which we will later copy into the appropriate hard registers.
806 Pseudos for each unaligned argument will be stored into the array
807 args[argnum].aligned_regs. The caller is responsible for deallocating
808 the aligned_regs array if it is nonzero. */
811 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
815 for (i = 0; i < num_actuals; i++)
816 if (args[i].reg != 0 && ! args[i].pass_on_stack
817 && args[i].mode == BLKmode
818 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
819 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
821 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
822 int endian_correction = 0;
826 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
827 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
831 args[i].n_aligned_regs
832 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
835 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
837 /* Structures smaller than a word are normally aligned to the
838 least significant byte. On a BYTES_BIG_ENDIAN machine,
839 this means we must skip the empty high order bytes when
840 calculating the bit offset. */
841 if (bytes < UNITS_PER_WORD
842 #ifdef BLOCK_REG_PADDING
843 && (BLOCK_REG_PADDING (args[i].mode,
844 TREE_TYPE (args[i].tree_value), 1)
850 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
852 for (j = 0; j < args[i].n_aligned_regs; j++)
854 rtx reg = gen_reg_rtx (word_mode);
855 rtx word = operand_subword_force (args[i].value, j, BLKmode);
856 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
858 args[i].aligned_regs[j] = reg;
859 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
860 word_mode, word_mode);
862 /* There is no need to restrict this code to loading items
863 in TYPE_ALIGN sized hunks. The bitfield instructions can
864 load up entire word sized registers efficiently.
866 ??? This may not be needed anymore.
867 We use to emit a clobber here but that doesn't let later
868 passes optimize the instructions we emit. By storing 0 into
869 the register later passes know the first AND to zero out the
870 bitfield being set in the register is unnecessary. The store
871 of 0 will be deleted as will at least the first AND. */
873 emit_move_insn (reg, const0_rtx);
875 bytes -= bitsize / BITS_PER_UNIT;
876 store_bit_field (reg, bitsize, endian_correction, word_mode,
882 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
885 NUM_ACTUALS is the total number of parameters.
887 N_NAMED_ARGS is the total number of named arguments.
889 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
892 FNDECL is the tree code for the target of this call (if known)
894 ARGS_SO_FAR holds state needed by the target to know where to place
897 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
898 for arguments which are passed in registers.
900 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
901 and may be modified by this routine.
903 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
904 flags which may may be modified by this routine.
906 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
907 that requires allocation of stack space.
909 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
910 the thunked-to function. */
913 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
914 struct arg_data *args,
915 struct args_size *args_size,
916 int n_named_args ATTRIBUTE_UNUSED,
917 tree exp, tree struct_value_addr_value,
919 CUMULATIVE_ARGS *args_so_far,
920 int reg_parm_stack_space,
921 rtx *old_stack_level, int *old_pending_adj,
922 int *must_preallocate, int *ecf_flags,
923 bool *may_tailcall, bool call_from_thunk_p)
925 /* 1 if scanning parms front to back, -1 if scanning back to front. */
928 /* Count arg position in order args appear. */
933 args_size->constant = 0;
936 /* In this loop, we consider args in the order they are written.
937 We fill up ARGS from the front or from the back if necessary
938 so that in any case the first arg to be pushed ends up at the front. */
940 if (PUSH_ARGS_REVERSED)
942 i = num_actuals - 1, inc = -1;
943 /* In this case, must reverse order of args
944 so that we compute and push the last arg first. */
951 /* First fill in the actual arguments in the ARGS array, splitting
952 complex arguments if necessary. */
955 call_expr_arg_iterator iter;
958 if (struct_value_addr_value)
960 args[j].tree_value = struct_value_addr_value;
963 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
965 tree argtype = TREE_TYPE (arg);
966 if (targetm.calls.split_complex_arg
968 && TREE_CODE (argtype) == COMPLEX_TYPE
969 && targetm.calls.split_complex_arg (argtype))
971 tree subtype = TREE_TYPE (argtype);
972 arg = save_expr (arg);
973 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
975 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
978 args[j].tree_value = arg;
983 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
984 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
986 tree type = TREE_TYPE (args[i].tree_value);
988 enum machine_mode mode;
990 /* Replace erroneous argument with constant zero. */
991 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
992 args[i].tree_value = integer_zero_node, type = integer_type_node;
994 /* If TYPE is a transparent union, pass things the way we would
995 pass the first field of the union. We have already verified that
996 the modes are the same. */
997 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
998 type = TREE_TYPE (TYPE_FIELDS (type));
1000 /* Decide where to pass this arg.
1002 args[i].reg is nonzero if all or part is passed in registers.
1004 args[i].partial is nonzero if part but not all is passed in registers,
1005 and the exact value says how many bytes are passed in registers.
1007 args[i].pass_on_stack is nonzero if the argument must at least be
1008 computed on the stack. It may then be loaded back into registers
1009 if args[i].reg is nonzero.
1011 These decisions are driven by the FUNCTION_... macros and must agree
1012 with those made by function.c. */
1014 /* See if this argument should be passed by invisible reference. */
1015 if (pass_by_reference (args_so_far, TYPE_MODE (type),
1016 type, argpos < n_named_args))
1022 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1023 type, argpos < n_named_args);
1025 /* If we're compiling a thunk, pass through invisible references
1026 instead of making a copy. */
1027 if (call_from_thunk_p
1029 && !TREE_ADDRESSABLE (type)
1030 && (base = get_base_address (args[i].tree_value))
1031 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1033 /* We can't use sibcalls if a callee-copied argument is
1034 stored in the current function's frame. */
1035 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1036 *may_tailcall = false;
1038 args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1039 type = TREE_TYPE (args[i].tree_value);
1041 if (*ecf_flags & ECF_CONST)
1042 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1043 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1047 /* We make a copy of the object and pass the address to the
1048 function being called. */
1051 if (!COMPLETE_TYPE_P (type)
1052 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1053 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1054 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1055 STACK_CHECK_MAX_VAR_SIZE))))
1057 /* This is a variable-sized object. Make space on the stack
1059 rtx size_rtx = expr_size (args[i].tree_value);
1061 if (*old_stack_level == 0)
1063 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1064 *old_pending_adj = pending_stack_adjust;
1065 pending_stack_adjust = 0;
1068 copy = gen_rtx_MEM (BLKmode,
1069 allocate_dynamic_stack_space
1070 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1071 set_mem_attributes (copy, type, 1);
1074 copy = assign_temp (type, 0, 1, 0);
1076 store_expr (args[i].tree_value, copy, 0, false);
1078 *ecf_flags &= ~(ECF_LIBCALL_BLOCK);
1080 /* Just change the const function to pure and then let
1081 the next test clear the pure based on
1083 if (*ecf_flags & ECF_CONST)
1085 *ecf_flags &= ~ECF_CONST;
1086 *ecf_flags |= ECF_PURE;
1089 if (!callee_copies && *ecf_flags & ECF_PURE)
1090 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1093 = build_fold_addr_expr (make_tree (type, copy));
1094 type = TREE_TYPE (args[i].tree_value);
1095 *may_tailcall = false;
1099 mode = TYPE_MODE (type);
1100 unsignedp = TYPE_UNSIGNED (type);
1102 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1103 mode = promote_mode (type, mode, &unsignedp, 1);
1105 args[i].unsignedp = unsignedp;
1106 args[i].mode = mode;
1108 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1109 argpos < n_named_args);
1110 #ifdef FUNCTION_INCOMING_ARG
1111 /* If this is a sibling call and the machine has register windows, the
1112 register window has to be unwinded before calling the routine, so
1113 arguments have to go into the incoming registers. */
1114 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1115 argpos < n_named_args);
1117 args[i].tail_call_reg = args[i].reg;
1122 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1123 argpos < n_named_args);
1125 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1127 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1128 it means that we are to pass this arg in the register(s) designated
1129 by the PARALLEL, but also to pass it in the stack. */
1130 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1131 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1132 args[i].pass_on_stack = 1;
1134 /* If this is an addressable type, we must preallocate the stack
1135 since we must evaluate the object into its final location.
1137 If this is to be passed in both registers and the stack, it is simpler
1139 if (TREE_ADDRESSABLE (type)
1140 || (args[i].pass_on_stack && args[i].reg != 0))
1141 *must_preallocate = 1;
1143 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1144 we cannot consider this function call constant. */
1145 if (TREE_ADDRESSABLE (type))
1146 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1148 /* Compute the stack-size of this argument. */
1149 if (args[i].reg == 0 || args[i].partial != 0
1150 || reg_parm_stack_space > 0
1151 || args[i].pass_on_stack)
1152 locate_and_pad_parm (mode, type,
1153 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1158 args[i].pass_on_stack ? 0 : args[i].partial,
1159 fndecl, args_size, &args[i].locate);
1160 #ifdef BLOCK_REG_PADDING
1162 /* The argument is passed entirely in registers. See at which
1163 end it should be padded. */
1164 args[i].locate.where_pad =
1165 BLOCK_REG_PADDING (mode, type,
1166 int_size_in_bytes (type) <= UNITS_PER_WORD);
1169 /* Update ARGS_SIZE, the total stack space for args so far. */
1171 args_size->constant += args[i].locate.size.constant;
1172 if (args[i].locate.size.var)
1173 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1175 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1176 have been used, etc. */
1178 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1179 argpos < n_named_args);
1183 /* Update ARGS_SIZE to contain the total size for the argument block.
1184 Return the original constant component of the argument block's size.
1186 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1187 for arguments passed in registers. */
1190 compute_argument_block_size (int reg_parm_stack_space,
1191 struct args_size *args_size,
1192 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1194 int unadjusted_args_size = args_size->constant;
1196 /* For accumulate outgoing args mode we don't need to align, since the frame
1197 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1198 backends from generating misaligned frame sizes. */
1199 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1200 preferred_stack_boundary = STACK_BOUNDARY;
1202 /* Compute the actual size of the argument block required. The variable
1203 and constant sizes must be combined, the size may have to be rounded,
1204 and there may be a minimum required size. */
1208 args_size->var = ARGS_SIZE_TREE (*args_size);
1209 args_size->constant = 0;
1211 preferred_stack_boundary /= BITS_PER_UNIT;
1212 if (preferred_stack_boundary > 1)
1214 /* We don't handle this case yet. To handle it correctly we have
1215 to add the delta, round and subtract the delta.
1216 Currently no machine description requires this support. */
1217 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1218 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1221 if (reg_parm_stack_space > 0)
1224 = size_binop (MAX_EXPR, args_size->var,
1225 ssize_int (reg_parm_stack_space));
1227 /* The area corresponding to register parameters is not to count in
1228 the size of the block we need. So make the adjustment. */
1229 if (!OUTGOING_REG_PARM_STACK_SPACE)
1231 = size_binop (MINUS_EXPR, args_size->var,
1232 ssize_int (reg_parm_stack_space));
1237 preferred_stack_boundary /= BITS_PER_UNIT;
1238 if (preferred_stack_boundary < 1)
1239 preferred_stack_boundary = 1;
1240 args_size->constant = (((args_size->constant
1241 + stack_pointer_delta
1242 + preferred_stack_boundary - 1)
1243 / preferred_stack_boundary
1244 * preferred_stack_boundary)
1245 - stack_pointer_delta);
1247 args_size->constant = MAX (args_size->constant,
1248 reg_parm_stack_space);
1250 if (!OUTGOING_REG_PARM_STACK_SPACE)
1251 args_size->constant -= reg_parm_stack_space;
1253 return unadjusted_args_size;
1256 /* Precompute parameters as needed for a function call.
1258 FLAGS is mask of ECF_* constants.
1260 NUM_ACTUALS is the number of arguments.
1262 ARGS is an array containing information for each argument; this
1263 routine fills in the INITIAL_VALUE and VALUE fields for each
1264 precomputed argument. */
1267 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1271 /* If this is a libcall, then precompute all arguments so that we do not
1272 get extraneous instructions emitted as part of the libcall sequence. */
1274 /* If we preallocated the stack space, and some arguments must be passed
1275 on the stack, then we must precompute any parameter which contains a
1276 function call which will store arguments on the stack.
1277 Otherwise, evaluating the parameter may clobber previous parameters
1278 which have already been stored into the stack. (we have code to avoid
1279 such case by saving the outgoing stack arguments, but it results in
1281 if ((flags & ECF_LIBCALL_BLOCK) == 0 && !ACCUMULATE_OUTGOING_ARGS)
1284 for (i = 0; i < num_actuals; i++)
1286 enum machine_mode mode;
1288 if ((flags & ECF_LIBCALL_BLOCK) == 0
1289 && TREE_CODE (args[i].tree_value) != CALL_EXPR)
1292 /* If this is an addressable type, we cannot pre-evaluate it. */
1293 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
1295 args[i].initial_value = args[i].value
1296 = expand_normal (args[i].tree_value);
1298 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1299 if (mode != args[i].mode)
1302 = convert_modes (args[i].mode, mode,
1303 args[i].value, args[i].unsignedp);
1304 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1305 /* CSE will replace this only if it contains args[i].value
1306 pseudo, so convert it down to the declared mode using
1308 if (REG_P (args[i].value)
1309 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1311 args[i].initial_value
1312 = gen_lowpart_SUBREG (mode, args[i].value);
1313 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1314 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1322 /* Given the current state of MUST_PREALLOCATE and information about
1323 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1324 compute and return the final value for MUST_PREALLOCATE. */
1327 finalize_must_preallocate (int must_preallocate, int num_actuals,
1328 struct arg_data *args, struct args_size *args_size)
1330 /* See if we have or want to preallocate stack space.
1332 If we would have to push a partially-in-regs parm
1333 before other stack parms, preallocate stack space instead.
1335 If the size of some parm is not a multiple of the required stack
1336 alignment, we must preallocate.
1338 If the total size of arguments that would otherwise create a copy in
1339 a temporary (such as a CALL) is more than half the total argument list
1340 size, preallocation is faster.
1342 Another reason to preallocate is if we have a machine (like the m88k)
1343 where stack alignment is required to be maintained between every
1344 pair of insns, not just when the call is made. However, we assume here
1345 that such machines either do not have push insns (and hence preallocation
1346 would occur anyway) or the problem is taken care of with
1349 if (! must_preallocate)
1351 int partial_seen = 0;
1352 int copy_to_evaluate_size = 0;
1355 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1357 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1359 else if (partial_seen && args[i].reg == 0)
1360 must_preallocate = 1;
1362 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1363 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1364 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1365 || TREE_CODE (args[i].tree_value) == COND_EXPR
1366 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1367 copy_to_evaluate_size
1368 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1371 if (copy_to_evaluate_size * 2 >= args_size->constant
1372 && args_size->constant > 0)
1373 must_preallocate = 1;
1375 return must_preallocate;
1378 /* If we preallocated stack space, compute the address of each argument
1379 and store it into the ARGS array.
1381 We need not ensure it is a valid memory address here; it will be
1382 validized when it is used.
1384 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1387 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1391 rtx arg_reg = argblock;
1392 int i, arg_offset = 0;
1394 if (GET_CODE (argblock) == PLUS)
1395 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1397 for (i = 0; i < num_actuals; i++)
1399 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1400 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1402 unsigned int align, boundary;
1403 unsigned int units_on_stack = 0;
1404 enum machine_mode partial_mode = VOIDmode;
1406 /* Skip this parm if it will not be passed on the stack. */
1407 if (! args[i].pass_on_stack
1409 && args[i].partial == 0)
1412 if (GET_CODE (offset) == CONST_INT)
1413 addr = plus_constant (arg_reg, INTVAL (offset));
1415 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1417 addr = plus_constant (addr, arg_offset);
1419 if (args[i].partial != 0)
1421 /* Only part of the parameter is being passed on the stack.
1422 Generate a simple memory reference of the correct size. */
1423 units_on_stack = args[i].locate.size.constant;
1424 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1426 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1427 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1431 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1432 set_mem_attributes (args[i].stack,
1433 TREE_TYPE (args[i].tree_value), 1);
1435 align = BITS_PER_UNIT;
1436 boundary = args[i].locate.boundary;
1437 if (args[i].locate.where_pad != downward)
1439 else if (GET_CODE (offset) == CONST_INT)
1441 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1442 align = align & -align;
1444 set_mem_align (args[i].stack, align);
1446 if (GET_CODE (slot_offset) == CONST_INT)
1447 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1449 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1451 addr = plus_constant (addr, arg_offset);
1453 if (args[i].partial != 0)
1455 /* Only part of the parameter is being passed on the stack.
1456 Generate a simple memory reference of the correct size.
1458 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1459 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1463 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1464 set_mem_attributes (args[i].stack_slot,
1465 TREE_TYPE (args[i].tree_value), 1);
1467 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1469 /* Function incoming arguments may overlap with sibling call
1470 outgoing arguments and we cannot allow reordering of reads
1471 from function arguments with stores to outgoing arguments
1472 of sibling calls. */
1473 set_mem_alias_set (args[i].stack, 0);
1474 set_mem_alias_set (args[i].stack_slot, 0);
1479 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1480 in a call instruction.
1482 FNDECL is the tree node for the target function. For an indirect call
1483 FNDECL will be NULL_TREE.
1485 ADDR is the operand 0 of CALL_EXPR for this call. */
1488 rtx_for_function_call (tree fndecl, tree addr)
1492 /* Get the function to call, in the form of RTL. */
1495 /* If this is the first use of the function, see if we need to
1496 make an external definition for it. */
1497 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1499 assemble_external (fndecl);
1500 TREE_USED (fndecl) = 1;
1503 /* Get a SYMBOL_REF rtx for the function address. */
1504 funexp = XEXP (DECL_RTL (fndecl), 0);
1507 /* Generate an rtx (probably a pseudo-register) for the address. */
1510 funexp = expand_normal (addr);
1511 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1516 /* Return true if and only if SIZE storage units (usually bytes)
1517 starting from address ADDR overlap with already clobbered argument
1518 area. This function is used to determine if we should give up a
1522 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1526 if (addr == crtl->args.internal_arg_pointer)
1528 else if (GET_CODE (addr) == PLUS
1529 && XEXP (addr, 0) == crtl->args.internal_arg_pointer
1530 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
1531 i = INTVAL (XEXP (addr, 1));
1532 /* Return true for arg pointer based indexed addressing. */
1533 else if (GET_CODE (addr) == PLUS
1534 && (XEXP (addr, 0) == crtl->args.internal_arg_pointer
1535 || XEXP (addr, 1) == crtl->args.internal_arg_pointer))
1540 #ifdef ARGS_GROW_DOWNWARD
1545 unsigned HOST_WIDE_INT k;
1547 for (k = 0; k < size; k++)
1548 if (i + k < stored_args_map->n_bits
1549 && TEST_BIT (stored_args_map, i + k))
1556 /* Do the register loads required for any wholly-register parms or any
1557 parms which are passed both on the stack and in a register. Their
1558 expressions were already evaluated.
1560 Mark all register-parms as living through the call, putting these USE
1561 insns in the CALL_INSN_FUNCTION_USAGE field.
1563 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1564 checking, setting *SIBCALL_FAILURE if appropriate. */
1567 load_register_parameters (struct arg_data *args, int num_actuals,
1568 rtx *call_fusage, int flags, int is_sibcall,
1569 int *sibcall_failure)
1573 for (i = 0; i < num_actuals; i++)
1575 rtx reg = ((flags & ECF_SIBCALL)
1576 ? args[i].tail_call_reg : args[i].reg);
1579 int partial = args[i].partial;
1582 rtx before_arg = get_last_insn ();
1583 /* Set non-negative if we must move a word at a time, even if
1584 just one word (e.g, partial == 4 && mode == DFmode). Set
1585 to -1 if we just use a normal move insn. This value can be
1586 zero if the argument is a zero size structure. */
1588 if (GET_CODE (reg) == PARALLEL)
1592 gcc_assert (partial % UNITS_PER_WORD == 0);
1593 nregs = partial / UNITS_PER_WORD;
1595 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1597 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1598 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1601 size = GET_MODE_SIZE (args[i].mode);
1603 /* Handle calls that pass values in multiple non-contiguous
1604 locations. The Irix 6 ABI has examples of this. */
1606 if (GET_CODE (reg) == PARALLEL)
1607 emit_group_move (reg, args[i].parallel_value);
1609 /* If simple case, just do move. If normal partial, store_one_arg
1610 has already loaded the register for us. In all other cases,
1611 load the register(s) from memory. */
1613 else if (nregs == -1)
1615 emit_move_insn (reg, args[i].value);
1616 #ifdef BLOCK_REG_PADDING
1617 /* Handle case where we have a value that needs shifting
1618 up to the msb. eg. a QImode value and we're padding
1619 upward on a BYTES_BIG_ENDIAN machine. */
1620 if (size < UNITS_PER_WORD
1621 && (args[i].locate.where_pad
1622 == (BYTES_BIG_ENDIAN ? upward : downward)))
1625 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1627 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1628 report the whole reg as used. Strictly speaking, the
1629 call only uses SIZE bytes at the msb end, but it doesn't
1630 seem worth generating rtl to say that. */
1631 reg = gen_rtx_REG (word_mode, REGNO (reg));
1632 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1633 build_int_cst (NULL_TREE, shift),
1636 emit_move_insn (reg, x);
1641 /* If we have pre-computed the values to put in the registers in
1642 the case of non-aligned structures, copy them in now. */
1644 else if (args[i].n_aligned_regs != 0)
1645 for (j = 0; j < args[i].n_aligned_regs; j++)
1646 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1647 args[i].aligned_regs[j]);
1649 else if (partial == 0 || args[i].pass_on_stack)
1651 rtx mem = validize_mem (args[i].value);
1653 /* Check for overlap with already clobbered argument area. */
1655 && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
1657 *sibcall_failure = 1;
1659 /* Handle a BLKmode that needs shifting. */
1660 if (nregs == 1 && size < UNITS_PER_WORD
1661 #ifdef BLOCK_REG_PADDING
1662 && args[i].locate.where_pad == downward
1668 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1669 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1670 rtx x = gen_reg_rtx (word_mode);
1671 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1672 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1675 emit_move_insn (x, tem);
1676 x = expand_shift (dir, word_mode, x,
1677 build_int_cst (NULL_TREE, shift),
1680 emit_move_insn (ri, x);
1683 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1686 /* When a parameter is a block, and perhaps in other cases, it is
1687 possible that it did a load from an argument slot that was
1688 already clobbered. */
1690 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1691 *sibcall_failure = 1;
1693 /* Handle calls that pass values in multiple non-contiguous
1694 locations. The Irix 6 ABI has examples of this. */
1695 if (GET_CODE (reg) == PARALLEL)
1696 use_group_regs (call_fusage, reg);
1697 else if (nregs == -1)
1698 use_reg (call_fusage, reg);
1700 use_regs (call_fusage, REGNO (reg), nregs);
1705 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1706 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1707 bytes, then we would need to push some additional bytes to pad the
1708 arguments. So, we compute an adjust to the stack pointer for an
1709 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1710 bytes. Then, when the arguments are pushed the stack will be perfectly
1711 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1712 be popped after the call. Returns the adjustment. */
1715 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1716 struct args_size *args_size,
1717 unsigned int preferred_unit_stack_boundary)
1719 /* The number of bytes to pop so that the stack will be
1720 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1721 HOST_WIDE_INT adjustment;
1722 /* The alignment of the stack after the arguments are pushed, if we
1723 just pushed the arguments without adjust the stack here. */
1724 unsigned HOST_WIDE_INT unadjusted_alignment;
1726 unadjusted_alignment
1727 = ((stack_pointer_delta + unadjusted_args_size)
1728 % preferred_unit_stack_boundary);
1730 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1731 as possible -- leaving just enough left to cancel out the
1732 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1733 PENDING_STACK_ADJUST is non-negative, and congruent to
1734 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1736 /* Begin by trying to pop all the bytes. */
1737 unadjusted_alignment
1738 = (unadjusted_alignment
1739 - (pending_stack_adjust % preferred_unit_stack_boundary));
1740 adjustment = pending_stack_adjust;
1741 /* Push enough additional bytes that the stack will be aligned
1742 after the arguments are pushed. */
1743 if (preferred_unit_stack_boundary > 1)
1745 if (unadjusted_alignment > 0)
1746 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1748 adjustment += unadjusted_alignment;
1751 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1752 bytes after the call. The right number is the entire
1753 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1754 by the arguments in the first place. */
1756 = pending_stack_adjust - adjustment + unadjusted_args_size;
1761 /* Scan X expression if it does not dereference any argument slots
1762 we already clobbered by tail call arguments (as noted in stored_args_map
1764 Return nonzero if X expression dereferences such argument slots,
1768 check_sibcall_argument_overlap_1 (rtx x)
1777 code = GET_CODE (x);
1780 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1781 GET_MODE_SIZE (GET_MODE (x)));
1783 /* Scan all subexpressions. */
1784 fmt = GET_RTX_FORMAT (code);
1785 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1789 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1792 else if (*fmt == 'E')
1794 for (j = 0; j < XVECLEN (x, i); j++)
1795 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1802 /* Scan sequence after INSN if it does not dereference any argument slots
1803 we already clobbered by tail call arguments (as noted in stored_args_map
1804 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1805 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1806 should be 0). Return nonzero if sequence after INSN dereferences such argument
1807 slots, zero otherwise. */
1810 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1814 if (insn == NULL_RTX)
1815 insn = get_insns ();
1817 insn = NEXT_INSN (insn);
1819 for (; insn; insn = NEXT_INSN (insn))
1821 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1824 if (mark_stored_args_map)
1826 #ifdef ARGS_GROW_DOWNWARD
1827 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1829 low = arg->locate.slot_offset.constant;
1832 for (high = low + arg->locate.size.constant; low < high; low++)
1833 SET_BIT (stored_args_map, low);
1835 return insn != NULL_RTX;
1838 /* Given that a function returns a value of mode MODE at the most
1839 significant end of hard register VALUE, shift VALUE left or right
1840 as specified by LEFT_P. Return true if some action was needed. */
1843 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1845 HOST_WIDE_INT shift;
1847 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1848 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1852 /* Use ashr rather than lshr for right shifts. This is for the benefit
1853 of the MIPS port, which requires SImode values to be sign-extended
1854 when stored in 64-bit registers. */
1855 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1856 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1861 /* If X is a likely-spilled register value, copy it to a pseudo
1862 register and return that register. Return X otherwise. */
1865 avoid_likely_spilled_reg (rtx x)
1870 && HARD_REGISTER_P (x)
1871 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (x))))
1873 /* Make sure that we generate a REG rather than a CONCAT.
1874 Moves into CONCATs can need nontrivial instructions,
1875 and the whole point of this function is to avoid
1876 using the hard register directly in such a situation. */
1877 generating_concat_p = 0;
1878 new = gen_reg_rtx (GET_MODE (x));
1879 generating_concat_p = 1;
1880 emit_move_insn (new, x);
1886 /* Generate all the code for a CALL_EXPR exp
1887 and return an rtx for its value.
1888 Store the value in TARGET (specified as an rtx) if convenient.
1889 If the value is stored in TARGET then TARGET is returned.
1890 If IGNORE is nonzero, then we ignore the value of the function call. */
1893 expand_call (tree exp, rtx target, int ignore)
1895 /* Nonzero if we are currently expanding a call. */
1896 static int currently_expanding_call = 0;
1898 /* RTX for the function to be called. */
1900 /* Sequence of insns to perform a normal "call". */
1901 rtx normal_call_insns = NULL_RTX;
1902 /* Sequence of insns to perform a tail "call". */
1903 rtx tail_call_insns = NULL_RTX;
1904 /* Data type of the function. */
1906 tree type_arg_types;
1907 /* Declaration of the function being called,
1908 or 0 if the function is computed (not known by name). */
1910 /* The type of the function being called. */
1912 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1915 /* Register in which non-BLKmode value will be returned,
1916 or 0 if no value or if value is BLKmode. */
1918 /* Address where we should return a BLKmode value;
1919 0 if value not BLKmode. */
1920 rtx structure_value_addr = 0;
1921 /* Nonzero if that address is being passed by treating it as
1922 an extra, implicit first parameter. Otherwise,
1923 it is passed by being copied directly into struct_value_rtx. */
1924 int structure_value_addr_parm = 0;
1925 /* Holds the value of implicit argument for the struct value. */
1926 tree structure_value_addr_value = NULL_TREE;
1927 /* Size of aggregate value wanted, or zero if none wanted
1928 or if we are using the non-reentrant PCC calling convention
1929 or expecting the value in registers. */
1930 HOST_WIDE_INT struct_value_size = 0;
1931 /* Nonzero if called function returns an aggregate in memory PCC style,
1932 by returning the address of where to find it. */
1933 int pcc_struct_value = 0;
1934 rtx struct_value = 0;
1936 /* Number of actual parameters in this call, including struct value addr. */
1938 /* Number of named args. Args after this are anonymous ones
1939 and they must all go on the stack. */
1941 /* Number of complex actual arguments that need to be split. */
1942 int num_complex_actuals = 0;
1944 /* Vector of information about each argument.
1945 Arguments are numbered in the order they will be pushed,
1946 not the order they are written. */
1947 struct arg_data *args;
1949 /* Total size in bytes of all the stack-parms scanned so far. */
1950 struct args_size args_size;
1951 struct args_size adjusted_args_size;
1952 /* Size of arguments before any adjustments (such as rounding). */
1953 int unadjusted_args_size;
1954 /* Data on reg parms scanned so far. */
1955 CUMULATIVE_ARGS args_so_far;
1956 /* Nonzero if a reg parm has been scanned. */
1958 /* Nonzero if this is an indirect function call. */
1960 /* Nonzero if we must avoid push-insns in the args for this call.
1961 If stack space is allocated for register parameters, but not by the
1962 caller, then it is preallocated in the fixed part of the stack frame.
1963 So the entire argument block must then be preallocated (i.e., we
1964 ignore PUSH_ROUNDING in that case). */
1966 int must_preallocate = !PUSH_ARGS;
1968 /* Size of the stack reserved for parameter registers. */
1969 int reg_parm_stack_space = 0;
1971 /* Address of space preallocated for stack parms
1972 (on machines that lack push insns), or 0 if space not preallocated. */
1975 /* Mask of ECF_ flags. */
1977 #ifdef REG_PARM_STACK_SPACE
1978 /* Define the boundary of the register parm stack space that needs to be
1980 int low_to_save, high_to_save;
1981 rtx save_area = 0; /* Place that it is saved */
1984 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1985 char *initial_stack_usage_map = stack_usage_map;
1986 char *stack_usage_map_buf = NULL;
1988 int old_stack_allocated;
1990 /* State variables to track stack modifications. */
1991 rtx old_stack_level = 0;
1992 int old_stack_arg_under_construction = 0;
1993 int old_pending_adj = 0;
1994 int old_inhibit_defer_pop = inhibit_defer_pop;
1996 /* Some stack pointer alterations we make are performed via
1997 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1998 which we then also need to save/restore along the way. */
1999 int old_stack_pointer_delta = 0;
2002 tree p = CALL_EXPR_FN (exp);
2003 tree addr = CALL_EXPR_FN (exp);
2005 /* The alignment of the stack, in bits. */
2006 unsigned HOST_WIDE_INT preferred_stack_boundary;
2007 /* The alignment of the stack, in bytes. */
2008 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
2009 /* The static chain value to use for this call. */
2010 rtx static_chain_value;
2011 /* See if this is "nothrow" function call. */
2012 if (TREE_NOTHROW (exp))
2013 flags |= ECF_NOTHROW;
2015 /* See if we can find a DECL-node for the actual function, and get the
2016 function attributes (flags) from the function decl or type node. */
2017 fndecl = get_callee_fndecl (exp);
2020 fntype = TREE_TYPE (fndecl);
2021 flags |= flags_from_decl_or_type (fndecl);
2025 fntype = TREE_TYPE (TREE_TYPE (p));
2026 flags |= flags_from_decl_or_type (fntype);
2029 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2031 /* Warn if this value is an aggregate type,
2032 regardless of which calling convention we are using for it. */
2033 if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2034 warning (OPT_Waggregate_return, "function call has aggregate value");
2036 /* If the result of a non looping pure or const function call is
2037 ignored (or void), and none of its arguments are volatile, we can
2038 avoid expanding the call and just evaluate the arguments for
2040 if ((flags & (ECF_CONST | ECF_PURE))
2041 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2042 && (ignore || target == const0_rtx
2043 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2045 bool volatilep = false;
2047 call_expr_arg_iterator iter;
2049 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2050 if (TREE_THIS_VOLATILE (arg))
2058 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2059 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2064 #ifdef REG_PARM_STACK_SPACE
2065 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2068 if (!OUTGOING_REG_PARM_STACK_SPACE && reg_parm_stack_space > 0 && PUSH_ARGS)
2069 must_preallocate = 1;
2071 /* Set up a place to return a structure. */
2073 /* Cater to broken compilers. */
2074 if (aggregate_value_p (exp, fndecl))
2076 /* This call returns a big structure. */
2077 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE
2078 | ECF_LIBCALL_BLOCK);
2080 #ifdef PCC_STATIC_STRUCT_RETURN
2082 pcc_struct_value = 1;
2084 #else /* not PCC_STATIC_STRUCT_RETURN */
2086 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2088 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2089 structure_value_addr = XEXP (target, 0);
2092 /* For variable-sized objects, we must be called with a target
2093 specified. If we were to allocate space on the stack here,
2094 we would have no way of knowing when to free it. */
2095 rtx d = assign_temp (TREE_TYPE (exp), 0, 1, 1);
2097 mark_temp_addr_taken (d);
2098 structure_value_addr = XEXP (d, 0);
2102 #endif /* not PCC_STATIC_STRUCT_RETURN */
2105 /* Figure out the amount to which the stack should be aligned. */
2106 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2109 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2110 if (i && i->preferred_incoming_stack_boundary)
2111 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2114 /* Operand 0 is a pointer-to-function; get the type of the function. */
2115 funtype = TREE_TYPE (addr);
2116 gcc_assert (POINTER_TYPE_P (funtype));
2117 funtype = TREE_TYPE (funtype);
2119 /* Count whether there are actual complex arguments that need to be split
2120 into their real and imaginary parts. Munge the type_arg_types
2121 appropriately here as well. */
2122 if (targetm.calls.split_complex_arg)
2124 call_expr_arg_iterator iter;
2126 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2128 tree type = TREE_TYPE (arg);
2129 if (type && TREE_CODE (type) == COMPLEX_TYPE
2130 && targetm.calls.split_complex_arg (type))
2131 num_complex_actuals++;
2133 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2136 type_arg_types = TYPE_ARG_TYPES (funtype);
2138 if (flags & ECF_MAY_BE_ALLOCA)
2139 cfun->calls_alloca = 1;
2141 /* If struct_value_rtx is 0, it means pass the address
2142 as if it were an extra parameter. Put the argument expression
2143 in structure_value_addr_value. */
2144 if (structure_value_addr && struct_value == 0)
2146 /* If structure_value_addr is a REG other than
2147 virtual_outgoing_args_rtx, we can use always use it. If it
2148 is not a REG, we must always copy it into a register.
2149 If it is virtual_outgoing_args_rtx, we must copy it to another
2150 register in some cases. */
2151 rtx temp = (!REG_P (structure_value_addr)
2152 || (ACCUMULATE_OUTGOING_ARGS
2153 && stack_arg_under_construction
2154 && structure_value_addr == virtual_outgoing_args_rtx)
2155 ? copy_addr_to_reg (convert_memory_address
2156 (Pmode, structure_value_addr))
2157 : structure_value_addr);
2159 structure_value_addr_value =
2160 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2161 structure_value_addr_parm = 1;
2164 /* Count the arguments and set NUM_ACTUALS. */
2166 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2168 /* Compute number of named args.
2169 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2171 if (type_arg_types != 0)
2173 = (list_length (type_arg_types)
2174 /* Count the struct value address, if it is passed as a parm. */
2175 + structure_value_addr_parm);
2177 /* If we know nothing, treat all args as named. */
2178 n_named_args = num_actuals;
2180 /* Start updating where the next arg would go.
2182 On some machines (such as the PA) indirect calls have a different
2183 calling convention than normal calls. The fourth argument in
2184 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2186 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2188 /* Now possibly adjust the number of named args.
2189 Normally, don't include the last named arg if anonymous args follow.
2190 We do include the last named arg if
2191 targetm.calls.strict_argument_naming() returns nonzero.
2192 (If no anonymous args follow, the result of list_length is actually
2193 one too large. This is harmless.)
2195 If targetm.calls.pretend_outgoing_varargs_named() returns
2196 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2197 this machine will be able to place unnamed args that were passed
2198 in registers into the stack. So treat all args as named. This
2199 allows the insns emitting for a specific argument list to be
2200 independent of the function declaration.
2202 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2203 we do not have any reliable way to pass unnamed args in
2204 registers, so we must force them into memory. */
2206 if (type_arg_types != 0
2207 && targetm.calls.strict_argument_naming (&args_so_far))
2209 else if (type_arg_types != 0
2210 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2211 /* Don't include the last named arg. */
2214 /* Treat all args as named. */
2215 n_named_args = num_actuals;
2217 /* Make a vector to hold all the information about each arg. */
2218 args = alloca (num_actuals * sizeof (struct arg_data));
2219 memset (args, 0, num_actuals * sizeof (struct arg_data));
2221 /* Build up entries in the ARGS array, compute the size of the
2222 arguments into ARGS_SIZE, etc. */
2223 initialize_argument_information (num_actuals, args, &args_size,
2225 structure_value_addr_value, fndecl,
2226 &args_so_far, reg_parm_stack_space,
2227 &old_stack_level, &old_pending_adj,
2228 &must_preallocate, &flags,
2229 &try_tail_call, CALL_FROM_THUNK_P (exp));
2233 /* If this function requires a variable-sized argument list, don't
2234 try to make a cse'able block for this call. We may be able to
2235 do this eventually, but it is too complicated to keep track of
2236 what insns go in the cse'able block and which don't. */
2238 flags &= ~ECF_LIBCALL_BLOCK;
2239 must_preallocate = 1;
2242 /* Now make final decision about preallocating stack space. */
2243 must_preallocate = finalize_must_preallocate (must_preallocate,
2247 /* If the structure value address will reference the stack pointer, we
2248 must stabilize it. We don't need to do this if we know that we are
2249 not going to adjust the stack pointer in processing this call. */
2251 if (structure_value_addr
2252 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2253 || reg_mentioned_p (virtual_outgoing_args_rtx,
2254 structure_value_addr))
2256 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2257 structure_value_addr = copy_to_reg (structure_value_addr);
2259 /* Tail calls can make things harder to debug, and we've traditionally
2260 pushed these optimizations into -O2. Don't try if we're already
2261 expanding a call, as that means we're an argument. Don't try if
2262 there's cleanups, as we know there's code to follow the call. */
2264 if (currently_expanding_call++ != 0
2265 || !flag_optimize_sibling_calls
2267 || lookup_stmt_eh_region (exp) >= 0
2268 || dbg_cnt (tail_call) == false)
2271 /* Rest of purposes for tail call optimizations to fail. */
2273 #ifdef HAVE_sibcall_epilogue
2274 !HAVE_sibcall_epilogue
2279 /* Doing sibling call optimization needs some work, since
2280 structure_value_addr can be allocated on the stack.
2281 It does not seem worth the effort since few optimizable
2282 sibling calls will return a structure. */
2283 || structure_value_addr != NULL_RTX
2284 /* Check whether the target is able to optimize the call
2286 || !targetm.function_ok_for_sibcall (fndecl, exp)
2287 /* Functions that do not return exactly once may not be sibcall
2289 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2290 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2291 /* If the called function is nested in the current one, it might access
2292 some of the caller's arguments, but could clobber them beforehand if
2293 the argument areas are shared. */
2294 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2295 /* If this function requires more stack slots than the current
2296 function, we cannot change it into a sibling call.
2297 crtl->args.pretend_args_size is not part of the
2298 stack allocated by our caller. */
2299 || args_size.constant > (crtl->args.size
2300 - crtl->args.pretend_args_size)
2301 /* If the callee pops its own arguments, then it must pop exactly
2302 the same number of arguments as the current function. */
2303 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2304 != RETURN_POPS_ARGS (current_function_decl,
2305 TREE_TYPE (current_function_decl),
2307 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2310 /* Ensure current function's preferred stack boundary is at least
2311 what we need. We don't have to increase alignment for recursive
2313 if (crtl->preferred_stack_boundary < preferred_stack_boundary
2314 && fndecl != current_function_decl)
2315 crtl->preferred_stack_boundary = preferred_stack_boundary;
2317 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2319 /* We want to make two insn chains; one for a sibling call, the other
2320 for a normal call. We will select one of the two chains after
2321 initial RTL generation is complete. */
2322 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2324 int sibcall_failure = 0;
2325 /* We want to emit any pending stack adjustments before the tail
2326 recursion "call". That way we know any adjustment after the tail
2327 recursion call can be ignored if we indeed use the tail
2329 int save_pending_stack_adjust = 0;
2330 int save_stack_pointer_delta = 0;
2332 rtx before_call, next_arg_reg, after_args;
2336 /* State variables we need to save and restore between
2338 save_pending_stack_adjust = pending_stack_adjust;
2339 save_stack_pointer_delta = stack_pointer_delta;
2342 flags &= ~ECF_SIBCALL;
2344 flags |= ECF_SIBCALL;
2346 /* Other state variables that we must reinitialize each time
2347 through the loop (that are not initialized by the loop itself). */
2351 /* Start a new sequence for the normal call case.
2353 From this point on, if the sibling call fails, we want to set
2354 sibcall_failure instead of continuing the loop. */
2357 /* Don't let pending stack adjusts add up to too much.
2358 Also, do all pending adjustments now if there is any chance
2359 this might be a call to alloca or if we are expanding a sibling
2361 Also do the adjustments before a throwing call, otherwise
2362 exception handling can fail; PR 19225. */
2363 if (pending_stack_adjust >= 32
2364 || (pending_stack_adjust > 0
2365 && (flags & ECF_MAY_BE_ALLOCA))
2366 || (pending_stack_adjust > 0
2367 && flag_exceptions && !(flags & ECF_NOTHROW))
2369 do_pending_stack_adjust ();
2371 /* When calling a const function, we must pop the stack args right away,
2372 so that the pop is deleted or moved with the call. */
2373 if (pass && (flags & ECF_LIBCALL_BLOCK))
2376 /* Precompute any arguments as needed. */
2378 precompute_arguments (flags, num_actuals, args);
2380 /* Now we are about to start emitting insns that can be deleted
2381 if a libcall is deleted. */
2382 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2385 if (pass == 0 && crtl->stack_protect_guard)
2386 stack_protect_epilogue ();
2388 adjusted_args_size = args_size;
2389 /* Compute the actual size of the argument block required. The variable
2390 and constant sizes must be combined, the size may have to be rounded,
2391 and there may be a minimum required size. When generating a sibcall
2392 pattern, do not round up, since we'll be re-using whatever space our
2394 unadjusted_args_size
2395 = compute_argument_block_size (reg_parm_stack_space,
2396 &adjusted_args_size,
2398 : preferred_stack_boundary));
2400 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2402 /* The argument block when performing a sibling call is the
2403 incoming argument block. */
2406 argblock = virtual_incoming_args_rtx;
2408 #ifdef STACK_GROWS_DOWNWARD
2409 = plus_constant (argblock, crtl->args.pretend_args_size);
2411 = plus_constant (argblock, -crtl->args.pretend_args_size);
2413 stored_args_map = sbitmap_alloc (args_size.constant);
2414 sbitmap_zero (stored_args_map);
2417 /* If we have no actual push instructions, or shouldn't use them,
2418 make space for all args right now. */
2419 else if (adjusted_args_size.var != 0)
2421 if (old_stack_level == 0)
2423 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2424 old_stack_pointer_delta = stack_pointer_delta;
2425 old_pending_adj = pending_stack_adjust;
2426 pending_stack_adjust = 0;
2427 /* stack_arg_under_construction says whether a stack arg is
2428 being constructed at the old stack level. Pushing the stack
2429 gets a clean outgoing argument block. */
2430 old_stack_arg_under_construction = stack_arg_under_construction;
2431 stack_arg_under_construction = 0;
2433 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2437 /* Note that we must go through the motions of allocating an argument
2438 block even if the size is zero because we may be storing args
2439 in the area reserved for register arguments, which may be part of
2442 int needed = adjusted_args_size.constant;
2444 /* Store the maximum argument space used. It will be pushed by
2445 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2448 if (needed > crtl->outgoing_args_size)
2449 crtl->outgoing_args_size = needed;
2451 if (must_preallocate)
2453 if (ACCUMULATE_OUTGOING_ARGS)
2455 /* Since the stack pointer will never be pushed, it is
2456 possible for the evaluation of a parm to clobber
2457 something we have already written to the stack.
2458 Since most function calls on RISC machines do not use
2459 the stack, this is uncommon, but must work correctly.
2461 Therefore, we save any area of the stack that was already
2462 written and that we are using. Here we set up to do this
2463 by making a new stack usage map from the old one. The
2464 actual save will be done by store_one_arg.
2466 Another approach might be to try to reorder the argument
2467 evaluations to avoid this conflicting stack usage. */
2469 /* Since we will be writing into the entire argument area,
2470 the map must be allocated for its entire size, not just
2471 the part that is the responsibility of the caller. */
2472 if (!OUTGOING_REG_PARM_STACK_SPACE)
2473 needed += reg_parm_stack_space;
2475 #ifdef ARGS_GROW_DOWNWARD
2476 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2479 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2482 if (stack_usage_map_buf)
2483 free (stack_usage_map_buf);
2484 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2485 stack_usage_map = stack_usage_map_buf;
2487 if (initial_highest_arg_in_use)
2488 memcpy (stack_usage_map, initial_stack_usage_map,
2489 initial_highest_arg_in_use);
2491 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2492 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2493 (highest_outgoing_arg_in_use
2494 - initial_highest_arg_in_use));
2497 /* The address of the outgoing argument list must not be
2498 copied to a register here, because argblock would be left
2499 pointing to the wrong place after the call to
2500 allocate_dynamic_stack_space below. */
2502 argblock = virtual_outgoing_args_rtx;
2506 if (inhibit_defer_pop == 0)
2508 /* Try to reuse some or all of the pending_stack_adjust
2509 to get this space. */
2511 = (combine_pending_stack_adjustment_and_call
2512 (unadjusted_args_size,
2513 &adjusted_args_size,
2514 preferred_unit_stack_boundary));
2516 /* combine_pending_stack_adjustment_and_call computes
2517 an adjustment before the arguments are allocated.
2518 Account for them and see whether or not the stack
2519 needs to go up or down. */
2520 needed = unadjusted_args_size - needed;
2524 /* We're releasing stack space. */
2525 /* ??? We can avoid any adjustment at all if we're
2526 already aligned. FIXME. */
2527 pending_stack_adjust = -needed;
2528 do_pending_stack_adjust ();
2532 /* We need to allocate space. We'll do that in
2533 push_block below. */
2534 pending_stack_adjust = 0;
2537 /* Special case this because overhead of `push_block' in
2538 this case is non-trivial. */
2540 argblock = virtual_outgoing_args_rtx;
2543 argblock = push_block (GEN_INT (needed), 0, 0);
2544 #ifdef ARGS_GROW_DOWNWARD
2545 argblock = plus_constant (argblock, needed);
2549 /* We only really need to call `copy_to_reg' in the case
2550 where push insns are going to be used to pass ARGBLOCK
2551 to a function call in ARGS. In that case, the stack
2552 pointer changes value from the allocation point to the
2553 call point, and hence the value of
2554 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2555 as well always do it. */
2556 argblock = copy_to_reg (argblock);
2561 if (ACCUMULATE_OUTGOING_ARGS)
2563 /* The save/restore code in store_one_arg handles all
2564 cases except one: a constructor call (including a C
2565 function returning a BLKmode struct) to initialize
2567 if (stack_arg_under_construction)
2570 = GEN_INT (adjusted_args_size.constant
2571 + (OUTGOING_REG_PARM_STACK_SPACE ? 0
2572 : reg_parm_stack_space));
2573 if (old_stack_level == 0)
2575 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2577 old_stack_pointer_delta = stack_pointer_delta;
2578 old_pending_adj = pending_stack_adjust;
2579 pending_stack_adjust = 0;
2580 /* stack_arg_under_construction says whether a stack
2581 arg is being constructed at the old stack level.
2582 Pushing the stack gets a clean outgoing argument
2584 old_stack_arg_under_construction
2585 = stack_arg_under_construction;
2586 stack_arg_under_construction = 0;
2587 /* Make a new map for the new argument list. */
2588 if (stack_usage_map_buf)
2589 free (stack_usage_map_buf);
2590 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
2591 stack_usage_map = stack_usage_map_buf;
2592 highest_outgoing_arg_in_use = 0;
2594 allocate_dynamic_stack_space (push_size, NULL_RTX,
2598 /* If argument evaluation might modify the stack pointer,
2599 copy the address of the argument list to a register. */
2600 for (i = 0; i < num_actuals; i++)
2601 if (args[i].pass_on_stack)
2603 argblock = copy_addr_to_reg (argblock);
2608 compute_argument_addresses (args, argblock, num_actuals);
2610 /* If we push args individually in reverse order, perform stack alignment
2611 before the first push (the last arg). */
2612 if (PUSH_ARGS_REVERSED && argblock == 0
2613 && adjusted_args_size.constant != unadjusted_args_size)
2615 /* When the stack adjustment is pending, we get better code
2616 by combining the adjustments. */
2617 if (pending_stack_adjust
2618 && ! (flags & ECF_LIBCALL_BLOCK)
2619 && ! inhibit_defer_pop)
2621 pending_stack_adjust
2622 = (combine_pending_stack_adjustment_and_call
2623 (unadjusted_args_size,
2624 &adjusted_args_size,
2625 preferred_unit_stack_boundary));
2626 do_pending_stack_adjust ();
2628 else if (argblock == 0)
2629 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2630 - unadjusted_args_size));
2632 /* Now that the stack is properly aligned, pops can't safely
2633 be deferred during the evaluation of the arguments. */
2636 funexp = rtx_for_function_call (fndecl, addr);
2638 /* Figure out the register where the value, if any, will come back. */
2640 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2641 && ! structure_value_addr)
2643 if (pcc_struct_value)
2644 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2645 fndecl, NULL, (pass == 0));
2647 valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype,
2650 /* If VALREG is a PARALLEL whose first member has a zero
2651 offset, use that. This is for targets such as m68k that
2652 return the same value in multiple places. */
2653 if (GET_CODE (valreg) == PARALLEL)
2655 rtx elem = XVECEXP (valreg, 0, 0);
2656 rtx where = XEXP (elem, 0);
2657 rtx offset = XEXP (elem, 1);
2658 if (offset == const0_rtx
2659 && GET_MODE (where) == GET_MODE (valreg))
2664 /* Precompute all register parameters. It isn't safe to compute anything
2665 once we have started filling any specific hard regs. */
2666 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2668 if (CALL_EXPR_STATIC_CHAIN (exp))
2669 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
2671 static_chain_value = 0;
2673 #ifdef REG_PARM_STACK_SPACE
2674 /* Save the fixed argument area if it's part of the caller's frame and
2675 is clobbered by argument setup for this call. */
2676 if (ACCUMULATE_OUTGOING_ARGS && pass)
2677 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2678 &low_to_save, &high_to_save);
2681 /* Now store (and compute if necessary) all non-register parms.
2682 These come before register parms, since they can require block-moves,
2683 which could clobber the registers used for register parms.
2684 Parms which have partial registers are not stored here,
2685 but we do preallocate space here if they want that. */
2687 for (i = 0; i < num_actuals; i++)
2688 if (args[i].reg == 0 || args[i].pass_on_stack)
2690 rtx before_arg = get_last_insn ();
2692 if (store_one_arg (&args[i], argblock, flags,
2693 adjusted_args_size.var != 0,
2694 reg_parm_stack_space)
2696 && check_sibcall_argument_overlap (before_arg,
2698 sibcall_failure = 1;
2700 if (flags & ECF_CONST
2702 && args[i].value == args[i].stack)
2703 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2704 gen_rtx_USE (VOIDmode,
2709 /* If we have a parm that is passed in registers but not in memory
2710 and whose alignment does not permit a direct copy into registers,
2711 make a group of pseudos that correspond to each register that we
2713 if (STRICT_ALIGNMENT)
2714 store_unaligned_arguments_into_pseudos (args, num_actuals);
2716 /* Now store any partially-in-registers parm.
2717 This is the last place a block-move can happen. */
2719 for (i = 0; i < num_actuals; i++)
2720 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2722 rtx before_arg = get_last_insn ();
2724 if (store_one_arg (&args[i], argblock, flags,
2725 adjusted_args_size.var != 0,
2726 reg_parm_stack_space)
2728 && check_sibcall_argument_overlap (before_arg,
2730 sibcall_failure = 1;
2733 /* If we pushed args in forward order, perform stack alignment
2734 after pushing the last arg. */
2735 if (!PUSH_ARGS_REVERSED && argblock == 0)
2736 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2737 - unadjusted_args_size));
2739 /* If register arguments require space on the stack and stack space
2740 was not preallocated, allocate stack space here for arguments
2741 passed in registers. */
2742 if (OUTGOING_REG_PARM_STACK_SPACE && !ACCUMULATE_OUTGOING_ARGS
2743 && must_preallocate == 0 && reg_parm_stack_space > 0)
2744 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2746 /* Pass the function the address in which to return a
2748 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2750 structure_value_addr
2751 = convert_memory_address (Pmode, structure_value_addr);
2752 emit_move_insn (struct_value,
2754 force_operand (structure_value_addr,
2757 if (REG_P (struct_value))
2758 use_reg (&call_fusage, struct_value);
2761 after_args = get_last_insn ();
2762 funexp = prepare_call_address (funexp, static_chain_value,
2763 &call_fusage, reg_parm_seen, pass == 0);
2765 load_register_parameters (args, num_actuals, &call_fusage, flags,
2766 pass == 0, &sibcall_failure);
2768 /* Save a pointer to the last insn before the call, so that we can
2769 later safely search backwards to find the CALL_INSN. */
2770 before_call = get_last_insn ();
2772 /* Set up next argument register. For sibling calls on machines
2773 with register windows this should be the incoming register. */
2774 #ifdef FUNCTION_INCOMING_ARG
2776 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2780 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2783 /* All arguments and registers used for the call must be set up by
2786 /* Stack must be properly aligned now. */
2788 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2790 /* Generate the actual call instruction. */
2791 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2792 adjusted_args_size.constant, struct_value_size,
2793 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2794 flags, & args_so_far);
2796 /* If the call setup or the call itself overlaps with anything
2797 of the argument setup we probably clobbered our call address.
2798 In that case we can't do sibcalls. */
2800 && check_sibcall_argument_overlap (after_args, 0, 0))
2801 sibcall_failure = 1;
2803 /* If a non-BLKmode value is returned at the most significant end
2804 of a register, shift the register right by the appropriate amount
2805 and update VALREG accordingly. BLKmode values are handled by the
2806 group load/store machinery below. */
2807 if (!structure_value_addr
2808 && !pcc_struct_value
2809 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2810 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2812 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2813 sibcall_failure = 1;
2814 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2817 /* If call is cse'able, make appropriate pair of reg-notes around it.
2818 Test valreg so we don't crash; may safely ignore `const'
2819 if return type is void. Disable for PARALLEL return values, because
2820 we have no way to move such values into a pseudo register. */
2821 if (pass && (flags & ECF_LIBCALL_BLOCK))
2825 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2827 insns = get_insns ();
2829 /* Expansion of block moves possibly introduced a loop that may
2830 not appear inside libcall block. */
2831 for (insn = insns; insn; insn = NEXT_INSN (insn))
2843 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2845 /* Mark the return value as a pointer if needed. */
2846 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2847 mark_reg_pointer (temp,
2848 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2851 if (flag_unsafe_math_optimizations
2853 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2854 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2855 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2856 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2857 note = gen_rtx_fmt_e (SQRT,
2859 args[0].initial_value);
2862 /* Construct an "equal form" for the value which
2863 mentions all the arguments in order as well as
2864 the function name. */
2865 for (i = 0; i < num_actuals; i++)
2866 note = gen_rtx_EXPR_LIST (VOIDmode,
2867 args[i].initial_value, note);
2868 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2870 emit_libcall_block (insns, temp, valreg, note);
2875 else if (pass && (flags & ECF_MALLOC))
2877 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2880 /* The return value from a malloc-like function is a pointer. */
2881 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2882 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2884 emit_move_insn (temp, valreg);
2886 /* The return value from a malloc-like function can not alias
2888 last = get_last_insn ();
2890 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2892 /* Write out the sequence. */
2893 insns = get_insns ();
2899 /* For calls to `setjmp', etc., inform
2900 function.c:setjmp_warnings that it should complain if
2901 nonvolatile values are live. For functions that cannot
2902 return, inform flow that control does not fall through. */
2904 if ((flags & ECF_NORETURN) || pass == 0)
2906 /* The barrier must be emitted
2907 immediately after the CALL_INSN. Some ports emit more
2908 than just a CALL_INSN above, so we must search for it here. */
2910 rtx last = get_last_insn ();
2911 while (!CALL_P (last))
2913 last = PREV_INSN (last);
2914 /* There was no CALL_INSN? */
2915 gcc_assert (last != before_call);
2918 emit_barrier_after (last);
2920 /* Stack adjustments after a noreturn call are dead code.
2921 However when NO_DEFER_POP is in effect, we must preserve
2922 stack_pointer_delta. */
2923 if (inhibit_defer_pop == 0)
2925 stack_pointer_delta = old_stack_allocated;
2926 pending_stack_adjust = 0;
2930 /* If value type not void, return an rtx for the value. */
2932 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2934 target = const0_rtx;
2935 else if (structure_value_addr)
2937 if (target == 0 || !MEM_P (target))
2940 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2941 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2942 structure_value_addr));
2943 set_mem_attributes (target, exp, 1);
2946 else if (pcc_struct_value)
2948 /* This is the special C++ case where we need to
2949 know what the true target was. We take care to
2950 never use this value more than once in one expression. */
2951 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2952 copy_to_reg (valreg));
2953 set_mem_attributes (target, exp, 1);
2955 /* Handle calls that return values in multiple non-contiguous locations.
2956 The Irix 6 ABI has examples of this. */
2957 else if (GET_CODE (valreg) == PARALLEL)
2961 /* This will only be assigned once, so it can be readonly. */
2962 tree nt = build_qualified_type (TREE_TYPE (exp),
2963 (TYPE_QUALS (TREE_TYPE (exp))
2964 | TYPE_QUAL_CONST));
2966 target = assign_temp (nt, 0, 1, 1);
2969 if (! rtx_equal_p (target, valreg))
2970 emit_group_store (target, valreg, TREE_TYPE (exp),
2971 int_size_in_bytes (TREE_TYPE (exp)));
2973 /* We can not support sibling calls for this case. */
2974 sibcall_failure = 1;
2977 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2978 && GET_MODE (target) == GET_MODE (valreg))
2980 bool may_overlap = false;
2982 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
2983 reg to a plain register. */
2984 if (!REG_P (target) || HARD_REGISTER_P (target))
2985 valreg = avoid_likely_spilled_reg (valreg);
2987 /* If TARGET is a MEM in the argument area, and we have
2988 saved part of the argument area, then we can't store
2989 directly into TARGET as it may get overwritten when we
2990 restore the argument save area below. Don't work too
2991 hard though and simply force TARGET to a register if it
2992 is a MEM; the optimizer is quite likely to sort it out. */
2993 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
2994 for (i = 0; i < num_actuals; i++)
2995 if (args[i].save_area)
3002 target = copy_to_reg (valreg);
3005 /* TARGET and VALREG cannot be equal at this point
3006 because the latter would not have
3007 REG_FUNCTION_VALUE_P true, while the former would if
3008 it were referring to the same register.
3010 If they refer to the same register, this move will be
3011 a no-op, except when function inlining is being
3013 emit_move_insn (target, valreg);
3015 /* If we are setting a MEM, this code must be executed.
3016 Since it is emitted after the call insn, sibcall
3017 optimization cannot be performed in that case. */
3019 sibcall_failure = 1;
3022 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3024 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3026 /* We can not support sibling calls for this case. */
3027 sibcall_failure = 1;
3030 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
3032 if (targetm.calls.promote_function_return(funtype))
3034 /* If we promoted this return value, make the proper SUBREG.
3035 TARGET might be const0_rtx here, so be careful. */
3037 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3038 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3040 tree type = TREE_TYPE (exp);
3041 int unsignedp = TYPE_UNSIGNED (type);
3043 enum machine_mode pmode;
3045 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
3046 /* If we don't promote as expected, something is wrong. */
3047 gcc_assert (GET_MODE (target) == pmode);
3049 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3050 && (GET_MODE_SIZE (GET_MODE (target))
3051 > GET_MODE_SIZE (TYPE_MODE (type))))
3053 offset = GET_MODE_SIZE (GET_MODE (target))
3054 - GET_MODE_SIZE (TYPE_MODE (type));
3055 if (! BYTES_BIG_ENDIAN)
3056 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3057 else if (! WORDS_BIG_ENDIAN)
3058 offset %= UNITS_PER_WORD;
3060 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3061 SUBREG_PROMOTED_VAR_P (target) = 1;
3062 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3066 /* If size of args is variable or this was a constructor call for a stack
3067 argument, restore saved stack-pointer value. */
3069 if (old_stack_level)
3071 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3072 stack_pointer_delta = old_stack_pointer_delta;
3073 pending_stack_adjust = old_pending_adj;
3074 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3075 stack_arg_under_construction = old_stack_arg_under_construction;
3076 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3077 stack_usage_map = initial_stack_usage_map;
3078 sibcall_failure = 1;
3080 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3082 #ifdef REG_PARM_STACK_SPACE
3084 restore_fixed_argument_area (save_area, argblock,
3085 high_to_save, low_to_save);
3088 /* If we saved any argument areas, restore them. */
3089 for (i = 0; i < num_actuals; i++)
3090 if (args[i].save_area)
3092 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3094 = gen_rtx_MEM (save_mode,
3095 memory_address (save_mode,
3096 XEXP (args[i].stack_slot, 0)));
3098 if (save_mode != BLKmode)
3099 emit_move_insn (stack_area, args[i].save_area);
3101 emit_block_move (stack_area, args[i].save_area,
3102 GEN_INT (args[i].locate.size.constant),
3103 BLOCK_OP_CALL_PARM);
3106 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3107 stack_usage_map = initial_stack_usage_map;
3110 /* If this was alloca, record the new stack level for nonlocal gotos.
3111 Check for the handler slots since we might not have a save area
3112 for non-local gotos. */
3114 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3115 update_nonlocal_goto_save_area ();
3117 /* Free up storage we no longer need. */
3118 for (i = 0; i < num_actuals; ++i)
3119 if (args[i].aligned_regs)
3120 free (args[i].aligned_regs);
3122 insns = get_insns ();
3127 tail_call_insns = insns;
3129 /* Restore the pending stack adjustment now that we have
3130 finished generating the sibling call sequence. */
3132 pending_stack_adjust = save_pending_stack_adjust;
3133 stack_pointer_delta = save_stack_pointer_delta;
3135 /* Prepare arg structure for next iteration. */
3136 for (i = 0; i < num_actuals; i++)
3139 args[i].aligned_regs = 0;
3143 sbitmap_free (stored_args_map);
3147 normal_call_insns = insns;
3149 /* Verify that we've deallocated all the stack we used. */
3150 gcc_assert ((flags & ECF_NORETURN)
3151 || (old_stack_allocated
3152 == stack_pointer_delta - pending_stack_adjust));
3155 /* If something prevents making this a sibling call,
3156 zero out the sequence. */
3157 if (sibcall_failure)
3158 tail_call_insns = NULL_RTX;
3163 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3164 arguments too, as argument area is now clobbered by the call. */
3165 if (tail_call_insns)
3167 emit_insn (tail_call_insns);
3168 crtl->tail_call_emit = true;
3171 emit_insn (normal_call_insns);
3173 currently_expanding_call--;
3175 if (stack_usage_map_buf)
3176 free (stack_usage_map_buf);
3181 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3182 this function's incoming arguments.
3184 At the start of RTL generation we know the only REG_EQUIV notes
3185 in the rtl chain are those for incoming arguments, so we can look
3186 for REG_EQUIV notes between the start of the function and the
3187 NOTE_INSN_FUNCTION_BEG.
3189 This is (slight) overkill. We could keep track of the highest
3190 argument we clobber and be more selective in removing notes, but it
3191 does not seem to be worth the effort. */
3194 fixup_tail_calls (void)
3198 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3202 /* There are never REG_EQUIV notes for the incoming arguments
3203 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3205 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3208 note = find_reg_note (insn, REG_EQUIV, 0);
3210 remove_note (insn, note);
3211 note = find_reg_note (insn, REG_EQUIV, 0);
3216 /* Traverse a list of TYPES and expand all complex types into their
3219 split_complex_types (tree types)
3223 /* Before allocating memory, check for the common case of no complex. */
3224 for (p = types; p; p = TREE_CHAIN (p))
3226 tree type = TREE_VALUE (p);
3227 if (TREE_CODE (type) == COMPLEX_TYPE
3228 && targetm.calls.split_complex_arg (type))
3234 types = copy_list (types);
3236 for (p = types; p; p = TREE_CHAIN (p))
3238 tree complex_type = TREE_VALUE (p);
3240 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3241 && targetm.calls.split_complex_arg (complex_type))
3245 /* Rewrite complex type with component type. */
3246 TREE_VALUE (p) = TREE_TYPE (complex_type);
3247 next = TREE_CHAIN (p);
3249 /* Add another component type for the imaginary part. */
3250 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3251 TREE_CHAIN (p) = imag;
3252 TREE_CHAIN (imag) = next;
3254 /* Skip the newly created node. */
3262 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3263 The RETVAL parameter specifies whether return value needs to be saved, other
3264 parameters are documented in the emit_library_call function below. */
3267 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3268 enum libcall_type fn_type,
3269 enum machine_mode outmode, int nargs, va_list p)
3271 /* Total size in bytes of all the stack-parms scanned so far. */
3272 struct args_size args_size;
3273 /* Size of arguments before any adjustments (such as rounding). */
3274 struct args_size original_args_size;
3280 CUMULATIVE_ARGS args_so_far;
3284 enum machine_mode mode;
3287 struct locate_and_pad_arg_data locate;
3291 int old_inhibit_defer_pop = inhibit_defer_pop;
3292 rtx call_fusage = 0;
3295 int pcc_struct_value = 0;
3296 int struct_value_size = 0;
3298 int reg_parm_stack_space = 0;
3301 tree tfom; /* type_for_mode (outmode, 0) */
3303 #ifdef REG_PARM_STACK_SPACE
3304 /* Define the boundary of the register parm stack space that needs to be
3306 int low_to_save, high_to_save;
3307 rtx save_area = 0; /* Place that it is saved. */
3310 /* Size of the stack reserved for parameter registers. */
3311 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3312 char *initial_stack_usage_map = stack_usage_map;
3313 char *stack_usage_map_buf = NULL;
3315 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3317 #ifdef REG_PARM_STACK_SPACE
3318 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3321 /* By default, library functions can not throw. */
3322 flags = ECF_NOTHROW;
3334 case LCT_CONST_MAKE_BLOCK:
3335 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3337 case LCT_PURE_MAKE_BLOCK:
3338 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3341 flags |= ECF_NORETURN;
3344 flags = ECF_NORETURN;
3346 case LCT_RETURNS_TWICE:
3347 flags = ECF_RETURNS_TWICE;
3352 /* Ensure current function's preferred stack boundary is at least
3354 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3355 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3357 /* If this kind of value comes back in memory,
3358 decide where in memory it should come back. */
3359 if (outmode != VOIDmode)
3361 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3362 if (aggregate_value_p (tfom, 0))
3364 #ifdef PCC_STATIC_STRUCT_RETURN
3366 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3367 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3368 pcc_struct_value = 1;
3370 value = gen_reg_rtx (outmode);
3371 #else /* not PCC_STATIC_STRUCT_RETURN */
3372 struct_value_size = GET_MODE_SIZE (outmode);
3373 if (value != 0 && MEM_P (value))
3376 mem_value = assign_temp (tfom, 0, 1, 1);
3378 /* This call returns a big structure. */
3379 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE
3380 | ECF_LIBCALL_BLOCK);
3384 tfom = void_type_node;
3386 /* ??? Unfinished: must pass the memory address as an argument. */
3388 /* Copy all the libcall-arguments out of the varargs data
3389 and into a vector ARGVEC.
3391 Compute how to pass each argument. We only support a very small subset
3392 of the full argument passing conventions to limit complexity here since
3393 library functions shouldn't have many args. */
3395 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3396 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3398 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3399 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3401 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3404 args_size.constant = 0;
3409 /* Now we are about to start emitting insns that can be deleted
3410 if a libcall is deleted. */
3411 if (flags & ECF_LIBCALL_BLOCK)
3416 /* If there's a structure value address to be passed,
3417 either pass it in the special place, or pass it as an extra argument. */
3418 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3420 rtx addr = XEXP (mem_value, 0);
3424 /* Make sure it is a reasonable operand for a move or push insn. */
3425 if (!REG_P (addr) && !MEM_P (addr)
3426 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3427 addr = force_operand (addr, NULL_RTX);
3429 argvec[count].value = addr;
3430 argvec[count].mode = Pmode;
3431 argvec[count].partial = 0;
3433 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3434 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3435 NULL_TREE, 1) == 0);
3437 locate_and_pad_parm (Pmode, NULL_TREE,
3438 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3441 argvec[count].reg != 0,
3443 0, NULL_TREE, &args_size, &argvec[count].locate);
3445 if (argvec[count].reg == 0 || argvec[count].partial != 0
3446 || reg_parm_stack_space > 0)
3447 args_size.constant += argvec[count].locate.size.constant;
3449 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3454 for (; count < nargs; count++)
3456 rtx val = va_arg (p, rtx);
3457 enum machine_mode mode = va_arg (p, enum machine_mode);
3459 /* We cannot convert the arg value to the mode the library wants here;
3460 must do it earlier where we know the signedness of the arg. */
3461 gcc_assert (mode != BLKmode
3462 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3464 /* Make sure it is a reasonable operand for a move or push insn. */
3465 if (!REG_P (val) && !MEM_P (val)
3466 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3467 val = force_operand (val, NULL_RTX);
3469 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3473 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3475 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3476 functions, so we have to pretend this isn't such a function. */
3477 if (flags & ECF_LIBCALL_BLOCK)
3479 rtx insns = get_insns ();
3484 /* If this was a CONST function, it is now PURE since it now
3486 if (flags & ECF_CONST)
3488 flags &= ~ECF_CONST;
3492 if (GET_MODE (val) == MEM && !must_copy)
3496 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3498 emit_move_insn (slot, val);
3501 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3502 gen_rtx_USE (VOIDmode, slot),
3505 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3506 gen_rtx_CLOBBER (VOIDmode,
3511 val = force_operand (XEXP (slot, 0), NULL_RTX);
3514 argvec[count].value = val;
3515 argvec[count].mode = mode;
3517 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3519 argvec[count].partial
3520 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3522 locate_and_pad_parm (mode, NULL_TREE,
3523 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3526 argvec[count].reg != 0,
3528 argvec[count].partial,
3529 NULL_TREE, &args_size, &argvec[count].locate);
3531 gcc_assert (!argvec[count].locate.size.var);
3533 if (argvec[count].reg == 0 || argvec[count].partial != 0
3534 || reg_parm_stack_space > 0)
3535 args_size.constant += argvec[count].locate.size.constant;
3537 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3540 /* If this machine requires an external definition for library
3541 functions, write one out. */
3542 assemble_external_libcall (fun);
3544 original_args_size = args_size;
3545 args_size.constant = (((args_size.constant
3546 + stack_pointer_delta
3550 - stack_pointer_delta);
3552 args_size.constant = MAX (args_size.constant,
3553 reg_parm_stack_space);
3555 if (!OUTGOING_REG_PARM_STACK_SPACE)
3556 args_size.constant -= reg_parm_stack_space;
3558 if (args_size.constant > crtl->outgoing_args_size)
3559 crtl->outgoing_args_size = args_size.constant;
3561 if (ACCUMULATE_OUTGOING_ARGS)
3563 /* Since the stack pointer will never be pushed, it is possible for
3564 the evaluation of a parm to clobber something we have already
3565 written to the stack. Since most function calls on RISC machines
3566 do not use the stack, this is uncommon, but must work correctly.
3568 Therefore, we save any area of the stack that was already written
3569 and that we are using. Here we set up to do this by making a new
3570 stack usage map from the old one.
3572 Another approach might be to try to reorder the argument
3573 evaluations to avoid this conflicting stack usage. */
3575 needed = args_size.constant;
3577 /* Since we will be writing into the entire argument area, the
3578 map must be allocated for its entire size, not just the part that
3579 is the responsibility of the caller. */
3580 if (!OUTGOING_REG_PARM_STACK_SPACE)
3581 needed += reg_parm_stack_space;
3583 #ifdef ARGS_GROW_DOWNWARD
3584 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3587 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3590 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3591 stack_usage_map = stack_usage_map_buf;
3593 if (initial_highest_arg_in_use)
3594 memcpy (stack_usage_map, initial_stack_usage_map,
3595 initial_highest_arg_in_use);
3597 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3598 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3599 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3602 /* We must be careful to use virtual regs before they're instantiated,
3603 and real regs afterwards. Loop optimization, for example, can create
3604 new libcalls after we've instantiated the virtual regs, and if we
3605 use virtuals anyway, they won't match the rtl patterns. */
3607 if (virtuals_instantiated)
3608 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3610 argblock = virtual_outgoing_args_rtx;
3615 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3618 /* If we push args individually in reverse order, perform stack alignment
3619 before the first push (the last arg). */
3620 if (argblock == 0 && PUSH_ARGS_REVERSED)
3621 anti_adjust_stack (GEN_INT (args_size.constant
3622 - original_args_size.constant));
3624 if (PUSH_ARGS_REVERSED)
3635 #ifdef REG_PARM_STACK_SPACE
3636 if (ACCUMULATE_OUTGOING_ARGS)
3638 /* The argument list is the property of the called routine and it
3639 may clobber it. If the fixed area has been used for previous
3640 parameters, we must save and restore it. */
3641 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3642 &low_to_save, &high_to_save);
3646 /* Push the args that need to be pushed. */
3648 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3649 are to be pushed. */
3650 for (count = 0; count < nargs; count++, argnum += inc)
3652 enum machine_mode mode = argvec[argnum].mode;
3653 rtx val = argvec[argnum].value;
3654 rtx reg = argvec[argnum].reg;
3655 int partial = argvec[argnum].partial;
3656 int lower_bound = 0, upper_bound = 0, i;
3658 if (! (reg != 0 && partial == 0))
3660 if (ACCUMULATE_OUTGOING_ARGS)
3662 /* If this is being stored into a pre-allocated, fixed-size,
3663 stack area, save any previous data at that location. */
3665 #ifdef ARGS_GROW_DOWNWARD
3666 /* stack_slot is negative, but we want to index stack_usage_map
3667 with positive values. */
3668 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3669 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3671 lower_bound = argvec[argnum].locate.offset.constant;
3672 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3676 /* Don't worry about things in the fixed argument area;
3677 it has already been saved. */
3678 if (i < reg_parm_stack_space)
3679 i = reg_parm_stack_space;
3680 while (i < upper_bound && stack_usage_map[i] == 0)
3683 if (i < upper_bound)
3685 /* We need to make a save area. */
3687 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3688 enum machine_mode save_mode
3689 = mode_for_size (size, MODE_INT, 1);
3691 = plus_constant (argblock,
3692 argvec[argnum].locate.offset.constant);
3694 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3696 if (save_mode == BLKmode)
3698 argvec[argnum].save_area
3699 = assign_stack_temp (BLKmode,
3700 argvec[argnum].locate.size.constant,
3703 emit_block_move (validize_mem (argvec[argnum].save_area),
3705 GEN_INT (argvec[argnum].locate.size.constant),
3706 BLOCK_OP_CALL_PARM);
3710 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3712 emit_move_insn (argvec[argnum].save_area, stack_area);
3717 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3718 partial, reg, 0, argblock,
3719 GEN_INT (argvec[argnum].locate.offset.constant),
3720 reg_parm_stack_space,
3721 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3723 /* Now mark the segment we just used. */
3724 if (ACCUMULATE_OUTGOING_ARGS)
3725 for (i = lower_bound; i < upper_bound; i++)
3726 stack_usage_map[i] = 1;
3730 if (flags & ECF_CONST)
3734 /* Indicate argument access so that alias.c knows that these
3737 use = plus_constant (argblock,
3738 argvec[argnum].locate.offset.constant);
3740 /* When arguments are pushed, trying to tell alias.c where
3741 exactly this argument is won't work, because the
3742 auto-increment causes confusion. So we merely indicate
3743 that we access something with a known mode somewhere on
3745 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3746 gen_rtx_SCRATCH (Pmode));
3747 use = gen_rtx_MEM (argvec[argnum].mode, use);
3748 use = gen_rtx_USE (VOIDmode, use);
3749 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3754 /* If we pushed args in forward order, perform stack alignment
3755 after pushing the last arg. */
3756 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3757 anti_adjust_stack (GEN_INT (args_size.constant
3758 - original_args_size.constant));
3760 if (PUSH_ARGS_REVERSED)
3765 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3767 /* Now load any reg parms into their regs. */
3769 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3770 are to be pushed. */
3771 for (count = 0; count < nargs; count++, argnum += inc)
3773 enum machine_mode mode = argvec[argnum].mode;
3774 rtx val = argvec[argnum].value;
3775 rtx reg = argvec[argnum].reg;
3776 int partial = argvec[argnum].partial;
3778 /* Handle calls that pass values in multiple non-contiguous
3779 locations. The PA64 has examples of this for library calls. */
3780 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3781 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3782 else if (reg != 0 && partial == 0)
3783 emit_move_insn (reg, val);
3788 /* Any regs containing parms remain in use through the call. */
3789 for (count = 0; count < nargs; count++)
3791 rtx reg = argvec[count].reg;
3792 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3793 use_group_regs (&call_fusage, reg);
3796 int partial = argvec[count].partial;
3800 gcc_assert (partial % UNITS_PER_WORD == 0);
3801 nregs = partial / UNITS_PER_WORD;
3802 use_regs (&call_fusage, REGNO (reg), nregs);
3805 use_reg (&call_fusage, reg);
3809 /* Pass the function the address in which to return a structure value. */
3810 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3812 emit_move_insn (struct_value,
3814 force_operand (XEXP (mem_value, 0),
3816 if (REG_P (struct_value))
3817 use_reg (&call_fusage, struct_value);
3820 /* Don't allow popping to be deferred, since then
3821 cse'ing of library calls could delete a call and leave the pop. */
3823 valreg = (mem_value == 0 && outmode != VOIDmode
3824 ? hard_libcall_value (outmode) : NULL_RTX);
3826 /* Stack must be properly aligned now. */
3827 gcc_assert (!(stack_pointer_delta
3828 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3830 before_call = get_last_insn ();
3832 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3833 will set inhibit_defer_pop to that value. */
3834 /* The return type is needed to decide how many bytes the function pops.
3835 Signedness plays no role in that, so for simplicity, we pretend it's
3836 always signed. We also assume that the list of arguments passed has
3837 no impact, so we pretend it is unknown. */
3839 emit_call_1 (fun, NULL,
3840 get_identifier (XSTR (orgfun, 0)),
3841 build_function_type (tfom, NULL_TREE),
3842 original_args_size.constant, args_size.constant,
3844 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3846 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3848 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
3849 that it should complain if nonvolatile values are live. For
3850 functions that cannot return, inform flow that control does not
3853 if (flags & ECF_NORETURN)
3855 /* The barrier note must be emitted
3856 immediately after the CALL_INSN. Some ports emit more than
3857 just a CALL_INSN above, so we must search for it here. */
3859 rtx last = get_last_insn ();
3860 while (!CALL_P (last))
3862 last = PREV_INSN (last);
3863 /* There was no CALL_INSN? */
3864 gcc_assert (last != before_call);
3867 emit_barrier_after (last);
3870 /* Now restore inhibit_defer_pop to its actual original value. */
3873 /* If call is cse'able, make appropriate pair of reg-notes around it.
3874 Test valreg so we don't crash; may safely ignore `const'
3875 if return type is void. Disable for PARALLEL return values, because
3876 we have no way to move such values into a pseudo register. */
3877 if (flags & ECF_LIBCALL_BLOCK)
3883 insns = get_insns ();
3893 if (GET_CODE (valreg) == PARALLEL)
3895 temp = gen_reg_rtx (outmode);
3896 emit_group_store (temp, valreg, NULL_TREE,
3897 GET_MODE_SIZE (outmode));
3901 temp = gen_reg_rtx (GET_MODE (valreg));
3903 /* Construct an "equal form" for the value which mentions all the
3904 arguments in order as well as the function name. */
3905 for (i = 0; i < nargs; i++)
3906 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3907 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3909 insns = get_insns ();
3911 emit_libcall_block (insns, temp, valreg, note);
3918 /* Copy the value to the right place. */
3919 if (outmode != VOIDmode && retval)
3925 if (value != mem_value)
3926 emit_move_insn (value, mem_value);
3928 else if (GET_CODE (valreg) == PARALLEL)
3931 value = gen_reg_rtx (outmode);
3932 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3936 /* Convert to the proper mode if PROMOTE_MODE has been active. */
3937 if (GET_MODE (valreg) != outmode)
3939 int unsignedp = TYPE_UNSIGNED (tfom);
3941 gcc_assert (targetm.calls.promote_function_return (tfom));
3942 gcc_assert (promote_mode (tfom, outmode, &unsignedp, 0)
3943 == GET_MODE (valreg));
3945 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
3949 emit_move_insn (value, valreg);
3955 if (ACCUMULATE_OUTGOING_ARGS)
3957 #ifdef REG_PARM_STACK_SPACE
3959 restore_fixed_argument_area (save_area, argblock,
3960 high_to_save, low_to_save);
3963 /* If we saved any argument areas, restore them. */
3964 for (count = 0; count < nargs; count++)
3965 if (argvec[count].save_area)
3967 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3968 rtx adr = plus_constant (argblock,
3969 argvec[count].locate.offset.constant);
3970 rtx stack_area = gen_rtx_MEM (save_mode,
3971 memory_address (save_mode, adr));
3973 if (save_mode == BLKmode)
3974 emit_block_move (stack_area,
3975 validize_mem (argvec[count].save_area),
3976 GEN_INT (argvec[count].locate.size.constant),
3977 BLOCK_OP_CALL_PARM);
3979 emit_move_insn (stack_area, argvec[count].save_area);
3982 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3983 stack_usage_map = initial_stack_usage_map;
3986 if (stack_usage_map_buf)
3987 free (stack_usage_map_buf);
3993 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3994 (emitting the queue unless NO_QUEUE is nonzero),
3995 for a value of mode OUTMODE,
3996 with NARGS different arguments, passed as alternating rtx values
3997 and machine_modes to convert them to.
3999 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4000 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4001 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4002 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4003 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4004 or other LCT_ value for other types of library calls. */
4007 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4008 enum machine_mode outmode, int nargs, ...)
4012 va_start (p, nargs);
4013 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4017 /* Like emit_library_call except that an extra argument, VALUE,
4018 comes second and says where to store the result.
4019 (If VALUE is zero, this function chooses a convenient way
4020 to return the value.
4022 This function returns an rtx for where the value is to be found.
4023 If VALUE is nonzero, VALUE is returned. */
4026 emit_library_call_value (rtx orgfun, rtx value,
4027 enum libcall_type fn_type,
4028 enum machine_mode outmode, int nargs, ...)
4033 va_start (p, nargs);
4034 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4041 /* Store a single argument for a function call
4042 into the register or memory area where it must be passed.
4043 *ARG describes the argument value and where to pass it.
4045 ARGBLOCK is the address of the stack-block for all the arguments,
4046 or 0 on a machine where arguments are pushed individually.
4048 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4049 so must be careful about how the stack is used.
4051 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4052 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4053 that we need not worry about saving and restoring the stack.
4055 FNDECL is the declaration of the function we are calling.
4057 Return nonzero if this arg should cause sibcall failure,
4061 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4062 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4064 tree pval = arg->tree_value;
4068 int i, lower_bound = 0, upper_bound = 0;
4069 int sibcall_failure = 0;
4071 if (TREE_CODE (pval) == ERROR_MARK)
4074 /* Push a new temporary level for any temporaries we make for
4078 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4080 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4081 save any previous data at that location. */
4082 if (argblock && ! variable_size && arg->stack)
4084 #ifdef ARGS_GROW_DOWNWARD
4085 /* stack_slot is negative, but we want to index stack_usage_map
4086 with positive values. */
4087 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4088 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4092 lower_bound = upper_bound - arg->locate.size.constant;
4094 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4095 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4099 upper_bound = lower_bound + arg->locate.size.constant;
4103 /* Don't worry about things in the fixed argument area;
4104 it has already been saved. */
4105 if (i < reg_parm_stack_space)
4106 i = reg_parm_stack_space;
4107 while (i < upper_bound && stack_usage_map[i] == 0)
4110 if (i < upper_bound)
4112 /* We need to make a save area. */
4113 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4114 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4115 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4116 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4118 if (save_mode == BLKmode)
4120 tree ot = TREE_TYPE (arg->tree_value);
4121 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4122 | TYPE_QUAL_CONST));
4124 arg->save_area = assign_temp (nt, 0, 1, 1);
4125 preserve_temp_slots (arg->save_area);
4126 emit_block_move (validize_mem (arg->save_area), stack_area,
4127 GEN_INT (arg->locate.size.constant),
4128 BLOCK_OP_CALL_PARM);
4132 arg->save_area = gen_reg_rtx (save_mode);
4133 emit_move_insn (arg->save_area, stack_area);
4139 /* If this isn't going to be placed on both the stack and in registers,
4140 set up the register and number of words. */
4141 if (! arg->pass_on_stack)
4143 if (flags & ECF_SIBCALL)
4144 reg = arg->tail_call_reg;
4147 partial = arg->partial;
4150 /* Being passed entirely in a register. We shouldn't be called in
4152 gcc_assert (reg == 0 || partial != 0);
4154 /* If this arg needs special alignment, don't load the registers
4156 if (arg->n_aligned_regs != 0)
4159 /* If this is being passed partially in a register, we can't evaluate
4160 it directly into its stack slot. Otherwise, we can. */
4161 if (arg->value == 0)
4163 /* stack_arg_under_construction is nonzero if a function argument is
4164 being evaluated directly into the outgoing argument list and
4165 expand_call must take special action to preserve the argument list
4166 if it is called recursively.
4168 For scalar function arguments stack_usage_map is sufficient to
4169 determine which stack slots must be saved and restored. Scalar
4170 arguments in general have pass_on_stack == 0.
4172 If this argument is initialized by a function which takes the
4173 address of the argument (a C++ constructor or a C function
4174 returning a BLKmode structure), then stack_usage_map is
4175 insufficient and expand_call must push the stack around the
4176 function call. Such arguments have pass_on_stack == 1.
4178 Note that it is always safe to set stack_arg_under_construction,
4179 but this generates suboptimal code if set when not needed. */
4181 if (arg->pass_on_stack)
4182 stack_arg_under_construction++;
4184 arg->value = expand_expr (pval,
4186 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4187 ? NULL_RTX : arg->stack,
4188 VOIDmode, EXPAND_STACK_PARM);
4190 /* If we are promoting object (or for any other reason) the mode
4191 doesn't agree, convert the mode. */
4193 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4194 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4195 arg->value, arg->unsignedp);
4197 if (arg->pass_on_stack)
4198 stack_arg_under_construction--;
4201 /* Check for overlap with already clobbered argument area. */
4202 if ((flags & ECF_SIBCALL)
4203 && MEM_P (arg->value)
4204 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4205 arg->locate.size.constant))
4206 sibcall_failure = 1;
4208 /* Don't allow anything left on stack from computation
4209 of argument to alloca. */
4210 if (flags & ECF_MAY_BE_ALLOCA)
4211 do_pending_stack_adjust ();
4213 if (arg->value == arg->stack)
4214 /* If the value is already in the stack slot, we are done. */
4216 else if (arg->mode != BLKmode)
4219 unsigned int parm_align;
4221 /* Argument is a scalar, not entirely passed in registers.
4222 (If part is passed in registers, arg->partial says how much
4223 and emit_push_insn will take care of putting it there.)
4225 Push it, and if its size is less than the
4226 amount of space allocated to it,
4227 also bump stack pointer by the additional space.
4228 Note that in C the default argument promotions
4229 will prevent such mismatches. */
4231 size = GET_MODE_SIZE (arg->mode);
4232 /* Compute how much space the push instruction will push.
4233 On many machines, pushing a byte will advance the stack
4234 pointer by a halfword. */
4235 #ifdef PUSH_ROUNDING
4236 size = PUSH_ROUNDING (size);
4240 /* Compute how much space the argument should get:
4241 round up to a multiple of the alignment for arguments. */
4242 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4243 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4244 / (PARM_BOUNDARY / BITS_PER_UNIT))
4245 * (PARM_BOUNDARY / BITS_PER_UNIT));
4247 /* Compute the alignment of the pushed argument. */
4248 parm_align = arg->locate.boundary;
4249 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4251 int pad = used - size;
4254 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4255 parm_align = MIN (parm_align, pad_align);
4259 /* This isn't already where we want it on the stack, so put it there.
4260 This can either be done with push or copy insns. */
4261 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4262 parm_align, partial, reg, used - size, argblock,
4263 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4264 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4266 /* Unless this is a partially-in-register argument, the argument is now
4269 arg->value = arg->stack;
4273 /* BLKmode, at least partly to be pushed. */
4275 unsigned int parm_align;
4279 /* Pushing a nonscalar.
4280 If part is passed in registers, PARTIAL says how much
4281 and emit_push_insn will take care of putting it there. */
4283 /* Round its size up to a multiple
4284 of the allocation unit for arguments. */
4286 if (arg->locate.size.var != 0)
4289 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4293 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4294 for BLKmode is careful to avoid it. */
4295 excess = (arg->locate.size.constant
4296 - int_size_in_bytes (TREE_TYPE (pval))
4298 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4299 NULL_RTX, TYPE_MODE (sizetype), 0);
4302 parm_align = arg->locate.boundary;
4304 /* When an argument is padded down, the block is aligned to
4305 PARM_BOUNDARY, but the actual argument isn't. */
4306 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4308 if (arg->locate.size.var)
4309 parm_align = BITS_PER_UNIT;
4312 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4313 parm_align = MIN (parm_align, excess_align);
4317 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4319 /* emit_push_insn might not work properly if arg->value and
4320 argblock + arg->locate.offset areas overlap. */
4324 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4325 || (GET_CODE (XEXP (x, 0)) == PLUS
4326 && XEXP (XEXP (x, 0), 0) ==
4327 crtl->args.internal_arg_pointer
4328 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4330 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4331 i = INTVAL (XEXP (XEXP (x, 0), 1));
4333 /* expand_call should ensure this. */
4334 gcc_assert (!arg->locate.offset.var
4335 && arg->locate.size.var == 0
4336 && GET_CODE (size_rtx) == CONST_INT);
4338 if (arg->locate.offset.constant > i)
4340 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4341 sibcall_failure = 1;
4343 else if (arg->locate.offset.constant < i)
4345 /* Use arg->locate.size.constant instead of size_rtx
4346 because we only care about the part of the argument
4348 if (i < (arg->locate.offset.constant
4349 + arg->locate.size.constant))
4350 sibcall_failure = 1;
4354 /* Even though they appear to be at the same location,
4355 if part of the outgoing argument is in registers,
4356 they aren't really at the same location. Check for
4357 this by making sure that the incoming size is the
4358 same as the outgoing size. */
4359 if (arg->locate.size.constant != INTVAL (size_rtx))
4360 sibcall_failure = 1;
4365 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4366 parm_align, partial, reg, excess, argblock,
4367 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4368 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4370 /* Unless this is a partially-in-register argument, the argument is now
4373 ??? Unlike the case above, in which we want the actual
4374 address of the data, so that we can load it directly into a
4375 register, here we want the address of the stack slot, so that
4376 it's properly aligned for word-by-word copying or something
4377 like that. It's not clear that this is always correct. */
4379 arg->value = arg->stack_slot;
4382 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4384 tree type = TREE_TYPE (arg->tree_value);
4386 = emit_group_load_into_temps (arg->reg, arg->value, type,
4387 int_size_in_bytes (type));
4390 /* Mark all slots this store used. */
4391 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4392 && argblock && ! variable_size && arg->stack)
4393 for (i = lower_bound; i < upper_bound; i++)
4394 stack_usage_map[i] = 1;
4396 /* Once we have pushed something, pops can't safely
4397 be deferred during the rest of the arguments. */
4400 /* Free any temporary slots made in processing this argument. Show
4401 that we might have taken the address of something and pushed that
4403 preserve_temp_slots (NULL_RTX);
4407 return sibcall_failure;
4410 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4413 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4419 /* If the type has variable size... */
4420 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4423 /* If the type is marked as addressable (it is required
4424 to be constructed into the stack)... */
4425 if (TREE_ADDRESSABLE (type))
4431 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4432 takes trailing padding of a structure into account. */
4433 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4436 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
4441 /* If the type has variable size... */
4442 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4445 /* If the type is marked as addressable (it is required
4446 to be constructed into the stack)... */
4447 if (TREE_ADDRESSABLE (type))
4450 /* If the padding and mode of the type is such that a copy into
4451 a register would put it into the wrong part of the register. */
4453 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4454 && (FUNCTION_ARG_PADDING (mode, type)
4455 == (BYTES_BIG_ENDIAN ? upward : downward)))