1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
40 #include "langhooks.h"
45 #include "tree-flow.h"
47 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
48 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
50 /* Data structure and subroutines used within expand_call. */
54 /* Tree node for this argument. */
56 /* Mode for value; TYPE_MODE unless promoted. */
57 enum machine_mode mode;
58 /* Current RTL value for argument, or 0 if it isn't precomputed. */
60 /* Initially-compute RTL value for argument; only for const functions. */
62 /* Register to pass this argument in, 0 if passed on stack, or an
63 PARALLEL if the arg is to be copied into multiple non-contiguous
66 /* Register to pass this argument in when generating tail call sequence.
67 This is not the same register as for normal calls on machines with
70 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
71 form for emit_group_move. */
73 /* If REG was promoted from the actual mode of the argument expression,
74 indicates whether the promotion is sign- or zero-extended. */
76 /* Number of bytes to put in registers. 0 means put the whole arg
77 in registers. Also 0 if not passed in registers. */
79 /* Nonzero if argument must be passed on stack.
80 Note that some arguments may be passed on the stack
81 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
82 pass_on_stack identifies arguments that *cannot* go in registers. */
84 /* Some fields packaged up for locate_and_pad_parm. */
85 struct locate_and_pad_arg_data locate;
86 /* Location on the stack at which parameter should be stored. The store
87 has already been done if STACK == VALUE. */
89 /* Location on the stack of the start of this argument slot. This can
90 differ from STACK if this arg pads downward. This location is known
91 to be aligned to FUNCTION_ARG_BOUNDARY. */
93 /* Place that this stack area has been saved, if needed. */
95 /* If an argument's alignment does not permit direct copying into registers,
96 copy in smaller-sized pieces into pseudos. These are stored in a
97 block pointed to by this field. The next field says how many
98 word-sized pseudos we made. */
103 /* A vector of one char per byte of stack space. A byte if nonzero if
104 the corresponding stack location has been used.
105 This vector is used to prevent a function call within an argument from
106 clobbering any stack already set up. */
107 static char *stack_usage_map;
109 /* Size of STACK_USAGE_MAP. */
110 static int highest_outgoing_arg_in_use;
112 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
113 stack location's tail call argument has been already stored into the stack.
114 This bitmap is used to prevent sibling call optimization if function tries
115 to use parent's incoming argument slots when they have been already
116 overwritten with tail call arguments. */
117 static sbitmap stored_args_map;
119 /* stack_arg_under_construction is nonzero when an argument may be
120 initialized with a constructor call (including a C function that
121 returns a BLKmode struct) and expand_call must take special action
122 to make sure the object being constructed does not overlap the
123 argument list for the constructor call. */
124 static int stack_arg_under_construction;
126 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
127 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
129 static void precompute_register_parameters (int, struct arg_data *, int *);
130 static int store_one_arg (struct arg_data *, rtx, int, int, int);
131 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
132 static int finalize_must_preallocate (int, int, struct arg_data *,
134 static void precompute_arguments (int, struct arg_data *);
135 static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
136 static void initialize_argument_information (int, struct arg_data *,
137 struct args_size *, int,
139 tree, tree, CUMULATIVE_ARGS *, int,
140 rtx *, int *, int *, int *,
142 static void compute_argument_addresses (struct arg_data *, rtx, int);
143 static rtx rtx_for_function_call (tree, tree);
144 static void load_register_parameters (struct arg_data *, int, rtx *, int,
146 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
147 enum machine_mode, int, va_list);
148 static int special_function_p (const_tree, int);
149 static int check_sibcall_argument_overlap_1 (rtx);
150 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
152 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
154 static tree split_complex_types (tree);
156 #ifdef REG_PARM_STACK_SPACE
157 static rtx save_fixed_argument_area (int, rtx, int *, int *);
158 static void restore_fixed_argument_area (rtx, rtx, int, int);
161 /* Force FUNEXP into a form suitable for the address of a CALL,
162 and return that as an rtx. Also load the static chain register
163 if FNDECL is a nested function.
165 CALL_FUSAGE points to a variable holding the prospective
166 CALL_INSN_FUNCTION_USAGE information. */
169 prepare_call_address (rtx funexp, rtx static_chain_value,
170 rtx *call_fusage, int reg_parm_seen, int sibcallp)
172 /* Make a valid memory address and copy constants through pseudo-regs,
173 but not for a constant address if -fno-function-cse. */
174 if (GET_CODE (funexp) != SYMBOL_REF)
175 /* If we are using registers for parameters, force the
176 function address into a register now. */
177 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
178 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
179 : memory_address (FUNCTION_MODE, funexp));
182 #ifndef NO_FUNCTION_CSE
183 if (optimize && ! flag_no_function_cse)
184 funexp = force_reg (Pmode, funexp);
188 if (static_chain_value != 0)
190 static_chain_value = convert_memory_address (Pmode, static_chain_value);
191 emit_move_insn (static_chain_rtx, static_chain_value);
193 if (REG_P (static_chain_rtx))
194 use_reg (call_fusage, static_chain_rtx);
200 /* Generate instructions to call function FUNEXP,
201 and optionally pop the results.
202 The CALL_INSN is the first insn generated.
204 FNDECL is the declaration node of the function. This is given to the
205 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
207 FUNTYPE is the data type of the function. This is given to the macro
208 RETURN_POPS_ARGS to determine whether this function pops its own args.
209 We used to allow an identifier for library functions, but that doesn't
210 work when the return type is an aggregate type and the calling convention
211 says that the pointer to this aggregate is to be popped by the callee.
213 STACK_SIZE is the number of bytes of arguments on the stack,
214 ROUNDED_STACK_SIZE is that number rounded up to
215 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
216 both to put into the call insn and to generate explicit popping
219 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
220 It is zero if this call doesn't want a structure value.
222 NEXT_ARG_REG is the rtx that results from executing
223 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
224 just after all the args have had their registers assigned.
225 This could be whatever you like, but normally it is the first
226 arg-register beyond those used for args in this call,
227 or 0 if all the arg-registers are used in this call.
228 It is passed on to `gen_call' so you can put this info in the call insn.
230 VALREG is a hard register in which a value is returned,
231 or 0 if the call does not return a value.
233 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
234 the args to this call were processed.
235 We restore `inhibit_defer_pop' to that value.
237 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
238 denote registers used by the called function. */
241 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
242 tree funtype ATTRIBUTE_UNUSED,
243 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
244 HOST_WIDE_INT rounded_stack_size,
245 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
246 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
247 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
248 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
250 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
252 int already_popped = 0;
253 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
254 #if defined (HAVE_call) && defined (HAVE_call_value)
255 rtx struct_value_size_rtx;
256 struct_value_size_rtx = GEN_INT (struct_value_size);
259 #ifdef CALL_POPS_ARGS
260 n_popped += CALL_POPS_ARGS (* args_so_far);
263 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
264 and we don't want to load it into a register as an optimization,
265 because prepare_call_address already did it if it should be done. */
266 if (GET_CODE (funexp) != SYMBOL_REF)
267 funexp = memory_address (FUNCTION_MODE, funexp);
269 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
270 if ((ecf_flags & ECF_SIBCALL)
271 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
272 && (n_popped > 0 || stack_size == 0))
274 rtx n_pop = GEN_INT (n_popped);
277 /* If this subroutine pops its own args, record that in the call insn
278 if possible, for the sake of frame pointer elimination. */
281 pat = GEN_SIBCALL_VALUE_POP (valreg,
282 gen_rtx_MEM (FUNCTION_MODE, funexp),
283 rounded_stack_size_rtx, next_arg_reg,
286 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
287 rounded_stack_size_rtx, next_arg_reg, n_pop);
289 emit_call_insn (pat);
295 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
296 /* If the target has "call" or "call_value" insns, then prefer them
297 if no arguments are actually popped. If the target does not have
298 "call" or "call_value" insns, then we must use the popping versions
299 even if the call has no arguments to pop. */
300 #if defined (HAVE_call) && defined (HAVE_call_value)
301 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
304 if (HAVE_call_pop && HAVE_call_value_pop)
307 rtx n_pop = GEN_INT (n_popped);
310 /* If this subroutine pops its own args, record that in the call insn
311 if possible, for the sake of frame pointer elimination. */
314 pat = GEN_CALL_VALUE_POP (valreg,
315 gen_rtx_MEM (FUNCTION_MODE, funexp),
316 rounded_stack_size_rtx, next_arg_reg, n_pop);
318 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
319 rounded_stack_size_rtx, next_arg_reg, n_pop);
321 emit_call_insn (pat);
327 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
328 if ((ecf_flags & ECF_SIBCALL)
329 && HAVE_sibcall && HAVE_sibcall_value)
332 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
333 gen_rtx_MEM (FUNCTION_MODE, funexp),
334 rounded_stack_size_rtx,
335 next_arg_reg, NULL_RTX));
337 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
338 rounded_stack_size_rtx, next_arg_reg,
339 struct_value_size_rtx));
344 #if defined (HAVE_call) && defined (HAVE_call_value)
345 if (HAVE_call && HAVE_call_value)
348 emit_call_insn (GEN_CALL_VALUE (valreg,
349 gen_rtx_MEM (FUNCTION_MODE, funexp),
350 rounded_stack_size_rtx, next_arg_reg,
353 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
354 rounded_stack_size_rtx, next_arg_reg,
355 struct_value_size_rtx));
361 /* Find the call we just emitted. */
362 call_insn = last_call_insn ();
364 /* Put the register usage information there. */
365 add_function_usage_to (call_insn, call_fusage);
367 /* If this is a const call, then set the insn's unchanging bit. */
368 if (ecf_flags & ECF_CONST)
369 RTL_CONST_CALL_P (call_insn) = 1;
371 /* If this is a pure call, then set the insn's unchanging bit. */
372 if (ecf_flags & ECF_PURE)
373 RTL_PURE_CALL_P (call_insn) = 1;
375 /* If this is a const call, then set the insn's unchanging bit. */
376 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
377 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
379 /* If this call can't throw, attach a REG_EH_REGION reg note to that
381 if (ecf_flags & ECF_NOTHROW)
382 add_reg_note (call_insn, REG_EH_REGION, const0_rtx);
385 int rn = lookup_expr_eh_region (fntree);
387 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
388 throw, which we already took care of. */
390 add_reg_note (call_insn, REG_EH_REGION, GEN_INT (rn));
393 if (ecf_flags & ECF_NORETURN)
394 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
396 if (ecf_flags & ECF_RETURNS_TWICE)
398 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
399 cfun->calls_setjmp = 1;
402 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
404 /* Restore this now, so that we do defer pops for this call's args
405 if the context of the call as a whole permits. */
406 inhibit_defer_pop = old_inhibit_defer_pop;
411 CALL_INSN_FUNCTION_USAGE (call_insn)
412 = gen_rtx_EXPR_LIST (VOIDmode,
413 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
414 CALL_INSN_FUNCTION_USAGE (call_insn));
415 rounded_stack_size -= n_popped;
416 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
417 stack_pointer_delta -= n_popped;
419 /* If popup is needed, stack realign must use DRAP */
420 if (SUPPORTS_STACK_ALIGNMENT)
421 crtl->need_drap = true;
424 if (!ACCUMULATE_OUTGOING_ARGS)
426 /* If returning from the subroutine does not automatically pop the args,
427 we need an instruction to pop them sooner or later.
428 Perhaps do it now; perhaps just record how much space to pop later.
430 If returning from the subroutine does pop the args, indicate that the
431 stack pointer will be changed. */
433 if (rounded_stack_size != 0)
435 if (ecf_flags & ECF_NORETURN)
436 /* Just pretend we did the pop. */
437 stack_pointer_delta -= rounded_stack_size;
438 else if (flag_defer_pop && inhibit_defer_pop == 0
439 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
440 pending_stack_adjust += rounded_stack_size;
442 adjust_stack (rounded_stack_size_rtx);
445 /* When we accumulate outgoing args, we must avoid any stack manipulations.
446 Restore the stack pointer to its original value now. Usually
447 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
448 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
449 popping variants of functions exist as well.
451 ??? We may optimize similar to defer_pop above, but it is
452 probably not worthwhile.
454 ??? It will be worthwhile to enable combine_stack_adjustments even for
457 anti_adjust_stack (GEN_INT (n_popped));
460 /* Determine if the function identified by NAME and FNDECL is one with
461 special properties we wish to know about.
463 For example, if the function might return more than one time (setjmp), then
464 set RETURNS_TWICE to a nonzero value.
466 Similarly set NORETURN if the function is in the longjmp family.
468 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
469 space from the stack such as alloca. */
472 special_function_p (const_tree fndecl, int flags)
474 if (fndecl && DECL_NAME (fndecl)
475 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
476 /* Exclude functions not at the file scope, or not `extern',
477 since they are not the magic functions we would otherwise
479 FIXME: this should be handled with attributes, not with this
480 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
481 because you can declare fork() inside a function if you
483 && (DECL_CONTEXT (fndecl) == NULL_TREE
484 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
485 && TREE_PUBLIC (fndecl))
487 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
488 const char *tname = name;
490 /* We assume that alloca will always be called by name. It
491 makes no sense to pass it as a pointer-to-function to
492 anything that does not understand its behavior. */
493 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
495 && ! strcmp (name, "alloca"))
496 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
498 && ! strcmp (name, "__builtin_alloca"))))
499 flags |= ECF_MAY_BE_ALLOCA;
501 /* Disregard prefix _, __ or __x. */
504 if (name[1] == '_' && name[2] == 'x')
506 else if (name[1] == '_')
515 && (! strcmp (tname, "setjmp")
516 || ! strcmp (tname, "setjmp_syscall")))
518 && ! strcmp (tname, "sigsetjmp"))
520 && ! strcmp (tname, "savectx")))
521 flags |= ECF_RETURNS_TWICE;
524 && ! strcmp (tname, "siglongjmp"))
525 flags |= ECF_NORETURN;
527 else if ((tname[0] == 'q' && tname[1] == 's'
528 && ! strcmp (tname, "qsetjmp"))
529 || (tname[0] == 'v' && tname[1] == 'f'
530 && ! strcmp (tname, "vfork"))
531 || (tname[0] == 'g' && tname[1] == 'e'
532 && !strcmp (tname, "getcontext")))
533 flags |= ECF_RETURNS_TWICE;
535 else if (tname[0] == 'l' && tname[1] == 'o'
536 && ! strcmp (tname, "longjmp"))
537 flags |= ECF_NORETURN;
543 /* Return nonzero when FNDECL represents a call to setjmp. */
546 setjmp_call_p (const_tree fndecl)
548 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
552 /* Return true if STMT is an alloca call. */
555 gimple_alloca_call_p (const_gimple stmt)
559 if (!is_gimple_call (stmt))
562 fndecl = gimple_call_fndecl (stmt);
563 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
569 /* Return true when exp contains alloca call. */
572 alloca_call_p (const_tree exp)
574 if (TREE_CODE (exp) == CALL_EXPR
575 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
576 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
577 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
578 & ECF_MAY_BE_ALLOCA))
583 /* Detect flags (function attributes) from the function decl or type node. */
586 flags_from_decl_or_type (const_tree exp)
589 const_tree type = exp;
593 type = TREE_TYPE (exp);
595 /* The function exp may have the `malloc' attribute. */
596 if (DECL_IS_MALLOC (exp))
599 /* The function exp may have the `returns_twice' attribute. */
600 if (DECL_IS_RETURNS_TWICE (exp))
601 flags |= ECF_RETURNS_TWICE;
603 /* Process the pure and const attributes. */
604 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
606 if (DECL_PURE_P (exp))
608 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
609 flags |= ECF_LOOPING_CONST_OR_PURE;
611 if (DECL_IS_NOVOPS (exp))
614 if (TREE_NOTHROW (exp))
615 flags |= ECF_NOTHROW;
617 flags = special_function_p (exp, flags);
619 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
622 if (TREE_THIS_VOLATILE (exp))
623 flags |= ECF_NORETURN;
628 /* Detect flags from a CALL_EXPR. */
631 call_expr_flags (const_tree t)
634 tree decl = get_callee_fndecl (t);
637 flags = flags_from_decl_or_type (decl);
640 t = TREE_TYPE (CALL_EXPR_FN (t));
641 if (t && TREE_CODE (t) == POINTER_TYPE)
642 flags = flags_from_decl_or_type (TREE_TYPE (t));
650 /* Precompute all register parameters as described by ARGS, storing values
651 into fields within the ARGS array.
653 NUM_ACTUALS indicates the total number elements in the ARGS array.
655 Set REG_PARM_SEEN if we encounter a register parameter. */
658 precompute_register_parameters (int num_actuals, struct arg_data *args,
665 for (i = 0; i < num_actuals; i++)
666 if (args[i].reg != 0 && ! args[i].pass_on_stack)
670 if (args[i].value == 0)
673 args[i].value = expand_normal (args[i].tree_value);
674 preserve_temp_slots (args[i].value);
678 /* If the value is a non-legitimate constant, force it into a
679 pseudo now. TLS symbols sometimes need a call to resolve. */
680 if (CONSTANT_P (args[i].value)
681 && !LEGITIMATE_CONSTANT_P (args[i].value))
682 args[i].value = force_reg (args[i].mode, args[i].value);
684 /* If we are to promote the function arg to a wider mode,
687 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
689 = convert_modes (args[i].mode,
690 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
691 args[i].value, args[i].unsignedp);
693 /* If we're going to have to load the value by parts, pull the
694 parts into pseudos. The part extraction process can involve
695 non-trivial computation. */
696 if (GET_CODE (args[i].reg) == PARALLEL)
698 tree type = TREE_TYPE (args[i].tree_value);
699 args[i].parallel_value
700 = emit_group_load_into_temps (args[i].reg, args[i].value,
701 type, int_size_in_bytes (type));
704 /* If the value is expensive, and we are inside an appropriately
705 short loop, put the value into a pseudo and then put the pseudo
708 For small register classes, also do this if this call uses
709 register parameters. This is to avoid reload conflicts while
710 loading the parameters registers. */
712 else if ((! (REG_P (args[i].value)
713 || (GET_CODE (args[i].value) == SUBREG
714 && REG_P (SUBREG_REG (args[i].value)))))
715 && args[i].mode != BLKmode
716 && rtx_cost (args[i].value, SET, optimize_insn_for_speed_p ())
718 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
720 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
724 #ifdef REG_PARM_STACK_SPACE
726 /* The argument list is the property of the called routine and it
727 may clobber it. If the fixed area has been used for previous
728 parameters, we must save and restore it. */
731 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
736 /* Compute the boundary of the area that needs to be saved, if any. */
737 high = reg_parm_stack_space;
738 #ifdef ARGS_GROW_DOWNWARD
741 if (high > highest_outgoing_arg_in_use)
742 high = highest_outgoing_arg_in_use;
744 for (low = 0; low < high; low++)
745 if (stack_usage_map[low] != 0)
748 enum machine_mode save_mode;
753 while (stack_usage_map[--high] == 0)
757 *high_to_save = high;
759 num_to_save = high - low + 1;
760 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
762 /* If we don't have the required alignment, must do this
764 if ((low & (MIN (GET_MODE_SIZE (save_mode),
765 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
768 #ifdef ARGS_GROW_DOWNWARD
773 stack_area = gen_rtx_MEM (save_mode,
774 memory_address (save_mode,
775 plus_constant (argblock,
778 set_mem_align (stack_area, PARM_BOUNDARY);
779 if (save_mode == BLKmode)
781 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
782 emit_block_move (validize_mem (save_area), stack_area,
783 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
787 save_area = gen_reg_rtx (save_mode);
788 emit_move_insn (save_area, stack_area);
798 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
800 enum machine_mode save_mode = GET_MODE (save_area);
804 #ifdef ARGS_GROW_DOWNWARD
805 delta = -high_to_save;
809 stack_area = gen_rtx_MEM (save_mode,
810 memory_address (save_mode,
811 plus_constant (argblock, delta)));
812 set_mem_align (stack_area, PARM_BOUNDARY);
814 if (save_mode != BLKmode)
815 emit_move_insn (stack_area, save_area);
817 emit_block_move (stack_area, validize_mem (save_area),
818 GEN_INT (high_to_save - low_to_save + 1),
821 #endif /* REG_PARM_STACK_SPACE */
823 /* If any elements in ARGS refer to parameters that are to be passed in
824 registers, but not in memory, and whose alignment does not permit a
825 direct copy into registers. Copy the values into a group of pseudos
826 which we will later copy into the appropriate hard registers.
828 Pseudos for each unaligned argument will be stored into the array
829 args[argnum].aligned_regs. The caller is responsible for deallocating
830 the aligned_regs array if it is nonzero. */
833 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
837 for (i = 0; i < num_actuals; i++)
838 if (args[i].reg != 0 && ! args[i].pass_on_stack
839 && args[i].mode == BLKmode
840 && MEM_P (args[i].value)
841 && (MEM_ALIGN (args[i].value)
842 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
844 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
845 int endian_correction = 0;
849 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
850 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
854 args[i].n_aligned_regs
855 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
858 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
860 /* Structures smaller than a word are normally aligned to the
861 least significant byte. On a BYTES_BIG_ENDIAN machine,
862 this means we must skip the empty high order bytes when
863 calculating the bit offset. */
864 if (bytes < UNITS_PER_WORD
865 #ifdef BLOCK_REG_PADDING
866 && (BLOCK_REG_PADDING (args[i].mode,
867 TREE_TYPE (args[i].tree_value), 1)
873 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
875 for (j = 0; j < args[i].n_aligned_regs; j++)
877 rtx reg = gen_reg_rtx (word_mode);
878 rtx word = operand_subword_force (args[i].value, j, BLKmode);
879 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
881 args[i].aligned_regs[j] = reg;
882 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
883 word_mode, word_mode);
885 /* There is no need to restrict this code to loading items
886 in TYPE_ALIGN sized hunks. The bitfield instructions can
887 load up entire word sized registers efficiently.
889 ??? This may not be needed anymore.
890 We use to emit a clobber here but that doesn't let later
891 passes optimize the instructions we emit. By storing 0 into
892 the register later passes know the first AND to zero out the
893 bitfield being set in the register is unnecessary. The store
894 of 0 will be deleted as will at least the first AND. */
896 emit_move_insn (reg, const0_rtx);
898 bytes -= bitsize / BITS_PER_UNIT;
899 store_bit_field (reg, bitsize, endian_correction, word_mode,
905 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
908 NUM_ACTUALS is the total number of parameters.
910 N_NAMED_ARGS is the total number of named arguments.
912 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
915 FNDECL is the tree code for the target of this call (if known)
917 ARGS_SO_FAR holds state needed by the target to know where to place
920 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
921 for arguments which are passed in registers.
923 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
924 and may be modified by this routine.
926 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
927 flags which may may be modified by this routine.
929 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
930 that requires allocation of stack space.
932 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
933 the thunked-to function. */
936 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
937 struct arg_data *args,
938 struct args_size *args_size,
939 int n_named_args ATTRIBUTE_UNUSED,
940 tree exp, tree struct_value_addr_value,
941 tree fndecl, tree fntype,
942 CUMULATIVE_ARGS *args_so_far,
943 int reg_parm_stack_space,
944 rtx *old_stack_level, int *old_pending_adj,
945 int *must_preallocate, int *ecf_flags,
946 bool *may_tailcall, bool call_from_thunk_p)
948 /* 1 if scanning parms front to back, -1 if scanning back to front. */
951 /* Count arg position in order args appear. */
956 args_size->constant = 0;
959 /* In this loop, we consider args in the order they are written.
960 We fill up ARGS from the front or from the back if necessary
961 so that in any case the first arg to be pushed ends up at the front. */
963 if (PUSH_ARGS_REVERSED)
965 i = num_actuals - 1, inc = -1;
966 /* In this case, must reverse order of args
967 so that we compute and push the last arg first. */
974 /* First fill in the actual arguments in the ARGS array, splitting
975 complex arguments if necessary. */
978 call_expr_arg_iterator iter;
981 if (struct_value_addr_value)
983 args[j].tree_value = struct_value_addr_value;
986 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
988 tree argtype = TREE_TYPE (arg);
989 if (targetm.calls.split_complex_arg
991 && TREE_CODE (argtype) == COMPLEX_TYPE
992 && targetm.calls.split_complex_arg (argtype))
994 tree subtype = TREE_TYPE (argtype);
995 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
997 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1000 args[j].tree_value = arg;
1005 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1006 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
1008 tree type = TREE_TYPE (args[i].tree_value);
1010 enum machine_mode mode;
1012 /* Replace erroneous argument with constant zero. */
1013 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1014 args[i].tree_value = integer_zero_node, type = integer_type_node;
1016 /* If TYPE is a transparent union, pass things the way we would
1017 pass the first field of the union. We have already verified that
1018 the modes are the same. */
1019 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1020 type = TREE_TYPE (TYPE_FIELDS (type));
1022 /* Decide where to pass this arg.
1024 args[i].reg is nonzero if all or part is passed in registers.
1026 args[i].partial is nonzero if part but not all is passed in registers,
1027 and the exact value says how many bytes are passed in registers.
1029 args[i].pass_on_stack is nonzero if the argument must at least be
1030 computed on the stack. It may then be loaded back into registers
1031 if args[i].reg is nonzero.
1033 These decisions are driven by the FUNCTION_... macros and must agree
1034 with those made by function.c. */
1036 /* See if this argument should be passed by invisible reference. */
1037 if (pass_by_reference (args_so_far, TYPE_MODE (type),
1038 type, argpos < n_named_args))
1044 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1045 type, argpos < n_named_args);
1047 /* If we're compiling a thunk, pass through invisible references
1048 instead of making a copy. */
1049 if (call_from_thunk_p
1051 && !TREE_ADDRESSABLE (type)
1052 && (base = get_base_address (args[i].tree_value))
1053 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1055 /* We can't use sibcalls if a callee-copied argument is
1056 stored in the current function's frame. */
1057 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1058 *may_tailcall = false;
1060 args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1061 type = TREE_TYPE (args[i].tree_value);
1063 if (*ecf_flags & ECF_CONST)
1064 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1068 /* We make a copy of the object and pass the address to the
1069 function being called. */
1072 if (!COMPLETE_TYPE_P (type)
1073 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1074 || (flag_stack_check == GENERIC_STACK_CHECK
1075 && compare_tree_int (TYPE_SIZE_UNIT (type),
1076 STACK_CHECK_MAX_VAR_SIZE) > 0))
1078 /* This is a variable-sized object. Make space on the stack
1080 rtx size_rtx = expr_size (args[i].tree_value);
1082 if (*old_stack_level == 0)
1084 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1085 *old_pending_adj = pending_stack_adjust;
1086 pending_stack_adjust = 0;
1089 copy = gen_rtx_MEM (BLKmode,
1090 allocate_dynamic_stack_space
1091 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1092 set_mem_attributes (copy, type, 1);
1095 copy = assign_temp (type, 0, 1, 0);
1097 store_expr (args[i].tree_value, copy, 0, false);
1099 /* Just change the const function to pure and then let
1100 the next test clear the pure based on
1102 if (*ecf_flags & ECF_CONST)
1104 *ecf_flags &= ~ECF_CONST;
1105 *ecf_flags |= ECF_PURE;
1108 if (!callee_copies && *ecf_flags & ECF_PURE)
1109 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1112 = build_fold_addr_expr (make_tree (type, copy));
1113 type = TREE_TYPE (args[i].tree_value);
1114 *may_tailcall = false;
1118 mode = TYPE_MODE (type);
1119 unsignedp = TYPE_UNSIGNED (type);
1121 if (targetm.calls.promote_function_args (fndecl
1122 ? TREE_TYPE (fndecl)
1124 mode = promote_mode (type, mode, &unsignedp, 1);
1126 args[i].unsignedp = unsignedp;
1127 args[i].mode = mode;
1129 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1130 argpos < n_named_args);
1131 #ifdef FUNCTION_INCOMING_ARG
1132 /* If this is a sibling call and the machine has register windows, the
1133 register window has to be unwinded before calling the routine, so
1134 arguments have to go into the incoming registers. */
1135 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1136 argpos < n_named_args);
1138 args[i].tail_call_reg = args[i].reg;
1143 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1144 argpos < n_named_args);
1146 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1148 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1149 it means that we are to pass this arg in the register(s) designated
1150 by the PARALLEL, but also to pass it in the stack. */
1151 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1152 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1153 args[i].pass_on_stack = 1;
1155 /* If this is an addressable type, we must preallocate the stack
1156 since we must evaluate the object into its final location.
1158 If this is to be passed in both registers and the stack, it is simpler
1160 if (TREE_ADDRESSABLE (type)
1161 || (args[i].pass_on_stack && args[i].reg != 0))
1162 *must_preallocate = 1;
1164 /* Compute the stack-size of this argument. */
1165 if (args[i].reg == 0 || args[i].partial != 0
1166 || reg_parm_stack_space > 0
1167 || args[i].pass_on_stack)
1168 locate_and_pad_parm (mode, type,
1169 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1174 args[i].pass_on_stack ? 0 : args[i].partial,
1175 fndecl, args_size, &args[i].locate);
1176 #ifdef BLOCK_REG_PADDING
1178 /* The argument is passed entirely in registers. See at which
1179 end it should be padded. */
1180 args[i].locate.where_pad =
1181 BLOCK_REG_PADDING (mode, type,
1182 int_size_in_bytes (type) <= UNITS_PER_WORD);
1185 /* Update ARGS_SIZE, the total stack space for args so far. */
1187 args_size->constant += args[i].locate.size.constant;
1188 if (args[i].locate.size.var)
1189 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1191 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1192 have been used, etc. */
1194 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1195 argpos < n_named_args);
1199 /* Update ARGS_SIZE to contain the total size for the argument block.
1200 Return the original constant component of the argument block's size.
1202 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1203 for arguments passed in registers. */
1206 compute_argument_block_size (int reg_parm_stack_space,
1207 struct args_size *args_size,
1208 tree fndecl ATTRIBUTE_UNUSED,
1209 tree fntype ATTRIBUTE_UNUSED,
1210 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1212 int unadjusted_args_size = args_size->constant;
1214 /* For accumulate outgoing args mode we don't need to align, since the frame
1215 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1216 backends from generating misaligned frame sizes. */
1217 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1218 preferred_stack_boundary = STACK_BOUNDARY;
1220 /* Compute the actual size of the argument block required. The variable
1221 and constant sizes must be combined, the size may have to be rounded,
1222 and there may be a minimum required size. */
1226 args_size->var = ARGS_SIZE_TREE (*args_size);
1227 args_size->constant = 0;
1229 preferred_stack_boundary /= BITS_PER_UNIT;
1230 if (preferred_stack_boundary > 1)
1232 /* We don't handle this case yet. To handle it correctly we have
1233 to add the delta, round and subtract the delta.
1234 Currently no machine description requires this support. */
1235 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1236 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1239 if (reg_parm_stack_space > 0)
1242 = size_binop (MAX_EXPR, args_size->var,
1243 ssize_int (reg_parm_stack_space));
1245 /* The area corresponding to register parameters is not to count in
1246 the size of the block we need. So make the adjustment. */
1247 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1249 = size_binop (MINUS_EXPR, args_size->var,
1250 ssize_int (reg_parm_stack_space));
1255 preferred_stack_boundary /= BITS_PER_UNIT;
1256 if (preferred_stack_boundary < 1)
1257 preferred_stack_boundary = 1;
1258 args_size->constant = (((args_size->constant
1259 + stack_pointer_delta
1260 + preferred_stack_boundary - 1)
1261 / preferred_stack_boundary
1262 * preferred_stack_boundary)
1263 - stack_pointer_delta);
1265 args_size->constant = MAX (args_size->constant,
1266 reg_parm_stack_space);
1268 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1269 args_size->constant -= reg_parm_stack_space;
1271 return unadjusted_args_size;
1274 /* Precompute parameters as needed for a function call.
1276 FLAGS is mask of ECF_* constants.
1278 NUM_ACTUALS is the number of arguments.
1280 ARGS is an array containing information for each argument; this
1281 routine fills in the INITIAL_VALUE and VALUE fields for each
1282 precomputed argument. */
1285 precompute_arguments (int num_actuals, struct arg_data *args)
1289 /* If this is a libcall, then precompute all arguments so that we do not
1290 get extraneous instructions emitted as part of the libcall sequence. */
1292 /* If we preallocated the stack space, and some arguments must be passed
1293 on the stack, then we must precompute any parameter which contains a
1294 function call which will store arguments on the stack.
1295 Otherwise, evaluating the parameter may clobber previous parameters
1296 which have already been stored into the stack. (we have code to avoid
1297 such case by saving the outgoing stack arguments, but it results in
1299 if (!ACCUMULATE_OUTGOING_ARGS)
1302 for (i = 0; i < num_actuals; i++)
1304 enum machine_mode mode;
1306 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
1309 /* If this is an addressable type, we cannot pre-evaluate it. */
1310 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
1312 args[i].initial_value = args[i].value
1313 = expand_normal (args[i].tree_value);
1315 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1316 if (mode != args[i].mode)
1319 = convert_modes (args[i].mode, mode,
1320 args[i].value, args[i].unsignedp);
1321 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1322 /* CSE will replace this only if it contains args[i].value
1323 pseudo, so convert it down to the declared mode using
1325 if (REG_P (args[i].value)
1326 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1328 args[i].initial_value
1329 = gen_lowpart_SUBREG (mode, args[i].value);
1330 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1331 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1339 /* Given the current state of MUST_PREALLOCATE and information about
1340 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1341 compute and return the final value for MUST_PREALLOCATE. */
1344 finalize_must_preallocate (int must_preallocate, int num_actuals,
1345 struct arg_data *args, struct args_size *args_size)
1347 /* See if we have or want to preallocate stack space.
1349 If we would have to push a partially-in-regs parm
1350 before other stack parms, preallocate stack space instead.
1352 If the size of some parm is not a multiple of the required stack
1353 alignment, we must preallocate.
1355 If the total size of arguments that would otherwise create a copy in
1356 a temporary (such as a CALL) is more than half the total argument list
1357 size, preallocation is faster.
1359 Another reason to preallocate is if we have a machine (like the m88k)
1360 where stack alignment is required to be maintained between every
1361 pair of insns, not just when the call is made. However, we assume here
1362 that such machines either do not have push insns (and hence preallocation
1363 would occur anyway) or the problem is taken care of with
1366 if (! must_preallocate)
1368 int partial_seen = 0;
1369 int copy_to_evaluate_size = 0;
1372 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1374 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1376 else if (partial_seen && args[i].reg == 0)
1377 must_preallocate = 1;
1379 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1380 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1381 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1382 || TREE_CODE (args[i].tree_value) == COND_EXPR
1383 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1384 copy_to_evaluate_size
1385 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1388 if (copy_to_evaluate_size * 2 >= args_size->constant
1389 && args_size->constant > 0)
1390 must_preallocate = 1;
1392 return must_preallocate;
1395 /* If we preallocated stack space, compute the address of each argument
1396 and store it into the ARGS array.
1398 We need not ensure it is a valid memory address here; it will be
1399 validized when it is used.
1401 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1404 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1408 rtx arg_reg = argblock;
1409 int i, arg_offset = 0;
1411 if (GET_CODE (argblock) == PLUS)
1412 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1414 for (i = 0; i < num_actuals; i++)
1416 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1417 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1419 unsigned int align, boundary;
1420 unsigned int units_on_stack = 0;
1421 enum machine_mode partial_mode = VOIDmode;
1423 /* Skip this parm if it will not be passed on the stack. */
1424 if (! args[i].pass_on_stack
1426 && args[i].partial == 0)
1429 if (GET_CODE (offset) == CONST_INT)
1430 addr = plus_constant (arg_reg, INTVAL (offset));
1432 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1434 addr = plus_constant (addr, arg_offset);
1436 if (args[i].partial != 0)
1438 /* Only part of the parameter is being passed on the stack.
1439 Generate a simple memory reference of the correct size. */
1440 units_on_stack = args[i].locate.size.constant;
1441 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1443 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1444 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1448 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1449 set_mem_attributes (args[i].stack,
1450 TREE_TYPE (args[i].tree_value), 1);
1452 align = BITS_PER_UNIT;
1453 boundary = args[i].locate.boundary;
1454 if (args[i].locate.where_pad != downward)
1456 else if (GET_CODE (offset) == CONST_INT)
1458 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1459 align = align & -align;
1461 set_mem_align (args[i].stack, align);
1463 if (GET_CODE (slot_offset) == CONST_INT)
1464 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1466 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1468 addr = plus_constant (addr, arg_offset);
1470 if (args[i].partial != 0)
1472 /* Only part of the parameter is being passed on the stack.
1473 Generate a simple memory reference of the correct size.
1475 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1476 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1480 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1481 set_mem_attributes (args[i].stack_slot,
1482 TREE_TYPE (args[i].tree_value), 1);
1484 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1486 /* Function incoming arguments may overlap with sibling call
1487 outgoing arguments and we cannot allow reordering of reads
1488 from function arguments with stores to outgoing arguments
1489 of sibling calls. */
1490 set_mem_alias_set (args[i].stack, 0);
1491 set_mem_alias_set (args[i].stack_slot, 0);
1496 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1497 in a call instruction.
1499 FNDECL is the tree node for the target function. For an indirect call
1500 FNDECL will be NULL_TREE.
1502 ADDR is the operand 0 of CALL_EXPR for this call. */
1505 rtx_for_function_call (tree fndecl, tree addr)
1509 /* Get the function to call, in the form of RTL. */
1512 /* If this is the first use of the function, see if we need to
1513 make an external definition for it. */
1514 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1516 assemble_external (fndecl);
1517 TREE_USED (fndecl) = 1;
1520 /* Get a SYMBOL_REF rtx for the function address. */
1521 funexp = XEXP (DECL_RTL (fndecl), 0);
1524 /* Generate an rtx (probably a pseudo-register) for the address. */
1527 funexp = expand_normal (addr);
1528 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1533 /* Return true if and only if SIZE storage units (usually bytes)
1534 starting from address ADDR overlap with already clobbered argument
1535 area. This function is used to determine if we should give up a
1539 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1543 if (addr == crtl->args.internal_arg_pointer)
1545 else if (GET_CODE (addr) == PLUS
1546 && XEXP (addr, 0) == crtl->args.internal_arg_pointer
1547 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
1548 i = INTVAL (XEXP (addr, 1));
1549 /* Return true for arg pointer based indexed addressing. */
1550 else if (GET_CODE (addr) == PLUS
1551 && (XEXP (addr, 0) == crtl->args.internal_arg_pointer
1552 || XEXP (addr, 1) == crtl->args.internal_arg_pointer))
1557 #ifdef ARGS_GROW_DOWNWARD
1562 unsigned HOST_WIDE_INT k;
1564 for (k = 0; k < size; k++)
1565 if (i + k < stored_args_map->n_bits
1566 && TEST_BIT (stored_args_map, i + k))
1573 /* Do the register loads required for any wholly-register parms or any
1574 parms which are passed both on the stack and in a register. Their
1575 expressions were already evaluated.
1577 Mark all register-parms as living through the call, putting these USE
1578 insns in the CALL_INSN_FUNCTION_USAGE field.
1580 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1581 checking, setting *SIBCALL_FAILURE if appropriate. */
1584 load_register_parameters (struct arg_data *args, int num_actuals,
1585 rtx *call_fusage, int flags, int is_sibcall,
1586 int *sibcall_failure)
1590 for (i = 0; i < num_actuals; i++)
1592 rtx reg = ((flags & ECF_SIBCALL)
1593 ? args[i].tail_call_reg : args[i].reg);
1596 int partial = args[i].partial;
1599 rtx before_arg = get_last_insn ();
1600 /* Set non-negative if we must move a word at a time, even if
1601 just one word (e.g, partial == 4 && mode == DFmode). Set
1602 to -1 if we just use a normal move insn. This value can be
1603 zero if the argument is a zero size structure. */
1605 if (GET_CODE (reg) == PARALLEL)
1609 gcc_assert (partial % UNITS_PER_WORD == 0);
1610 nregs = partial / UNITS_PER_WORD;
1612 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1614 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1615 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1618 size = GET_MODE_SIZE (args[i].mode);
1620 /* Handle calls that pass values in multiple non-contiguous
1621 locations. The Irix 6 ABI has examples of this. */
1623 if (GET_CODE (reg) == PARALLEL)
1624 emit_group_move (reg, args[i].parallel_value);
1626 /* If simple case, just do move. If normal partial, store_one_arg
1627 has already loaded the register for us. In all other cases,
1628 load the register(s) from memory. */
1630 else if (nregs == -1)
1632 emit_move_insn (reg, args[i].value);
1633 #ifdef BLOCK_REG_PADDING
1634 /* Handle case where we have a value that needs shifting
1635 up to the msb. eg. a QImode value and we're padding
1636 upward on a BYTES_BIG_ENDIAN machine. */
1637 if (size < UNITS_PER_WORD
1638 && (args[i].locate.where_pad
1639 == (BYTES_BIG_ENDIAN ? upward : downward)))
1642 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1644 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1645 report the whole reg as used. Strictly speaking, the
1646 call only uses SIZE bytes at the msb end, but it doesn't
1647 seem worth generating rtl to say that. */
1648 reg = gen_rtx_REG (word_mode, REGNO (reg));
1649 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1650 build_int_cst (NULL_TREE, shift),
1653 emit_move_insn (reg, x);
1658 /* If we have pre-computed the values to put in the registers in
1659 the case of non-aligned structures, copy them in now. */
1661 else if (args[i].n_aligned_regs != 0)
1662 for (j = 0; j < args[i].n_aligned_regs; j++)
1663 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1664 args[i].aligned_regs[j]);
1666 else if (partial == 0 || args[i].pass_on_stack)
1668 rtx mem = validize_mem (args[i].value);
1670 /* Check for overlap with already clobbered argument area. */
1672 && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
1674 *sibcall_failure = 1;
1676 /* Handle a BLKmode that needs shifting. */
1677 if (nregs == 1 && size < UNITS_PER_WORD
1678 #ifdef BLOCK_REG_PADDING
1679 && args[i].locate.where_pad == downward
1685 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1686 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1687 rtx x = gen_reg_rtx (word_mode);
1688 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1689 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1692 emit_move_insn (x, tem);
1693 x = expand_shift (dir, word_mode, x,
1694 build_int_cst (NULL_TREE, shift),
1697 emit_move_insn (ri, x);
1700 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1703 /* When a parameter is a block, and perhaps in other cases, it is
1704 possible that it did a load from an argument slot that was
1705 already clobbered. */
1707 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1708 *sibcall_failure = 1;
1710 /* Handle calls that pass values in multiple non-contiguous
1711 locations. The Irix 6 ABI has examples of this. */
1712 if (GET_CODE (reg) == PARALLEL)
1713 use_group_regs (call_fusage, reg);
1714 else if (nregs == -1)
1715 use_reg (call_fusage, reg);
1717 use_regs (call_fusage, REGNO (reg), nregs);
1722 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1723 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1724 bytes, then we would need to push some additional bytes to pad the
1725 arguments. So, we compute an adjust to the stack pointer for an
1726 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1727 bytes. Then, when the arguments are pushed the stack will be perfectly
1728 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1729 be popped after the call. Returns the adjustment. */
1732 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1733 struct args_size *args_size,
1734 unsigned int preferred_unit_stack_boundary)
1736 /* The number of bytes to pop so that the stack will be
1737 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1738 HOST_WIDE_INT adjustment;
1739 /* The alignment of the stack after the arguments are pushed, if we
1740 just pushed the arguments without adjust the stack here. */
1741 unsigned HOST_WIDE_INT unadjusted_alignment;
1743 unadjusted_alignment
1744 = ((stack_pointer_delta + unadjusted_args_size)
1745 % preferred_unit_stack_boundary);
1747 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1748 as possible -- leaving just enough left to cancel out the
1749 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1750 PENDING_STACK_ADJUST is non-negative, and congruent to
1751 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1753 /* Begin by trying to pop all the bytes. */
1754 unadjusted_alignment
1755 = (unadjusted_alignment
1756 - (pending_stack_adjust % preferred_unit_stack_boundary));
1757 adjustment = pending_stack_adjust;
1758 /* Push enough additional bytes that the stack will be aligned
1759 after the arguments are pushed. */
1760 if (preferred_unit_stack_boundary > 1)
1762 if (unadjusted_alignment > 0)
1763 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1765 adjustment += unadjusted_alignment;
1768 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1769 bytes after the call. The right number is the entire
1770 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1771 by the arguments in the first place. */
1773 = pending_stack_adjust - adjustment + unadjusted_args_size;
1778 /* Scan X expression if it does not dereference any argument slots
1779 we already clobbered by tail call arguments (as noted in stored_args_map
1781 Return nonzero if X expression dereferences such argument slots,
1785 check_sibcall_argument_overlap_1 (rtx x)
1794 code = GET_CODE (x);
1797 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1798 GET_MODE_SIZE (GET_MODE (x)));
1800 /* Scan all subexpressions. */
1801 fmt = GET_RTX_FORMAT (code);
1802 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1806 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1809 else if (*fmt == 'E')
1811 for (j = 0; j < XVECLEN (x, i); j++)
1812 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1819 /* Scan sequence after INSN if it does not dereference any argument slots
1820 we already clobbered by tail call arguments (as noted in stored_args_map
1821 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1822 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1823 should be 0). Return nonzero if sequence after INSN dereferences such argument
1824 slots, zero otherwise. */
1827 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1831 if (insn == NULL_RTX)
1832 insn = get_insns ();
1834 insn = NEXT_INSN (insn);
1836 for (; insn; insn = NEXT_INSN (insn))
1838 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1841 if (mark_stored_args_map)
1843 #ifdef ARGS_GROW_DOWNWARD
1844 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1846 low = arg->locate.slot_offset.constant;
1849 for (high = low + arg->locate.size.constant; low < high; low++)
1850 SET_BIT (stored_args_map, low);
1852 return insn != NULL_RTX;
1855 /* Given that a function returns a value of mode MODE at the most
1856 significant end of hard register VALUE, shift VALUE left or right
1857 as specified by LEFT_P. Return true if some action was needed. */
1860 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1862 HOST_WIDE_INT shift;
1864 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1865 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1869 /* Use ashr rather than lshr for right shifts. This is for the benefit
1870 of the MIPS port, which requires SImode values to be sign-extended
1871 when stored in 64-bit registers. */
1872 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1873 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1878 /* If X is a likely-spilled register value, copy it to a pseudo
1879 register and return that register. Return X otherwise. */
1882 avoid_likely_spilled_reg (rtx x)
1887 && HARD_REGISTER_P (x)
1888 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (x))))
1890 /* Make sure that we generate a REG rather than a CONCAT.
1891 Moves into CONCATs can need nontrivial instructions,
1892 and the whole point of this function is to avoid
1893 using the hard register directly in such a situation. */
1894 generating_concat_p = 0;
1895 new_rtx = gen_reg_rtx (GET_MODE (x));
1896 generating_concat_p = 1;
1897 emit_move_insn (new_rtx, x);
1903 /* Generate all the code for a CALL_EXPR exp
1904 and return an rtx for its value.
1905 Store the value in TARGET (specified as an rtx) if convenient.
1906 If the value is stored in TARGET then TARGET is returned.
1907 If IGNORE is nonzero, then we ignore the value of the function call. */
1910 expand_call (tree exp, rtx target, int ignore)
1912 /* Nonzero if we are currently expanding a call. */
1913 static int currently_expanding_call = 0;
1915 /* RTX for the function to be called. */
1917 /* Sequence of insns to perform a normal "call". */
1918 rtx normal_call_insns = NULL_RTX;
1919 /* Sequence of insns to perform a tail "call". */
1920 rtx tail_call_insns = NULL_RTX;
1921 /* Data type of the function. */
1923 tree type_arg_types;
1924 /* Declaration of the function being called,
1925 or 0 if the function is computed (not known by name). */
1927 /* The type of the function being called. */
1929 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1932 /* Register in which non-BLKmode value will be returned,
1933 or 0 if no value or if value is BLKmode. */
1935 /* Address where we should return a BLKmode value;
1936 0 if value not BLKmode. */
1937 rtx structure_value_addr = 0;
1938 /* Nonzero if that address is being passed by treating it as
1939 an extra, implicit first parameter. Otherwise,
1940 it is passed by being copied directly into struct_value_rtx. */
1941 int structure_value_addr_parm = 0;
1942 /* Holds the value of implicit argument for the struct value. */
1943 tree structure_value_addr_value = NULL_TREE;
1944 /* Size of aggregate value wanted, or zero if none wanted
1945 or if we are using the non-reentrant PCC calling convention
1946 or expecting the value in registers. */
1947 HOST_WIDE_INT struct_value_size = 0;
1948 /* Nonzero if called function returns an aggregate in memory PCC style,
1949 by returning the address of where to find it. */
1950 int pcc_struct_value = 0;
1951 rtx struct_value = 0;
1953 /* Number of actual parameters in this call, including struct value addr. */
1955 /* Number of named args. Args after this are anonymous ones
1956 and they must all go on the stack. */
1958 /* Number of complex actual arguments that need to be split. */
1959 int num_complex_actuals = 0;
1961 /* Vector of information about each argument.
1962 Arguments are numbered in the order they will be pushed,
1963 not the order they are written. */
1964 struct arg_data *args;
1966 /* Total size in bytes of all the stack-parms scanned so far. */
1967 struct args_size args_size;
1968 struct args_size adjusted_args_size;
1969 /* Size of arguments before any adjustments (such as rounding). */
1970 int unadjusted_args_size;
1971 /* Data on reg parms scanned so far. */
1972 CUMULATIVE_ARGS args_so_far;
1973 /* Nonzero if a reg parm has been scanned. */
1975 /* Nonzero if this is an indirect function call. */
1977 /* Nonzero if we must avoid push-insns in the args for this call.
1978 If stack space is allocated for register parameters, but not by the
1979 caller, then it is preallocated in the fixed part of the stack frame.
1980 So the entire argument block must then be preallocated (i.e., we
1981 ignore PUSH_ROUNDING in that case). */
1983 int must_preallocate = !PUSH_ARGS;
1985 /* Size of the stack reserved for parameter registers. */
1986 int reg_parm_stack_space = 0;
1988 /* Address of space preallocated for stack parms
1989 (on machines that lack push insns), or 0 if space not preallocated. */
1992 /* Mask of ECF_ flags. */
1994 #ifdef REG_PARM_STACK_SPACE
1995 /* Define the boundary of the register parm stack space that needs to be
1997 int low_to_save, high_to_save;
1998 rtx save_area = 0; /* Place that it is saved */
2001 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2002 char *initial_stack_usage_map = stack_usage_map;
2003 char *stack_usage_map_buf = NULL;
2005 int old_stack_allocated;
2007 /* State variables to track stack modifications. */
2008 rtx old_stack_level = 0;
2009 int old_stack_arg_under_construction = 0;
2010 int old_pending_adj = 0;
2011 int old_inhibit_defer_pop = inhibit_defer_pop;
2013 /* Some stack pointer alterations we make are performed via
2014 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2015 which we then also need to save/restore along the way. */
2016 int old_stack_pointer_delta = 0;
2019 tree p = CALL_EXPR_FN (exp);
2020 tree addr = CALL_EXPR_FN (exp);
2022 /* The alignment of the stack, in bits. */
2023 unsigned HOST_WIDE_INT preferred_stack_boundary;
2024 /* The alignment of the stack, in bytes. */
2025 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
2026 /* The static chain value to use for this call. */
2027 rtx static_chain_value;
2028 /* See if this is "nothrow" function call. */
2029 if (TREE_NOTHROW (exp))
2030 flags |= ECF_NOTHROW;
2032 /* See if we can find a DECL-node for the actual function, and get the
2033 function attributes (flags) from the function decl or type node. */
2034 fndecl = get_callee_fndecl (exp);
2037 fntype = TREE_TYPE (fndecl);
2038 flags |= flags_from_decl_or_type (fndecl);
2042 fntype = TREE_TYPE (TREE_TYPE (p));
2043 flags |= flags_from_decl_or_type (fntype);
2046 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2048 /* Warn if this value is an aggregate type,
2049 regardless of which calling convention we are using for it. */
2050 if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2051 warning (OPT_Waggregate_return, "function call has aggregate value");
2053 /* If the result of a non looping pure or const function call is
2054 ignored (or void), and none of its arguments are volatile, we can
2055 avoid expanding the call and just evaluate the arguments for
2057 if ((flags & (ECF_CONST | ECF_PURE))
2058 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2059 && (ignore || target == const0_rtx
2060 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2062 bool volatilep = false;
2064 call_expr_arg_iterator iter;
2066 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2067 if (TREE_THIS_VOLATILE (arg))
2075 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2076 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2081 #ifdef REG_PARM_STACK_SPACE
2082 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2085 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2086 && reg_parm_stack_space > 0 && PUSH_ARGS)
2087 must_preallocate = 1;
2089 /* Set up a place to return a structure. */
2091 /* Cater to broken compilers. */
2092 if (aggregate_value_p (exp, (!fndecl ? fntype : fndecl)))
2094 /* This call returns a big structure. */
2095 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2097 #ifdef PCC_STATIC_STRUCT_RETURN
2099 pcc_struct_value = 1;
2101 #else /* not PCC_STATIC_STRUCT_RETURN */
2103 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2105 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2106 structure_value_addr = XEXP (target, 0);
2109 /* For variable-sized objects, we must be called with a target
2110 specified. If we were to allocate space on the stack here,
2111 we would have no way of knowing when to free it. */
2112 rtx d = assign_temp (TREE_TYPE (exp), 0, 1, 1);
2114 mark_temp_addr_taken (d);
2115 structure_value_addr = XEXP (d, 0);
2119 #endif /* not PCC_STATIC_STRUCT_RETURN */
2122 /* Figure out the amount to which the stack should be aligned. */
2123 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2126 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2127 /* Without automatic stack alignment, we can't increase preferred
2128 stack boundary. With automatic stack alignment, it is
2129 unnecessary since unless we can guarantee that all callers will
2130 align the outgoing stack properly, callee has to align its
2133 && i->preferred_incoming_stack_boundary
2134 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
2135 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2138 /* Operand 0 is a pointer-to-function; get the type of the function. */
2139 funtype = TREE_TYPE (addr);
2140 gcc_assert (POINTER_TYPE_P (funtype));
2141 funtype = TREE_TYPE (funtype);
2143 /* Count whether there are actual complex arguments that need to be split
2144 into their real and imaginary parts. Munge the type_arg_types
2145 appropriately here as well. */
2146 if (targetm.calls.split_complex_arg)
2148 call_expr_arg_iterator iter;
2150 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2152 tree type = TREE_TYPE (arg);
2153 if (type && TREE_CODE (type) == COMPLEX_TYPE
2154 && targetm.calls.split_complex_arg (type))
2155 num_complex_actuals++;
2157 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2160 type_arg_types = TYPE_ARG_TYPES (funtype);
2162 if (flags & ECF_MAY_BE_ALLOCA)
2163 cfun->calls_alloca = 1;
2165 /* If struct_value_rtx is 0, it means pass the address
2166 as if it were an extra parameter. Put the argument expression
2167 in structure_value_addr_value. */
2168 if (structure_value_addr && struct_value == 0)
2170 /* If structure_value_addr is a REG other than
2171 virtual_outgoing_args_rtx, we can use always use it. If it
2172 is not a REG, we must always copy it into a register.
2173 If it is virtual_outgoing_args_rtx, we must copy it to another
2174 register in some cases. */
2175 rtx temp = (!REG_P (structure_value_addr)
2176 || (ACCUMULATE_OUTGOING_ARGS
2177 && stack_arg_under_construction
2178 && structure_value_addr == virtual_outgoing_args_rtx)
2179 ? copy_addr_to_reg (convert_memory_address
2180 (Pmode, structure_value_addr))
2181 : structure_value_addr);
2183 structure_value_addr_value =
2184 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2185 structure_value_addr_parm = 1;
2188 /* Count the arguments and set NUM_ACTUALS. */
2190 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2192 /* Compute number of named args.
2193 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2195 if (type_arg_types != 0)
2197 = (list_length (type_arg_types)
2198 /* Count the struct value address, if it is passed as a parm. */
2199 + structure_value_addr_parm);
2201 /* If we know nothing, treat all args as named. */
2202 n_named_args = num_actuals;
2204 /* Start updating where the next arg would go.
2206 On some machines (such as the PA) indirect calls have a different
2207 calling convention than normal calls. The fourth argument in
2208 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2210 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2212 /* Now possibly adjust the number of named args.
2213 Normally, don't include the last named arg if anonymous args follow.
2214 We do include the last named arg if
2215 targetm.calls.strict_argument_naming() returns nonzero.
2216 (If no anonymous args follow, the result of list_length is actually
2217 one too large. This is harmless.)
2219 If targetm.calls.pretend_outgoing_varargs_named() returns
2220 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2221 this machine will be able to place unnamed args that were passed
2222 in registers into the stack. So treat all args as named. This
2223 allows the insns emitting for a specific argument list to be
2224 independent of the function declaration.
2226 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2227 we do not have any reliable way to pass unnamed args in
2228 registers, so we must force them into memory. */
2230 if (type_arg_types != 0
2231 && targetm.calls.strict_argument_naming (&args_so_far))
2233 else if (type_arg_types != 0
2234 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2235 /* Don't include the last named arg. */
2238 /* Treat all args as named. */
2239 n_named_args = num_actuals;
2241 /* Make a vector to hold all the information about each arg. */
2242 args = XALLOCAVEC (struct arg_data, num_actuals);
2243 memset (args, 0, num_actuals * sizeof (struct arg_data));
2245 /* Build up entries in the ARGS array, compute the size of the
2246 arguments into ARGS_SIZE, etc. */
2247 initialize_argument_information (num_actuals, args, &args_size,
2249 structure_value_addr_value, fndecl, fntype,
2250 &args_so_far, reg_parm_stack_space,
2251 &old_stack_level, &old_pending_adj,
2252 &must_preallocate, &flags,
2253 &try_tail_call, CALL_FROM_THUNK_P (exp));
2256 must_preallocate = 1;
2258 /* Now make final decision about preallocating stack space. */
2259 must_preallocate = finalize_must_preallocate (must_preallocate,
2263 /* If the structure value address will reference the stack pointer, we
2264 must stabilize it. We don't need to do this if we know that we are
2265 not going to adjust the stack pointer in processing this call. */
2267 if (structure_value_addr
2268 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2269 || reg_mentioned_p (virtual_outgoing_args_rtx,
2270 structure_value_addr))
2272 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2273 structure_value_addr = copy_to_reg (structure_value_addr);
2275 /* Tail calls can make things harder to debug, and we've traditionally
2276 pushed these optimizations into -O2. Don't try if we're already
2277 expanding a call, as that means we're an argument. Don't try if
2278 there's cleanups, as we know there's code to follow the call. */
2280 if (currently_expanding_call++ != 0
2281 || !flag_optimize_sibling_calls
2283 || lookup_expr_eh_region (exp) >= 0
2284 || dbg_cnt (tail_call) == false)
2287 /* Rest of purposes for tail call optimizations to fail. */
2289 #ifdef HAVE_sibcall_epilogue
2290 !HAVE_sibcall_epilogue
2295 /* Doing sibling call optimization needs some work, since
2296 structure_value_addr can be allocated on the stack.
2297 It does not seem worth the effort since few optimizable
2298 sibling calls will return a structure. */
2299 || structure_value_addr != NULL_RTX
2300 #ifdef REG_PARM_STACK_SPACE
2301 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2302 || (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2303 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)))
2304 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl))
2306 /* Check whether the target is able to optimize the call
2308 || !targetm.function_ok_for_sibcall (fndecl, exp)
2309 /* Functions that do not return exactly once may not be sibcall
2311 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2312 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2313 /* If the called function is nested in the current one, it might access
2314 some of the caller's arguments, but could clobber them beforehand if
2315 the argument areas are shared. */
2316 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2317 /* If this function requires more stack slots than the current
2318 function, we cannot change it into a sibling call.
2319 crtl->args.pretend_args_size is not part of the
2320 stack allocated by our caller. */
2321 || args_size.constant > (crtl->args.size
2322 - crtl->args.pretend_args_size)
2323 /* If the callee pops its own arguments, then it must pop exactly
2324 the same number of arguments as the current function. */
2325 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2326 != RETURN_POPS_ARGS (current_function_decl,
2327 TREE_TYPE (current_function_decl),
2329 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2332 /* Ensure current function's preferred stack boundary is at least
2333 what we need. Stack alignment may also increase preferred stack
2335 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
2336 crtl->preferred_stack_boundary = preferred_stack_boundary;
2338 preferred_stack_boundary = crtl->preferred_stack_boundary;
2340 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2342 /* We want to make two insn chains; one for a sibling call, the other
2343 for a normal call. We will select one of the two chains after
2344 initial RTL generation is complete. */
2345 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2347 int sibcall_failure = 0;
2348 /* We want to emit any pending stack adjustments before the tail
2349 recursion "call". That way we know any adjustment after the tail
2350 recursion call can be ignored if we indeed use the tail
2352 int save_pending_stack_adjust = 0;
2353 int save_stack_pointer_delta = 0;
2355 rtx before_call, next_arg_reg, after_args;
2359 /* State variables we need to save and restore between
2361 save_pending_stack_adjust = pending_stack_adjust;
2362 save_stack_pointer_delta = stack_pointer_delta;
2365 flags &= ~ECF_SIBCALL;
2367 flags |= ECF_SIBCALL;
2369 /* Other state variables that we must reinitialize each time
2370 through the loop (that are not initialized by the loop itself). */
2374 /* Start a new sequence for the normal call case.
2376 From this point on, if the sibling call fails, we want to set
2377 sibcall_failure instead of continuing the loop. */
2380 /* Don't let pending stack adjusts add up to too much.
2381 Also, do all pending adjustments now if there is any chance
2382 this might be a call to alloca or if we are expanding a sibling
2384 Also do the adjustments before a throwing call, otherwise
2385 exception handling can fail; PR 19225. */
2386 if (pending_stack_adjust >= 32
2387 || (pending_stack_adjust > 0
2388 && (flags & ECF_MAY_BE_ALLOCA))
2389 || (pending_stack_adjust > 0
2390 && flag_exceptions && !(flags & ECF_NOTHROW))
2392 do_pending_stack_adjust ();
2394 /* Precompute any arguments as needed. */
2396 precompute_arguments (num_actuals, args);
2398 /* Now we are about to start emitting insns that can be deleted
2399 if a libcall is deleted. */
2400 if (pass && (flags & ECF_MALLOC))
2403 if (pass == 0 && crtl->stack_protect_guard)
2404 stack_protect_epilogue ();
2406 adjusted_args_size = args_size;
2407 /* Compute the actual size of the argument block required. The variable
2408 and constant sizes must be combined, the size may have to be rounded,
2409 and there may be a minimum required size. When generating a sibcall
2410 pattern, do not round up, since we'll be re-using whatever space our
2412 unadjusted_args_size
2413 = compute_argument_block_size (reg_parm_stack_space,
2414 &adjusted_args_size,
2417 : preferred_stack_boundary));
2419 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2421 /* The argument block when performing a sibling call is the
2422 incoming argument block. */
2425 argblock = crtl->args.internal_arg_pointer;
2427 #ifdef STACK_GROWS_DOWNWARD
2428 = plus_constant (argblock, crtl->args.pretend_args_size);
2430 = plus_constant (argblock, -crtl->args.pretend_args_size);
2432 stored_args_map = sbitmap_alloc (args_size.constant);
2433 sbitmap_zero (stored_args_map);
2436 /* If we have no actual push instructions, or shouldn't use them,
2437 make space for all args right now. */
2438 else if (adjusted_args_size.var != 0)
2440 if (old_stack_level == 0)
2442 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2443 old_stack_pointer_delta = stack_pointer_delta;
2444 old_pending_adj = pending_stack_adjust;
2445 pending_stack_adjust = 0;
2446 /* stack_arg_under_construction says whether a stack arg is
2447 being constructed at the old stack level. Pushing the stack
2448 gets a clean outgoing argument block. */
2449 old_stack_arg_under_construction = stack_arg_under_construction;
2450 stack_arg_under_construction = 0;
2452 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2456 /* Note that we must go through the motions of allocating an argument
2457 block even if the size is zero because we may be storing args
2458 in the area reserved for register arguments, which may be part of
2461 int needed = adjusted_args_size.constant;
2463 /* Store the maximum argument space used. It will be pushed by
2464 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2467 if (needed > crtl->outgoing_args_size)
2468 crtl->outgoing_args_size = needed;
2470 if (must_preallocate)
2472 if (ACCUMULATE_OUTGOING_ARGS)
2474 /* Since the stack pointer will never be pushed, it is
2475 possible for the evaluation of a parm to clobber
2476 something we have already written to the stack.
2477 Since most function calls on RISC machines do not use
2478 the stack, this is uncommon, but must work correctly.
2480 Therefore, we save any area of the stack that was already
2481 written and that we are using. Here we set up to do this
2482 by making a new stack usage map from the old one. The
2483 actual save will be done by store_one_arg.
2485 Another approach might be to try to reorder the argument
2486 evaluations to avoid this conflicting stack usage. */
2488 /* Since we will be writing into the entire argument area,
2489 the map must be allocated for its entire size, not just
2490 the part that is the responsibility of the caller. */
2491 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2492 needed += reg_parm_stack_space;
2494 #ifdef ARGS_GROW_DOWNWARD
2495 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2498 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2501 if (stack_usage_map_buf)
2502 free (stack_usage_map_buf);
2503 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2504 stack_usage_map = stack_usage_map_buf;
2506 if (initial_highest_arg_in_use)
2507 memcpy (stack_usage_map, initial_stack_usage_map,
2508 initial_highest_arg_in_use);
2510 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2511 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2512 (highest_outgoing_arg_in_use
2513 - initial_highest_arg_in_use));
2516 /* The address of the outgoing argument list must not be
2517 copied to a register here, because argblock would be left
2518 pointing to the wrong place after the call to
2519 allocate_dynamic_stack_space below. */
2521 argblock = virtual_outgoing_args_rtx;
2525 if (inhibit_defer_pop == 0)
2527 /* Try to reuse some or all of the pending_stack_adjust
2528 to get this space. */
2530 = (combine_pending_stack_adjustment_and_call
2531 (unadjusted_args_size,
2532 &adjusted_args_size,
2533 preferred_unit_stack_boundary));
2535 /* combine_pending_stack_adjustment_and_call computes
2536 an adjustment before the arguments are allocated.
2537 Account for them and see whether or not the stack
2538 needs to go up or down. */
2539 needed = unadjusted_args_size - needed;
2543 /* We're releasing stack space. */
2544 /* ??? We can avoid any adjustment at all if we're
2545 already aligned. FIXME. */
2546 pending_stack_adjust = -needed;
2547 do_pending_stack_adjust ();
2551 /* We need to allocate space. We'll do that in
2552 push_block below. */
2553 pending_stack_adjust = 0;
2556 /* Special case this because overhead of `push_block' in
2557 this case is non-trivial. */
2559 argblock = virtual_outgoing_args_rtx;
2562 argblock = push_block (GEN_INT (needed), 0, 0);
2563 #ifdef ARGS_GROW_DOWNWARD
2564 argblock = plus_constant (argblock, needed);
2568 /* We only really need to call `copy_to_reg' in the case
2569 where push insns are going to be used to pass ARGBLOCK
2570 to a function call in ARGS. In that case, the stack
2571 pointer changes value from the allocation point to the
2572 call point, and hence the value of
2573 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2574 as well always do it. */
2575 argblock = copy_to_reg (argblock);
2580 if (ACCUMULATE_OUTGOING_ARGS)
2582 /* The save/restore code in store_one_arg handles all
2583 cases except one: a constructor call (including a C
2584 function returning a BLKmode struct) to initialize
2586 if (stack_arg_under_construction)
2589 = GEN_INT (adjusted_args_size.constant
2590 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
2591 : TREE_TYPE (fndecl))) ? 0
2592 : reg_parm_stack_space));
2593 if (old_stack_level == 0)
2595 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2597 old_stack_pointer_delta = stack_pointer_delta;
2598 old_pending_adj = pending_stack_adjust;
2599 pending_stack_adjust = 0;
2600 /* stack_arg_under_construction says whether a stack
2601 arg is being constructed at the old stack level.
2602 Pushing the stack gets a clean outgoing argument
2604 old_stack_arg_under_construction
2605 = stack_arg_under_construction;
2606 stack_arg_under_construction = 0;
2607 /* Make a new map for the new argument list. */
2608 if (stack_usage_map_buf)
2609 free (stack_usage_map_buf);
2610 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
2611 stack_usage_map = stack_usage_map_buf;
2612 highest_outgoing_arg_in_use = 0;
2614 allocate_dynamic_stack_space (push_size, NULL_RTX,
2618 /* If argument evaluation might modify the stack pointer,
2619 copy the address of the argument list to a register. */
2620 for (i = 0; i < num_actuals; i++)
2621 if (args[i].pass_on_stack)
2623 argblock = copy_addr_to_reg (argblock);
2628 compute_argument_addresses (args, argblock, num_actuals);
2630 /* If we push args individually in reverse order, perform stack alignment
2631 before the first push (the last arg). */
2632 if (PUSH_ARGS_REVERSED && argblock == 0
2633 && adjusted_args_size.constant != unadjusted_args_size)
2635 /* When the stack adjustment is pending, we get better code
2636 by combining the adjustments. */
2637 if (pending_stack_adjust
2638 && ! inhibit_defer_pop)
2640 pending_stack_adjust
2641 = (combine_pending_stack_adjustment_and_call
2642 (unadjusted_args_size,
2643 &adjusted_args_size,
2644 preferred_unit_stack_boundary));
2645 do_pending_stack_adjust ();
2647 else if (argblock == 0)
2648 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2649 - unadjusted_args_size));
2651 /* Now that the stack is properly aligned, pops can't safely
2652 be deferred during the evaluation of the arguments. */
2655 funexp = rtx_for_function_call (fndecl, addr);
2657 /* Figure out the register where the value, if any, will come back. */
2659 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2660 && ! structure_value_addr)
2662 if (pcc_struct_value)
2663 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2664 fndecl, NULL, (pass == 0));
2666 valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype,
2669 /* If VALREG is a PARALLEL whose first member has a zero
2670 offset, use that. This is for targets such as m68k that
2671 return the same value in multiple places. */
2672 if (GET_CODE (valreg) == PARALLEL)
2674 rtx elem = XVECEXP (valreg, 0, 0);
2675 rtx where = XEXP (elem, 0);
2676 rtx offset = XEXP (elem, 1);
2677 if (offset == const0_rtx
2678 && GET_MODE (where) == GET_MODE (valreg))
2683 /* Precompute all register parameters. It isn't safe to compute anything
2684 once we have started filling any specific hard regs. */
2685 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2687 if (CALL_EXPR_STATIC_CHAIN (exp))
2688 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
2690 static_chain_value = 0;
2692 #ifdef REG_PARM_STACK_SPACE
2693 /* Save the fixed argument area if it's part of the caller's frame and
2694 is clobbered by argument setup for this call. */
2695 if (ACCUMULATE_OUTGOING_ARGS && pass)
2696 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2697 &low_to_save, &high_to_save);
2700 /* Now store (and compute if necessary) all non-register parms.
2701 These come before register parms, since they can require block-moves,
2702 which could clobber the registers used for register parms.
2703 Parms which have partial registers are not stored here,
2704 but we do preallocate space here if they want that. */
2706 for (i = 0; i < num_actuals; i++)
2708 if (args[i].reg == 0 || args[i].pass_on_stack)
2710 rtx before_arg = get_last_insn ();
2712 if (store_one_arg (&args[i], argblock, flags,
2713 adjusted_args_size.var != 0,
2714 reg_parm_stack_space)
2716 && check_sibcall_argument_overlap (before_arg,
2718 sibcall_failure = 1;
2721 if (((flags & ECF_CONST)
2722 || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
2724 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2725 gen_rtx_USE (VOIDmode,
2730 /* If we have a parm that is passed in registers but not in memory
2731 and whose alignment does not permit a direct copy into registers,
2732 make a group of pseudos that correspond to each register that we
2734 if (STRICT_ALIGNMENT)
2735 store_unaligned_arguments_into_pseudos (args, num_actuals);
2737 /* Now store any partially-in-registers parm.
2738 This is the last place a block-move can happen. */
2740 for (i = 0; i < num_actuals; i++)
2741 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2743 rtx before_arg = get_last_insn ();
2745 if (store_one_arg (&args[i], argblock, flags,
2746 adjusted_args_size.var != 0,
2747 reg_parm_stack_space)
2749 && check_sibcall_argument_overlap (before_arg,
2751 sibcall_failure = 1;
2754 /* If we pushed args in forward order, perform stack alignment
2755 after pushing the last arg. */
2756 if (!PUSH_ARGS_REVERSED && argblock == 0)
2757 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2758 - unadjusted_args_size));
2760 /* If register arguments require space on the stack and stack space
2761 was not preallocated, allocate stack space here for arguments
2762 passed in registers. */
2763 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2764 && !ACCUMULATE_OUTGOING_ARGS
2765 && must_preallocate == 0 && reg_parm_stack_space > 0)
2766 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2768 /* Pass the function the address in which to return a
2770 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2772 structure_value_addr
2773 = convert_memory_address (Pmode, structure_value_addr);
2774 emit_move_insn (struct_value,
2776 force_operand (structure_value_addr,
2779 if (REG_P (struct_value))
2780 use_reg (&call_fusage, struct_value);
2783 after_args = get_last_insn ();
2784 funexp = prepare_call_address (funexp, static_chain_value,
2785 &call_fusage, reg_parm_seen, pass == 0);
2787 load_register_parameters (args, num_actuals, &call_fusage, flags,
2788 pass == 0, &sibcall_failure);
2790 /* Save a pointer to the last insn before the call, so that we can
2791 later safely search backwards to find the CALL_INSN. */
2792 before_call = get_last_insn ();
2794 /* Set up next argument register. For sibling calls on machines
2795 with register windows this should be the incoming register. */
2796 #ifdef FUNCTION_INCOMING_ARG
2798 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2802 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2805 /* All arguments and registers used for the call must be set up by
2808 /* Stack must be properly aligned now. */
2810 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2812 /* Generate the actual call instruction. */
2813 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2814 adjusted_args_size.constant, struct_value_size,
2815 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2816 flags, & args_so_far);
2818 /* If the call setup or the call itself overlaps with anything
2819 of the argument setup we probably clobbered our call address.
2820 In that case we can't do sibcalls. */
2822 && check_sibcall_argument_overlap (after_args, 0, 0))
2823 sibcall_failure = 1;
2825 /* If a non-BLKmode value is returned at the most significant end
2826 of a register, shift the register right by the appropriate amount
2827 and update VALREG accordingly. BLKmode values are handled by the
2828 group load/store machinery below. */
2829 if (!structure_value_addr
2830 && !pcc_struct_value
2831 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2832 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2834 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2835 sibcall_failure = 1;
2836 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2839 if (pass && (flags & ECF_MALLOC))
2841 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2844 /* The return value from a malloc-like function is a pointer. */
2845 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2846 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2848 emit_move_insn (temp, valreg);
2850 /* The return value from a malloc-like function can not alias
2852 last = get_last_insn ();
2853 add_reg_note (last, REG_NOALIAS, temp);
2855 /* Write out the sequence. */
2856 insns = get_insns ();
2862 /* For calls to `setjmp', etc., inform
2863 function.c:setjmp_warnings that it should complain if
2864 nonvolatile values are live. For functions that cannot
2865 return, inform flow that control does not fall through. */
2867 if ((flags & ECF_NORETURN) || pass == 0)
2869 /* The barrier must be emitted
2870 immediately after the CALL_INSN. Some ports emit more
2871 than just a CALL_INSN above, so we must search for it here. */
2873 rtx last = get_last_insn ();
2874 while (!CALL_P (last))
2876 last = PREV_INSN (last);
2877 /* There was no CALL_INSN? */
2878 gcc_assert (last != before_call);
2881 emit_barrier_after (last);
2883 /* Stack adjustments after a noreturn call are dead code.
2884 However when NO_DEFER_POP is in effect, we must preserve
2885 stack_pointer_delta. */
2886 if (inhibit_defer_pop == 0)
2888 stack_pointer_delta = old_stack_allocated;
2889 pending_stack_adjust = 0;
2893 /* If value type not void, return an rtx for the value. */
2895 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2897 target = const0_rtx;
2898 else if (structure_value_addr)
2900 if (target == 0 || !MEM_P (target))
2903 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2904 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2905 structure_value_addr));
2906 set_mem_attributes (target, exp, 1);
2909 else if (pcc_struct_value)
2911 /* This is the special C++ case where we need to
2912 know what the true target was. We take care to
2913 never use this value more than once in one expression. */
2914 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2915 copy_to_reg (valreg));
2916 set_mem_attributes (target, exp, 1);
2918 /* Handle calls that return values in multiple non-contiguous locations.
2919 The Irix 6 ABI has examples of this. */
2920 else if (GET_CODE (valreg) == PARALLEL)
2924 /* This will only be assigned once, so it can be readonly. */
2925 tree nt = build_qualified_type (TREE_TYPE (exp),
2926 (TYPE_QUALS (TREE_TYPE (exp))
2927 | TYPE_QUAL_CONST));
2929 target = assign_temp (nt, 0, 1, 1);
2932 if (! rtx_equal_p (target, valreg))
2933 emit_group_store (target, valreg, TREE_TYPE (exp),
2934 int_size_in_bytes (TREE_TYPE (exp)));
2936 /* We can not support sibling calls for this case. */
2937 sibcall_failure = 1;
2940 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2941 && GET_MODE (target) == GET_MODE (valreg))
2943 bool may_overlap = false;
2945 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
2946 reg to a plain register. */
2947 if (!REG_P (target) || HARD_REGISTER_P (target))
2948 valreg = avoid_likely_spilled_reg (valreg);
2950 /* If TARGET is a MEM in the argument area, and we have
2951 saved part of the argument area, then we can't store
2952 directly into TARGET as it may get overwritten when we
2953 restore the argument save area below. Don't work too
2954 hard though and simply force TARGET to a register if it
2955 is a MEM; the optimizer is quite likely to sort it out. */
2956 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
2957 for (i = 0; i < num_actuals; i++)
2958 if (args[i].save_area)
2965 target = copy_to_reg (valreg);
2968 /* TARGET and VALREG cannot be equal at this point
2969 because the latter would not have
2970 REG_FUNCTION_VALUE_P true, while the former would if
2971 it were referring to the same register.
2973 If they refer to the same register, this move will be
2974 a no-op, except when function inlining is being
2976 emit_move_insn (target, valreg);
2978 /* If we are setting a MEM, this code must be executed.
2979 Since it is emitted after the call insn, sibcall
2980 optimization cannot be performed in that case. */
2982 sibcall_failure = 1;
2985 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2987 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2989 /* We can not support sibling calls for this case. */
2990 sibcall_failure = 1;
2993 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
2995 if (targetm.calls.promote_function_return(funtype))
2997 /* If we promoted this return value, make the proper SUBREG.
2998 TARGET might be const0_rtx here, so be careful. */
3000 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3001 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3003 tree type = TREE_TYPE (exp);
3004 int unsignedp = TYPE_UNSIGNED (type);
3006 enum machine_mode pmode;
3008 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
3009 /* If we don't promote as expected, something is wrong. */
3010 gcc_assert (GET_MODE (target) == pmode);
3012 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3013 && (GET_MODE_SIZE (GET_MODE (target))
3014 > GET_MODE_SIZE (TYPE_MODE (type))))
3016 offset = GET_MODE_SIZE (GET_MODE (target))
3017 - GET_MODE_SIZE (TYPE_MODE (type));
3018 if (! BYTES_BIG_ENDIAN)
3019 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3020 else if (! WORDS_BIG_ENDIAN)
3021 offset %= UNITS_PER_WORD;
3023 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3024 SUBREG_PROMOTED_VAR_P (target) = 1;
3025 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3029 /* If size of args is variable or this was a constructor call for a stack
3030 argument, restore saved stack-pointer value. */
3032 if (old_stack_level)
3034 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3035 stack_pointer_delta = old_stack_pointer_delta;
3036 pending_stack_adjust = old_pending_adj;
3037 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3038 stack_arg_under_construction = old_stack_arg_under_construction;
3039 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3040 stack_usage_map = initial_stack_usage_map;
3041 sibcall_failure = 1;
3043 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3045 #ifdef REG_PARM_STACK_SPACE
3047 restore_fixed_argument_area (save_area, argblock,
3048 high_to_save, low_to_save);
3051 /* If we saved any argument areas, restore them. */
3052 for (i = 0; i < num_actuals; i++)
3053 if (args[i].save_area)
3055 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3057 = gen_rtx_MEM (save_mode,
3058 memory_address (save_mode,
3059 XEXP (args[i].stack_slot, 0)));
3061 if (save_mode != BLKmode)
3062 emit_move_insn (stack_area, args[i].save_area);
3064 emit_block_move (stack_area, args[i].save_area,
3065 GEN_INT (args[i].locate.size.constant),
3066 BLOCK_OP_CALL_PARM);
3069 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3070 stack_usage_map = initial_stack_usage_map;
3073 /* If this was alloca, record the new stack level for nonlocal gotos.
3074 Check for the handler slots since we might not have a save area
3075 for non-local gotos. */
3077 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3078 update_nonlocal_goto_save_area ();
3080 /* Free up storage we no longer need. */
3081 for (i = 0; i < num_actuals; ++i)
3082 if (args[i].aligned_regs)
3083 free (args[i].aligned_regs);
3085 insns = get_insns ();
3090 tail_call_insns = insns;
3092 /* Restore the pending stack adjustment now that we have
3093 finished generating the sibling call sequence. */
3095 pending_stack_adjust = save_pending_stack_adjust;
3096 stack_pointer_delta = save_stack_pointer_delta;
3098 /* Prepare arg structure for next iteration. */
3099 for (i = 0; i < num_actuals; i++)
3102 args[i].aligned_regs = 0;
3106 sbitmap_free (stored_args_map);
3110 normal_call_insns = insns;
3112 /* Verify that we've deallocated all the stack we used. */
3113 gcc_assert ((flags & ECF_NORETURN)
3114 || (old_stack_allocated
3115 == stack_pointer_delta - pending_stack_adjust));
3118 /* If something prevents making this a sibling call,
3119 zero out the sequence. */
3120 if (sibcall_failure)
3121 tail_call_insns = NULL_RTX;
3126 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3127 arguments too, as argument area is now clobbered by the call. */
3128 if (tail_call_insns)
3130 emit_insn (tail_call_insns);
3131 crtl->tail_call_emit = true;
3134 emit_insn (normal_call_insns);
3136 currently_expanding_call--;
3138 if (stack_usage_map_buf)
3139 free (stack_usage_map_buf);
3144 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3145 this function's incoming arguments.
3147 At the start of RTL generation we know the only REG_EQUIV notes
3148 in the rtl chain are those for incoming arguments, so we can look
3149 for REG_EQUIV notes between the start of the function and the
3150 NOTE_INSN_FUNCTION_BEG.
3152 This is (slight) overkill. We could keep track of the highest
3153 argument we clobber and be more selective in removing notes, but it
3154 does not seem to be worth the effort. */
3157 fixup_tail_calls (void)
3161 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3165 /* There are never REG_EQUIV notes for the incoming arguments
3166 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3168 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3171 note = find_reg_note (insn, REG_EQUIV, 0);
3173 remove_note (insn, note);
3174 note = find_reg_note (insn, REG_EQUIV, 0);
3179 /* Traverse a list of TYPES and expand all complex types into their
3182 split_complex_types (tree types)
3186 /* Before allocating memory, check for the common case of no complex. */
3187 for (p = types; p; p = TREE_CHAIN (p))
3189 tree type = TREE_VALUE (p);
3190 if (TREE_CODE (type) == COMPLEX_TYPE
3191 && targetm.calls.split_complex_arg (type))
3197 types = copy_list (types);
3199 for (p = types; p; p = TREE_CHAIN (p))
3201 tree complex_type = TREE_VALUE (p);
3203 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3204 && targetm.calls.split_complex_arg (complex_type))
3208 /* Rewrite complex type with component type. */
3209 TREE_VALUE (p) = TREE_TYPE (complex_type);
3210 next = TREE_CHAIN (p);
3212 /* Add another component type for the imaginary part. */
3213 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3214 TREE_CHAIN (p) = imag;
3215 TREE_CHAIN (imag) = next;
3217 /* Skip the newly created node. */
3225 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3226 The RETVAL parameter specifies whether return value needs to be saved, other
3227 parameters are documented in the emit_library_call function below. */
3230 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3231 enum libcall_type fn_type,
3232 enum machine_mode outmode, int nargs, va_list p)
3234 /* Total size in bytes of all the stack-parms scanned so far. */
3235 struct args_size args_size;
3236 /* Size of arguments before any adjustments (such as rounding). */
3237 struct args_size original_args_size;
3240 /* Todo, choose the correct decl type of orgfun. Sadly this information
3241 isn't present here, so we default to native calling abi here. */
3242 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3243 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3247 CUMULATIVE_ARGS args_so_far;
3251 enum machine_mode mode;
3254 struct locate_and_pad_arg_data locate;
3258 int old_inhibit_defer_pop = inhibit_defer_pop;
3259 rtx call_fusage = 0;
3262 int pcc_struct_value = 0;
3263 int struct_value_size = 0;
3265 int reg_parm_stack_space = 0;
3268 tree tfom; /* type_for_mode (outmode, 0) */
3270 #ifdef REG_PARM_STACK_SPACE
3271 /* Define the boundary of the register parm stack space that needs to be
3273 int low_to_save = 0, high_to_save = 0;
3274 rtx save_area = 0; /* Place that it is saved. */
3277 /* Size of the stack reserved for parameter registers. */
3278 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3279 char *initial_stack_usage_map = stack_usage_map;
3280 char *stack_usage_map_buf = NULL;
3282 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3284 #ifdef REG_PARM_STACK_SPACE
3285 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3288 /* By default, library functions can not throw. */
3289 flags = ECF_NOTHROW;
3302 flags |= ECF_NORETURN;
3305 flags = ECF_NORETURN;
3307 case LCT_RETURNS_TWICE:
3308 flags = ECF_RETURNS_TWICE;
3313 /* Ensure current function's preferred stack boundary is at least
3315 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3316 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3318 /* If this kind of value comes back in memory,
3319 decide where in memory it should come back. */
3320 if (outmode != VOIDmode)
3322 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3323 if (aggregate_value_p (tfom, 0))
3325 #ifdef PCC_STATIC_STRUCT_RETURN
3327 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3328 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3329 pcc_struct_value = 1;
3331 value = gen_reg_rtx (outmode);
3332 #else /* not PCC_STATIC_STRUCT_RETURN */
3333 struct_value_size = GET_MODE_SIZE (outmode);
3334 if (value != 0 && MEM_P (value))
3337 mem_value = assign_temp (tfom, 0, 1, 1);
3339 /* This call returns a big structure. */
3340 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3344 tfom = void_type_node;
3346 /* ??? Unfinished: must pass the memory address as an argument. */
3348 /* Copy all the libcall-arguments out of the varargs data
3349 and into a vector ARGVEC.
3351 Compute how to pass each argument. We only support a very small subset
3352 of the full argument passing conventions to limit complexity here since
3353 library functions shouldn't have many args. */
3355 argvec = XALLOCAVEC (struct arg, nargs + 1);
3356 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3358 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3359 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3361 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3364 args_size.constant = 0;
3371 /* If there's a structure value address to be passed,
3372 either pass it in the special place, or pass it as an extra argument. */
3373 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3375 rtx addr = XEXP (mem_value, 0);
3379 /* Make sure it is a reasonable operand for a move or push insn. */
3380 if (!REG_P (addr) && !MEM_P (addr)
3381 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3382 addr = force_operand (addr, NULL_RTX);
3384 argvec[count].value = addr;
3385 argvec[count].mode = Pmode;
3386 argvec[count].partial = 0;
3388 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3389 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3390 NULL_TREE, 1) == 0);
3392 locate_and_pad_parm (Pmode, NULL_TREE,
3393 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3396 argvec[count].reg != 0,
3398 0, NULL_TREE, &args_size, &argvec[count].locate);
3400 if (argvec[count].reg == 0 || argvec[count].partial != 0
3401 || reg_parm_stack_space > 0)
3402 args_size.constant += argvec[count].locate.size.constant;
3404 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3409 for (; count < nargs; count++)
3411 rtx val = va_arg (p, rtx);
3412 enum machine_mode mode = va_arg (p, enum machine_mode);
3414 /* We cannot convert the arg value to the mode the library wants here;
3415 must do it earlier where we know the signedness of the arg. */
3416 gcc_assert (mode != BLKmode
3417 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3419 /* Make sure it is a reasonable operand for a move or push insn. */
3420 if (!REG_P (val) && !MEM_P (val)
3421 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3422 val = force_operand (val, NULL_RTX);
3424 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3428 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3430 /* If this was a CONST function, it is now PURE since it now
3432 if (flags & ECF_CONST)
3434 flags &= ~ECF_CONST;
3438 if (MEM_P (val) && !must_copy)
3442 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3444 emit_move_insn (slot, val);
3447 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3448 gen_rtx_USE (VOIDmode, slot),
3451 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3452 gen_rtx_CLOBBER (VOIDmode,
3457 val = force_operand (XEXP (slot, 0), NULL_RTX);
3460 argvec[count].value = val;
3461 argvec[count].mode = mode;
3463 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3465 argvec[count].partial
3466 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3468 locate_and_pad_parm (mode, NULL_TREE,
3469 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3472 argvec[count].reg != 0,
3474 argvec[count].partial,
3475 NULL_TREE, &args_size, &argvec[count].locate);
3477 gcc_assert (!argvec[count].locate.size.var);
3479 if (argvec[count].reg == 0 || argvec[count].partial != 0
3480 || reg_parm_stack_space > 0)
3481 args_size.constant += argvec[count].locate.size.constant;
3483 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3486 /* If this machine requires an external definition for library
3487 functions, write one out. */
3488 assemble_external_libcall (fun);
3490 original_args_size = args_size;
3491 args_size.constant = (((args_size.constant
3492 + stack_pointer_delta
3496 - stack_pointer_delta);
3498 args_size.constant = MAX (args_size.constant,
3499 reg_parm_stack_space);
3501 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3502 args_size.constant -= reg_parm_stack_space;
3504 if (args_size.constant > crtl->outgoing_args_size)
3505 crtl->outgoing_args_size = args_size.constant;
3507 if (ACCUMULATE_OUTGOING_ARGS)
3509 /* Since the stack pointer will never be pushed, it is possible for
3510 the evaluation of a parm to clobber something we have already
3511 written to the stack. Since most function calls on RISC machines
3512 do not use the stack, this is uncommon, but must work correctly.
3514 Therefore, we save any area of the stack that was already written
3515 and that we are using. Here we set up to do this by making a new
3516 stack usage map from the old one.
3518 Another approach might be to try to reorder the argument
3519 evaluations to avoid this conflicting stack usage. */
3521 needed = args_size.constant;
3523 /* Since we will be writing into the entire argument area, the
3524 map must be allocated for its entire size, not just the part that
3525 is the responsibility of the caller. */
3526 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3527 needed += reg_parm_stack_space;
3529 #ifdef ARGS_GROW_DOWNWARD
3530 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3533 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3536 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3537 stack_usage_map = stack_usage_map_buf;
3539 if (initial_highest_arg_in_use)
3540 memcpy (stack_usage_map, initial_stack_usage_map,
3541 initial_highest_arg_in_use);
3543 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3544 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3545 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3548 /* We must be careful to use virtual regs before they're instantiated,
3549 and real regs afterwards. Loop optimization, for example, can create
3550 new libcalls after we've instantiated the virtual regs, and if we
3551 use virtuals anyway, they won't match the rtl patterns. */
3553 if (virtuals_instantiated)
3554 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3556 argblock = virtual_outgoing_args_rtx;
3561 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3564 /* If we push args individually in reverse order, perform stack alignment
3565 before the first push (the last arg). */
3566 if (argblock == 0 && PUSH_ARGS_REVERSED)
3567 anti_adjust_stack (GEN_INT (args_size.constant
3568 - original_args_size.constant));
3570 if (PUSH_ARGS_REVERSED)
3581 #ifdef REG_PARM_STACK_SPACE
3582 if (ACCUMULATE_OUTGOING_ARGS)
3584 /* The argument list is the property of the called routine and it
3585 may clobber it. If the fixed area has been used for previous
3586 parameters, we must save and restore it. */
3587 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3588 &low_to_save, &high_to_save);
3592 /* Push the args that need to be pushed. */
3594 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3595 are to be pushed. */
3596 for (count = 0; count < nargs; count++, argnum += inc)
3598 enum machine_mode mode = argvec[argnum].mode;
3599 rtx val = argvec[argnum].value;
3600 rtx reg = argvec[argnum].reg;
3601 int partial = argvec[argnum].partial;
3602 int lower_bound = 0, upper_bound = 0, i;
3604 if (! (reg != 0 && partial == 0))
3606 if (ACCUMULATE_OUTGOING_ARGS)
3608 /* If this is being stored into a pre-allocated, fixed-size,
3609 stack area, save any previous data at that location. */
3611 #ifdef ARGS_GROW_DOWNWARD
3612 /* stack_slot is negative, but we want to index stack_usage_map
3613 with positive values. */
3614 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3615 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3617 lower_bound = argvec[argnum].locate.offset.constant;
3618 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3622 /* Don't worry about things in the fixed argument area;
3623 it has already been saved. */
3624 if (i < reg_parm_stack_space)
3625 i = reg_parm_stack_space;
3626 while (i < upper_bound && stack_usage_map[i] == 0)
3629 if (i < upper_bound)
3631 /* We need to make a save area. */
3633 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3634 enum machine_mode save_mode
3635 = mode_for_size (size, MODE_INT, 1);
3637 = plus_constant (argblock,
3638 argvec[argnum].locate.offset.constant);
3640 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3642 if (save_mode == BLKmode)
3644 argvec[argnum].save_area
3645 = assign_stack_temp (BLKmode,
3646 argvec[argnum].locate.size.constant,
3649 emit_block_move (validize_mem (argvec[argnum].save_area),
3651 GEN_INT (argvec[argnum].locate.size.constant),
3652 BLOCK_OP_CALL_PARM);
3656 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3658 emit_move_insn (argvec[argnum].save_area, stack_area);
3663 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3664 partial, reg, 0, argblock,
3665 GEN_INT (argvec[argnum].locate.offset.constant),
3666 reg_parm_stack_space,
3667 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3669 /* Now mark the segment we just used. */
3670 if (ACCUMULATE_OUTGOING_ARGS)
3671 for (i = lower_bound; i < upper_bound; i++)
3672 stack_usage_map[i] = 1;
3676 if ((flags & ECF_CONST)
3677 || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
3681 /* Indicate argument access so that alias.c knows that these
3684 use = plus_constant (argblock,
3685 argvec[argnum].locate.offset.constant);
3687 /* When arguments are pushed, trying to tell alias.c where
3688 exactly this argument is won't work, because the
3689 auto-increment causes confusion. So we merely indicate
3690 that we access something with a known mode somewhere on
3692 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3693 gen_rtx_SCRATCH (Pmode));
3694 use = gen_rtx_MEM (argvec[argnum].mode, use);
3695 use = gen_rtx_USE (VOIDmode, use);
3696 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3701 /* If we pushed args in forward order, perform stack alignment
3702 after pushing the last arg. */
3703 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3704 anti_adjust_stack (GEN_INT (args_size.constant
3705 - original_args_size.constant));
3707 if (PUSH_ARGS_REVERSED)
3712 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3714 /* Now load any reg parms into their regs. */
3716 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3717 are to be pushed. */
3718 for (count = 0; count < nargs; count++, argnum += inc)
3720 enum machine_mode mode = argvec[argnum].mode;
3721 rtx val = argvec[argnum].value;
3722 rtx reg = argvec[argnum].reg;
3723 int partial = argvec[argnum].partial;
3725 /* Handle calls that pass values in multiple non-contiguous
3726 locations. The PA64 has examples of this for library calls. */
3727 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3728 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3729 else if (reg != 0 && partial == 0)
3730 emit_move_insn (reg, val);
3735 /* Any regs containing parms remain in use through the call. */
3736 for (count = 0; count < nargs; count++)
3738 rtx reg = argvec[count].reg;
3739 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3740 use_group_regs (&call_fusage, reg);
3743 int partial = argvec[count].partial;
3747 gcc_assert (partial % UNITS_PER_WORD == 0);
3748 nregs = partial / UNITS_PER_WORD;
3749 use_regs (&call_fusage, REGNO (reg), nregs);
3752 use_reg (&call_fusage, reg);
3756 /* Pass the function the address in which to return a structure value. */
3757 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3759 emit_move_insn (struct_value,
3761 force_operand (XEXP (mem_value, 0),
3763 if (REG_P (struct_value))
3764 use_reg (&call_fusage, struct_value);
3767 /* Don't allow popping to be deferred, since then
3768 cse'ing of library calls could delete a call and leave the pop. */
3770 valreg = (mem_value == 0 && outmode != VOIDmode
3771 ? hard_libcall_value (outmode) : NULL_RTX);
3773 /* Stack must be properly aligned now. */
3774 gcc_assert (!(stack_pointer_delta
3775 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3777 before_call = get_last_insn ();
3779 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3780 will set inhibit_defer_pop to that value. */
3781 /* The return type is needed to decide how many bytes the function pops.
3782 Signedness plays no role in that, so for simplicity, we pretend it's
3783 always signed. We also assume that the list of arguments passed has
3784 no impact, so we pretend it is unknown. */
3786 emit_call_1 (fun, NULL,
3787 get_identifier (XSTR (orgfun, 0)),
3788 build_function_type (tfom, NULL_TREE),
3789 original_args_size.constant, args_size.constant,
3791 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3793 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3795 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
3796 that it should complain if nonvolatile values are live. For
3797 functions that cannot return, inform flow that control does not
3800 if (flags & ECF_NORETURN)
3802 /* The barrier note must be emitted
3803 immediately after the CALL_INSN. Some ports emit more than
3804 just a CALL_INSN above, so we must search for it here. */
3806 rtx last = get_last_insn ();
3807 while (!CALL_P (last))
3809 last = PREV_INSN (last);
3810 /* There was no CALL_INSN? */
3811 gcc_assert (last != before_call);
3814 emit_barrier_after (last);
3817 /* Now restore inhibit_defer_pop to its actual original value. */
3822 /* Copy the value to the right place. */
3823 if (outmode != VOIDmode && retval)
3829 if (value != mem_value)
3830 emit_move_insn (value, mem_value);
3832 else if (GET_CODE (valreg) == PARALLEL)
3835 value = gen_reg_rtx (outmode);
3836 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3840 /* Convert to the proper mode if PROMOTE_MODE has been active. */
3841 if (GET_MODE (valreg) != outmode)
3843 int unsignedp = TYPE_UNSIGNED (tfom);
3845 gcc_assert (targetm.calls.promote_function_return (tfom));
3846 gcc_assert (promote_mode (tfom, outmode, &unsignedp, 0)
3847 == GET_MODE (valreg));
3849 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
3853 emit_move_insn (value, valreg);
3859 if (ACCUMULATE_OUTGOING_ARGS)
3861 #ifdef REG_PARM_STACK_SPACE
3863 restore_fixed_argument_area (save_area, argblock,
3864 high_to_save, low_to_save);
3867 /* If we saved any argument areas, restore them. */
3868 for (count = 0; count < nargs; count++)
3869 if (argvec[count].save_area)
3871 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3872 rtx adr = plus_constant (argblock,
3873 argvec[count].locate.offset.constant);
3874 rtx stack_area = gen_rtx_MEM (save_mode,
3875 memory_address (save_mode, adr));
3877 if (save_mode == BLKmode)
3878 emit_block_move (stack_area,
3879 validize_mem (argvec[count].save_area),
3880 GEN_INT (argvec[count].locate.size.constant),
3881 BLOCK_OP_CALL_PARM);
3883 emit_move_insn (stack_area, argvec[count].save_area);
3886 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3887 stack_usage_map = initial_stack_usage_map;
3890 if (stack_usage_map_buf)
3891 free (stack_usage_map_buf);
3897 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3898 (emitting the queue unless NO_QUEUE is nonzero),
3899 for a value of mode OUTMODE,
3900 with NARGS different arguments, passed as alternating rtx values
3901 and machine_modes to convert them to.
3903 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
3904 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
3905 other types of library calls. */
3908 emit_library_call (rtx orgfun, enum libcall_type fn_type,
3909 enum machine_mode outmode, int nargs, ...)
3913 va_start (p, nargs);
3914 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
3918 /* Like emit_library_call except that an extra argument, VALUE,
3919 comes second and says where to store the result.
3920 (If VALUE is zero, this function chooses a convenient way
3921 to return the value.
3923 This function returns an rtx for where the value is to be found.
3924 If VALUE is nonzero, VALUE is returned. */
3927 emit_library_call_value (rtx orgfun, rtx value,
3928 enum libcall_type fn_type,
3929 enum machine_mode outmode, int nargs, ...)
3934 va_start (p, nargs);
3935 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
3942 /* Store a single argument for a function call
3943 into the register or memory area where it must be passed.
3944 *ARG describes the argument value and where to pass it.
3946 ARGBLOCK is the address of the stack-block for all the arguments,
3947 or 0 on a machine where arguments are pushed individually.
3949 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3950 so must be careful about how the stack is used.
3952 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3953 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3954 that we need not worry about saving and restoring the stack.
3956 FNDECL is the declaration of the function we are calling.
3958 Return nonzero if this arg should cause sibcall failure,
3962 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
3963 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
3965 tree pval = arg->tree_value;
3969 int i, lower_bound = 0, upper_bound = 0;
3970 int sibcall_failure = 0;
3972 if (TREE_CODE (pval) == ERROR_MARK)
3975 /* Push a new temporary level for any temporaries we make for
3979 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
3981 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3982 save any previous data at that location. */
3983 if (argblock && ! variable_size && arg->stack)
3985 #ifdef ARGS_GROW_DOWNWARD
3986 /* stack_slot is negative, but we want to index stack_usage_map
3987 with positive values. */
3988 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3989 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3993 lower_bound = upper_bound - arg->locate.size.constant;
3995 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3996 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4000 upper_bound = lower_bound + arg->locate.size.constant;
4004 /* Don't worry about things in the fixed argument area;
4005 it has already been saved. */
4006 if (i < reg_parm_stack_space)
4007 i = reg_parm_stack_space;
4008 while (i < upper_bound && stack_usage_map[i] == 0)
4011 if (i < upper_bound)
4013 /* We need to make a save area. */
4014 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4015 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4016 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4017 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4019 if (save_mode == BLKmode)
4021 tree ot = TREE_TYPE (arg->tree_value);
4022 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4023 | TYPE_QUAL_CONST));
4025 arg->save_area = assign_temp (nt, 0, 1, 1);
4026 preserve_temp_slots (arg->save_area);
4027 emit_block_move (validize_mem (arg->save_area), stack_area,
4028 GEN_INT (arg->locate.size.constant),
4029 BLOCK_OP_CALL_PARM);
4033 arg->save_area = gen_reg_rtx (save_mode);
4034 emit_move_insn (arg->save_area, stack_area);
4040 /* If this isn't going to be placed on both the stack and in registers,
4041 set up the register and number of words. */
4042 if (! arg->pass_on_stack)
4044 if (flags & ECF_SIBCALL)
4045 reg = arg->tail_call_reg;
4048 partial = arg->partial;
4051 /* Being passed entirely in a register. We shouldn't be called in
4053 gcc_assert (reg == 0 || partial != 0);
4055 /* If this arg needs special alignment, don't load the registers
4057 if (arg->n_aligned_regs != 0)
4060 /* If this is being passed partially in a register, we can't evaluate
4061 it directly into its stack slot. Otherwise, we can. */
4062 if (arg->value == 0)
4064 /* stack_arg_under_construction is nonzero if a function argument is
4065 being evaluated directly into the outgoing argument list and
4066 expand_call must take special action to preserve the argument list
4067 if it is called recursively.
4069 For scalar function arguments stack_usage_map is sufficient to
4070 determine which stack slots must be saved and restored. Scalar
4071 arguments in general have pass_on_stack == 0.
4073 If this argument is initialized by a function which takes the
4074 address of the argument (a C++ constructor or a C function
4075 returning a BLKmode structure), then stack_usage_map is
4076 insufficient and expand_call must push the stack around the
4077 function call. Such arguments have pass_on_stack == 1.
4079 Note that it is always safe to set stack_arg_under_construction,
4080 but this generates suboptimal code if set when not needed. */
4082 if (arg->pass_on_stack)
4083 stack_arg_under_construction++;
4085 arg->value = expand_expr (pval,
4087 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4088 ? NULL_RTX : arg->stack,
4089 VOIDmode, EXPAND_STACK_PARM);
4091 /* If we are promoting object (or for any other reason) the mode
4092 doesn't agree, convert the mode. */
4094 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4095 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4096 arg->value, arg->unsignedp);
4098 if (arg->pass_on_stack)
4099 stack_arg_under_construction--;
4102 /* Check for overlap with already clobbered argument area. */
4103 if ((flags & ECF_SIBCALL)
4104 && MEM_P (arg->value)
4105 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4106 arg->locate.size.constant))
4107 sibcall_failure = 1;
4109 /* Don't allow anything left on stack from computation
4110 of argument to alloca. */
4111 if (flags & ECF_MAY_BE_ALLOCA)
4112 do_pending_stack_adjust ();
4114 if (arg->value == arg->stack)
4115 /* If the value is already in the stack slot, we are done. */
4117 else if (arg->mode != BLKmode)
4120 unsigned int parm_align;
4122 /* Argument is a scalar, not entirely passed in registers.
4123 (If part is passed in registers, arg->partial says how much
4124 and emit_push_insn will take care of putting it there.)
4126 Push it, and if its size is less than the
4127 amount of space allocated to it,
4128 also bump stack pointer by the additional space.
4129 Note that in C the default argument promotions
4130 will prevent such mismatches. */
4132 size = GET_MODE_SIZE (arg->mode);
4133 /* Compute how much space the push instruction will push.
4134 On many machines, pushing a byte will advance the stack
4135 pointer by a halfword. */
4136 #ifdef PUSH_ROUNDING
4137 size = PUSH_ROUNDING (size);
4141 /* Compute how much space the argument should get:
4142 round up to a multiple of the alignment for arguments. */
4143 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4144 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4145 / (PARM_BOUNDARY / BITS_PER_UNIT))
4146 * (PARM_BOUNDARY / BITS_PER_UNIT));
4148 /* Compute the alignment of the pushed argument. */
4149 parm_align = arg->locate.boundary;
4150 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4152 int pad = used - size;
4155 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4156 parm_align = MIN (parm_align, pad_align);
4160 /* This isn't already where we want it on the stack, so put it there.
4161 This can either be done with push or copy insns. */
4162 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4163 parm_align, partial, reg, used - size, argblock,
4164 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4165 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4167 /* Unless this is a partially-in-register argument, the argument is now
4170 arg->value = arg->stack;
4174 /* BLKmode, at least partly to be pushed. */
4176 unsigned int parm_align;
4180 /* Pushing a nonscalar.
4181 If part is passed in registers, PARTIAL says how much
4182 and emit_push_insn will take care of putting it there. */
4184 /* Round its size up to a multiple
4185 of the allocation unit for arguments. */
4187 if (arg->locate.size.var != 0)
4190 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4194 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4195 for BLKmode is careful to avoid it. */
4196 excess = (arg->locate.size.constant
4197 - int_size_in_bytes (TREE_TYPE (pval))
4199 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4200 NULL_RTX, TYPE_MODE (sizetype), 0);
4203 parm_align = arg->locate.boundary;
4205 /* When an argument is padded down, the block is aligned to
4206 PARM_BOUNDARY, but the actual argument isn't. */
4207 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4209 if (arg->locate.size.var)
4210 parm_align = BITS_PER_UNIT;
4213 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4214 parm_align = MIN (parm_align, excess_align);
4218 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4220 /* emit_push_insn might not work properly if arg->value and
4221 argblock + arg->locate.offset areas overlap. */
4225 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4226 || (GET_CODE (XEXP (x, 0)) == PLUS
4227 && XEXP (XEXP (x, 0), 0) ==
4228 crtl->args.internal_arg_pointer
4229 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4231 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4232 i = INTVAL (XEXP (XEXP (x, 0), 1));
4234 /* expand_call should ensure this. */
4235 gcc_assert (!arg->locate.offset.var
4236 && arg->locate.size.var == 0
4237 && GET_CODE (size_rtx) == CONST_INT);
4239 if (arg->locate.offset.constant > i)
4241 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4242 sibcall_failure = 1;
4244 else if (arg->locate.offset.constant < i)
4246 /* Use arg->locate.size.constant instead of size_rtx
4247 because we only care about the part of the argument
4249 if (i < (arg->locate.offset.constant
4250 + arg->locate.size.constant))
4251 sibcall_failure = 1;
4255 /* Even though they appear to be at the same location,
4256 if part of the outgoing argument is in registers,
4257 they aren't really at the same location. Check for
4258 this by making sure that the incoming size is the
4259 same as the outgoing size. */
4260 if (arg->locate.size.constant != INTVAL (size_rtx))
4261 sibcall_failure = 1;
4266 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4267 parm_align, partial, reg, excess, argblock,
4268 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4269 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4271 /* Unless this is a partially-in-register argument, the argument is now
4274 ??? Unlike the case above, in which we want the actual
4275 address of the data, so that we can load it directly into a
4276 register, here we want the address of the stack slot, so that
4277 it's properly aligned for word-by-word copying or something
4278 like that. It's not clear that this is always correct. */
4280 arg->value = arg->stack_slot;
4283 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4285 tree type = TREE_TYPE (arg->tree_value);
4287 = emit_group_load_into_temps (arg->reg, arg->value, type,
4288 int_size_in_bytes (type));
4291 /* Mark all slots this store used. */
4292 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4293 && argblock && ! variable_size && arg->stack)
4294 for (i = lower_bound; i < upper_bound; i++)
4295 stack_usage_map[i] = 1;
4297 /* Once we have pushed something, pops can't safely
4298 be deferred during the rest of the arguments. */
4301 /* Free any temporary slots made in processing this argument. Show
4302 that we might have taken the address of something and pushed that
4304 preserve_temp_slots (NULL_RTX);
4308 return sibcall_failure;
4311 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4314 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4320 /* If the type has variable size... */
4321 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4324 /* If the type is marked as addressable (it is required
4325 to be constructed into the stack)... */
4326 if (TREE_ADDRESSABLE (type))
4332 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4333 takes trailing padding of a structure into account. */
4334 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4337 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
4342 /* If the type has variable size... */
4343 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4346 /* If the type is marked as addressable (it is required
4347 to be constructed into the stack)... */
4348 if (TREE_ADDRESSABLE (type))
4351 /* If the padding and mode of the type is such that a copy into
4352 a register would put it into the wrong part of the register. */
4354 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4355 && (FUNCTION_ARG_PADDING (mode, type)
4356 == (BYTES_BIG_ENDIAN ? upward : downward)))