1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
35 #include "diagnostic-core.h"
40 #include "langhooks.h"
46 #include "tree-flow.h"
48 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
49 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51 /* Data structure and subroutines used within expand_call. */
55 /* Tree node for this argument. */
57 /* Mode for value; TYPE_MODE unless promoted. */
58 enum machine_mode mode;
59 /* Current RTL value for argument, or 0 if it isn't precomputed. */
61 /* Initially-compute RTL value for argument; only for const functions. */
63 /* Register to pass this argument in, 0 if passed on stack, or an
64 PARALLEL if the arg is to be copied into multiple non-contiguous
67 /* Register to pass this argument in when generating tail call sequence.
68 This is not the same register as for normal calls on machines with
71 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
72 form for emit_group_move. */
74 /* If REG was promoted from the actual mode of the argument expression,
75 indicates whether the promotion is sign- or zero-extended. */
77 /* Number of bytes to put in registers. 0 means put the whole arg
78 in registers. Also 0 if not passed in registers. */
80 /* Nonzero if argument must be passed on stack.
81 Note that some arguments may be passed on the stack
82 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
83 pass_on_stack identifies arguments that *cannot* go in registers. */
85 /* Some fields packaged up for locate_and_pad_parm. */
86 struct locate_and_pad_arg_data locate;
87 /* Location on the stack at which parameter should be stored. The store
88 has already been done if STACK == VALUE. */
90 /* Location on the stack of the start of this argument slot. This can
91 differ from STACK if this arg pads downward. This location is known
92 to be aligned to FUNCTION_ARG_BOUNDARY. */
94 /* Place that this stack area has been saved, if needed. */
96 /* If an argument's alignment does not permit direct copying into registers,
97 copy in smaller-sized pieces into pseudos. These are stored in a
98 block pointed to by this field. The next field says how many
99 word-sized pseudos we made. */
104 /* A vector of one char per byte of stack space. A byte if nonzero if
105 the corresponding stack location has been used.
106 This vector is used to prevent a function call within an argument from
107 clobbering any stack already set up. */
108 static char *stack_usage_map;
110 /* Size of STACK_USAGE_MAP. */
111 static int highest_outgoing_arg_in_use;
113 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
114 stack location's tail call argument has been already stored into the stack.
115 This bitmap is used to prevent sibling call optimization if function tries
116 to use parent's incoming argument slots when they have been already
117 overwritten with tail call arguments. */
118 static sbitmap stored_args_map;
120 /* stack_arg_under_construction is nonzero when an argument may be
121 initialized with a constructor call (including a C function that
122 returns a BLKmode struct) and expand_call must take special action
123 to make sure the object being constructed does not overlap the
124 argument list for the constructor call. */
125 static int stack_arg_under_construction;
127 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
128 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
130 static void precompute_register_parameters (int, struct arg_data *, int *);
131 static int store_one_arg (struct arg_data *, rtx, int, int, int);
132 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
133 static int finalize_must_preallocate (int, int, struct arg_data *,
135 static void precompute_arguments (int, struct arg_data *);
136 static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
137 static void initialize_argument_information (int, struct arg_data *,
138 struct args_size *, int,
140 tree, tree, CUMULATIVE_ARGS *, int,
141 rtx *, int *, int *, int *,
143 static void compute_argument_addresses (struct arg_data *, rtx, int);
144 static rtx rtx_for_function_call (tree, tree);
145 static void load_register_parameters (struct arg_data *, int, rtx *, int,
147 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
148 enum machine_mode, int, va_list);
149 static int special_function_p (const_tree, int);
150 static int check_sibcall_argument_overlap_1 (rtx);
151 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
153 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
155 static tree split_complex_types (tree);
157 #ifdef REG_PARM_STACK_SPACE
158 static rtx save_fixed_argument_area (int, rtx, int *, int *);
159 static void restore_fixed_argument_area (rtx, rtx, int, int);
162 /* Force FUNEXP into a form suitable for the address of a CALL,
163 and return that as an rtx. Also load the static chain register
164 if FNDECL is a nested function.
166 CALL_FUSAGE points to a variable holding the prospective
167 CALL_INSN_FUNCTION_USAGE information. */
170 prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value,
171 rtx *call_fusage, int reg_parm_seen, int sibcallp)
173 /* Make a valid memory address and copy constants through pseudo-regs,
174 but not for a constant address if -fno-function-cse. */
175 if (GET_CODE (funexp) != SYMBOL_REF)
176 /* If we are using registers for parameters, force the
177 function address into a register now. */
178 funexp = ((reg_parm_seen
179 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
180 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
181 : memory_address (FUNCTION_MODE, funexp));
184 #ifndef NO_FUNCTION_CSE
185 if (optimize && ! flag_no_function_cse)
186 funexp = force_reg (Pmode, funexp);
190 if (static_chain_value != 0)
195 chain = targetm.calls.static_chain (fndecl, false);
196 static_chain_value = convert_memory_address (Pmode, static_chain_value);
198 emit_move_insn (chain, static_chain_value);
200 use_reg (call_fusage, chain);
206 /* Generate instructions to call function FUNEXP,
207 and optionally pop the results.
208 The CALL_INSN is the first insn generated.
210 FNDECL is the declaration node of the function. This is given to the
211 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
214 FUNTYPE is the data type of the function. This is given to the hook
215 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
216 own args. We used to allow an identifier for library functions, but
217 that doesn't work when the return type is an aggregate type and the
218 calling convention says that the pointer to this aggregate is to be
219 popped by the callee.
221 STACK_SIZE is the number of bytes of arguments on the stack,
222 ROUNDED_STACK_SIZE is that number rounded up to
223 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
224 both to put into the call insn and to generate explicit popping
227 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
228 It is zero if this call doesn't want a structure value.
230 NEXT_ARG_REG is the rtx that results from executing
231 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
232 just after all the args have had their registers assigned.
233 This could be whatever you like, but normally it is the first
234 arg-register beyond those used for args in this call,
235 or 0 if all the arg-registers are used in this call.
236 It is passed on to `gen_call' so you can put this info in the call insn.
238 VALREG is a hard register in which a value is returned,
239 or 0 if the call does not return a value.
241 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
242 the args to this call were processed.
243 We restore `inhibit_defer_pop' to that value.
245 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
246 denote registers used by the called function. */
249 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
250 tree funtype ATTRIBUTE_UNUSED,
251 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
252 HOST_WIDE_INT rounded_stack_size,
253 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
254 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
255 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
256 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
258 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
260 int already_popped = 0;
261 HOST_WIDE_INT n_popped
262 = targetm.calls.return_pops_args (fndecl, funtype, stack_size);
264 #ifdef CALL_POPS_ARGS
265 n_popped += CALL_POPS_ARGS (* args_so_far);
268 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
269 and we don't want to load it into a register as an optimization,
270 because prepare_call_address already did it if it should be done. */
271 if (GET_CODE (funexp) != SYMBOL_REF)
272 funexp = memory_address (FUNCTION_MODE, funexp);
274 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
275 if ((ecf_flags & ECF_SIBCALL)
276 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
277 && (n_popped > 0 || stack_size == 0))
279 rtx n_pop = GEN_INT (n_popped);
282 /* If this subroutine pops its own args, record that in the call insn
283 if possible, for the sake of frame pointer elimination. */
286 pat = GEN_SIBCALL_VALUE_POP (valreg,
287 gen_rtx_MEM (FUNCTION_MODE, funexp),
288 rounded_stack_size_rtx, next_arg_reg,
291 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
292 rounded_stack_size_rtx, next_arg_reg, n_pop);
294 emit_call_insn (pat);
300 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
301 /* If the target has "call" or "call_value" insns, then prefer them
302 if no arguments are actually popped. If the target does not have
303 "call" or "call_value" insns, then we must use the popping versions
304 even if the call has no arguments to pop. */
305 #if defined (HAVE_call) && defined (HAVE_call_value)
306 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
309 if (HAVE_call_pop && HAVE_call_value_pop)
312 rtx n_pop = GEN_INT (n_popped);
315 /* If this subroutine pops its own args, record that in the call insn
316 if possible, for the sake of frame pointer elimination. */
319 pat = GEN_CALL_VALUE_POP (valreg,
320 gen_rtx_MEM (FUNCTION_MODE, funexp),
321 rounded_stack_size_rtx, next_arg_reg, n_pop);
323 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
324 rounded_stack_size_rtx, next_arg_reg, n_pop);
326 emit_call_insn (pat);
332 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
333 if ((ecf_flags & ECF_SIBCALL)
334 && HAVE_sibcall && HAVE_sibcall_value)
337 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
338 gen_rtx_MEM (FUNCTION_MODE, funexp),
339 rounded_stack_size_rtx,
340 next_arg_reg, NULL_RTX));
342 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
343 rounded_stack_size_rtx, next_arg_reg,
344 GEN_INT (struct_value_size)));
349 #if defined (HAVE_call) && defined (HAVE_call_value)
350 if (HAVE_call && HAVE_call_value)
353 emit_call_insn (GEN_CALL_VALUE (valreg,
354 gen_rtx_MEM (FUNCTION_MODE, funexp),
355 rounded_stack_size_rtx, next_arg_reg,
358 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
359 rounded_stack_size_rtx, next_arg_reg,
360 GEN_INT (struct_value_size)));
366 /* Find the call we just emitted. */
367 call_insn = last_call_insn ();
369 /* Put the register usage information there. */
370 add_function_usage_to (call_insn, call_fusage);
372 /* If this is a const call, then set the insn's unchanging bit. */
373 if (ecf_flags & ECF_CONST)
374 RTL_CONST_CALL_P (call_insn) = 1;
376 /* If this is a pure call, then set the insn's unchanging bit. */
377 if (ecf_flags & ECF_PURE)
378 RTL_PURE_CALL_P (call_insn) = 1;
380 /* If this is a const call, then set the insn's unchanging bit. */
381 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
382 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
384 /* Create a nothrow REG_EH_REGION note, if needed. */
385 make_reg_eh_region_note (call_insn, ecf_flags, 0);
387 if (ecf_flags & ECF_NORETURN)
388 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
390 if (ecf_flags & ECF_RETURNS_TWICE)
392 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
393 cfun->calls_setjmp = 1;
396 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
398 /* Record debug information for virtual calls. */
399 if (flag_enable_icf_debug && fndecl == NULL)
400 (*debug_hooks->virtual_call_token) (CALL_EXPR_FN (fntree),
401 INSN_UID (call_insn));
403 /* Restore this now, so that we do defer pops for this call's args
404 if the context of the call as a whole permits. */
405 inhibit_defer_pop = old_inhibit_defer_pop;
410 CALL_INSN_FUNCTION_USAGE (call_insn)
411 = gen_rtx_EXPR_LIST (VOIDmode,
412 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
413 CALL_INSN_FUNCTION_USAGE (call_insn));
414 rounded_stack_size -= n_popped;
415 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
416 stack_pointer_delta -= n_popped;
418 /* If popup is needed, stack realign must use DRAP */
419 if (SUPPORTS_STACK_ALIGNMENT)
420 crtl->need_drap = true;
423 if (!ACCUMULATE_OUTGOING_ARGS)
425 /* If returning from the subroutine does not automatically pop the args,
426 we need an instruction to pop them sooner or later.
427 Perhaps do it now; perhaps just record how much space to pop later.
429 If returning from the subroutine does pop the args, indicate that the
430 stack pointer will be changed. */
432 if (rounded_stack_size != 0)
434 if (ecf_flags & ECF_NORETURN)
435 /* Just pretend we did the pop. */
436 stack_pointer_delta -= rounded_stack_size;
437 else if (flag_defer_pop && inhibit_defer_pop == 0
438 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
439 pending_stack_adjust += rounded_stack_size;
441 adjust_stack (rounded_stack_size_rtx);
444 /* When we accumulate outgoing args, we must avoid any stack manipulations.
445 Restore the stack pointer to its original value now. Usually
446 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
447 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
448 popping variants of functions exist as well.
450 ??? We may optimize similar to defer_pop above, but it is
451 probably not worthwhile.
453 ??? It will be worthwhile to enable combine_stack_adjustments even for
456 anti_adjust_stack (GEN_INT (n_popped));
459 /* Determine if the function identified by NAME and FNDECL is one with
460 special properties we wish to know about.
462 For example, if the function might return more than one time (setjmp), then
463 set RETURNS_TWICE to a nonzero value.
465 Similarly set NORETURN if the function is in the longjmp family.
467 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
468 space from the stack such as alloca. */
471 special_function_p (const_tree fndecl, int flags)
473 if (fndecl && DECL_NAME (fndecl)
474 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
475 /* Exclude functions not at the file scope, or not `extern',
476 since they are not the magic functions we would otherwise
478 FIXME: this should be handled with attributes, not with this
479 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
480 because you can declare fork() inside a function if you
482 && (DECL_CONTEXT (fndecl) == NULL_TREE
483 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
484 && TREE_PUBLIC (fndecl))
486 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
487 const char *tname = name;
489 /* We assume that alloca will always be called by name. It
490 makes no sense to pass it as a pointer-to-function to
491 anything that does not understand its behavior. */
492 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
494 && ! strcmp (name, "alloca"))
495 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
497 && ! strcmp (name, "__builtin_alloca"))))
498 flags |= ECF_MAY_BE_ALLOCA;
500 /* Disregard prefix _, __, __x or __builtin_. */
505 && !strncmp (name + 3, "uiltin_", 7))
507 else if (name[1] == '_' && name[2] == 'x')
509 else if (name[1] == '_')
518 && (! strcmp (tname, "setjmp")
519 || ! strcmp (tname, "setjmp_syscall")))
521 && ! strcmp (tname, "sigsetjmp"))
523 && ! strcmp (tname, "savectx")))
524 flags |= ECF_RETURNS_TWICE;
527 && ! strcmp (tname, "siglongjmp"))
528 flags |= ECF_NORETURN;
530 else if ((tname[0] == 'q' && tname[1] == 's'
531 && ! strcmp (tname, "qsetjmp"))
532 || (tname[0] == 'v' && tname[1] == 'f'
533 && ! strcmp (tname, "vfork"))
534 || (tname[0] == 'g' && tname[1] == 'e'
535 && !strcmp (tname, "getcontext")))
536 flags |= ECF_RETURNS_TWICE;
538 else if (tname[0] == 'l' && tname[1] == 'o'
539 && ! strcmp (tname, "longjmp"))
540 flags |= ECF_NORETURN;
546 /* Return nonzero when FNDECL represents a call to setjmp. */
549 setjmp_call_p (const_tree fndecl)
551 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
555 /* Return true if STMT is an alloca call. */
558 gimple_alloca_call_p (const_gimple stmt)
562 if (!is_gimple_call (stmt))
565 fndecl = gimple_call_fndecl (stmt);
566 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
572 /* Return true when exp contains alloca call. */
575 alloca_call_p (const_tree exp)
577 if (TREE_CODE (exp) == CALL_EXPR
578 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
579 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
580 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
581 & ECF_MAY_BE_ALLOCA))
586 /* Detect flags (function attributes) from the function decl or type node. */
589 flags_from_decl_or_type (const_tree exp)
595 /* The function exp may have the `malloc' attribute. */
596 if (DECL_IS_MALLOC (exp))
599 /* The function exp may have the `returns_twice' attribute. */
600 if (DECL_IS_RETURNS_TWICE (exp))
601 flags |= ECF_RETURNS_TWICE;
603 /* Process the pure and const attributes. */
604 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
606 if (DECL_PURE_P (exp))
608 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
609 flags |= ECF_LOOPING_CONST_OR_PURE;
611 if (DECL_IS_NOVOPS (exp))
614 if (TREE_NOTHROW (exp))
615 flags |= ECF_NOTHROW;
617 flags = special_function_p (exp, flags);
619 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
622 if (TREE_THIS_VOLATILE (exp))
623 flags |= ECF_NORETURN;
628 /* Detect flags from a CALL_EXPR. */
631 call_expr_flags (const_tree t)
634 tree decl = get_callee_fndecl (t);
637 flags = flags_from_decl_or_type (decl);
640 t = TREE_TYPE (CALL_EXPR_FN (t));
641 if (t && TREE_CODE (t) == POINTER_TYPE)
642 flags = flags_from_decl_or_type (TREE_TYPE (t));
650 /* Precompute all register parameters as described by ARGS, storing values
651 into fields within the ARGS array.
653 NUM_ACTUALS indicates the total number elements in the ARGS array.
655 Set REG_PARM_SEEN if we encounter a register parameter. */
658 precompute_register_parameters (int num_actuals, struct arg_data *args,
665 for (i = 0; i < num_actuals; i++)
666 if (args[i].reg != 0 && ! args[i].pass_on_stack)
670 if (args[i].value == 0)
673 args[i].value = expand_normal (args[i].tree_value);
674 preserve_temp_slots (args[i].value);
678 /* If the value is a non-legitimate constant, force it into a
679 pseudo now. TLS symbols sometimes need a call to resolve. */
680 if (CONSTANT_P (args[i].value)
681 && !LEGITIMATE_CONSTANT_P (args[i].value))
682 args[i].value = force_reg (args[i].mode, args[i].value);
684 /* If we are to promote the function arg to a wider mode,
687 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
689 = convert_modes (args[i].mode,
690 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
691 args[i].value, args[i].unsignedp);
693 /* If we're going to have to load the value by parts, pull the
694 parts into pseudos. The part extraction process can involve
695 non-trivial computation. */
696 if (GET_CODE (args[i].reg) == PARALLEL)
698 tree type = TREE_TYPE (args[i].tree_value);
699 args[i].parallel_value
700 = emit_group_load_into_temps (args[i].reg, args[i].value,
701 type, int_size_in_bytes (type));
704 /* If the value is expensive, and we are inside an appropriately
705 short loop, put the value into a pseudo and then put the pseudo
708 For small register classes, also do this if this call uses
709 register parameters. This is to avoid reload conflicts while
710 loading the parameters registers. */
712 else if ((! (REG_P (args[i].value)
713 || (GET_CODE (args[i].value) == SUBREG
714 && REG_P (SUBREG_REG (args[i].value)))))
715 && args[i].mode != BLKmode
716 && rtx_cost (args[i].value, SET, optimize_insn_for_speed_p ())
719 && targetm.small_register_classes_for_mode_p (args[i].mode))
721 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
725 #ifdef REG_PARM_STACK_SPACE
727 /* The argument list is the property of the called routine and it
728 may clobber it. If the fixed area has been used for previous
729 parameters, we must save and restore it. */
732 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
737 /* Compute the boundary of the area that needs to be saved, if any. */
738 high = reg_parm_stack_space;
739 #ifdef ARGS_GROW_DOWNWARD
742 if (high > highest_outgoing_arg_in_use)
743 high = highest_outgoing_arg_in_use;
745 for (low = 0; low < high; low++)
746 if (stack_usage_map[low] != 0)
749 enum machine_mode save_mode;
754 while (stack_usage_map[--high] == 0)
758 *high_to_save = high;
760 num_to_save = high - low + 1;
761 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
763 /* If we don't have the required alignment, must do this
765 if ((low & (MIN (GET_MODE_SIZE (save_mode),
766 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
769 #ifdef ARGS_GROW_DOWNWARD
774 stack_area = gen_rtx_MEM (save_mode,
775 memory_address (save_mode,
776 plus_constant (argblock,
779 set_mem_align (stack_area, PARM_BOUNDARY);
780 if (save_mode == BLKmode)
782 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
783 emit_block_move (validize_mem (save_area), stack_area,
784 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
788 save_area = gen_reg_rtx (save_mode);
789 emit_move_insn (save_area, stack_area);
799 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
801 enum machine_mode save_mode = GET_MODE (save_area);
805 #ifdef ARGS_GROW_DOWNWARD
806 delta = -high_to_save;
810 stack_area = gen_rtx_MEM (save_mode,
811 memory_address (save_mode,
812 plus_constant (argblock, delta)));
813 set_mem_align (stack_area, PARM_BOUNDARY);
815 if (save_mode != BLKmode)
816 emit_move_insn (stack_area, save_area);
818 emit_block_move (stack_area, validize_mem (save_area),
819 GEN_INT (high_to_save - low_to_save + 1),
822 #endif /* REG_PARM_STACK_SPACE */
824 /* If any elements in ARGS refer to parameters that are to be passed in
825 registers, but not in memory, and whose alignment does not permit a
826 direct copy into registers. Copy the values into a group of pseudos
827 which we will later copy into the appropriate hard registers.
829 Pseudos for each unaligned argument will be stored into the array
830 args[argnum].aligned_regs. The caller is responsible for deallocating
831 the aligned_regs array if it is nonzero. */
834 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
838 for (i = 0; i < num_actuals; i++)
839 if (args[i].reg != 0 && ! args[i].pass_on_stack
840 && args[i].mode == BLKmode
841 && MEM_P (args[i].value)
842 && (MEM_ALIGN (args[i].value)
843 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
845 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
846 int endian_correction = 0;
850 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
851 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
855 args[i].n_aligned_regs
856 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
859 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
861 /* Structures smaller than a word are normally aligned to the
862 least significant byte. On a BYTES_BIG_ENDIAN machine,
863 this means we must skip the empty high order bytes when
864 calculating the bit offset. */
865 if (bytes < UNITS_PER_WORD
866 #ifdef BLOCK_REG_PADDING
867 && (BLOCK_REG_PADDING (args[i].mode,
868 TREE_TYPE (args[i].tree_value), 1)
874 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
876 for (j = 0; j < args[i].n_aligned_regs; j++)
878 rtx reg = gen_reg_rtx (word_mode);
879 rtx word = operand_subword_force (args[i].value, j, BLKmode);
880 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
882 args[i].aligned_regs[j] = reg;
883 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
884 word_mode, word_mode);
886 /* There is no need to restrict this code to loading items
887 in TYPE_ALIGN sized hunks. The bitfield instructions can
888 load up entire word sized registers efficiently.
890 ??? This may not be needed anymore.
891 We use to emit a clobber here but that doesn't let later
892 passes optimize the instructions we emit. By storing 0 into
893 the register later passes know the first AND to zero out the
894 bitfield being set in the register is unnecessary. The store
895 of 0 will be deleted as will at least the first AND. */
897 emit_move_insn (reg, const0_rtx);
899 bytes -= bitsize / BITS_PER_UNIT;
900 store_bit_field (reg, bitsize, endian_correction, word_mode,
906 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
909 NUM_ACTUALS is the total number of parameters.
911 N_NAMED_ARGS is the total number of named arguments.
913 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
916 FNDECL is the tree code for the target of this call (if known)
918 ARGS_SO_FAR holds state needed by the target to know where to place
921 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
922 for arguments which are passed in registers.
924 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
925 and may be modified by this routine.
927 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
928 flags which may may be modified by this routine.
930 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
931 that requires allocation of stack space.
933 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
934 the thunked-to function. */
937 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
938 struct arg_data *args,
939 struct args_size *args_size,
940 int n_named_args ATTRIBUTE_UNUSED,
941 tree exp, tree struct_value_addr_value,
942 tree fndecl, tree fntype,
943 CUMULATIVE_ARGS *args_so_far,
944 int reg_parm_stack_space,
945 rtx *old_stack_level, int *old_pending_adj,
946 int *must_preallocate, int *ecf_flags,
947 bool *may_tailcall, bool call_from_thunk_p)
949 location_t loc = EXPR_LOCATION (exp);
950 /* 1 if scanning parms front to back, -1 if scanning back to front. */
953 /* Count arg position in order args appear. */
958 args_size->constant = 0;
961 /* In this loop, we consider args in the order they are written.
962 We fill up ARGS from the front or from the back if necessary
963 so that in any case the first arg to be pushed ends up at the front. */
965 if (PUSH_ARGS_REVERSED)
967 i = num_actuals - 1, inc = -1;
968 /* In this case, must reverse order of args
969 so that we compute and push the last arg first. */
976 /* First fill in the actual arguments in the ARGS array, splitting
977 complex arguments if necessary. */
980 call_expr_arg_iterator iter;
983 if (struct_value_addr_value)
985 args[j].tree_value = struct_value_addr_value;
988 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
990 tree argtype = TREE_TYPE (arg);
991 if (targetm.calls.split_complex_arg
993 && TREE_CODE (argtype) == COMPLEX_TYPE
994 && targetm.calls.split_complex_arg (argtype))
996 tree subtype = TREE_TYPE (argtype);
997 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
999 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1002 args[j].tree_value = arg;
1007 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1008 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
1010 tree type = TREE_TYPE (args[i].tree_value);
1012 enum machine_mode mode;
1014 /* Replace erroneous argument with constant zero. */
1015 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1016 args[i].tree_value = integer_zero_node, type = integer_type_node;
1018 /* If TYPE is a transparent union or record, pass things the way
1019 we would pass the first field of the union or record. We have
1020 already verified that the modes are the same. */
1021 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1022 && TYPE_TRANSPARENT_AGGR (type))
1023 type = TREE_TYPE (first_field (type));
1025 /* Decide where to pass this arg.
1027 args[i].reg is nonzero if all or part is passed in registers.
1029 args[i].partial is nonzero if part but not all is passed in registers,
1030 and the exact value says how many bytes are passed in registers.
1032 args[i].pass_on_stack is nonzero if the argument must at least be
1033 computed on the stack. It may then be loaded back into registers
1034 if args[i].reg is nonzero.
1036 These decisions are driven by the FUNCTION_... macros and must agree
1037 with those made by function.c. */
1039 /* See if this argument should be passed by invisible reference. */
1040 if (pass_by_reference (args_so_far, TYPE_MODE (type),
1041 type, argpos < n_named_args))
1047 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1048 type, argpos < n_named_args);
1050 /* If we're compiling a thunk, pass through invisible references
1051 instead of making a copy. */
1052 if (call_from_thunk_p
1054 && !TREE_ADDRESSABLE (type)
1055 && (base = get_base_address (args[i].tree_value))
1056 && TREE_CODE (base) != SSA_NAME
1057 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1059 /* We can't use sibcalls if a callee-copied argument is
1060 stored in the current function's frame. */
1061 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1062 *may_tailcall = false;
1064 args[i].tree_value = build_fold_addr_expr_loc (loc,
1065 args[i].tree_value);
1066 type = TREE_TYPE (args[i].tree_value);
1068 if (*ecf_flags & ECF_CONST)
1069 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1073 /* We make a copy of the object and pass the address to the
1074 function being called. */
1077 if (!COMPLETE_TYPE_P (type)
1078 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1079 || (flag_stack_check == GENERIC_STACK_CHECK
1080 && compare_tree_int (TYPE_SIZE_UNIT (type),
1081 STACK_CHECK_MAX_VAR_SIZE) > 0))
1083 /* This is a variable-sized object. Make space on the stack
1085 rtx size_rtx = expr_size (args[i].tree_value);
1087 if (*old_stack_level == 0)
1089 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1090 *old_pending_adj = pending_stack_adjust;
1091 pending_stack_adjust = 0;
1094 copy = gen_rtx_MEM (BLKmode,
1095 allocate_dynamic_stack_space
1096 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1097 set_mem_attributes (copy, type, 1);
1100 copy = assign_temp (type, 0, 1, 0);
1102 store_expr (args[i].tree_value, copy, 0, false);
1104 /* Just change the const function to pure and then let
1105 the next test clear the pure based on
1107 if (*ecf_flags & ECF_CONST)
1109 *ecf_flags &= ~ECF_CONST;
1110 *ecf_flags |= ECF_PURE;
1113 if (!callee_copies && *ecf_flags & ECF_PURE)
1114 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1117 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
1118 type = TREE_TYPE (args[i].tree_value);
1119 *may_tailcall = false;
1123 unsignedp = TYPE_UNSIGNED (type);
1124 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1125 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
1127 args[i].unsignedp = unsignedp;
1128 args[i].mode = mode;
1130 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1131 argpos < n_named_args);
1133 /* If this is a sibling call and the machine has register windows, the
1134 register window has to be unwinded before calling the routine, so
1135 arguments have to go into the incoming registers. */
1136 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1137 args[i].tail_call_reg
1138 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
1139 argpos < n_named_args);
1141 args[i].tail_call_reg = args[i].reg;
1145 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1146 argpos < n_named_args);
1148 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1150 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1151 it means that we are to pass this arg in the register(s) designated
1152 by the PARALLEL, but also to pass it in the stack. */
1153 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1154 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1155 args[i].pass_on_stack = 1;
1157 /* If this is an addressable type, we must preallocate the stack
1158 since we must evaluate the object into its final location.
1160 If this is to be passed in both registers and the stack, it is simpler
1162 if (TREE_ADDRESSABLE (type)
1163 || (args[i].pass_on_stack && args[i].reg != 0))
1164 *must_preallocate = 1;
1166 /* Compute the stack-size of this argument. */
1167 if (args[i].reg == 0 || args[i].partial != 0
1168 || reg_parm_stack_space > 0
1169 || args[i].pass_on_stack)
1170 locate_and_pad_parm (mode, type,
1171 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1176 args[i].pass_on_stack ? 0 : args[i].partial,
1177 fndecl, args_size, &args[i].locate);
1178 #ifdef BLOCK_REG_PADDING
1180 /* The argument is passed entirely in registers. See at which
1181 end it should be padded. */
1182 args[i].locate.where_pad =
1183 BLOCK_REG_PADDING (mode, type,
1184 int_size_in_bytes (type) <= UNITS_PER_WORD);
1187 /* Update ARGS_SIZE, the total stack space for args so far. */
1189 args_size->constant += args[i].locate.size.constant;
1190 if (args[i].locate.size.var)
1191 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1193 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1194 have been used, etc. */
1196 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
1197 type, argpos < n_named_args);
1201 /* Update ARGS_SIZE to contain the total size for the argument block.
1202 Return the original constant component of the argument block's size.
1204 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1205 for arguments passed in registers. */
1208 compute_argument_block_size (int reg_parm_stack_space,
1209 struct args_size *args_size,
1210 tree fndecl ATTRIBUTE_UNUSED,
1211 tree fntype ATTRIBUTE_UNUSED,
1212 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1214 int unadjusted_args_size = args_size->constant;
1216 /* For accumulate outgoing args mode we don't need to align, since the frame
1217 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1218 backends from generating misaligned frame sizes. */
1219 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1220 preferred_stack_boundary = STACK_BOUNDARY;
1222 /* Compute the actual size of the argument block required. The variable
1223 and constant sizes must be combined, the size may have to be rounded,
1224 and there may be a minimum required size. */
1228 args_size->var = ARGS_SIZE_TREE (*args_size);
1229 args_size->constant = 0;
1231 preferred_stack_boundary /= BITS_PER_UNIT;
1232 if (preferred_stack_boundary > 1)
1234 /* We don't handle this case yet. To handle it correctly we have
1235 to add the delta, round and subtract the delta.
1236 Currently no machine description requires this support. */
1237 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1238 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1241 if (reg_parm_stack_space > 0)
1244 = size_binop (MAX_EXPR, args_size->var,
1245 ssize_int (reg_parm_stack_space));
1247 /* The area corresponding to register parameters is not to count in
1248 the size of the block we need. So make the adjustment. */
1249 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1251 = size_binop (MINUS_EXPR, args_size->var,
1252 ssize_int (reg_parm_stack_space));
1257 preferred_stack_boundary /= BITS_PER_UNIT;
1258 if (preferred_stack_boundary < 1)
1259 preferred_stack_boundary = 1;
1260 args_size->constant = (((args_size->constant
1261 + stack_pointer_delta
1262 + preferred_stack_boundary - 1)
1263 / preferred_stack_boundary
1264 * preferred_stack_boundary)
1265 - stack_pointer_delta);
1267 args_size->constant = MAX (args_size->constant,
1268 reg_parm_stack_space);
1270 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1271 args_size->constant -= reg_parm_stack_space;
1273 return unadjusted_args_size;
1276 /* Precompute parameters as needed for a function call.
1278 FLAGS is mask of ECF_* constants.
1280 NUM_ACTUALS is the number of arguments.
1282 ARGS is an array containing information for each argument; this
1283 routine fills in the INITIAL_VALUE and VALUE fields for each
1284 precomputed argument. */
1287 precompute_arguments (int num_actuals, struct arg_data *args)
1291 /* If this is a libcall, then precompute all arguments so that we do not
1292 get extraneous instructions emitted as part of the libcall sequence. */
1294 /* If we preallocated the stack space, and some arguments must be passed
1295 on the stack, then we must precompute any parameter which contains a
1296 function call which will store arguments on the stack.
1297 Otherwise, evaluating the parameter may clobber previous parameters
1298 which have already been stored into the stack. (we have code to avoid
1299 such case by saving the outgoing stack arguments, but it results in
1301 if (!ACCUMULATE_OUTGOING_ARGS)
1304 for (i = 0; i < num_actuals; i++)
1307 enum machine_mode mode;
1309 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
1312 /* If this is an addressable type, we cannot pre-evaluate it. */
1313 type = TREE_TYPE (args[i].tree_value);
1314 gcc_assert (!TREE_ADDRESSABLE (type));
1316 args[i].initial_value = args[i].value
1317 = expand_normal (args[i].tree_value);
1319 mode = TYPE_MODE (type);
1320 if (mode != args[i].mode)
1322 int unsignedp = args[i].unsignedp;
1324 = convert_modes (args[i].mode, mode,
1325 args[i].value, args[i].unsignedp);
1327 /* CSE will replace this only if it contains args[i].value
1328 pseudo, so convert it down to the declared mode using
1330 if (REG_P (args[i].value)
1331 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1332 && promote_mode (type, mode, &unsignedp) != args[i].mode)
1334 args[i].initial_value
1335 = gen_lowpart_SUBREG (mode, args[i].value);
1336 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1337 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1344 /* Given the current state of MUST_PREALLOCATE and information about
1345 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1346 compute and return the final value for MUST_PREALLOCATE. */
1349 finalize_must_preallocate (int must_preallocate, int num_actuals,
1350 struct arg_data *args, struct args_size *args_size)
1352 /* See if we have or want to preallocate stack space.
1354 If we would have to push a partially-in-regs parm
1355 before other stack parms, preallocate stack space instead.
1357 If the size of some parm is not a multiple of the required stack
1358 alignment, we must preallocate.
1360 If the total size of arguments that would otherwise create a copy in
1361 a temporary (such as a CALL) is more than half the total argument list
1362 size, preallocation is faster.
1364 Another reason to preallocate is if we have a machine (like the m88k)
1365 where stack alignment is required to be maintained between every
1366 pair of insns, not just when the call is made. However, we assume here
1367 that such machines either do not have push insns (and hence preallocation
1368 would occur anyway) or the problem is taken care of with
1371 if (! must_preallocate)
1373 int partial_seen = 0;
1374 int copy_to_evaluate_size = 0;
1377 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1379 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1381 else if (partial_seen && args[i].reg == 0)
1382 must_preallocate = 1;
1384 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1385 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1386 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1387 || TREE_CODE (args[i].tree_value) == COND_EXPR
1388 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1389 copy_to_evaluate_size
1390 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1393 if (copy_to_evaluate_size * 2 >= args_size->constant
1394 && args_size->constant > 0)
1395 must_preallocate = 1;
1397 return must_preallocate;
1400 /* If we preallocated stack space, compute the address of each argument
1401 and store it into the ARGS array.
1403 We need not ensure it is a valid memory address here; it will be
1404 validized when it is used.
1406 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1409 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1413 rtx arg_reg = argblock;
1414 int i, arg_offset = 0;
1416 if (GET_CODE (argblock) == PLUS)
1417 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1419 for (i = 0; i < num_actuals; i++)
1421 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1422 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1424 unsigned int align, boundary;
1425 unsigned int units_on_stack = 0;
1426 enum machine_mode partial_mode = VOIDmode;
1428 /* Skip this parm if it will not be passed on the stack. */
1429 if (! args[i].pass_on_stack
1431 && args[i].partial == 0)
1434 if (CONST_INT_P (offset))
1435 addr = plus_constant (arg_reg, INTVAL (offset));
1437 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1439 addr = plus_constant (addr, arg_offset);
1441 if (args[i].partial != 0)
1443 /* Only part of the parameter is being passed on the stack.
1444 Generate a simple memory reference of the correct size. */
1445 units_on_stack = args[i].locate.size.constant;
1446 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1448 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1449 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1453 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1454 set_mem_attributes (args[i].stack,
1455 TREE_TYPE (args[i].tree_value), 1);
1457 align = BITS_PER_UNIT;
1458 boundary = args[i].locate.boundary;
1459 if (args[i].locate.where_pad != downward)
1461 else if (CONST_INT_P (offset))
1463 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1464 align = align & -align;
1466 set_mem_align (args[i].stack, align);
1468 if (CONST_INT_P (slot_offset))
1469 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1471 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1473 addr = plus_constant (addr, arg_offset);
1475 if (args[i].partial != 0)
1477 /* Only part of the parameter is being passed on the stack.
1478 Generate a simple memory reference of the correct size.
1480 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1481 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1485 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1486 set_mem_attributes (args[i].stack_slot,
1487 TREE_TYPE (args[i].tree_value), 1);
1489 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1491 /* Function incoming arguments may overlap with sibling call
1492 outgoing arguments and we cannot allow reordering of reads
1493 from function arguments with stores to outgoing arguments
1494 of sibling calls. */
1495 set_mem_alias_set (args[i].stack, 0);
1496 set_mem_alias_set (args[i].stack_slot, 0);
1501 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1502 in a call instruction.
1504 FNDECL is the tree node for the target function. For an indirect call
1505 FNDECL will be NULL_TREE.
1507 ADDR is the operand 0 of CALL_EXPR for this call. */
1510 rtx_for_function_call (tree fndecl, tree addr)
1514 /* Get the function to call, in the form of RTL. */
1517 /* If this is the first use of the function, see if we need to
1518 make an external definition for it. */
1519 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1521 assemble_external (fndecl);
1522 TREE_USED (fndecl) = 1;
1525 /* Get a SYMBOL_REF rtx for the function address. */
1526 funexp = XEXP (DECL_RTL (fndecl), 0);
1529 /* Generate an rtx (probably a pseudo-register) for the address. */
1532 funexp = expand_normal (addr);
1533 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1538 /* Return true if and only if SIZE storage units (usually bytes)
1539 starting from address ADDR overlap with already clobbered argument
1540 area. This function is used to determine if we should give up a
1544 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1548 if (addr == crtl->args.internal_arg_pointer)
1550 else if (GET_CODE (addr) == PLUS
1551 && XEXP (addr, 0) == crtl->args.internal_arg_pointer
1552 && CONST_INT_P (XEXP (addr, 1)))
1553 i = INTVAL (XEXP (addr, 1));
1554 /* Return true for arg pointer based indexed addressing. */
1555 else if (GET_CODE (addr) == PLUS
1556 && (XEXP (addr, 0) == crtl->args.internal_arg_pointer
1557 || XEXP (addr, 1) == crtl->args.internal_arg_pointer))
1562 #ifdef ARGS_GROW_DOWNWARD
1567 unsigned HOST_WIDE_INT k;
1569 for (k = 0; k < size; k++)
1570 if (i + k < stored_args_map->n_bits
1571 && TEST_BIT (stored_args_map, i + k))
1578 /* Do the register loads required for any wholly-register parms or any
1579 parms which are passed both on the stack and in a register. Their
1580 expressions were already evaluated.
1582 Mark all register-parms as living through the call, putting these USE
1583 insns in the CALL_INSN_FUNCTION_USAGE field.
1585 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1586 checking, setting *SIBCALL_FAILURE if appropriate. */
1589 load_register_parameters (struct arg_data *args, int num_actuals,
1590 rtx *call_fusage, int flags, int is_sibcall,
1591 int *sibcall_failure)
1595 for (i = 0; i < num_actuals; i++)
1597 rtx reg = ((flags & ECF_SIBCALL)
1598 ? args[i].tail_call_reg : args[i].reg);
1601 int partial = args[i].partial;
1604 rtx before_arg = get_last_insn ();
1605 /* Set non-negative if we must move a word at a time, even if
1606 just one word (e.g, partial == 4 && mode == DFmode). Set
1607 to -1 if we just use a normal move insn. This value can be
1608 zero if the argument is a zero size structure. */
1610 if (GET_CODE (reg) == PARALLEL)
1614 gcc_assert (partial % UNITS_PER_WORD == 0);
1615 nregs = partial / UNITS_PER_WORD;
1617 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1619 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1620 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1623 size = GET_MODE_SIZE (args[i].mode);
1625 /* Handle calls that pass values in multiple non-contiguous
1626 locations. The Irix 6 ABI has examples of this. */
1628 if (GET_CODE (reg) == PARALLEL)
1629 emit_group_move (reg, args[i].parallel_value);
1631 /* If simple case, just do move. If normal partial, store_one_arg
1632 has already loaded the register for us. In all other cases,
1633 load the register(s) from memory. */
1635 else if (nregs == -1)
1637 emit_move_insn (reg, args[i].value);
1638 #ifdef BLOCK_REG_PADDING
1639 /* Handle case where we have a value that needs shifting
1640 up to the msb. eg. a QImode value and we're padding
1641 upward on a BYTES_BIG_ENDIAN machine. */
1642 if (size < UNITS_PER_WORD
1643 && (args[i].locate.where_pad
1644 == (BYTES_BIG_ENDIAN ? upward : downward)))
1647 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1649 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1650 report the whole reg as used. Strictly speaking, the
1651 call only uses SIZE bytes at the msb end, but it doesn't
1652 seem worth generating rtl to say that. */
1653 reg = gen_rtx_REG (word_mode, REGNO (reg));
1654 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1655 build_int_cst (NULL_TREE, shift),
1658 emit_move_insn (reg, x);
1663 /* If we have pre-computed the values to put in the registers in
1664 the case of non-aligned structures, copy them in now. */
1666 else if (args[i].n_aligned_regs != 0)
1667 for (j = 0; j < args[i].n_aligned_regs; j++)
1668 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1669 args[i].aligned_regs[j]);
1671 else if ((partial == 0 || args[i].pass_on_stack)
1674 rtx mem = validize_mem (args[i].value);
1676 /* Check for overlap with already clobbered argument area. */
1678 && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
1680 *sibcall_failure = 1;
1682 /* Handle a BLKmode that needs shifting. */
1683 if (nregs == 1 && size < UNITS_PER_WORD
1684 #ifdef BLOCK_REG_PADDING
1685 && args[i].locate.where_pad == downward
1691 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1692 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1693 rtx x = gen_reg_rtx (word_mode);
1694 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1695 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1698 emit_move_insn (x, tem);
1699 x = expand_shift (dir, word_mode, x,
1700 build_int_cst (NULL_TREE, shift),
1703 emit_move_insn (ri, x);
1706 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1709 /* When a parameter is a block, and perhaps in other cases, it is
1710 possible that it did a load from an argument slot that was
1711 already clobbered. */
1713 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1714 *sibcall_failure = 1;
1716 /* Handle calls that pass values in multiple non-contiguous
1717 locations. The Irix 6 ABI has examples of this. */
1718 if (GET_CODE (reg) == PARALLEL)
1719 use_group_regs (call_fusage, reg);
1720 else if (nregs == -1)
1721 use_reg (call_fusage, reg);
1723 use_regs (call_fusage, REGNO (reg), nregs);
1728 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1729 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1730 bytes, then we would need to push some additional bytes to pad the
1731 arguments. So, we compute an adjust to the stack pointer for an
1732 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1733 bytes. Then, when the arguments are pushed the stack will be perfectly
1734 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1735 be popped after the call. Returns the adjustment. */
1738 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1739 struct args_size *args_size,
1740 unsigned int preferred_unit_stack_boundary)
1742 /* The number of bytes to pop so that the stack will be
1743 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1744 HOST_WIDE_INT adjustment;
1745 /* The alignment of the stack after the arguments are pushed, if we
1746 just pushed the arguments without adjust the stack here. */
1747 unsigned HOST_WIDE_INT unadjusted_alignment;
1749 unadjusted_alignment
1750 = ((stack_pointer_delta + unadjusted_args_size)
1751 % preferred_unit_stack_boundary);
1753 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1754 as possible -- leaving just enough left to cancel out the
1755 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1756 PENDING_STACK_ADJUST is non-negative, and congruent to
1757 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1759 /* Begin by trying to pop all the bytes. */
1760 unadjusted_alignment
1761 = (unadjusted_alignment
1762 - (pending_stack_adjust % preferred_unit_stack_boundary));
1763 adjustment = pending_stack_adjust;
1764 /* Push enough additional bytes that the stack will be aligned
1765 after the arguments are pushed. */
1766 if (preferred_unit_stack_boundary > 1)
1768 if (unadjusted_alignment > 0)
1769 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1771 adjustment += unadjusted_alignment;
1774 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1775 bytes after the call. The right number is the entire
1776 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1777 by the arguments in the first place. */
1779 = pending_stack_adjust - adjustment + unadjusted_args_size;
1784 /* Scan X expression if it does not dereference any argument slots
1785 we already clobbered by tail call arguments (as noted in stored_args_map
1787 Return nonzero if X expression dereferences such argument slots,
1791 check_sibcall_argument_overlap_1 (rtx x)
1800 code = GET_CODE (x);
1803 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1804 GET_MODE_SIZE (GET_MODE (x)));
1806 /* Scan all subexpressions. */
1807 fmt = GET_RTX_FORMAT (code);
1808 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1812 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1815 else if (*fmt == 'E')
1817 for (j = 0; j < XVECLEN (x, i); j++)
1818 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1825 /* Scan sequence after INSN if it does not dereference any argument slots
1826 we already clobbered by tail call arguments (as noted in stored_args_map
1827 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1828 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1829 should be 0). Return nonzero if sequence after INSN dereferences such argument
1830 slots, zero otherwise. */
1833 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1837 if (insn == NULL_RTX)
1838 insn = get_insns ();
1840 insn = NEXT_INSN (insn);
1842 for (; insn; insn = NEXT_INSN (insn))
1844 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1847 if (mark_stored_args_map)
1849 #ifdef ARGS_GROW_DOWNWARD
1850 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1852 low = arg->locate.slot_offset.constant;
1855 for (high = low + arg->locate.size.constant; low < high; low++)
1856 SET_BIT (stored_args_map, low);
1858 return insn != NULL_RTX;
1861 /* Given that a function returns a value of mode MODE at the most
1862 significant end of hard register VALUE, shift VALUE left or right
1863 as specified by LEFT_P. Return true if some action was needed. */
1866 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1868 HOST_WIDE_INT shift;
1870 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1871 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1875 /* Use ashr rather than lshr for right shifts. This is for the benefit
1876 of the MIPS port, which requires SImode values to be sign-extended
1877 when stored in 64-bit registers. */
1878 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1879 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1884 /* If X is a likely-spilled register value, copy it to a pseudo
1885 register and return that register. Return X otherwise. */
1888 avoid_likely_spilled_reg (rtx x)
1893 && HARD_REGISTER_P (x)
1894 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (x))))
1896 /* Make sure that we generate a REG rather than a CONCAT.
1897 Moves into CONCATs can need nontrivial instructions,
1898 and the whole point of this function is to avoid
1899 using the hard register directly in such a situation. */
1900 generating_concat_p = 0;
1901 new_rtx = gen_reg_rtx (GET_MODE (x));
1902 generating_concat_p = 1;
1903 emit_move_insn (new_rtx, x);
1909 /* Generate all the code for a CALL_EXPR exp
1910 and return an rtx for its value.
1911 Store the value in TARGET (specified as an rtx) if convenient.
1912 If the value is stored in TARGET then TARGET is returned.
1913 If IGNORE is nonzero, then we ignore the value of the function call. */
1916 expand_call (tree exp, rtx target, int ignore)
1918 /* Nonzero if we are currently expanding a call. */
1919 static int currently_expanding_call = 0;
1921 /* RTX for the function to be called. */
1923 /* Sequence of insns to perform a normal "call". */
1924 rtx normal_call_insns = NULL_RTX;
1925 /* Sequence of insns to perform a tail "call". */
1926 rtx tail_call_insns = NULL_RTX;
1927 /* Data type of the function. */
1929 tree type_arg_types;
1931 /* Declaration of the function being called,
1932 or 0 if the function is computed (not known by name). */
1934 /* The type of the function being called. */
1936 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1939 /* Register in which non-BLKmode value will be returned,
1940 or 0 if no value or if value is BLKmode. */
1942 /* Address where we should return a BLKmode value;
1943 0 if value not BLKmode. */
1944 rtx structure_value_addr = 0;
1945 /* Nonzero if that address is being passed by treating it as
1946 an extra, implicit first parameter. Otherwise,
1947 it is passed by being copied directly into struct_value_rtx. */
1948 int structure_value_addr_parm = 0;
1949 /* Holds the value of implicit argument for the struct value. */
1950 tree structure_value_addr_value = NULL_TREE;
1951 /* Size of aggregate value wanted, or zero if none wanted
1952 or if we are using the non-reentrant PCC calling convention
1953 or expecting the value in registers. */
1954 HOST_WIDE_INT struct_value_size = 0;
1955 /* Nonzero if called function returns an aggregate in memory PCC style,
1956 by returning the address of where to find it. */
1957 int pcc_struct_value = 0;
1958 rtx struct_value = 0;
1960 /* Number of actual parameters in this call, including struct value addr. */
1962 /* Number of named args. Args after this are anonymous ones
1963 and they must all go on the stack. */
1965 /* Number of complex actual arguments that need to be split. */
1966 int num_complex_actuals = 0;
1968 /* Vector of information about each argument.
1969 Arguments are numbered in the order they will be pushed,
1970 not the order they are written. */
1971 struct arg_data *args;
1973 /* Total size in bytes of all the stack-parms scanned so far. */
1974 struct args_size args_size;
1975 struct args_size adjusted_args_size;
1976 /* Size of arguments before any adjustments (such as rounding). */
1977 int unadjusted_args_size;
1978 /* Data on reg parms scanned so far. */
1979 CUMULATIVE_ARGS args_so_far;
1980 /* Nonzero if a reg parm has been scanned. */
1982 /* Nonzero if this is an indirect function call. */
1984 /* Nonzero if we must avoid push-insns in the args for this call.
1985 If stack space is allocated for register parameters, but not by the
1986 caller, then it is preallocated in the fixed part of the stack frame.
1987 So the entire argument block must then be preallocated (i.e., we
1988 ignore PUSH_ROUNDING in that case). */
1990 int must_preallocate = !PUSH_ARGS;
1992 /* Size of the stack reserved for parameter registers. */
1993 int reg_parm_stack_space = 0;
1995 /* Address of space preallocated for stack parms
1996 (on machines that lack push insns), or 0 if space not preallocated. */
1999 /* Mask of ECF_ flags. */
2001 #ifdef REG_PARM_STACK_SPACE
2002 /* Define the boundary of the register parm stack space that needs to be
2004 int low_to_save, high_to_save;
2005 rtx save_area = 0; /* Place that it is saved */
2008 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2009 char *initial_stack_usage_map = stack_usage_map;
2010 char *stack_usage_map_buf = NULL;
2012 int old_stack_allocated;
2014 /* State variables to track stack modifications. */
2015 rtx old_stack_level = 0;
2016 int old_stack_arg_under_construction = 0;
2017 int old_pending_adj = 0;
2018 int old_inhibit_defer_pop = inhibit_defer_pop;
2020 /* Some stack pointer alterations we make are performed via
2021 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2022 which we then also need to save/restore along the way. */
2023 int old_stack_pointer_delta = 0;
2026 tree addr = CALL_EXPR_FN (exp);
2028 /* The alignment of the stack, in bits. */
2029 unsigned HOST_WIDE_INT preferred_stack_boundary;
2030 /* The alignment of the stack, in bytes. */
2031 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
2032 /* The static chain value to use for this call. */
2033 rtx static_chain_value;
2034 /* See if this is "nothrow" function call. */
2035 if (TREE_NOTHROW (exp))
2036 flags |= ECF_NOTHROW;
2038 /* See if we can find a DECL-node for the actual function, and get the
2039 function attributes (flags) from the function decl or type node. */
2040 fndecl = get_callee_fndecl (exp);
2043 fntype = TREE_TYPE (fndecl);
2044 flags |= flags_from_decl_or_type (fndecl);
2048 fntype = TREE_TYPE (TREE_TYPE (addr));
2049 flags |= flags_from_decl_or_type (fntype);
2051 rettype = TREE_TYPE (exp);
2053 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2055 /* Warn if this value is an aggregate type,
2056 regardless of which calling convention we are using for it. */
2057 if (AGGREGATE_TYPE_P (rettype))
2058 warning (OPT_Waggregate_return, "function call has aggregate value");
2060 /* If the result of a non looping pure or const function call is
2061 ignored (or void), and none of its arguments are volatile, we can
2062 avoid expanding the call and just evaluate the arguments for
2064 if ((flags & (ECF_CONST | ECF_PURE))
2065 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2066 && (ignore || target == const0_rtx
2067 || TYPE_MODE (rettype) == VOIDmode))
2069 bool volatilep = false;
2071 call_expr_arg_iterator iter;
2073 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2074 if (TREE_THIS_VOLATILE (arg))
2082 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2083 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2088 #ifdef REG_PARM_STACK_SPACE
2089 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2092 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2093 && reg_parm_stack_space > 0 && PUSH_ARGS)
2094 must_preallocate = 1;
2096 /* Set up a place to return a structure. */
2098 /* Cater to broken compilers. */
2099 if (aggregate_value_p (exp, fntype))
2101 /* This call returns a big structure. */
2102 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2104 #ifdef PCC_STATIC_STRUCT_RETURN
2106 pcc_struct_value = 1;
2108 #else /* not PCC_STATIC_STRUCT_RETURN */
2110 struct_value_size = int_size_in_bytes (rettype);
2112 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2113 structure_value_addr = XEXP (target, 0);
2116 /* For variable-sized objects, we must be called with a target
2117 specified. If we were to allocate space on the stack here,
2118 we would have no way of knowing when to free it. */
2119 rtx d = assign_temp (rettype, 0, 1, 1);
2121 mark_temp_addr_taken (d);
2122 structure_value_addr = XEXP (d, 0);
2126 #endif /* not PCC_STATIC_STRUCT_RETURN */
2129 /* Figure out the amount to which the stack should be aligned. */
2130 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2133 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2134 /* Without automatic stack alignment, we can't increase preferred
2135 stack boundary. With automatic stack alignment, it is
2136 unnecessary since unless we can guarantee that all callers will
2137 align the outgoing stack properly, callee has to align its
2140 && i->preferred_incoming_stack_boundary
2141 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
2142 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2145 /* Operand 0 is a pointer-to-function; get the type of the function. */
2146 funtype = TREE_TYPE (addr);
2147 gcc_assert (POINTER_TYPE_P (funtype));
2148 funtype = TREE_TYPE (funtype);
2150 /* Count whether there are actual complex arguments that need to be split
2151 into their real and imaginary parts. Munge the type_arg_types
2152 appropriately here as well. */
2153 if (targetm.calls.split_complex_arg)
2155 call_expr_arg_iterator iter;
2157 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2159 tree type = TREE_TYPE (arg);
2160 if (type && TREE_CODE (type) == COMPLEX_TYPE
2161 && targetm.calls.split_complex_arg (type))
2162 num_complex_actuals++;
2164 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2167 type_arg_types = TYPE_ARG_TYPES (funtype);
2169 if (flags & ECF_MAY_BE_ALLOCA)
2170 cfun->calls_alloca = 1;
2172 /* If struct_value_rtx is 0, it means pass the address
2173 as if it were an extra parameter. Put the argument expression
2174 in structure_value_addr_value. */
2175 if (structure_value_addr && struct_value == 0)
2177 /* If structure_value_addr is a REG other than
2178 virtual_outgoing_args_rtx, we can use always use it. If it
2179 is not a REG, we must always copy it into a register.
2180 If it is virtual_outgoing_args_rtx, we must copy it to another
2181 register in some cases. */
2182 rtx temp = (!REG_P (structure_value_addr)
2183 || (ACCUMULATE_OUTGOING_ARGS
2184 && stack_arg_under_construction
2185 && structure_value_addr == virtual_outgoing_args_rtx)
2186 ? copy_addr_to_reg (convert_memory_address
2187 (Pmode, structure_value_addr))
2188 : structure_value_addr);
2190 structure_value_addr_value =
2191 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2192 structure_value_addr_parm = 1;
2195 /* Count the arguments and set NUM_ACTUALS. */
2197 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2199 /* Compute number of named args.
2200 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2202 if (type_arg_types != 0)
2204 = (list_length (type_arg_types)
2205 /* Count the struct value address, if it is passed as a parm. */
2206 + structure_value_addr_parm);
2208 /* If we know nothing, treat all args as named. */
2209 n_named_args = num_actuals;
2211 /* Start updating where the next arg would go.
2213 On some machines (such as the PA) indirect calls have a different
2214 calling convention than normal calls. The fourth argument in
2215 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2217 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2219 /* Now possibly adjust the number of named args.
2220 Normally, don't include the last named arg if anonymous args follow.
2221 We do include the last named arg if
2222 targetm.calls.strict_argument_naming() returns nonzero.
2223 (If no anonymous args follow, the result of list_length is actually
2224 one too large. This is harmless.)
2226 If targetm.calls.pretend_outgoing_varargs_named() returns
2227 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2228 this machine will be able to place unnamed args that were passed
2229 in registers into the stack. So treat all args as named. This
2230 allows the insns emitting for a specific argument list to be
2231 independent of the function declaration.
2233 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2234 we do not have any reliable way to pass unnamed args in
2235 registers, so we must force them into memory. */
2237 if (type_arg_types != 0
2238 && targetm.calls.strict_argument_naming (&args_so_far))
2240 else if (type_arg_types != 0
2241 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2242 /* Don't include the last named arg. */
2245 /* Treat all args as named. */
2246 n_named_args = num_actuals;
2248 /* Make a vector to hold all the information about each arg. */
2249 args = XALLOCAVEC (struct arg_data, num_actuals);
2250 memset (args, 0, num_actuals * sizeof (struct arg_data));
2252 /* Build up entries in the ARGS array, compute the size of the
2253 arguments into ARGS_SIZE, etc. */
2254 initialize_argument_information (num_actuals, args, &args_size,
2256 structure_value_addr_value, fndecl, fntype,
2257 &args_so_far, reg_parm_stack_space,
2258 &old_stack_level, &old_pending_adj,
2259 &must_preallocate, &flags,
2260 &try_tail_call, CALL_FROM_THUNK_P (exp));
2263 must_preallocate = 1;
2265 /* Now make final decision about preallocating stack space. */
2266 must_preallocate = finalize_must_preallocate (must_preallocate,
2270 /* If the structure value address will reference the stack pointer, we
2271 must stabilize it. We don't need to do this if we know that we are
2272 not going to adjust the stack pointer in processing this call. */
2274 if (structure_value_addr
2275 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2276 || reg_mentioned_p (virtual_outgoing_args_rtx,
2277 structure_value_addr))
2279 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2280 structure_value_addr = copy_to_reg (structure_value_addr);
2282 /* Tail calls can make things harder to debug, and we've traditionally
2283 pushed these optimizations into -O2. Don't try if we're already
2284 expanding a call, as that means we're an argument. Don't try if
2285 there's cleanups, as we know there's code to follow the call. */
2287 if (currently_expanding_call++ != 0
2288 || !flag_optimize_sibling_calls
2290 || dbg_cnt (tail_call) == false)
2293 /* Rest of purposes for tail call optimizations to fail. */
2295 #ifdef HAVE_sibcall_epilogue
2296 !HAVE_sibcall_epilogue
2301 /* Doing sibling call optimization needs some work, since
2302 structure_value_addr can be allocated on the stack.
2303 It does not seem worth the effort since few optimizable
2304 sibling calls will return a structure. */
2305 || structure_value_addr != NULL_RTX
2306 #ifdef REG_PARM_STACK_SPACE
2307 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2308 || (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2309 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)))
2310 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl))
2312 /* Check whether the target is able to optimize the call
2314 || !targetm.function_ok_for_sibcall (fndecl, exp)
2315 /* Functions that do not return exactly once may not be sibcall
2317 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2318 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2319 /* If the called function is nested in the current one, it might access
2320 some of the caller's arguments, but could clobber them beforehand if
2321 the argument areas are shared. */
2322 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2323 /* If this function requires more stack slots than the current
2324 function, we cannot change it into a sibling call.
2325 crtl->args.pretend_args_size is not part of the
2326 stack allocated by our caller. */
2327 || args_size.constant > (crtl->args.size
2328 - crtl->args.pretend_args_size)
2329 /* If the callee pops its own arguments, then it must pop exactly
2330 the same number of arguments as the current function. */
2331 || (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
2332 != targetm.calls.return_pops_args (current_function_decl,
2333 TREE_TYPE (current_function_decl),
2335 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2338 /* Check if caller and callee disagree in promotion of function
2342 enum machine_mode caller_mode, caller_promoted_mode;
2343 enum machine_mode callee_mode, callee_promoted_mode;
2344 int caller_unsignedp, callee_unsignedp;
2345 tree caller_res = DECL_RESULT (current_function_decl);
2347 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
2348 caller_mode = DECL_MODE (caller_res);
2349 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
2350 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
2351 caller_promoted_mode
2352 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
2354 TREE_TYPE (current_function_decl), 1);
2355 callee_promoted_mode
2356 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
2359 if (caller_mode != VOIDmode
2360 && (caller_promoted_mode != callee_promoted_mode
2361 || ((caller_mode != caller_promoted_mode
2362 || callee_mode != callee_promoted_mode)
2363 && (caller_unsignedp != callee_unsignedp
2364 || GET_MODE_BITSIZE (caller_mode)
2365 < GET_MODE_BITSIZE (callee_mode)))))
2369 /* Ensure current function's preferred stack boundary is at least
2370 what we need. Stack alignment may also increase preferred stack
2372 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
2373 crtl->preferred_stack_boundary = preferred_stack_boundary;
2375 preferred_stack_boundary = crtl->preferred_stack_boundary;
2377 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2379 /* We want to make two insn chains; one for a sibling call, the other
2380 for a normal call. We will select one of the two chains after
2381 initial RTL generation is complete. */
2382 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2384 int sibcall_failure = 0;
2385 /* We want to emit any pending stack adjustments before the tail
2386 recursion "call". That way we know any adjustment after the tail
2387 recursion call can be ignored if we indeed use the tail
2389 int save_pending_stack_adjust = 0;
2390 int save_stack_pointer_delta = 0;
2392 rtx before_call, next_arg_reg, after_args;
2396 /* State variables we need to save and restore between
2398 save_pending_stack_adjust = pending_stack_adjust;
2399 save_stack_pointer_delta = stack_pointer_delta;
2402 flags &= ~ECF_SIBCALL;
2404 flags |= ECF_SIBCALL;
2406 /* Other state variables that we must reinitialize each time
2407 through the loop (that are not initialized by the loop itself). */
2411 /* Start a new sequence for the normal call case.
2413 From this point on, if the sibling call fails, we want to set
2414 sibcall_failure instead of continuing the loop. */
2417 /* Don't let pending stack adjusts add up to too much.
2418 Also, do all pending adjustments now if there is any chance
2419 this might be a call to alloca or if we are expanding a sibling
2421 Also do the adjustments before a throwing call, otherwise
2422 exception handling can fail; PR 19225. */
2423 if (pending_stack_adjust >= 32
2424 || (pending_stack_adjust > 0
2425 && (flags & ECF_MAY_BE_ALLOCA))
2426 || (pending_stack_adjust > 0
2427 && flag_exceptions && !(flags & ECF_NOTHROW))
2429 do_pending_stack_adjust ();
2431 /* Precompute any arguments as needed. */
2433 precompute_arguments (num_actuals, args);
2435 /* Now we are about to start emitting insns that can be deleted
2436 if a libcall is deleted. */
2437 if (pass && (flags & ECF_MALLOC))
2440 if (pass == 0 && crtl->stack_protect_guard)
2441 stack_protect_epilogue ();
2443 adjusted_args_size = args_size;
2444 /* Compute the actual size of the argument block required. The variable
2445 and constant sizes must be combined, the size may have to be rounded,
2446 and there may be a minimum required size. When generating a sibcall
2447 pattern, do not round up, since we'll be re-using whatever space our
2449 unadjusted_args_size
2450 = compute_argument_block_size (reg_parm_stack_space,
2451 &adjusted_args_size,
2454 : preferred_stack_boundary));
2456 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2458 /* The argument block when performing a sibling call is the
2459 incoming argument block. */
2462 argblock = crtl->args.internal_arg_pointer;
2464 #ifdef STACK_GROWS_DOWNWARD
2465 = plus_constant (argblock, crtl->args.pretend_args_size);
2467 = plus_constant (argblock, -crtl->args.pretend_args_size);
2469 stored_args_map = sbitmap_alloc (args_size.constant);
2470 sbitmap_zero (stored_args_map);
2473 /* If we have no actual push instructions, or shouldn't use them,
2474 make space for all args right now. */
2475 else if (adjusted_args_size.var != 0)
2477 if (old_stack_level == 0)
2479 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2480 old_stack_pointer_delta = stack_pointer_delta;
2481 old_pending_adj = pending_stack_adjust;
2482 pending_stack_adjust = 0;
2483 /* stack_arg_under_construction says whether a stack arg is
2484 being constructed at the old stack level. Pushing the stack
2485 gets a clean outgoing argument block. */
2486 old_stack_arg_under_construction = stack_arg_under_construction;
2487 stack_arg_under_construction = 0;
2489 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2493 /* Note that we must go through the motions of allocating an argument
2494 block even if the size is zero because we may be storing args
2495 in the area reserved for register arguments, which may be part of
2498 int needed = adjusted_args_size.constant;
2500 /* Store the maximum argument space used. It will be pushed by
2501 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2504 if (needed > crtl->outgoing_args_size)
2505 crtl->outgoing_args_size = needed;
2507 if (must_preallocate)
2509 if (ACCUMULATE_OUTGOING_ARGS)
2511 /* Since the stack pointer will never be pushed, it is
2512 possible for the evaluation of a parm to clobber
2513 something we have already written to the stack.
2514 Since most function calls on RISC machines do not use
2515 the stack, this is uncommon, but must work correctly.
2517 Therefore, we save any area of the stack that was already
2518 written and that we are using. Here we set up to do this
2519 by making a new stack usage map from the old one. The
2520 actual save will be done by store_one_arg.
2522 Another approach might be to try to reorder the argument
2523 evaluations to avoid this conflicting stack usage. */
2525 /* Since we will be writing into the entire argument area,
2526 the map must be allocated for its entire size, not just
2527 the part that is the responsibility of the caller. */
2528 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2529 needed += reg_parm_stack_space;
2531 #ifdef ARGS_GROW_DOWNWARD
2532 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2535 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2538 if (stack_usage_map_buf)
2539 free (stack_usage_map_buf);
2540 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2541 stack_usage_map = stack_usage_map_buf;
2543 if (initial_highest_arg_in_use)
2544 memcpy (stack_usage_map, initial_stack_usage_map,
2545 initial_highest_arg_in_use);
2547 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2548 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2549 (highest_outgoing_arg_in_use
2550 - initial_highest_arg_in_use));
2553 /* The address of the outgoing argument list must not be
2554 copied to a register here, because argblock would be left
2555 pointing to the wrong place after the call to
2556 allocate_dynamic_stack_space below. */
2558 argblock = virtual_outgoing_args_rtx;
2562 if (inhibit_defer_pop == 0)
2564 /* Try to reuse some or all of the pending_stack_adjust
2565 to get this space. */
2567 = (combine_pending_stack_adjustment_and_call
2568 (unadjusted_args_size,
2569 &adjusted_args_size,
2570 preferred_unit_stack_boundary));
2572 /* combine_pending_stack_adjustment_and_call computes
2573 an adjustment before the arguments are allocated.
2574 Account for them and see whether or not the stack
2575 needs to go up or down. */
2576 needed = unadjusted_args_size - needed;
2580 /* We're releasing stack space. */
2581 /* ??? We can avoid any adjustment at all if we're
2582 already aligned. FIXME. */
2583 pending_stack_adjust = -needed;
2584 do_pending_stack_adjust ();
2588 /* We need to allocate space. We'll do that in
2589 push_block below. */
2590 pending_stack_adjust = 0;
2593 /* Special case this because overhead of `push_block' in
2594 this case is non-trivial. */
2596 argblock = virtual_outgoing_args_rtx;
2599 argblock = push_block (GEN_INT (needed), 0, 0);
2600 #ifdef ARGS_GROW_DOWNWARD
2601 argblock = plus_constant (argblock, needed);
2605 /* We only really need to call `copy_to_reg' in the case
2606 where push insns are going to be used to pass ARGBLOCK
2607 to a function call in ARGS. In that case, the stack
2608 pointer changes value from the allocation point to the
2609 call point, and hence the value of
2610 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2611 as well always do it. */
2612 argblock = copy_to_reg (argblock);
2617 if (ACCUMULATE_OUTGOING_ARGS)
2619 /* The save/restore code in store_one_arg handles all
2620 cases except one: a constructor call (including a C
2621 function returning a BLKmode struct) to initialize
2623 if (stack_arg_under_construction)
2626 = GEN_INT (adjusted_args_size.constant
2627 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
2628 : TREE_TYPE (fndecl))) ? 0
2629 : reg_parm_stack_space));
2630 if (old_stack_level == 0)
2632 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2634 old_stack_pointer_delta = stack_pointer_delta;
2635 old_pending_adj = pending_stack_adjust;
2636 pending_stack_adjust = 0;
2637 /* stack_arg_under_construction says whether a stack
2638 arg is being constructed at the old stack level.
2639 Pushing the stack gets a clean outgoing argument
2641 old_stack_arg_under_construction
2642 = stack_arg_under_construction;
2643 stack_arg_under_construction = 0;
2644 /* Make a new map for the new argument list. */
2645 if (stack_usage_map_buf)
2646 free (stack_usage_map_buf);
2647 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
2648 stack_usage_map = stack_usage_map_buf;
2649 highest_outgoing_arg_in_use = 0;
2651 allocate_dynamic_stack_space (push_size, NULL_RTX,
2655 /* If argument evaluation might modify the stack pointer,
2656 copy the address of the argument list to a register. */
2657 for (i = 0; i < num_actuals; i++)
2658 if (args[i].pass_on_stack)
2660 argblock = copy_addr_to_reg (argblock);
2665 compute_argument_addresses (args, argblock, num_actuals);
2667 /* If we push args individually in reverse order, perform stack alignment
2668 before the first push (the last arg). */
2669 if (PUSH_ARGS_REVERSED && argblock == 0
2670 && adjusted_args_size.constant != unadjusted_args_size)
2672 /* When the stack adjustment is pending, we get better code
2673 by combining the adjustments. */
2674 if (pending_stack_adjust
2675 && ! inhibit_defer_pop)
2677 pending_stack_adjust
2678 = (combine_pending_stack_adjustment_and_call
2679 (unadjusted_args_size,
2680 &adjusted_args_size,
2681 preferred_unit_stack_boundary));
2682 do_pending_stack_adjust ();
2684 else if (argblock == 0)
2685 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2686 - unadjusted_args_size));
2688 /* Now that the stack is properly aligned, pops can't safely
2689 be deferred during the evaluation of the arguments. */
2692 funexp = rtx_for_function_call (fndecl, addr);
2694 /* Figure out the register where the value, if any, will come back. */
2696 if (TYPE_MODE (rettype) != VOIDmode
2697 && ! structure_value_addr)
2699 if (pcc_struct_value)
2700 valreg = hard_function_value (build_pointer_type (rettype),
2701 fndecl, NULL, (pass == 0));
2703 valreg = hard_function_value (rettype, fndecl, fntype,
2706 /* If VALREG is a PARALLEL whose first member has a zero
2707 offset, use that. This is for targets such as m68k that
2708 return the same value in multiple places. */
2709 if (GET_CODE (valreg) == PARALLEL)
2711 rtx elem = XVECEXP (valreg, 0, 0);
2712 rtx where = XEXP (elem, 0);
2713 rtx offset = XEXP (elem, 1);
2714 if (offset == const0_rtx
2715 && GET_MODE (where) == GET_MODE (valreg))
2720 /* Precompute all register parameters. It isn't safe to compute anything
2721 once we have started filling any specific hard regs. */
2722 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2724 if (CALL_EXPR_STATIC_CHAIN (exp))
2725 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
2727 static_chain_value = 0;
2729 #ifdef REG_PARM_STACK_SPACE
2730 /* Save the fixed argument area if it's part of the caller's frame and
2731 is clobbered by argument setup for this call. */
2732 if (ACCUMULATE_OUTGOING_ARGS && pass)
2733 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2734 &low_to_save, &high_to_save);
2737 /* Now store (and compute if necessary) all non-register parms.
2738 These come before register parms, since they can require block-moves,
2739 which could clobber the registers used for register parms.
2740 Parms which have partial registers are not stored here,
2741 but we do preallocate space here if they want that. */
2743 for (i = 0; i < num_actuals; i++)
2745 if (args[i].reg == 0 || args[i].pass_on_stack)
2747 rtx before_arg = get_last_insn ();
2749 if (store_one_arg (&args[i], argblock, flags,
2750 adjusted_args_size.var != 0,
2751 reg_parm_stack_space)
2753 && check_sibcall_argument_overlap (before_arg,
2755 sibcall_failure = 1;
2758 if (((flags & ECF_CONST)
2759 || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
2761 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2762 gen_rtx_USE (VOIDmode,
2767 /* If we have a parm that is passed in registers but not in memory
2768 and whose alignment does not permit a direct copy into registers,
2769 make a group of pseudos that correspond to each register that we
2771 if (STRICT_ALIGNMENT)
2772 store_unaligned_arguments_into_pseudos (args, num_actuals);
2774 /* Now store any partially-in-registers parm.
2775 This is the last place a block-move can happen. */
2777 for (i = 0; i < num_actuals; i++)
2778 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2780 rtx before_arg = get_last_insn ();
2782 if (store_one_arg (&args[i], argblock, flags,
2783 adjusted_args_size.var != 0,
2784 reg_parm_stack_space)
2786 && check_sibcall_argument_overlap (before_arg,
2788 sibcall_failure = 1;
2791 /* If we pushed args in forward order, perform stack alignment
2792 after pushing the last arg. */
2793 if (!PUSH_ARGS_REVERSED && argblock == 0)
2794 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2795 - unadjusted_args_size));
2797 /* If register arguments require space on the stack and stack space
2798 was not preallocated, allocate stack space here for arguments
2799 passed in registers. */
2800 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2801 && !ACCUMULATE_OUTGOING_ARGS
2802 && must_preallocate == 0 && reg_parm_stack_space > 0)
2803 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2805 /* Pass the function the address in which to return a
2807 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2809 structure_value_addr
2810 = convert_memory_address (Pmode, structure_value_addr);
2811 emit_move_insn (struct_value,
2813 force_operand (structure_value_addr,
2816 if (REG_P (struct_value))
2817 use_reg (&call_fusage, struct_value);
2820 after_args = get_last_insn ();
2821 funexp = prepare_call_address (fndecl, funexp, static_chain_value,
2822 &call_fusage, reg_parm_seen, pass == 0);
2824 load_register_parameters (args, num_actuals, &call_fusage, flags,
2825 pass == 0, &sibcall_failure);
2827 /* Save a pointer to the last insn before the call, so that we can
2828 later safely search backwards to find the CALL_INSN. */
2829 before_call = get_last_insn ();
2831 /* Set up next argument register. For sibling calls on machines
2832 with register windows this should be the incoming register. */
2834 next_arg_reg = targetm.calls.function_incoming_arg (&args_so_far,
2839 next_arg_reg = targetm.calls.function_arg (&args_so_far,
2840 VOIDmode, void_type_node,
2843 /* All arguments and registers used for the call must be set up by
2846 /* Stack must be properly aligned now. */
2848 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2850 /* Generate the actual call instruction. */
2851 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2852 adjusted_args_size.constant, struct_value_size,
2853 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2854 flags, & args_so_far);
2856 /* If the call setup or the call itself overlaps with anything
2857 of the argument setup we probably clobbered our call address.
2858 In that case we can't do sibcalls. */
2860 && check_sibcall_argument_overlap (after_args, 0, 0))
2861 sibcall_failure = 1;
2863 /* If a non-BLKmode value is returned at the most significant end
2864 of a register, shift the register right by the appropriate amount
2865 and update VALREG accordingly. BLKmode values are handled by the
2866 group load/store machinery below. */
2867 if (!structure_value_addr
2868 && !pcc_struct_value
2869 && TYPE_MODE (rettype) != BLKmode
2870 && targetm.calls.return_in_msb (rettype))
2872 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
2873 sibcall_failure = 1;
2874 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
2877 if (pass && (flags & ECF_MALLOC))
2879 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2882 /* The return value from a malloc-like function is a pointer. */
2883 if (TREE_CODE (rettype) == POINTER_TYPE)
2884 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2886 emit_move_insn (temp, valreg);
2888 /* The return value from a malloc-like function can not alias
2890 last = get_last_insn ();
2891 add_reg_note (last, REG_NOALIAS, temp);
2893 /* Write out the sequence. */
2894 insns = get_insns ();
2900 /* For calls to `setjmp', etc., inform
2901 function.c:setjmp_warnings that it should complain if
2902 nonvolatile values are live. For functions that cannot
2903 return, inform flow that control does not fall through. */
2905 if ((flags & ECF_NORETURN) || pass == 0)
2907 /* The barrier must be emitted
2908 immediately after the CALL_INSN. Some ports emit more
2909 than just a CALL_INSN above, so we must search for it here. */
2911 rtx last = get_last_insn ();
2912 while (!CALL_P (last))
2914 last = PREV_INSN (last);
2915 /* There was no CALL_INSN? */
2916 gcc_assert (last != before_call);
2919 emit_barrier_after (last);
2921 /* Stack adjustments after a noreturn call are dead code.
2922 However when NO_DEFER_POP is in effect, we must preserve
2923 stack_pointer_delta. */
2924 if (inhibit_defer_pop == 0)
2926 stack_pointer_delta = old_stack_allocated;
2927 pending_stack_adjust = 0;
2931 /* If value type not void, return an rtx for the value. */
2933 if (TYPE_MODE (rettype) == VOIDmode
2935 target = const0_rtx;
2936 else if (structure_value_addr)
2938 if (target == 0 || !MEM_P (target))
2941 = gen_rtx_MEM (TYPE_MODE (rettype),
2942 memory_address (TYPE_MODE (rettype),
2943 structure_value_addr));
2944 set_mem_attributes (target, rettype, 1);
2947 else if (pcc_struct_value)
2949 /* This is the special C++ case where we need to
2950 know what the true target was. We take care to
2951 never use this value more than once in one expression. */
2952 target = gen_rtx_MEM (TYPE_MODE (rettype),
2953 copy_to_reg (valreg));
2954 set_mem_attributes (target, rettype, 1);
2956 /* Handle calls that return values in multiple non-contiguous locations.
2957 The Irix 6 ABI has examples of this. */
2958 else if (GET_CODE (valreg) == PARALLEL)
2962 /* This will only be assigned once, so it can be readonly. */
2963 tree nt = build_qualified_type (rettype,
2964 (TYPE_QUALS (rettype)
2965 | TYPE_QUAL_CONST));
2967 target = assign_temp (nt, 0, 1, 1);
2970 if (! rtx_equal_p (target, valreg))
2971 emit_group_store (target, valreg, rettype,
2972 int_size_in_bytes (rettype));
2974 /* We can not support sibling calls for this case. */
2975 sibcall_failure = 1;
2978 && GET_MODE (target) == TYPE_MODE (rettype)
2979 && GET_MODE (target) == GET_MODE (valreg))
2981 bool may_overlap = false;
2983 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
2984 reg to a plain register. */
2985 if (!REG_P (target) || HARD_REGISTER_P (target))
2986 valreg = avoid_likely_spilled_reg (valreg);
2988 /* If TARGET is a MEM in the argument area, and we have
2989 saved part of the argument area, then we can't store
2990 directly into TARGET as it may get overwritten when we
2991 restore the argument save area below. Don't work too
2992 hard though and simply force TARGET to a register if it
2993 is a MEM; the optimizer is quite likely to sort it out. */
2994 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
2995 for (i = 0; i < num_actuals; i++)
2996 if (args[i].save_area)
3003 target = copy_to_reg (valreg);
3006 /* TARGET and VALREG cannot be equal at this point
3007 because the latter would not have
3008 REG_FUNCTION_VALUE_P true, while the former would if
3009 it were referring to the same register.
3011 If they refer to the same register, this move will be
3012 a no-op, except when function inlining is being
3014 emit_move_insn (target, valreg);
3016 /* If we are setting a MEM, this code must be executed.
3017 Since it is emitted after the call insn, sibcall
3018 optimization cannot be performed in that case. */
3020 sibcall_failure = 1;
3023 else if (TYPE_MODE (rettype) == BLKmode)
3026 if (GET_MODE (val) != BLKmode)
3027 val = avoid_likely_spilled_reg (val);
3028 target = copy_blkmode_from_reg (target, val, rettype);
3030 /* We can not support sibling calls for this case. */
3031 sibcall_failure = 1;
3034 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
3036 /* If we promoted this return value, make the proper SUBREG.
3037 TARGET might be const0_rtx here, so be careful. */
3039 && TYPE_MODE (rettype) != BLKmode
3040 && GET_MODE (target) != TYPE_MODE (rettype))
3042 tree type = rettype;
3043 int unsignedp = TYPE_UNSIGNED (type);
3045 enum machine_mode pmode;
3047 /* Ensure we promote as expected, and get the new unsignedness. */
3048 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
3050 gcc_assert (GET_MODE (target) == pmode);
3052 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3053 && (GET_MODE_SIZE (GET_MODE (target))
3054 > GET_MODE_SIZE (TYPE_MODE (type))))
3056 offset = GET_MODE_SIZE (GET_MODE (target))
3057 - GET_MODE_SIZE (TYPE_MODE (type));
3058 if (! BYTES_BIG_ENDIAN)
3059 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3060 else if (! WORDS_BIG_ENDIAN)
3061 offset %= UNITS_PER_WORD;
3064 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3065 SUBREG_PROMOTED_VAR_P (target) = 1;
3066 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3069 /* If size of args is variable or this was a constructor call for a stack
3070 argument, restore saved stack-pointer value. */
3072 if (old_stack_level)
3074 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3075 stack_pointer_delta = old_stack_pointer_delta;
3076 pending_stack_adjust = old_pending_adj;
3077 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3078 stack_arg_under_construction = old_stack_arg_under_construction;
3079 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3080 stack_usage_map = initial_stack_usage_map;
3081 sibcall_failure = 1;
3083 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3085 #ifdef REG_PARM_STACK_SPACE
3087 restore_fixed_argument_area (save_area, argblock,
3088 high_to_save, low_to_save);
3091 /* If we saved any argument areas, restore them. */
3092 for (i = 0; i < num_actuals; i++)
3093 if (args[i].save_area)
3095 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3097 = gen_rtx_MEM (save_mode,
3098 memory_address (save_mode,
3099 XEXP (args[i].stack_slot, 0)));
3101 if (save_mode != BLKmode)
3102 emit_move_insn (stack_area, args[i].save_area);
3104 emit_block_move (stack_area, args[i].save_area,
3105 GEN_INT (args[i].locate.size.constant),
3106 BLOCK_OP_CALL_PARM);
3109 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3110 stack_usage_map = initial_stack_usage_map;
3113 /* If this was alloca, record the new stack level for nonlocal gotos.
3114 Check for the handler slots since we might not have a save area
3115 for non-local gotos. */
3117 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3118 update_nonlocal_goto_save_area ();
3120 /* Free up storage we no longer need. */
3121 for (i = 0; i < num_actuals; ++i)
3122 if (args[i].aligned_regs)
3123 free (args[i].aligned_regs);
3125 insns = get_insns ();
3130 tail_call_insns = insns;
3132 /* Restore the pending stack adjustment now that we have
3133 finished generating the sibling call sequence. */
3135 pending_stack_adjust = save_pending_stack_adjust;
3136 stack_pointer_delta = save_stack_pointer_delta;
3138 /* Prepare arg structure for next iteration. */
3139 for (i = 0; i < num_actuals; i++)
3142 args[i].aligned_regs = 0;
3146 sbitmap_free (stored_args_map);
3150 normal_call_insns = insns;
3152 /* Verify that we've deallocated all the stack we used. */
3153 gcc_assert ((flags & ECF_NORETURN)
3154 || (old_stack_allocated
3155 == stack_pointer_delta - pending_stack_adjust));
3158 /* If something prevents making this a sibling call,
3159 zero out the sequence. */
3160 if (sibcall_failure)
3161 tail_call_insns = NULL_RTX;
3166 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3167 arguments too, as argument area is now clobbered by the call. */
3168 if (tail_call_insns)
3170 emit_insn (tail_call_insns);
3171 crtl->tail_call_emit = true;
3174 emit_insn (normal_call_insns);
3176 currently_expanding_call--;
3178 if (stack_usage_map_buf)
3179 free (stack_usage_map_buf);
3184 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3185 this function's incoming arguments.
3187 At the start of RTL generation we know the only REG_EQUIV notes
3188 in the rtl chain are those for incoming arguments, so we can look
3189 for REG_EQUIV notes between the start of the function and the
3190 NOTE_INSN_FUNCTION_BEG.
3192 This is (slight) overkill. We could keep track of the highest
3193 argument we clobber and be more selective in removing notes, but it
3194 does not seem to be worth the effort. */
3197 fixup_tail_calls (void)
3201 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3205 /* There are never REG_EQUIV notes for the incoming arguments
3206 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3208 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3211 note = find_reg_note (insn, REG_EQUIV, 0);
3213 remove_note (insn, note);
3214 note = find_reg_note (insn, REG_EQUIV, 0);
3219 /* Traverse a list of TYPES and expand all complex types into their
3222 split_complex_types (tree types)
3226 /* Before allocating memory, check for the common case of no complex. */
3227 for (p = types; p; p = TREE_CHAIN (p))
3229 tree type = TREE_VALUE (p);
3230 if (TREE_CODE (type) == COMPLEX_TYPE
3231 && targetm.calls.split_complex_arg (type))
3237 types = copy_list (types);
3239 for (p = types; p; p = TREE_CHAIN (p))
3241 tree complex_type = TREE_VALUE (p);
3243 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3244 && targetm.calls.split_complex_arg (complex_type))
3248 /* Rewrite complex type with component type. */
3249 TREE_VALUE (p) = TREE_TYPE (complex_type);
3250 next = TREE_CHAIN (p);
3252 /* Add another component type for the imaginary part. */
3253 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3254 TREE_CHAIN (p) = imag;
3255 TREE_CHAIN (imag) = next;
3257 /* Skip the newly created node. */
3265 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3266 The RETVAL parameter specifies whether return value needs to be saved, other
3267 parameters are documented in the emit_library_call function below. */
3270 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3271 enum libcall_type fn_type,
3272 enum machine_mode outmode, int nargs, va_list p)
3274 /* Total size in bytes of all the stack-parms scanned so far. */
3275 struct args_size args_size;
3276 /* Size of arguments before any adjustments (such as rounding). */
3277 struct args_size original_args_size;
3280 /* Todo, choose the correct decl type of orgfun. Sadly this information
3281 isn't present here, so we default to native calling abi here. */
3282 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3283 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3287 CUMULATIVE_ARGS args_so_far;
3291 enum machine_mode mode;
3294 struct locate_and_pad_arg_data locate;
3298 int old_inhibit_defer_pop = inhibit_defer_pop;
3299 rtx call_fusage = 0;
3302 int pcc_struct_value = 0;
3303 int struct_value_size = 0;
3305 int reg_parm_stack_space = 0;
3308 tree tfom; /* type_for_mode (outmode, 0) */
3310 #ifdef REG_PARM_STACK_SPACE
3311 /* Define the boundary of the register parm stack space that needs to be
3313 int low_to_save = 0, high_to_save = 0;
3314 rtx save_area = 0; /* Place that it is saved. */
3317 /* Size of the stack reserved for parameter registers. */
3318 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3319 char *initial_stack_usage_map = stack_usage_map;
3320 char *stack_usage_map_buf = NULL;
3322 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3324 #ifdef REG_PARM_STACK_SPACE
3325 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3328 /* By default, library functions can not throw. */
3329 flags = ECF_NOTHROW;
3342 flags |= ECF_NORETURN;
3345 flags = ECF_NORETURN;
3347 case LCT_RETURNS_TWICE:
3348 flags = ECF_RETURNS_TWICE;
3353 /* Ensure current function's preferred stack boundary is at least
3355 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3356 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3358 /* If this kind of value comes back in memory,
3359 decide where in memory it should come back. */
3360 if (outmode != VOIDmode)
3362 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3363 if (aggregate_value_p (tfom, 0))
3365 #ifdef PCC_STATIC_STRUCT_RETURN
3367 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3368 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3369 pcc_struct_value = 1;
3371 value = gen_reg_rtx (outmode);
3372 #else /* not PCC_STATIC_STRUCT_RETURN */
3373 struct_value_size = GET_MODE_SIZE (outmode);
3374 if (value != 0 && MEM_P (value))
3377 mem_value = assign_temp (tfom, 0, 1, 1);
3379 /* This call returns a big structure. */
3380 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3384 tfom = void_type_node;
3386 /* ??? Unfinished: must pass the memory address as an argument. */
3388 /* Copy all the libcall-arguments out of the varargs data
3389 and into a vector ARGVEC.
3391 Compute how to pass each argument. We only support a very small subset
3392 of the full argument passing conventions to limit complexity here since
3393 library functions shouldn't have many args. */
3395 argvec = XALLOCAVEC (struct arg, nargs + 1);
3396 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3398 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3399 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3401 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3404 args_size.constant = 0;
3411 /* If there's a structure value address to be passed,
3412 either pass it in the special place, or pass it as an extra argument. */
3413 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3415 rtx addr = XEXP (mem_value, 0);
3419 /* Make sure it is a reasonable operand for a move or push insn. */
3420 if (!REG_P (addr) && !MEM_P (addr)
3421 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3422 addr = force_operand (addr, NULL_RTX);
3424 argvec[count].value = addr;
3425 argvec[count].mode = Pmode;
3426 argvec[count].partial = 0;
3428 argvec[count].reg = targetm.calls.function_arg (&args_so_far,
3429 Pmode, NULL_TREE, true);
3430 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3431 NULL_TREE, 1) == 0);
3433 locate_and_pad_parm (Pmode, NULL_TREE,
3434 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3437 argvec[count].reg != 0,
3439 0, NULL_TREE, &args_size, &argvec[count].locate);
3441 if (argvec[count].reg == 0 || argvec[count].partial != 0
3442 || reg_parm_stack_space > 0)
3443 args_size.constant += argvec[count].locate.size.constant;
3445 targetm.calls.function_arg_advance (&args_so_far, Pmode, (tree) 0, true);
3450 for (; count < nargs; count++)
3452 rtx val = va_arg (p, rtx);
3453 enum machine_mode mode = (enum machine_mode) va_arg (p, int);
3455 /* We cannot convert the arg value to the mode the library wants here;
3456 must do it earlier where we know the signedness of the arg. */
3457 gcc_assert (mode != BLKmode
3458 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3460 /* Make sure it is a reasonable operand for a move or push insn. */
3461 if (!REG_P (val) && !MEM_P (val)
3462 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3463 val = force_operand (val, NULL_RTX);
3465 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3469 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3471 /* If this was a CONST function, it is now PURE since it now
3473 if (flags & ECF_CONST)
3475 flags &= ~ECF_CONST;
3479 if (MEM_P (val) && !must_copy)
3483 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3485 emit_move_insn (slot, val);
3488 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3489 gen_rtx_USE (VOIDmode, slot),
3492 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3493 gen_rtx_CLOBBER (VOIDmode,
3498 val = force_operand (XEXP (slot, 0), NULL_RTX);
3501 argvec[count].value = val;
3502 argvec[count].mode = mode;
3504 argvec[count].reg = targetm.calls.function_arg (&args_so_far, mode,
3507 argvec[count].partial
3508 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3510 locate_and_pad_parm (mode, NULL_TREE,
3511 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3514 argvec[count].reg != 0,
3516 argvec[count].partial,
3517 NULL_TREE, &args_size, &argvec[count].locate);
3519 gcc_assert (!argvec[count].locate.size.var);
3521 if (argvec[count].reg == 0 || argvec[count].partial != 0
3522 || reg_parm_stack_space > 0)
3523 args_size.constant += argvec[count].locate.size.constant;
3525 targetm.calls.function_arg_advance (&args_so_far, mode, (tree) 0, true);
3528 /* If this machine requires an external definition for library
3529 functions, write one out. */
3530 assemble_external_libcall (fun);
3532 original_args_size = args_size;
3533 args_size.constant = (((args_size.constant
3534 + stack_pointer_delta
3538 - stack_pointer_delta);
3540 args_size.constant = MAX (args_size.constant,
3541 reg_parm_stack_space);
3543 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3544 args_size.constant -= reg_parm_stack_space;
3546 if (args_size.constant > crtl->outgoing_args_size)
3547 crtl->outgoing_args_size = args_size.constant;
3549 if (ACCUMULATE_OUTGOING_ARGS)
3551 /* Since the stack pointer will never be pushed, it is possible for
3552 the evaluation of a parm to clobber something we have already
3553 written to the stack. Since most function calls on RISC machines
3554 do not use the stack, this is uncommon, but must work correctly.
3556 Therefore, we save any area of the stack that was already written
3557 and that we are using. Here we set up to do this by making a new
3558 stack usage map from the old one.
3560 Another approach might be to try to reorder the argument
3561 evaluations to avoid this conflicting stack usage. */
3563 needed = args_size.constant;
3565 /* Since we will be writing into the entire argument area, the
3566 map must be allocated for its entire size, not just the part that
3567 is the responsibility of the caller. */
3568 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3569 needed += reg_parm_stack_space;
3571 #ifdef ARGS_GROW_DOWNWARD
3572 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3575 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3578 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3579 stack_usage_map = stack_usage_map_buf;
3581 if (initial_highest_arg_in_use)
3582 memcpy (stack_usage_map, initial_stack_usage_map,
3583 initial_highest_arg_in_use);
3585 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3586 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3587 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3590 /* We must be careful to use virtual regs before they're instantiated,
3591 and real regs afterwards. Loop optimization, for example, can create
3592 new libcalls after we've instantiated the virtual regs, and if we
3593 use virtuals anyway, they won't match the rtl patterns. */
3595 if (virtuals_instantiated)
3596 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3598 argblock = virtual_outgoing_args_rtx;
3603 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3606 /* If we push args individually in reverse order, perform stack alignment
3607 before the first push (the last arg). */
3608 if (argblock == 0 && PUSH_ARGS_REVERSED)
3609 anti_adjust_stack (GEN_INT (args_size.constant
3610 - original_args_size.constant));
3612 if (PUSH_ARGS_REVERSED)
3623 #ifdef REG_PARM_STACK_SPACE
3624 if (ACCUMULATE_OUTGOING_ARGS)
3626 /* The argument list is the property of the called routine and it
3627 may clobber it. If the fixed area has been used for previous
3628 parameters, we must save and restore it. */
3629 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3630 &low_to_save, &high_to_save);
3634 /* Push the args that need to be pushed. */
3636 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3637 are to be pushed. */
3638 for (count = 0; count < nargs; count++, argnum += inc)
3640 enum machine_mode mode = argvec[argnum].mode;
3641 rtx val = argvec[argnum].value;
3642 rtx reg = argvec[argnum].reg;
3643 int partial = argvec[argnum].partial;
3644 unsigned int parm_align = argvec[argnum].locate.boundary;
3645 int lower_bound = 0, upper_bound = 0, i;
3647 if (! (reg != 0 && partial == 0))
3649 if (ACCUMULATE_OUTGOING_ARGS)
3651 /* If this is being stored into a pre-allocated, fixed-size,
3652 stack area, save any previous data at that location. */
3654 #ifdef ARGS_GROW_DOWNWARD
3655 /* stack_slot is negative, but we want to index stack_usage_map
3656 with positive values. */
3657 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
3658 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3660 lower_bound = argvec[argnum].locate.slot_offset.constant;
3661 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3665 /* Don't worry about things in the fixed argument area;
3666 it has already been saved. */
3667 if (i < reg_parm_stack_space)
3668 i = reg_parm_stack_space;
3669 while (i < upper_bound && stack_usage_map[i] == 0)
3672 if (i < upper_bound)
3674 /* We need to make a save area. */
3676 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3677 enum machine_mode save_mode
3678 = mode_for_size (size, MODE_INT, 1);
3680 = plus_constant (argblock,
3681 argvec[argnum].locate.offset.constant);
3683 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3685 if (save_mode == BLKmode)
3687 argvec[argnum].save_area
3688 = assign_stack_temp (BLKmode,
3689 argvec[argnum].locate.size.constant,
3692 emit_block_move (validize_mem (argvec[argnum].save_area),
3694 GEN_INT (argvec[argnum].locate.size.constant),
3695 BLOCK_OP_CALL_PARM);
3699 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3701 emit_move_insn (argvec[argnum].save_area, stack_area);
3706 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
3707 partial, reg, 0, argblock,
3708 GEN_INT (argvec[argnum].locate.offset.constant),
3709 reg_parm_stack_space,
3710 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3712 /* Now mark the segment we just used. */
3713 if (ACCUMULATE_OUTGOING_ARGS)
3714 for (i = lower_bound; i < upper_bound; i++)
3715 stack_usage_map[i] = 1;
3719 if ((flags & ECF_CONST)
3720 || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
3724 /* Indicate argument access so that alias.c knows that these
3727 use = plus_constant (argblock,
3728 argvec[argnum].locate.offset.constant);
3730 /* When arguments are pushed, trying to tell alias.c where
3731 exactly this argument is won't work, because the
3732 auto-increment causes confusion. So we merely indicate
3733 that we access something with a known mode somewhere on
3735 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3736 gen_rtx_SCRATCH (Pmode));
3737 use = gen_rtx_MEM (argvec[argnum].mode, use);
3738 use = gen_rtx_USE (VOIDmode, use);
3739 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3744 /* If we pushed args in forward order, perform stack alignment
3745 after pushing the last arg. */
3746 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3747 anti_adjust_stack (GEN_INT (args_size.constant
3748 - original_args_size.constant));
3750 if (PUSH_ARGS_REVERSED)
3755 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
3757 /* Now load any reg parms into their regs. */
3759 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3760 are to be pushed. */
3761 for (count = 0; count < nargs; count++, argnum += inc)
3763 enum machine_mode mode = argvec[argnum].mode;
3764 rtx val = argvec[argnum].value;
3765 rtx reg = argvec[argnum].reg;
3766 int partial = argvec[argnum].partial;
3768 /* Handle calls that pass values in multiple non-contiguous
3769 locations. The PA64 has examples of this for library calls. */
3770 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3771 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3772 else if (reg != 0 && partial == 0)
3773 emit_move_insn (reg, val);
3778 /* Any regs containing parms remain in use through the call. */
3779 for (count = 0; count < nargs; count++)
3781 rtx reg = argvec[count].reg;
3782 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3783 use_group_regs (&call_fusage, reg);
3786 int partial = argvec[count].partial;
3790 gcc_assert (partial % UNITS_PER_WORD == 0);
3791 nregs = partial / UNITS_PER_WORD;
3792 use_regs (&call_fusage, REGNO (reg), nregs);
3795 use_reg (&call_fusage, reg);
3799 /* Pass the function the address in which to return a structure value. */
3800 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3802 emit_move_insn (struct_value,
3804 force_operand (XEXP (mem_value, 0),
3806 if (REG_P (struct_value))
3807 use_reg (&call_fusage, struct_value);
3810 /* Don't allow popping to be deferred, since then
3811 cse'ing of library calls could delete a call and leave the pop. */
3813 valreg = (mem_value == 0 && outmode != VOIDmode
3814 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
3816 /* Stack must be properly aligned now. */
3817 gcc_assert (!(stack_pointer_delta
3818 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3820 before_call = get_last_insn ();
3822 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3823 will set inhibit_defer_pop to that value. */
3824 /* The return type is needed to decide how many bytes the function pops.
3825 Signedness plays no role in that, so for simplicity, we pretend it's
3826 always signed. We also assume that the list of arguments passed has
3827 no impact, so we pretend it is unknown. */
3829 emit_call_1 (fun, NULL,
3830 get_identifier (XSTR (orgfun, 0)),
3831 build_function_type (tfom, NULL_TREE),
3832 original_args_size.constant, args_size.constant,
3834 targetm.calls.function_arg (&args_so_far,
3835 VOIDmode, void_type_node, true),
3837 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3839 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
3840 that it should complain if nonvolatile values are live. For
3841 functions that cannot return, inform flow that control does not
3844 if (flags & ECF_NORETURN)
3846 /* The barrier note must be emitted
3847 immediately after the CALL_INSN. Some ports emit more than
3848 just a CALL_INSN above, so we must search for it here. */
3850 rtx last = get_last_insn ();
3851 while (!CALL_P (last))
3853 last = PREV_INSN (last);
3854 /* There was no CALL_INSN? */
3855 gcc_assert (last != before_call);
3858 emit_barrier_after (last);
3861 /* Now restore inhibit_defer_pop to its actual original value. */
3866 /* Copy the value to the right place. */
3867 if (outmode != VOIDmode && retval)
3873 if (value != mem_value)
3874 emit_move_insn (value, mem_value);
3876 else if (GET_CODE (valreg) == PARALLEL)
3879 value = gen_reg_rtx (outmode);
3880 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3884 /* Convert to the proper mode if a promotion has been active. */
3885 if (GET_MODE (valreg) != outmode)
3887 int unsignedp = TYPE_UNSIGNED (tfom);
3889 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
3890 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
3891 == GET_MODE (valreg));
3892 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
3896 emit_move_insn (value, valreg);
3902 if (ACCUMULATE_OUTGOING_ARGS)
3904 #ifdef REG_PARM_STACK_SPACE
3906 restore_fixed_argument_area (save_area, argblock,
3907 high_to_save, low_to_save);
3910 /* If we saved any argument areas, restore them. */
3911 for (count = 0; count < nargs; count++)
3912 if (argvec[count].save_area)
3914 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3915 rtx adr = plus_constant (argblock,
3916 argvec[count].locate.offset.constant);
3917 rtx stack_area = gen_rtx_MEM (save_mode,
3918 memory_address (save_mode, adr));
3920 if (save_mode == BLKmode)
3921 emit_block_move (stack_area,
3922 validize_mem (argvec[count].save_area),
3923 GEN_INT (argvec[count].locate.size.constant),
3924 BLOCK_OP_CALL_PARM);
3926 emit_move_insn (stack_area, argvec[count].save_area);
3929 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3930 stack_usage_map = initial_stack_usage_map;
3933 if (stack_usage_map_buf)
3934 free (stack_usage_map_buf);
3940 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3941 (emitting the queue unless NO_QUEUE is nonzero),
3942 for a value of mode OUTMODE,
3943 with NARGS different arguments, passed as alternating rtx values
3944 and machine_modes to convert them to.
3946 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
3947 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
3948 other types of library calls. */
3951 emit_library_call (rtx orgfun, enum libcall_type fn_type,
3952 enum machine_mode outmode, int nargs, ...)
3956 va_start (p, nargs);
3957 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
3961 /* Like emit_library_call except that an extra argument, VALUE,
3962 comes second and says where to store the result.
3963 (If VALUE is zero, this function chooses a convenient way
3964 to return the value.
3966 This function returns an rtx for where the value is to be found.
3967 If VALUE is nonzero, VALUE is returned. */
3970 emit_library_call_value (rtx orgfun, rtx value,
3971 enum libcall_type fn_type,
3972 enum machine_mode outmode, int nargs, ...)
3977 va_start (p, nargs);
3978 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
3985 /* Store a single argument for a function call
3986 into the register or memory area where it must be passed.
3987 *ARG describes the argument value and where to pass it.
3989 ARGBLOCK is the address of the stack-block for all the arguments,
3990 or 0 on a machine where arguments are pushed individually.
3992 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3993 so must be careful about how the stack is used.
3995 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3996 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3997 that we need not worry about saving and restoring the stack.
3999 FNDECL is the declaration of the function we are calling.
4001 Return nonzero if this arg should cause sibcall failure,
4005 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4006 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4008 tree pval = arg->tree_value;
4012 int i, lower_bound = 0, upper_bound = 0;
4013 int sibcall_failure = 0;
4015 if (TREE_CODE (pval) == ERROR_MARK)
4018 /* Push a new temporary level for any temporaries we make for
4022 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4024 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4025 save any previous data at that location. */
4026 if (argblock && ! variable_size && arg->stack)
4028 #ifdef ARGS_GROW_DOWNWARD
4029 /* stack_slot is negative, but we want to index stack_usage_map
4030 with positive values. */
4031 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4032 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4036 lower_bound = upper_bound - arg->locate.size.constant;
4038 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4039 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4043 upper_bound = lower_bound + arg->locate.size.constant;
4047 /* Don't worry about things in the fixed argument area;
4048 it has already been saved. */
4049 if (i < reg_parm_stack_space)
4050 i = reg_parm_stack_space;
4051 while (i < upper_bound && stack_usage_map[i] == 0)
4054 if (i < upper_bound)
4056 /* We need to make a save area. */
4057 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4058 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4059 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4060 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4062 if (save_mode == BLKmode)
4064 tree ot = TREE_TYPE (arg->tree_value);
4065 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4066 | TYPE_QUAL_CONST));
4068 arg->save_area = assign_temp (nt, 0, 1, 1);
4069 preserve_temp_slots (arg->save_area);
4070 emit_block_move (validize_mem (arg->save_area), stack_area,
4071 GEN_INT (arg->locate.size.constant),
4072 BLOCK_OP_CALL_PARM);
4076 arg->save_area = gen_reg_rtx (save_mode);
4077 emit_move_insn (arg->save_area, stack_area);
4083 /* If this isn't going to be placed on both the stack and in registers,
4084 set up the register and number of words. */
4085 if (! arg->pass_on_stack)
4087 if (flags & ECF_SIBCALL)
4088 reg = arg->tail_call_reg;
4091 partial = arg->partial;
4094 /* Being passed entirely in a register. We shouldn't be called in
4096 gcc_assert (reg == 0 || partial != 0);
4098 /* If this arg needs special alignment, don't load the registers
4100 if (arg->n_aligned_regs != 0)
4103 /* If this is being passed partially in a register, we can't evaluate
4104 it directly into its stack slot. Otherwise, we can. */
4105 if (arg->value == 0)
4107 /* stack_arg_under_construction is nonzero if a function argument is
4108 being evaluated directly into the outgoing argument list and
4109 expand_call must take special action to preserve the argument list
4110 if it is called recursively.
4112 For scalar function arguments stack_usage_map is sufficient to
4113 determine which stack slots must be saved and restored. Scalar
4114 arguments in general have pass_on_stack == 0.
4116 If this argument is initialized by a function which takes the
4117 address of the argument (a C++ constructor or a C function
4118 returning a BLKmode structure), then stack_usage_map is
4119 insufficient and expand_call must push the stack around the
4120 function call. Such arguments have pass_on_stack == 1.
4122 Note that it is always safe to set stack_arg_under_construction,
4123 but this generates suboptimal code if set when not needed. */
4125 if (arg->pass_on_stack)
4126 stack_arg_under_construction++;
4128 arg->value = expand_expr (pval,
4130 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4131 ? NULL_RTX : arg->stack,
4132 VOIDmode, EXPAND_STACK_PARM);
4134 /* If we are promoting object (or for any other reason) the mode
4135 doesn't agree, convert the mode. */
4137 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4138 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4139 arg->value, arg->unsignedp);
4141 if (arg->pass_on_stack)
4142 stack_arg_under_construction--;
4145 /* Check for overlap with already clobbered argument area. */
4146 if ((flags & ECF_SIBCALL)
4147 && MEM_P (arg->value)
4148 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4149 arg->locate.size.constant))
4150 sibcall_failure = 1;
4152 /* Don't allow anything left on stack from computation
4153 of argument to alloca. */
4154 if (flags & ECF_MAY_BE_ALLOCA)
4155 do_pending_stack_adjust ();
4157 if (arg->value == arg->stack)
4158 /* If the value is already in the stack slot, we are done. */
4160 else if (arg->mode != BLKmode)
4163 unsigned int parm_align;
4165 /* Argument is a scalar, not entirely passed in registers.
4166 (If part is passed in registers, arg->partial says how much
4167 and emit_push_insn will take care of putting it there.)
4169 Push it, and if its size is less than the
4170 amount of space allocated to it,
4171 also bump stack pointer by the additional space.
4172 Note that in C the default argument promotions
4173 will prevent such mismatches. */
4175 size = GET_MODE_SIZE (arg->mode);
4176 /* Compute how much space the push instruction will push.
4177 On many machines, pushing a byte will advance the stack
4178 pointer by a halfword. */
4179 #ifdef PUSH_ROUNDING
4180 size = PUSH_ROUNDING (size);
4184 /* Compute how much space the argument should get:
4185 round up to a multiple of the alignment for arguments. */
4186 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4187 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4188 / (PARM_BOUNDARY / BITS_PER_UNIT))
4189 * (PARM_BOUNDARY / BITS_PER_UNIT));
4191 /* Compute the alignment of the pushed argument. */
4192 parm_align = arg->locate.boundary;
4193 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4195 int pad = used - size;
4198 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4199 parm_align = MIN (parm_align, pad_align);
4203 /* This isn't already where we want it on the stack, so put it there.
4204 This can either be done with push or copy insns. */
4205 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4206 parm_align, partial, reg, used - size, argblock,
4207 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4208 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4210 /* Unless this is a partially-in-register argument, the argument is now
4213 arg->value = arg->stack;
4217 /* BLKmode, at least partly to be pushed. */
4219 unsigned int parm_align;
4223 /* Pushing a nonscalar.
4224 If part is passed in registers, PARTIAL says how much
4225 and emit_push_insn will take care of putting it there. */
4227 /* Round its size up to a multiple
4228 of the allocation unit for arguments. */
4230 if (arg->locate.size.var != 0)
4233 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4237 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4238 for BLKmode is careful to avoid it. */
4239 excess = (arg->locate.size.constant
4240 - int_size_in_bytes (TREE_TYPE (pval))
4242 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4243 NULL_RTX, TYPE_MODE (sizetype),
4247 parm_align = arg->locate.boundary;
4249 /* When an argument is padded down, the block is aligned to
4250 PARM_BOUNDARY, but the actual argument isn't. */
4251 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4253 if (arg->locate.size.var)
4254 parm_align = BITS_PER_UNIT;
4257 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4258 parm_align = MIN (parm_align, excess_align);
4262 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4264 /* emit_push_insn might not work properly if arg->value and
4265 argblock + arg->locate.offset areas overlap. */
4269 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4270 || (GET_CODE (XEXP (x, 0)) == PLUS
4271 && XEXP (XEXP (x, 0), 0) ==
4272 crtl->args.internal_arg_pointer
4273 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
4275 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4276 i = INTVAL (XEXP (XEXP (x, 0), 1));
4278 /* expand_call should ensure this. */
4279 gcc_assert (!arg->locate.offset.var
4280 && arg->locate.size.var == 0
4281 && CONST_INT_P (size_rtx));
4283 if (arg->locate.offset.constant > i)
4285 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4286 sibcall_failure = 1;
4288 else if (arg->locate.offset.constant < i)
4290 /* Use arg->locate.size.constant instead of size_rtx
4291 because we only care about the part of the argument
4293 if (i < (arg->locate.offset.constant
4294 + arg->locate.size.constant))
4295 sibcall_failure = 1;
4299 /* Even though they appear to be at the same location,
4300 if part of the outgoing argument is in registers,
4301 they aren't really at the same location. Check for
4302 this by making sure that the incoming size is the
4303 same as the outgoing size. */
4304 if (arg->locate.size.constant != INTVAL (size_rtx))
4305 sibcall_failure = 1;
4310 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4311 parm_align, partial, reg, excess, argblock,
4312 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4313 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4315 /* Unless this is a partially-in-register argument, the argument is now
4318 ??? Unlike the case above, in which we want the actual
4319 address of the data, so that we can load it directly into a
4320 register, here we want the address of the stack slot, so that
4321 it's properly aligned for word-by-word copying or something
4322 like that. It's not clear that this is always correct. */
4324 arg->value = arg->stack_slot;
4327 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4329 tree type = TREE_TYPE (arg->tree_value);
4331 = emit_group_load_into_temps (arg->reg, arg->value, type,
4332 int_size_in_bytes (type));
4335 /* Mark all slots this store used. */
4336 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4337 && argblock && ! variable_size && arg->stack)
4338 for (i = lower_bound; i < upper_bound; i++)
4339 stack_usage_map[i] = 1;
4341 /* Once we have pushed something, pops can't safely
4342 be deferred during the rest of the arguments. */
4345 /* Free any temporary slots made in processing this argument. Show
4346 that we might have taken the address of something and pushed that
4348 preserve_temp_slots (NULL_RTX);
4352 return sibcall_failure;
4355 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4358 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4364 /* If the type has variable size... */
4365 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4368 /* If the type is marked as addressable (it is required
4369 to be constructed into the stack)... */
4370 if (TREE_ADDRESSABLE (type))
4376 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4377 takes trailing padding of a structure into account. */
4378 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4381 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
4386 /* If the type has variable size... */
4387 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4390 /* If the type is marked as addressable (it is required
4391 to be constructed into the stack)... */
4392 if (TREE_ADDRESSABLE (type))
4395 /* If the padding and mode of the type is such that a copy into
4396 a register would put it into the wrong part of the register. */
4398 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4399 && (FUNCTION_ARG_PADDING (mode, type)
4400 == (BYTES_BIG_ENDIAN ? upward : downward)))