1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
39 #include "langhooks.h"
44 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
45 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
47 /* Data structure and subroutines used within expand_call. */
51 /* Tree node for this argument. */
53 /* Mode for value; TYPE_MODE unless promoted. */
54 enum machine_mode mode;
55 /* Current RTL value for argument, or 0 if it isn't precomputed. */
57 /* Initially-compute RTL value for argument; only for const functions. */
59 /* Register to pass this argument in, 0 if passed on stack, or an
60 PARALLEL if the arg is to be copied into multiple non-contiguous
63 /* Register to pass this argument in when generating tail call sequence.
64 This is not the same register as for normal calls on machines with
67 /* If REG was promoted from the actual mode of the argument expression,
68 indicates whether the promotion is sign- or zero-extended. */
70 /* Number of registers to use. 0 means put the whole arg in registers.
71 Also 0 if not passed in registers. */
73 /* Nonzero if argument must be passed on stack.
74 Note that some arguments may be passed on the stack
75 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
76 pass_on_stack identifies arguments that *cannot* go in registers. */
78 /* Some fields packaged up for locate_and_pad_parm. */
79 struct locate_and_pad_arg_data locate;
80 /* Location on the stack at which parameter should be stored. The store
81 has already been done if STACK == VALUE. */
83 /* Location on the stack of the start of this argument slot. This can
84 differ from STACK if this arg pads downward. This location is known
85 to be aligned to FUNCTION_ARG_BOUNDARY. */
87 /* Place that this stack area has been saved, if needed. */
89 /* If an argument's alignment does not permit direct copying into registers,
90 copy in smaller-sized pieces into pseudos. These are stored in a
91 block pointed to by this field. The next field says how many
92 word-sized pseudos we made. */
97 /* A vector of one char per byte of stack space. A byte if nonzero if
98 the corresponding stack location has been used.
99 This vector is used to prevent a function call within an argument from
100 clobbering any stack already set up. */
101 static char *stack_usage_map;
103 /* Size of STACK_USAGE_MAP. */
104 static int highest_outgoing_arg_in_use;
106 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
107 stack location's tail call argument has been already stored into the stack.
108 This bitmap is used to prevent sibling call optimization if function tries
109 to use parent's incoming argument slots when they have been already
110 overwritten with tail call arguments. */
111 static sbitmap stored_args_map;
113 /* stack_arg_under_construction is nonzero when an argument may be
114 initialized with a constructor call (including a C function that
115 returns a BLKmode struct) and expand_call must take special action
116 to make sure the object being constructed does not overlap the
117 argument list for the constructor call. */
118 int stack_arg_under_construction;
120 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
121 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
123 static void precompute_register_parameters (int, struct arg_data *, int *);
124 static int store_one_arg (struct arg_data *, rtx, int, int, int);
125 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
126 static int finalize_must_preallocate (int, int, struct arg_data *,
128 static void precompute_arguments (int, int, struct arg_data *);
129 static int compute_argument_block_size (int, struct args_size *, int);
130 static void initialize_argument_information (int, struct arg_data *,
131 struct args_size *, int, tree,
132 tree, CUMULATIVE_ARGS *, int,
133 rtx *, int *, int *, int *,
135 static void compute_argument_addresses (struct arg_data *, rtx, int);
136 static rtx rtx_for_function_call (tree, tree);
137 static void load_register_parameters (struct arg_data *, int, rtx *, int,
139 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
140 enum machine_mode, int, va_list);
141 static int special_function_p (tree, int);
142 static int check_sibcall_argument_overlap_1 (rtx);
143 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
145 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
147 static tree fix_unsafe_tree (tree);
148 static bool shift_returned_value (tree, rtx *);
150 #ifdef REG_PARM_STACK_SPACE
151 static rtx save_fixed_argument_area (int, rtx, int *, int *);
152 static void restore_fixed_argument_area (rtx, rtx, int, int);
155 /* Force FUNEXP into a form suitable for the address of a CALL,
156 and return that as an rtx. Also load the static chain register
157 if FNDECL is a nested function.
159 CALL_FUSAGE points to a variable holding the prospective
160 CALL_INSN_FUNCTION_USAGE information. */
163 prepare_call_address (rtx funexp, rtx static_chain_value,
164 rtx *call_fusage, int reg_parm_seen, int sibcallp)
166 funexp = protect_from_queue (funexp, 0);
168 /* Make a valid memory address and copy constants through pseudo-regs,
169 but not for a constant address if -fno-function-cse. */
170 if (GET_CODE (funexp) != SYMBOL_REF)
171 /* If we are using registers for parameters, force the
172 function address into a register now. */
173 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
174 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
175 : memory_address (FUNCTION_MODE, funexp));
178 #ifndef NO_FUNCTION_CSE
179 if (optimize && ! flag_no_function_cse)
180 funexp = force_reg (Pmode, funexp);
184 if (static_chain_value != 0)
186 static_chain_value = convert_memory_address (Pmode, static_chain_value);
187 emit_move_insn (static_chain_rtx, static_chain_value);
189 if (REG_P (static_chain_rtx))
190 use_reg (call_fusage, static_chain_rtx);
196 /* Generate instructions to call function FUNEXP,
197 and optionally pop the results.
198 The CALL_INSN is the first insn generated.
200 FNDECL is the declaration node of the function. This is given to the
201 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
203 FUNTYPE is the data type of the function. This is given to the macro
204 RETURN_POPS_ARGS to determine whether this function pops its own args.
205 We used to allow an identifier for library functions, but that doesn't
206 work when the return type is an aggregate type and the calling convention
207 says that the pointer to this aggregate is to be popped by the callee.
209 STACK_SIZE is the number of bytes of arguments on the stack,
210 ROUNDED_STACK_SIZE is that number rounded up to
211 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
212 both to put into the call insn and to generate explicit popping
215 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
216 It is zero if this call doesn't want a structure value.
218 NEXT_ARG_REG is the rtx that results from executing
219 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
220 just after all the args have had their registers assigned.
221 This could be whatever you like, but normally it is the first
222 arg-register beyond those used for args in this call,
223 or 0 if all the arg-registers are used in this call.
224 It is passed on to `gen_call' so you can put this info in the call insn.
226 VALREG is a hard register in which a value is returned,
227 or 0 if the call does not return a value.
229 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
230 the args to this call were processed.
231 We restore `inhibit_defer_pop' to that value.
233 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
234 denote registers used by the called function. */
237 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
238 tree funtype ATTRIBUTE_UNUSED,
239 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
240 HOST_WIDE_INT rounded_stack_size,
241 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
242 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
243 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
244 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
246 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
248 int already_popped = 0;
249 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
250 #if defined (HAVE_call) && defined (HAVE_call_value)
251 rtx struct_value_size_rtx;
252 struct_value_size_rtx = GEN_INT (struct_value_size);
255 #ifdef CALL_POPS_ARGS
256 n_popped += CALL_POPS_ARGS (* args_so_far);
259 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
260 and we don't want to load it into a register as an optimization,
261 because prepare_call_address already did it if it should be done. */
262 if (GET_CODE (funexp) != SYMBOL_REF)
263 funexp = memory_address (FUNCTION_MODE, funexp);
265 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
266 if ((ecf_flags & ECF_SIBCALL)
267 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
268 && (n_popped > 0 || stack_size == 0))
270 rtx n_pop = GEN_INT (n_popped);
273 /* If this subroutine pops its own args, record that in the call insn
274 if possible, for the sake of frame pointer elimination. */
277 pat = GEN_SIBCALL_VALUE_POP (valreg,
278 gen_rtx_MEM (FUNCTION_MODE, funexp),
279 rounded_stack_size_rtx, next_arg_reg,
282 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
283 rounded_stack_size_rtx, next_arg_reg, n_pop);
285 emit_call_insn (pat);
291 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
292 /* If the target has "call" or "call_value" insns, then prefer them
293 if no arguments are actually popped. If the target does not have
294 "call" or "call_value" insns, then we must use the popping versions
295 even if the call has no arguments to pop. */
296 #if defined (HAVE_call) && defined (HAVE_call_value)
297 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
298 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
300 if (HAVE_call_pop && HAVE_call_value_pop)
303 rtx n_pop = GEN_INT (n_popped);
306 /* If this subroutine pops its own args, record that in the call insn
307 if possible, for the sake of frame pointer elimination. */
310 pat = GEN_CALL_VALUE_POP (valreg,
311 gen_rtx_MEM (FUNCTION_MODE, funexp),
312 rounded_stack_size_rtx, next_arg_reg, n_pop);
314 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
315 rounded_stack_size_rtx, next_arg_reg, n_pop);
317 emit_call_insn (pat);
323 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
324 if ((ecf_flags & ECF_SIBCALL)
325 && HAVE_sibcall && HAVE_sibcall_value)
328 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
329 gen_rtx_MEM (FUNCTION_MODE, funexp),
330 rounded_stack_size_rtx,
331 next_arg_reg, NULL_RTX));
333 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
334 rounded_stack_size_rtx, next_arg_reg,
335 struct_value_size_rtx));
340 #if defined (HAVE_call) && defined (HAVE_call_value)
341 if (HAVE_call && HAVE_call_value)
344 emit_call_insn (GEN_CALL_VALUE (valreg,
345 gen_rtx_MEM (FUNCTION_MODE, funexp),
346 rounded_stack_size_rtx, next_arg_reg,
349 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
350 rounded_stack_size_rtx, next_arg_reg,
351 struct_value_size_rtx));
357 /* Find the call we just emitted. */
358 call_insn = last_call_insn ();
360 /* Mark memory as used for "pure" function call. */
361 if (ecf_flags & ECF_PURE)
365 gen_rtx_USE (VOIDmode,
366 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
369 /* Put the register usage information there. */
370 add_function_usage_to (call_insn, call_fusage);
372 /* If this is a const call, then set the insn's unchanging bit. */
373 if (ecf_flags & (ECF_CONST | ECF_PURE))
374 CONST_OR_PURE_CALL_P (call_insn) = 1;
376 /* If this call can't throw, attach a REG_EH_REGION reg note to that
378 if (ecf_flags & ECF_NOTHROW)
379 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
380 REG_NOTES (call_insn));
383 int rn = lookup_stmt_eh_region (fntree);
385 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
386 throw, which we already took care of. */
388 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
389 REG_NOTES (call_insn));
390 note_current_region_may_contain_throw ();
393 if (ecf_flags & ECF_NORETURN)
394 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
395 REG_NOTES (call_insn));
396 if (ecf_flags & ECF_ALWAYS_RETURN)
397 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
398 REG_NOTES (call_insn));
400 if (ecf_flags & ECF_RETURNS_TWICE)
402 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
403 REG_NOTES (call_insn));
404 current_function_calls_setjmp = 1;
407 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
409 /* Restore this now, so that we do defer pops for this call's args
410 if the context of the call as a whole permits. */
411 inhibit_defer_pop = old_inhibit_defer_pop;
416 CALL_INSN_FUNCTION_USAGE (call_insn)
417 = gen_rtx_EXPR_LIST (VOIDmode,
418 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
419 CALL_INSN_FUNCTION_USAGE (call_insn));
420 rounded_stack_size -= n_popped;
421 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
422 stack_pointer_delta -= n_popped;
425 if (!ACCUMULATE_OUTGOING_ARGS)
427 /* If returning from the subroutine does not automatically pop the args,
428 we need an instruction to pop them sooner or later.
429 Perhaps do it now; perhaps just record how much space to pop later.
431 If returning from the subroutine does pop the args, indicate that the
432 stack pointer will be changed. */
434 if (rounded_stack_size != 0)
436 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN | ECF_LONGJMP))
437 /* Just pretend we did the pop. */
438 stack_pointer_delta -= rounded_stack_size;
439 else if (flag_defer_pop && inhibit_defer_pop == 0
440 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
441 pending_stack_adjust += rounded_stack_size;
443 adjust_stack (rounded_stack_size_rtx);
446 /* When we accumulate outgoing args, we must avoid any stack manipulations.
447 Restore the stack pointer to its original value now. Usually
448 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
449 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
450 popping variants of functions exist as well.
452 ??? We may optimize similar to defer_pop above, but it is
453 probably not worthwhile.
455 ??? It will be worthwhile to enable combine_stack_adjustments even for
458 anti_adjust_stack (GEN_INT (n_popped));
461 /* Determine if the function identified by NAME and FNDECL is one with
462 special properties we wish to know about.
464 For example, if the function might return more than one time (setjmp), then
465 set RETURNS_TWICE to a nonzero value.
467 Similarly set LONGJMP for if the function is in the longjmp family.
469 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
470 space from the stack such as alloca. */
473 special_function_p (tree fndecl, int flags)
475 if (fndecl && DECL_NAME (fndecl)
476 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
477 /* Exclude functions not at the file scope, or not `extern',
478 since they are not the magic functions we would otherwise
480 FIXME: this should be handled with attributes, not with this
481 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
482 because you can declare fork() inside a function if you
484 && (DECL_CONTEXT (fndecl) == NULL_TREE
485 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
486 && TREE_PUBLIC (fndecl))
488 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
489 const char *tname = name;
491 /* We assume that alloca will always be called by name. It
492 makes no sense to pass it as a pointer-to-function to
493 anything that does not understand its behavior. */
494 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
496 && ! strcmp (name, "alloca"))
497 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
499 && ! strcmp (name, "__builtin_alloca"))))
500 flags |= ECF_MAY_BE_ALLOCA;
502 /* Disregard prefix _, __ or __x. */
505 if (name[1] == '_' && name[2] == 'x')
507 else if (name[1] == '_')
516 && (! strcmp (tname, "setjmp")
517 || ! strcmp (tname, "setjmp_syscall")))
519 && ! strcmp (tname, "sigsetjmp"))
521 && ! strcmp (tname, "savectx")))
522 flags |= ECF_RETURNS_TWICE;
525 && ! strcmp (tname, "siglongjmp"))
526 flags |= ECF_LONGJMP;
528 else if ((tname[0] == 'q' && tname[1] == 's'
529 && ! strcmp (tname, "qsetjmp"))
530 || (tname[0] == 'v' && tname[1] == 'f'
531 && ! strcmp (tname, "vfork")))
532 flags |= ECF_RETURNS_TWICE;
534 else if (tname[0] == 'l' && tname[1] == 'o'
535 && ! strcmp (tname, "longjmp"))
536 flags |= ECF_LONGJMP;
542 /* Return nonzero when tree represent call to longjmp. */
545 setjmp_call_p (tree fndecl)
547 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
550 /* Return true when exp contains alloca call. */
552 alloca_call_p (tree exp)
554 if (TREE_CODE (exp) == CALL_EXPR
555 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
556 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
558 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
559 0) & ECF_MAY_BE_ALLOCA))
564 /* Detect flags (function attributes) from the function decl or type node. */
567 flags_from_decl_or_type (tree exp)
574 struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
575 type = TREE_TYPE (exp);
579 if (i->pure_function)
580 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
581 if (i->const_function)
582 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
585 /* The function exp may have the `malloc' attribute. */
586 if (DECL_IS_MALLOC (exp))
589 /* The function exp may have the `pure' attribute. */
590 if (DECL_IS_PURE (exp))
591 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
593 if (TREE_NOTHROW (exp))
594 flags |= ECF_NOTHROW;
596 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
597 flags |= ECF_LIBCALL_BLOCK | ECF_CONST;
599 flags = special_function_p (exp, flags);
601 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
604 if (TREE_THIS_VOLATILE (exp))
605 flags |= ECF_NORETURN;
607 /* Mark if the function returns with the stack pointer depressed. We
608 cannot consider it pure or constant in that case. */
609 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
611 flags |= ECF_SP_DEPRESSED;
612 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
618 /* Detect flags from a CALL_EXPR. */
621 call_expr_flags (tree t)
624 tree decl = get_callee_fndecl (t);
627 flags = flags_from_decl_or_type (decl);
630 t = TREE_TYPE (TREE_OPERAND (t, 0));
631 if (t && TREE_CODE (t) == POINTER_TYPE)
632 flags = flags_from_decl_or_type (TREE_TYPE (t));
640 /* Precompute all register parameters as described by ARGS, storing values
641 into fields within the ARGS array.
643 NUM_ACTUALS indicates the total number elements in the ARGS array.
645 Set REG_PARM_SEEN if we encounter a register parameter. */
648 precompute_register_parameters (int num_actuals, struct arg_data *args, int *reg_parm_seen)
654 for (i = 0; i < num_actuals; i++)
655 if (args[i].reg != 0 && ! args[i].pass_on_stack)
659 if (args[i].value == 0)
662 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
664 preserve_temp_slots (args[i].value);
667 /* ANSI doesn't require a sequence point here,
668 but PCC has one, so this will avoid some problems. */
672 /* If the value is a non-legitimate constant, force it into a
673 pseudo now. TLS symbols sometimes need a call to resolve. */
674 if (CONSTANT_P (args[i].value)
675 && !LEGITIMATE_CONSTANT_P (args[i].value))
676 args[i].value = force_reg (args[i].mode, args[i].value);
678 /* If we are to promote the function arg to a wider mode,
681 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
683 = convert_modes (args[i].mode,
684 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
685 args[i].value, args[i].unsignedp);
687 /* If the value is expensive, and we are inside an appropriately
688 short loop, put the value into a pseudo and then put the pseudo
691 For small register classes, also do this if this call uses
692 register parameters. This is to avoid reload conflicts while
693 loading the parameters registers. */
695 if ((! (REG_P (args[i].value)
696 || (GET_CODE (args[i].value) == SUBREG
697 && REG_P (SUBREG_REG (args[i].value)))))
698 && args[i].mode != BLKmode
699 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
700 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
701 || preserve_subexpressions_p ()))
702 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
706 #ifdef REG_PARM_STACK_SPACE
708 /* The argument list is the property of the called routine and it
709 may clobber it. If the fixed area has been used for previous
710 parameters, we must save and restore it. */
713 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
718 /* Compute the boundary of the area that needs to be saved, if any. */
719 high = reg_parm_stack_space;
720 #ifdef ARGS_GROW_DOWNWARD
723 if (high > highest_outgoing_arg_in_use)
724 high = highest_outgoing_arg_in_use;
726 for (low = 0; low < high; low++)
727 if (stack_usage_map[low] != 0)
730 enum machine_mode save_mode;
735 while (stack_usage_map[--high] == 0)
739 *high_to_save = high;
741 num_to_save = high - low + 1;
742 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
744 /* If we don't have the required alignment, must do this
746 if ((low & (MIN (GET_MODE_SIZE (save_mode),
747 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
750 #ifdef ARGS_GROW_DOWNWARD
755 stack_area = gen_rtx_MEM (save_mode,
756 memory_address (save_mode,
757 plus_constant (argblock,
760 set_mem_align (stack_area, PARM_BOUNDARY);
761 if (save_mode == BLKmode)
763 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
764 emit_block_move (validize_mem (save_area), stack_area,
765 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
769 save_area = gen_reg_rtx (save_mode);
770 emit_move_insn (save_area, stack_area);
780 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
782 enum machine_mode save_mode = GET_MODE (save_area);
786 #ifdef ARGS_GROW_DOWNWARD
787 delta = -high_to_save;
791 stack_area = gen_rtx_MEM (save_mode,
792 memory_address (save_mode,
793 plus_constant (argblock, delta)));
794 set_mem_align (stack_area, PARM_BOUNDARY);
796 if (save_mode != BLKmode)
797 emit_move_insn (stack_area, save_area);
799 emit_block_move (stack_area, validize_mem (save_area),
800 GEN_INT (high_to_save - low_to_save + 1),
803 #endif /* REG_PARM_STACK_SPACE */
805 /* If any elements in ARGS refer to parameters that are to be passed in
806 registers, but not in memory, and whose alignment does not permit a
807 direct copy into registers. Copy the values into a group of pseudos
808 which we will later copy into the appropriate hard registers.
810 Pseudos for each unaligned argument will be stored into the array
811 args[argnum].aligned_regs. The caller is responsible for deallocating
812 the aligned_regs array if it is nonzero. */
815 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
819 for (i = 0; i < num_actuals; i++)
820 if (args[i].reg != 0 && ! args[i].pass_on_stack
821 && args[i].mode == BLKmode
822 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
823 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
825 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
826 int nregs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
827 int endian_correction = 0;
829 args[i].n_aligned_regs = args[i].partial ? args[i].partial : nregs;
830 args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
832 /* Structures smaller than a word are normally aligned to the
833 least significant byte. On a BYTES_BIG_ENDIAN machine,
834 this means we must skip the empty high order bytes when
835 calculating the bit offset. */
836 if (bytes < UNITS_PER_WORD
837 #ifdef BLOCK_REG_PADDING
838 && (BLOCK_REG_PADDING (args[i].mode,
839 TREE_TYPE (args[i].tree_value), 1)
845 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
847 for (j = 0; j < args[i].n_aligned_regs; j++)
849 rtx reg = gen_reg_rtx (word_mode);
850 rtx word = operand_subword_force (args[i].value, j, BLKmode);
851 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
853 args[i].aligned_regs[j] = reg;
854 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
855 word_mode, word_mode);
857 /* There is no need to restrict this code to loading items
858 in TYPE_ALIGN sized hunks. The bitfield instructions can
859 load up entire word sized registers efficiently.
861 ??? This may not be needed anymore.
862 We use to emit a clobber here but that doesn't let later
863 passes optimize the instructions we emit. By storing 0 into
864 the register later passes know the first AND to zero out the
865 bitfield being set in the register is unnecessary. The store
866 of 0 will be deleted as will at least the first AND. */
868 emit_move_insn (reg, const0_rtx);
870 bytes -= bitsize / BITS_PER_UNIT;
871 store_bit_field (reg, bitsize, endian_correction, word_mode,
877 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
880 NUM_ACTUALS is the total number of parameters.
882 N_NAMED_ARGS is the total number of named arguments.
884 FNDECL is the tree code for the target of this call (if known)
886 ARGS_SO_FAR holds state needed by the target to know where to place
889 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
890 for arguments which are passed in registers.
892 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
893 and may be modified by this routine.
895 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
896 flags which may may be modified by this routine.
898 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
899 that requires allocation of stack space.
901 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
902 the thunked-to function. */
905 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
906 struct arg_data *args,
907 struct args_size *args_size,
908 int n_named_args ATTRIBUTE_UNUSED,
909 tree actparms, tree fndecl,
910 CUMULATIVE_ARGS *args_so_far,
911 int reg_parm_stack_space,
912 rtx *old_stack_level, int *old_pending_adj,
913 int *must_preallocate, int *ecf_flags,
914 bool *may_tailcall, bool call_from_thunk_p)
916 /* 1 if scanning parms front to back, -1 if scanning back to front. */
919 /* Count arg position in order args appear. */
925 args_size->constant = 0;
928 /* In this loop, we consider args in the order they are written.
929 We fill up ARGS from the front or from the back if necessary
930 so that in any case the first arg to be pushed ends up at the front. */
932 if (PUSH_ARGS_REVERSED)
934 i = num_actuals - 1, inc = -1;
935 /* In this case, must reverse order of args
936 so that we compute and push the last arg first. */
943 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
944 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
946 tree type = TREE_TYPE (TREE_VALUE (p));
948 enum machine_mode mode;
950 args[i].tree_value = TREE_VALUE (p);
952 /* Replace erroneous argument with constant zero. */
953 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
954 args[i].tree_value = integer_zero_node, type = integer_type_node;
956 /* If TYPE is a transparent union, pass things the way we would
957 pass the first field of the union. We have already verified that
958 the modes are the same. */
959 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
960 type = TREE_TYPE (TYPE_FIELDS (type));
962 /* Decide where to pass this arg.
964 args[i].reg is nonzero if all or part is passed in registers.
966 args[i].partial is nonzero if part but not all is passed in registers,
967 and the exact value says how many words are passed in registers.
969 args[i].pass_on_stack is nonzero if the argument must at least be
970 computed on the stack. It may then be loaded back into registers
971 if args[i].reg is nonzero.
973 These decisions are driven by the FUNCTION_... macros and must agree
974 with those made by function.c. */
976 /* See if this argument should be passed by invisible reference. */
977 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
978 || TREE_ADDRESSABLE (type)
979 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
980 type, argpos < n_named_args)
983 /* If we're compiling a thunk, pass through invisible
984 references instead of making a copy. */
985 if (call_from_thunk_p
986 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
987 type, argpos < n_named_args)
988 /* If it's in a register, we must make a copy of it too. */
989 /* ??? Is this a sufficient test? Is there a better one? */
990 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
991 && REG_P (DECL_RTL (args[i].tree_value)))
992 && ! TREE_ADDRESSABLE (type))
995 /* C++ uses a TARGET_EXPR to indicate that we want to make a
996 new object from the argument. If we are passing by
997 invisible reference, the callee will do that for us, so we
998 can strip off the TARGET_EXPR. This is not always safe,
999 but it is safe in the only case where this is a useful
1000 optimization; namely, when the argument is a plain object.
1001 In that case, the frontend is just asking the backend to
1002 make a bitwise copy of the argument. */
1004 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1005 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1006 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1007 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1009 /* We can't use sibcalls if a callee-copied argument is stored
1010 in the current function's frame. */
1011 if (!call_from_thunk_p
1012 && (!DECL_P (args[i].tree_value)
1013 || !TREE_STATIC (args[i].tree_value)))
1014 *may_tailcall = false;
1016 args[i].tree_value = build1 (ADDR_EXPR,
1017 build_pointer_type (type),
1018 args[i].tree_value);
1019 type = build_pointer_type (type);
1021 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1023 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1024 We implement this by passing the address of the temporary
1025 rather than expanding it into another allocated slot. */
1026 args[i].tree_value = build1 (ADDR_EXPR,
1027 build_pointer_type (type),
1028 args[i].tree_value);
1029 type = build_pointer_type (type);
1030 *may_tailcall = false;
1034 /* We make a copy of the object and pass the address to the
1035 function being called. */
1038 if (!COMPLETE_TYPE_P (type)
1039 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1040 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1041 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1042 STACK_CHECK_MAX_VAR_SIZE))))
1044 /* This is a variable-sized object. Make space on the stack
1046 rtx size_rtx = expr_size (TREE_VALUE (p));
1048 if (*old_stack_level == 0)
1050 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1051 *old_pending_adj = pending_stack_adjust;
1052 pending_stack_adjust = 0;
1055 copy = gen_rtx_MEM (BLKmode,
1056 allocate_dynamic_stack_space
1057 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1058 set_mem_attributes (copy, type, 1);
1061 copy = assign_temp (type, 0, 1, 0);
1063 store_expr (args[i].tree_value, copy, 0);
1064 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1066 args[i].tree_value = build1 (ADDR_EXPR,
1067 build_pointer_type (type),
1068 make_tree (type, copy));
1069 type = build_pointer_type (type);
1070 *may_tailcall = false;
1074 mode = TYPE_MODE (type);
1075 unsignedp = TYPE_UNSIGNED (type);
1077 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1078 mode = promote_mode (type, mode, &unsignedp, 1);
1080 args[i].unsignedp = unsignedp;
1081 args[i].mode = mode;
1083 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1084 argpos < n_named_args);
1085 #ifdef FUNCTION_INCOMING_ARG
1086 /* If this is a sibling call and the machine has register windows, the
1087 register window has to be unwinded before calling the routine, so
1088 arguments have to go into the incoming registers. */
1089 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1090 argpos < n_named_args);
1092 args[i].tail_call_reg = args[i].reg;
1097 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1098 argpos < n_named_args);
1100 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1102 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1103 it means that we are to pass this arg in the register(s) designated
1104 by the PARALLEL, but also to pass it in the stack. */
1105 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1106 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1107 args[i].pass_on_stack = 1;
1109 /* If this is an addressable type, we must preallocate the stack
1110 since we must evaluate the object into its final location.
1112 If this is to be passed in both registers and the stack, it is simpler
1114 if (TREE_ADDRESSABLE (type)
1115 || (args[i].pass_on_stack && args[i].reg != 0))
1116 *must_preallocate = 1;
1118 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1119 we cannot consider this function call constant. */
1120 if (TREE_ADDRESSABLE (type))
1121 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1123 /* Compute the stack-size of this argument. */
1124 if (args[i].reg == 0 || args[i].partial != 0
1125 || reg_parm_stack_space > 0
1126 || args[i].pass_on_stack)
1127 locate_and_pad_parm (mode, type,
1128 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1133 args[i].pass_on_stack ? 0 : args[i].partial,
1134 fndecl, args_size, &args[i].locate);
1135 #ifdef BLOCK_REG_PADDING
1137 /* The argument is passed entirely in registers. See at which
1138 end it should be padded. */
1139 args[i].locate.where_pad =
1140 BLOCK_REG_PADDING (mode, type,
1141 int_size_in_bytes (type) <= UNITS_PER_WORD);
1144 /* Update ARGS_SIZE, the total stack space for args so far. */
1146 args_size->constant += args[i].locate.size.constant;
1147 if (args[i].locate.size.var)
1148 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1150 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1151 have been used, etc. */
1153 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1154 argpos < n_named_args);
1158 /* Update ARGS_SIZE to contain the total size for the argument block.
1159 Return the original constant component of the argument block's size.
1161 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1162 for arguments passed in registers. */
1165 compute_argument_block_size (int reg_parm_stack_space,
1166 struct args_size *args_size,
1167 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1169 int unadjusted_args_size = args_size->constant;
1171 /* For accumulate outgoing args mode we don't need to align, since the frame
1172 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1173 backends from generating misaligned frame sizes. */
1174 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1175 preferred_stack_boundary = STACK_BOUNDARY;
1177 /* Compute the actual size of the argument block required. The variable
1178 and constant sizes must be combined, the size may have to be rounded,
1179 and there may be a minimum required size. */
1183 args_size->var = ARGS_SIZE_TREE (*args_size);
1184 args_size->constant = 0;
1186 preferred_stack_boundary /= BITS_PER_UNIT;
1187 if (preferred_stack_boundary > 1)
1189 /* We don't handle this case yet. To handle it correctly we have
1190 to add the delta, round and subtract the delta.
1191 Currently no machine description requires this support. */
1192 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1194 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1197 if (reg_parm_stack_space > 0)
1200 = size_binop (MAX_EXPR, args_size->var,
1201 ssize_int (reg_parm_stack_space));
1203 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1204 /* The area corresponding to register parameters is not to count in
1205 the size of the block we need. So make the adjustment. */
1207 = size_binop (MINUS_EXPR, args_size->var,
1208 ssize_int (reg_parm_stack_space));
1214 preferred_stack_boundary /= BITS_PER_UNIT;
1215 if (preferred_stack_boundary < 1)
1216 preferred_stack_boundary = 1;
1217 args_size->constant = (((args_size->constant
1218 + stack_pointer_delta
1219 + preferred_stack_boundary - 1)
1220 / preferred_stack_boundary
1221 * preferred_stack_boundary)
1222 - stack_pointer_delta);
1224 args_size->constant = MAX (args_size->constant,
1225 reg_parm_stack_space);
1227 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1228 args_size->constant -= reg_parm_stack_space;
1231 return unadjusted_args_size;
1234 /* Precompute parameters as needed for a function call.
1236 FLAGS is mask of ECF_* constants.
1238 NUM_ACTUALS is the number of arguments.
1240 ARGS is an array containing information for each argument; this
1241 routine fills in the INITIAL_VALUE and VALUE fields for each
1242 precomputed argument. */
1245 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1249 /* If this is a libcall, then precompute all arguments so that we do not
1250 get extraneous instructions emitted as part of the libcall sequence. */
1251 if ((flags & ECF_LIBCALL_BLOCK) == 0)
1254 for (i = 0; i < num_actuals; i++)
1256 enum machine_mode mode;
1258 /* If this is an addressable type, we cannot pre-evaluate it. */
1259 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1263 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1265 /* ANSI doesn't require a sequence point here,
1266 but PCC has one, so this will avoid some problems. */
1269 args[i].initial_value = args[i].value
1270 = protect_from_queue (args[i].value, 0);
1272 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1273 if (mode != args[i].mode)
1276 = convert_modes (args[i].mode, mode,
1277 args[i].value, args[i].unsignedp);
1278 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1279 /* CSE will replace this only if it contains args[i].value
1280 pseudo, so convert it down to the declared mode using
1282 if (REG_P (args[i].value)
1283 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1285 args[i].initial_value
1286 = gen_lowpart_SUBREG (mode, args[i].value);
1287 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1288 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1296 /* Given the current state of MUST_PREALLOCATE and information about
1297 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1298 compute and return the final value for MUST_PREALLOCATE. */
1301 finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1303 /* See if we have or want to preallocate stack space.
1305 If we would have to push a partially-in-regs parm
1306 before other stack parms, preallocate stack space instead.
1308 If the size of some parm is not a multiple of the required stack
1309 alignment, we must preallocate.
1311 If the total size of arguments that would otherwise create a copy in
1312 a temporary (such as a CALL) is more than half the total argument list
1313 size, preallocation is faster.
1315 Another reason to preallocate is if we have a machine (like the m88k)
1316 where stack alignment is required to be maintained between every
1317 pair of insns, not just when the call is made. However, we assume here
1318 that such machines either do not have push insns (and hence preallocation
1319 would occur anyway) or the problem is taken care of with
1322 if (! must_preallocate)
1324 int partial_seen = 0;
1325 int copy_to_evaluate_size = 0;
1328 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1330 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1332 else if (partial_seen && args[i].reg == 0)
1333 must_preallocate = 1;
1335 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1336 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1337 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1338 || TREE_CODE (args[i].tree_value) == COND_EXPR
1339 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1340 copy_to_evaluate_size
1341 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1344 if (copy_to_evaluate_size * 2 >= args_size->constant
1345 && args_size->constant > 0)
1346 must_preallocate = 1;
1348 return must_preallocate;
1351 /* If we preallocated stack space, compute the address of each argument
1352 and store it into the ARGS array.
1354 We need not ensure it is a valid memory address here; it will be
1355 validized when it is used.
1357 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1360 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1364 rtx arg_reg = argblock;
1365 int i, arg_offset = 0;
1367 if (GET_CODE (argblock) == PLUS)
1368 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1370 for (i = 0; i < num_actuals; i++)
1372 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1373 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1376 /* Skip this parm if it will not be passed on the stack. */
1377 if (! args[i].pass_on_stack && args[i].reg != 0)
1380 if (GET_CODE (offset) == CONST_INT)
1381 addr = plus_constant (arg_reg, INTVAL (offset));
1383 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1385 addr = plus_constant (addr, arg_offset);
1386 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1387 set_mem_align (args[i].stack, PARM_BOUNDARY);
1388 set_mem_attributes (args[i].stack,
1389 TREE_TYPE (args[i].tree_value), 1);
1391 if (GET_CODE (slot_offset) == CONST_INT)
1392 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1394 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1396 addr = plus_constant (addr, arg_offset);
1397 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1398 set_mem_align (args[i].stack_slot, PARM_BOUNDARY);
1399 set_mem_attributes (args[i].stack_slot,
1400 TREE_TYPE (args[i].tree_value), 1);
1402 /* Function incoming arguments may overlap with sibling call
1403 outgoing arguments and we cannot allow reordering of reads
1404 from function arguments with stores to outgoing arguments
1405 of sibling calls. */
1406 set_mem_alias_set (args[i].stack, 0);
1407 set_mem_alias_set (args[i].stack_slot, 0);
1412 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1413 in a call instruction.
1415 FNDECL is the tree node for the target function. For an indirect call
1416 FNDECL will be NULL_TREE.
1418 ADDR is the operand 0 of CALL_EXPR for this call. */
1421 rtx_for_function_call (tree fndecl, tree addr)
1425 /* Get the function to call, in the form of RTL. */
1428 /* If this is the first use of the function, see if we need to
1429 make an external definition for it. */
1430 if (! TREE_USED (fndecl))
1432 assemble_external (fndecl);
1433 TREE_USED (fndecl) = 1;
1436 /* Get a SYMBOL_REF rtx for the function address. */
1437 funexp = XEXP (DECL_RTL (fndecl), 0);
1440 /* Generate an rtx (probably a pseudo-register) for the address. */
1443 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1444 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1450 /* Do the register loads required for any wholly-register parms or any
1451 parms which are passed both on the stack and in a register. Their
1452 expressions were already evaluated.
1454 Mark all register-parms as living through the call, putting these USE
1455 insns in the CALL_INSN_FUNCTION_USAGE field.
1457 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1458 checking, setting *SIBCALL_FAILURE if appropriate. */
1461 load_register_parameters (struct arg_data *args, int num_actuals,
1462 rtx *call_fusage, int flags, int is_sibcall,
1463 int *sibcall_failure)
1467 for (i = 0; i < num_actuals; i++)
1469 rtx reg = ((flags & ECF_SIBCALL)
1470 ? args[i].tail_call_reg : args[i].reg);
1473 int partial = args[i].partial;
1476 rtx before_arg = get_last_insn ();
1477 /* Set to non-negative if must move a word at a time, even if just
1478 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1479 we just use a normal move insn. This value can be zero if the
1480 argument is a zero size structure with no fields. */
1484 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1486 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1487 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1490 size = GET_MODE_SIZE (args[i].mode);
1492 /* Handle calls that pass values in multiple non-contiguous
1493 locations. The Irix 6 ABI has examples of this. */
1495 if (GET_CODE (reg) == PARALLEL)
1497 tree type = TREE_TYPE (args[i].tree_value);
1498 emit_group_load (reg, args[i].value, type,
1499 int_size_in_bytes (type));
1502 /* If simple case, just do move. If normal partial, store_one_arg
1503 has already loaded the register for us. In all other cases,
1504 load the register(s) from memory. */
1506 else if (nregs == -1)
1508 emit_move_insn (reg, args[i].value);
1509 #ifdef BLOCK_REG_PADDING
1510 /* Handle case where we have a value that needs shifting
1511 up to the msb. eg. a QImode value and we're padding
1512 upward on a BYTES_BIG_ENDIAN machine. */
1513 if (size < UNITS_PER_WORD
1514 && (args[i].locate.where_pad
1515 == (BYTES_BIG_ENDIAN ? upward : downward)))
1518 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1520 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1521 report the whole reg as used. Strictly speaking, the
1522 call only uses SIZE bytes at the msb end, but it doesn't
1523 seem worth generating rtl to say that. */
1524 reg = gen_rtx_REG (word_mode, REGNO (reg));
1525 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1526 build_int_2 (shift, 0), reg, 1);
1528 emit_move_insn (reg, x);
1533 /* If we have pre-computed the values to put in the registers in
1534 the case of non-aligned structures, copy them in now. */
1536 else if (args[i].n_aligned_regs != 0)
1537 for (j = 0; j < args[i].n_aligned_regs; j++)
1538 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1539 args[i].aligned_regs[j]);
1541 else if (partial == 0 || args[i].pass_on_stack)
1543 rtx mem = validize_mem (args[i].value);
1545 /* Handle a BLKmode that needs shifting. */
1546 if (nregs == 1 && size < UNITS_PER_WORD
1547 #ifdef BLOCK_REG_PADDING
1548 && args[i].locate.where_pad == downward
1554 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1555 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1556 rtx x = gen_reg_rtx (word_mode);
1557 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1558 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1561 emit_move_insn (x, tem);
1562 x = expand_shift (dir, word_mode, x,
1563 build_int_2 (shift, 0), ri, 1);
1565 emit_move_insn (ri, x);
1568 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1571 /* When a parameter is a block, and perhaps in other cases, it is
1572 possible that it did a load from an argument slot that was
1573 already clobbered. */
1575 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1576 *sibcall_failure = 1;
1578 /* Handle calls that pass values in multiple non-contiguous
1579 locations. The Irix 6 ABI has examples of this. */
1580 if (GET_CODE (reg) == PARALLEL)
1581 use_group_regs (call_fusage, reg);
1582 else if (nregs == -1)
1583 use_reg (call_fusage, reg);
1585 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1590 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1591 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1592 bytes, then we would need to push some additional bytes to pad the
1593 arguments. So, we compute an adjust to the stack pointer for an
1594 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1595 bytes. Then, when the arguments are pushed the stack will be perfectly
1596 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1597 be popped after the call. Returns the adjustment. */
1600 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1601 struct args_size *args_size,
1602 int preferred_unit_stack_boundary)
1604 /* The number of bytes to pop so that the stack will be
1605 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1606 HOST_WIDE_INT adjustment;
1607 /* The alignment of the stack after the arguments are pushed, if we
1608 just pushed the arguments without adjust the stack here. */
1609 HOST_WIDE_INT unadjusted_alignment;
1611 unadjusted_alignment
1612 = ((stack_pointer_delta + unadjusted_args_size)
1613 % preferred_unit_stack_boundary);
1615 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1616 as possible -- leaving just enough left to cancel out the
1617 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1618 PENDING_STACK_ADJUST is non-negative, and congruent to
1619 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1621 /* Begin by trying to pop all the bytes. */
1622 unadjusted_alignment
1623 = (unadjusted_alignment
1624 - (pending_stack_adjust % preferred_unit_stack_boundary));
1625 adjustment = pending_stack_adjust;
1626 /* Push enough additional bytes that the stack will be aligned
1627 after the arguments are pushed. */
1628 if (preferred_unit_stack_boundary > 1)
1630 if (unadjusted_alignment > 0)
1631 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1633 adjustment += unadjusted_alignment;
1636 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1637 bytes after the call. The right number is the entire
1638 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1639 by the arguments in the first place. */
1641 = pending_stack_adjust - adjustment + unadjusted_args_size;
1646 /* Scan X expression if it does not dereference any argument slots
1647 we already clobbered by tail call arguments (as noted in stored_args_map
1649 Return nonzero if X expression dereferences such argument slots,
1653 check_sibcall_argument_overlap_1 (rtx x)
1663 code = GET_CODE (x);
1667 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1669 else if (GET_CODE (XEXP (x, 0)) == PLUS
1670 && XEXP (XEXP (x, 0), 0) ==
1671 current_function_internal_arg_pointer
1672 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1673 i = INTVAL (XEXP (XEXP (x, 0), 1));
1677 #ifdef ARGS_GROW_DOWNWARD
1678 i = -i - GET_MODE_SIZE (GET_MODE (x));
1681 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1682 if (i + k < stored_args_map->n_bits
1683 && TEST_BIT (stored_args_map, i + k))
1689 /* Scan all subexpressions. */
1690 fmt = GET_RTX_FORMAT (code);
1691 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1695 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1698 else if (*fmt == 'E')
1700 for (j = 0; j < XVECLEN (x, i); j++)
1701 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1708 /* Scan sequence after INSN if it does not dereference any argument slots
1709 we already clobbered by tail call arguments (as noted in stored_args_map
1710 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1711 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1712 should be 0). Return nonzero if sequence after INSN dereferences such argument
1713 slots, zero otherwise. */
1716 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1720 if (insn == NULL_RTX)
1721 insn = get_insns ();
1723 insn = NEXT_INSN (insn);
1725 for (; insn; insn = NEXT_INSN (insn))
1727 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1730 if (mark_stored_args_map)
1732 #ifdef ARGS_GROW_DOWNWARD
1733 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1735 low = arg->locate.slot_offset.constant;
1738 for (high = low + arg->locate.size.constant; low < high; low++)
1739 SET_BIT (stored_args_map, low);
1741 return insn != NULL_RTX;
1745 fix_unsafe_tree (tree t)
1747 switch (unsafe_for_reeval (t))
1752 case 1: /* Mildly unsafe. */
1753 t = unsave_expr (t);
1756 case 2: /* Wildly unsafe. */
1758 tree var = build_decl (VAR_DECL, NULL_TREE,
1761 expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
1773 /* If function value *VALUE was returned at the most significant end of a
1774 register, shift it towards the least significant end and convert it to
1775 TYPE's mode. Return true and update *VALUE if some action was needed.
1777 TYPE is the type of the function's return value, which is known not
1778 to have mode BLKmode. */
1781 shift_returned_value (tree type, rtx *value)
1783 if (targetm.calls.return_in_msb (type))
1785 HOST_WIDE_INT shift;
1787 shift = (GET_MODE_BITSIZE (GET_MODE (*value))
1788 - BITS_PER_UNIT * int_size_in_bytes (type));
1791 /* Shift the value into the low part of the register. */
1792 *value = expand_binop (GET_MODE (*value), lshr_optab, *value,
1793 GEN_INT (shift), 0, 1, OPTAB_WIDEN);
1795 /* Truncate it to the type's mode, or its integer equivalent.
1796 This is subject to TRULY_NOOP_TRUNCATION. */
1797 *value = convert_to_mode (int_mode_for_mode (TYPE_MODE (type)),
1800 /* Now convert it to the final form. */
1801 *value = gen_lowpart (TYPE_MODE (type), *value);
1808 /* Remove all REG_EQUIV notes found in the insn chain. */
1811 purge_reg_equiv_notes (void)
1815 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1819 rtx note = find_reg_note (insn, REG_EQUIV, 0);
1822 /* Remove the note and keep looking at the notes for
1824 remove_note (insn, note);
1832 /* Clear RTX_UNCHANGING_P flag of incoming argument MEMs. */
1835 purge_mem_unchanging_flag (rtx x)
1844 code = GET_CODE (x);
1848 if (RTX_UNCHANGING_P (x)
1849 && (XEXP (x, 0) == current_function_internal_arg_pointer
1850 || (GET_CODE (XEXP (x, 0)) == PLUS
1851 && XEXP (XEXP (x, 0), 0) ==
1852 current_function_internal_arg_pointer
1853 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
1854 RTX_UNCHANGING_P (x) = 0;
1858 /* Scan all subexpressions. */
1859 fmt = GET_RTX_FORMAT (code);
1860 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1863 purge_mem_unchanging_flag (XEXP (x, i));
1864 else if (*fmt == 'E')
1865 for (j = 0; j < XVECLEN (x, i); j++)
1866 purge_mem_unchanging_flag (XVECEXP (x, i, j));
1871 /* Generate all the code for a function call
1872 and return an rtx for its value.
1873 Store the value in TARGET (specified as an rtx) if convenient.
1874 If the value is stored in TARGET then TARGET is returned.
1875 If IGNORE is nonzero, then we ignore the value of the function call. */
1878 expand_call (tree exp, rtx target, int ignore)
1880 /* Nonzero if we are currently expanding a call. */
1881 static int currently_expanding_call = 0;
1883 /* List of actual parameters. */
1884 tree actparms = TREE_OPERAND (exp, 1);
1885 /* RTX for the function to be called. */
1887 /* Sequence of insns to perform a normal "call". */
1888 rtx normal_call_insns = NULL_RTX;
1889 /* Sequence of insns to perform a tail "call". */
1890 rtx tail_call_insns = NULL_RTX;
1891 /* Data type of the function. */
1893 tree type_arg_types;
1894 /* Declaration of the function being called,
1895 or 0 if the function is computed (not known by name). */
1897 /* The type of the function being called. */
1899 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1902 /* Register in which non-BLKmode value will be returned,
1903 or 0 if no value or if value is BLKmode. */
1905 /* Address where we should return a BLKmode value;
1906 0 if value not BLKmode. */
1907 rtx structure_value_addr = 0;
1908 /* Nonzero if that address is being passed by treating it as
1909 an extra, implicit first parameter. Otherwise,
1910 it is passed by being copied directly into struct_value_rtx. */
1911 int structure_value_addr_parm = 0;
1912 /* Size of aggregate value wanted, or zero if none wanted
1913 or if we are using the non-reentrant PCC calling convention
1914 or expecting the value in registers. */
1915 HOST_WIDE_INT struct_value_size = 0;
1916 /* Nonzero if called function returns an aggregate in memory PCC style,
1917 by returning the address of where to find it. */
1918 int pcc_struct_value = 0;
1919 rtx struct_value = 0;
1921 /* Number of actual parameters in this call, including struct value addr. */
1923 /* Number of named args. Args after this are anonymous ones
1924 and they must all go on the stack. */
1927 /* Vector of information about each argument.
1928 Arguments are numbered in the order they will be pushed,
1929 not the order they are written. */
1930 struct arg_data *args;
1932 /* Total size in bytes of all the stack-parms scanned so far. */
1933 struct args_size args_size;
1934 struct args_size adjusted_args_size;
1935 /* Size of arguments before any adjustments (such as rounding). */
1936 int unadjusted_args_size;
1937 /* Data on reg parms scanned so far. */
1938 CUMULATIVE_ARGS args_so_far;
1939 /* Nonzero if a reg parm has been scanned. */
1941 /* Nonzero if this is an indirect function call. */
1943 /* Nonzero if we must avoid push-insns in the args for this call.
1944 If stack space is allocated for register parameters, but not by the
1945 caller, then it is preallocated in the fixed part of the stack frame.
1946 So the entire argument block must then be preallocated (i.e., we
1947 ignore PUSH_ROUNDING in that case). */
1949 int must_preallocate = !PUSH_ARGS;
1951 /* Size of the stack reserved for parameter registers. */
1952 int reg_parm_stack_space = 0;
1954 /* Address of space preallocated for stack parms
1955 (on machines that lack push insns), or 0 if space not preallocated. */
1958 /* Mask of ECF_ flags. */
1960 #ifdef REG_PARM_STACK_SPACE
1961 /* Define the boundary of the register parm stack space that needs to be
1963 int low_to_save, high_to_save;
1964 rtx save_area = 0; /* Place that it is saved */
1967 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1968 char *initial_stack_usage_map = stack_usage_map;
1970 int old_stack_allocated;
1972 /* State variables to track stack modifications. */
1973 rtx old_stack_level = 0;
1974 int old_stack_arg_under_construction = 0;
1975 int old_pending_adj = 0;
1976 int old_inhibit_defer_pop = inhibit_defer_pop;
1978 /* Some stack pointer alterations we make are performed via
1979 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1980 which we then also need to save/restore along the way. */
1981 int old_stack_pointer_delta = 0;
1984 tree p = TREE_OPERAND (exp, 0);
1985 tree addr = TREE_OPERAND (exp, 0);
1987 /* The alignment of the stack, in bits. */
1988 HOST_WIDE_INT preferred_stack_boundary;
1989 /* The alignment of the stack, in bytes. */
1990 HOST_WIDE_INT preferred_unit_stack_boundary;
1991 /* The static chain value to use for this call. */
1992 rtx static_chain_value;
1993 /* See if this is "nothrow" function call. */
1994 if (TREE_NOTHROW (exp))
1995 flags |= ECF_NOTHROW;
1997 /* See if we can find a DECL-node for the actual function, and get the
1998 function attributes (flags) from the function decl or type node. */
1999 fndecl = get_callee_fndecl (exp);
2002 fntype = TREE_TYPE (fndecl);
2003 flags |= flags_from_decl_or_type (fndecl);
2007 fntype = TREE_TYPE (TREE_TYPE (p));
2008 flags |= flags_from_decl_or_type (fntype);
2011 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2013 /* Warn if this value is an aggregate type,
2014 regardless of which calling convention we are using for it. */
2015 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2016 warning ("function call has aggregate value");
2018 /* If the result of a pure or const function call is ignored (or void),
2019 and none of its arguments are volatile, we can avoid expanding the
2020 call and just evaluate the arguments for side-effects. */
2021 if ((flags & (ECF_CONST | ECF_PURE))
2022 && (ignore || target == const0_rtx
2023 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2025 bool volatilep = false;
2028 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2029 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
2037 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2038 expand_expr (TREE_VALUE (arg), const0_rtx,
2039 VOIDmode, EXPAND_NORMAL);
2044 #ifdef REG_PARM_STACK_SPACE
2045 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2048 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2049 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2050 must_preallocate = 1;
2053 /* Set up a place to return a structure. */
2055 /* Cater to broken compilers. */
2056 if (aggregate_value_p (exp, fndecl))
2058 /* This call returns a big structure. */
2059 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2061 #ifdef PCC_STATIC_STRUCT_RETURN
2063 pcc_struct_value = 1;
2065 #else /* not PCC_STATIC_STRUCT_RETURN */
2067 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2069 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
2071 /* The structure value address arg is already in actparms.
2072 Pull it out. It might be nice to just leave it there, but
2073 we need to set structure_value_addr. */
2074 tree return_arg = TREE_VALUE (actparms);
2075 actparms = TREE_CHAIN (actparms);
2076 structure_value_addr = expand_expr (return_arg, NULL_RTX,
2077 VOIDmode, EXPAND_NORMAL);
2079 else if (target && MEM_P (target))
2080 structure_value_addr = XEXP (target, 0);
2083 /* For variable-sized objects, we must be called with a target
2084 specified. If we were to allocate space on the stack here,
2085 we would have no way of knowing when to free it. */
2086 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2088 mark_temp_addr_taken (d);
2089 structure_value_addr = XEXP (d, 0);
2093 #endif /* not PCC_STATIC_STRUCT_RETURN */
2096 /* Figure out the amount to which the stack should be aligned. */
2097 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2100 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2101 if (i && i->preferred_incoming_stack_boundary)
2102 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2105 /* Operand 0 is a pointer-to-function; get the type of the function. */
2106 funtype = TREE_TYPE (addr);
2107 if (! POINTER_TYPE_P (funtype))
2109 funtype = TREE_TYPE (funtype);
2111 /* Munge the tree to split complex arguments into their imaginary
2113 if (targetm.calls.split_complex_arg)
2115 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2116 actparms = split_complex_values (actparms);
2119 type_arg_types = TYPE_ARG_TYPES (funtype);
2121 if (flags & ECF_MAY_BE_ALLOCA)
2122 current_function_calls_alloca = 1;
2124 /* If struct_value_rtx is 0, it means pass the address
2125 as if it were an extra parameter. */
2126 if (structure_value_addr && struct_value == 0)
2128 /* If structure_value_addr is a REG other than
2129 virtual_outgoing_args_rtx, we can use always use it. If it
2130 is not a REG, we must always copy it into a register.
2131 If it is virtual_outgoing_args_rtx, we must copy it to another
2132 register in some cases. */
2133 rtx temp = (!REG_P (structure_value_addr)
2134 || (ACCUMULATE_OUTGOING_ARGS
2135 && stack_arg_under_construction
2136 && structure_value_addr == virtual_outgoing_args_rtx)
2137 ? copy_addr_to_reg (convert_memory_address
2138 (Pmode, structure_value_addr))
2139 : structure_value_addr);
2142 = tree_cons (error_mark_node,
2143 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2146 structure_value_addr_parm = 1;
2149 /* Count the arguments and set NUM_ACTUALS. */
2150 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2153 /* Compute number of named args.
2154 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2156 if (type_arg_types != 0)
2158 = (list_length (type_arg_types)
2159 /* Count the struct value address, if it is passed as a parm. */
2160 + structure_value_addr_parm);
2162 /* If we know nothing, treat all args as named. */
2163 n_named_args = num_actuals;
2165 /* Start updating where the next arg would go.
2167 On some machines (such as the PA) indirect calls have a different
2168 calling convention than normal calls. The fourth argument in
2169 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2171 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2173 /* Now possibly adjust the number of named args.
2174 Normally, don't include the last named arg if anonymous args follow.
2175 We do include the last named arg if
2176 targetm.calls.strict_argument_naming() returns nonzero.
2177 (If no anonymous args follow, the result of list_length is actually
2178 one too large. This is harmless.)
2180 If targetm.calls.pretend_outgoing_varargs_named() returns
2181 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2182 this machine will be able to place unnamed args that were passed
2183 in registers into the stack. So treat all args as named. This
2184 allows the insns emitting for a specific argument list to be
2185 independent of the function declaration.
2187 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2188 we do not have any reliable way to pass unnamed args in
2189 registers, so we must force them into memory. */
2191 if (type_arg_types != 0
2192 && targetm.calls.strict_argument_naming (&args_so_far))
2194 else if (type_arg_types != 0
2195 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2196 /* Don't include the last named arg. */
2199 /* Treat all args as named. */
2200 n_named_args = num_actuals;
2202 /* Make a vector to hold all the information about each arg. */
2203 args = alloca (num_actuals * sizeof (struct arg_data));
2204 memset (args, 0, num_actuals * sizeof (struct arg_data));
2206 /* Build up entries in the ARGS array, compute the size of the
2207 arguments into ARGS_SIZE, etc. */
2208 initialize_argument_information (num_actuals, args, &args_size,
2209 n_named_args, actparms, fndecl,
2210 &args_so_far, reg_parm_stack_space,
2211 &old_stack_level, &old_pending_adj,
2212 &must_preallocate, &flags,
2213 &try_tail_call, CALL_FROM_THUNK_P (exp));
2217 /* If this function requires a variable-sized argument list, don't
2218 try to make a cse'able block for this call. We may be able to
2219 do this eventually, but it is too complicated to keep track of
2220 what insns go in the cse'able block and which don't. */
2222 flags &= ~ECF_LIBCALL_BLOCK;
2223 must_preallocate = 1;
2226 /* Now make final decision about preallocating stack space. */
2227 must_preallocate = finalize_must_preallocate (must_preallocate,
2231 /* If the structure value address will reference the stack pointer, we
2232 must stabilize it. We don't need to do this if we know that we are
2233 not going to adjust the stack pointer in processing this call. */
2235 if (structure_value_addr
2236 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2237 || reg_mentioned_p (virtual_outgoing_args_rtx,
2238 structure_value_addr))
2240 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2241 structure_value_addr = copy_to_reg (structure_value_addr);
2243 /* Tail calls can make things harder to debug, and we're traditionally
2244 pushed these optimizations into -O2. Don't try if we're already
2245 expanding a call, as that means we're an argument. Don't try if
2246 there's cleanups, as we know there's code to follow the call.
2248 If rtx_equal_function_value_matters is false, that means we've
2249 finished with regular parsing. Which means that some of the
2250 machinery we use to generate tail-calls is no longer in place.
2251 This is most often true of sjlj-exceptions, which we couldn't
2252 tail-call to anyway.
2254 If current_nesting_level () == 0, we're being called after
2255 the function body has been expanded. This can happen when
2256 setting up trampolines in expand_function_end. */
2257 if (currently_expanding_call++ != 0
2258 || !flag_optimize_sibling_calls
2259 || !rtx_equal_function_value_matters
2260 || current_nesting_level () == 0
2262 || lookup_stmt_eh_region (exp) >= 0)
2265 /* Rest of purposes for tail call optimizations to fail. */
2267 #ifdef HAVE_sibcall_epilogue
2268 !HAVE_sibcall_epilogue
2273 /* Doing sibling call optimization needs some work, since
2274 structure_value_addr can be allocated on the stack.
2275 It does not seem worth the effort since few optimizable
2276 sibling calls will return a structure. */
2277 || structure_value_addr != NULL_RTX
2278 /* Check whether the target is able to optimize the call
2280 || !targetm.function_ok_for_sibcall (fndecl, exp)
2281 /* Functions that do not return exactly once may not be sibcall
2283 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
2284 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2285 /* If the called function is nested in the current one, it might access
2286 some of the caller's arguments, but could clobber them beforehand if
2287 the argument areas are shared. */
2288 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2289 /* If this function requires more stack slots than the current
2290 function, we cannot change it into a sibling call. */
2291 || args_size.constant > current_function_args_size
2292 /* If the callee pops its own arguments, then it must pop exactly
2293 the same number of arguments as the current function. */
2294 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2295 != RETURN_POPS_ARGS (current_function_decl,
2296 TREE_TYPE (current_function_decl),
2297 current_function_args_size))
2298 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2304 actparms = NULL_TREE;
2305 /* Ok, we're going to give the tail call the old college try.
2306 This means we're going to evaluate the function arguments
2307 up to three times. There are two degrees of badness we can
2308 encounter, those that can be unsaved and those that can't.
2309 (See unsafe_for_reeval commentary for details.)
2311 Generate a new argument list. Pass safe arguments through
2312 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2313 For hard badness, evaluate them now and put their resulting
2314 rtx in a temporary VAR_DECL.
2316 initialize_argument_information has ordered the array for the
2317 order to be pushed, and we must remember this when reconstructing
2318 the original argument order. */
2320 if (PUSH_ARGS_REVERSED)
2329 i = num_actuals - 1;
2333 for (; i != end; i += inc)
2335 args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
2337 /* Do the same for the function address if it is an expression. */
2339 addr = fix_unsafe_tree (addr);
2343 /* Ensure current function's preferred stack boundary is at least
2344 what we need. We don't have to increase alignment for recursive
2346 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2347 && fndecl != current_function_decl)
2348 cfun->preferred_stack_boundary = preferred_stack_boundary;
2349 if (fndecl == current_function_decl)
2350 cfun->recursive_call_emit = true;
2352 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2354 /* We want to make two insn chains; one for a sibling call, the other
2355 for a normal call. We will select one of the two chains after
2356 initial RTL generation is complete. */
2357 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2359 int sibcall_failure = 0;
2360 /* We want to emit any pending stack adjustments before the tail
2361 recursion "call". That way we know any adjustment after the tail
2362 recursion call can be ignored if we indeed use the tail
2364 int save_pending_stack_adjust = 0;
2365 int save_stack_pointer_delta = 0;
2367 rtx before_call, next_arg_reg;
2371 /* Emit any queued insns now; otherwise they would end up in
2372 only one of the alternates. */
2375 /* State variables we need to save and restore between
2377 save_pending_stack_adjust = pending_stack_adjust;
2378 save_stack_pointer_delta = stack_pointer_delta;
2381 flags &= ~ECF_SIBCALL;
2383 flags |= ECF_SIBCALL;
2385 /* Other state variables that we must reinitialize each time
2386 through the loop (that are not initialized by the loop itself). */
2390 /* Start a new sequence for the normal call case.
2392 From this point on, if the sibling call fails, we want to set
2393 sibcall_failure instead of continuing the loop. */
2398 /* We know at this point that there are not currently any
2399 pending cleanups. If, however, in the process of evaluating
2400 the arguments we were to create some, we'll need to be
2401 able to get rid of them. */
2402 expand_start_target_temps ();
2405 /* Don't let pending stack adjusts add up to too much.
2406 Also, do all pending adjustments now if there is any chance
2407 this might be a call to alloca or if we are expanding a sibling
2408 call sequence or if we are calling a function that is to return
2409 with stack pointer depressed. */
2410 if (pending_stack_adjust >= 32
2411 || (pending_stack_adjust > 0
2412 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2414 do_pending_stack_adjust ();
2416 /* When calling a const function, we must pop the stack args right away,
2417 so that the pop is deleted or moved with the call. */
2418 if (pass && (flags & ECF_LIBCALL_BLOCK))
2421 /* Precompute any arguments as needed. */
2423 precompute_arguments (flags, num_actuals, args);
2425 /* Now we are about to start emitting insns that can be deleted
2426 if a libcall is deleted. */
2427 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2430 adjusted_args_size = args_size;
2431 /* Compute the actual size of the argument block required. The variable
2432 and constant sizes must be combined, the size may have to be rounded,
2433 and there may be a minimum required size. When generating a sibcall
2434 pattern, do not round up, since we'll be re-using whatever space our
2436 unadjusted_args_size
2437 = compute_argument_block_size (reg_parm_stack_space,
2438 &adjusted_args_size,
2440 : preferred_stack_boundary));
2442 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2444 /* The argument block when performing a sibling call is the
2445 incoming argument block. */
2448 argblock = virtual_incoming_args_rtx;
2450 #ifdef STACK_GROWS_DOWNWARD
2451 = plus_constant (argblock, current_function_pretend_args_size);
2453 = plus_constant (argblock, -current_function_pretend_args_size);
2455 stored_args_map = sbitmap_alloc (args_size.constant);
2456 sbitmap_zero (stored_args_map);
2459 /* If we have no actual push instructions, or shouldn't use them,
2460 make space for all args right now. */
2461 else if (adjusted_args_size.var != 0)
2463 if (old_stack_level == 0)
2465 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2466 old_stack_pointer_delta = stack_pointer_delta;
2467 old_pending_adj = pending_stack_adjust;
2468 pending_stack_adjust = 0;
2469 /* stack_arg_under_construction says whether a stack arg is
2470 being constructed at the old stack level. Pushing the stack
2471 gets a clean outgoing argument block. */
2472 old_stack_arg_under_construction = stack_arg_under_construction;
2473 stack_arg_under_construction = 0;
2475 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2479 /* Note that we must go through the motions of allocating an argument
2480 block even if the size is zero because we may be storing args
2481 in the area reserved for register arguments, which may be part of
2484 int needed = adjusted_args_size.constant;
2486 /* Store the maximum argument space used. It will be pushed by
2487 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2490 if (needed > current_function_outgoing_args_size)
2491 current_function_outgoing_args_size = needed;
2493 if (must_preallocate)
2495 if (ACCUMULATE_OUTGOING_ARGS)
2497 /* Since the stack pointer will never be pushed, it is
2498 possible for the evaluation of a parm to clobber
2499 something we have already written to the stack.
2500 Since most function calls on RISC machines do not use
2501 the stack, this is uncommon, but must work correctly.
2503 Therefore, we save any area of the stack that was already
2504 written and that we are using. Here we set up to do this
2505 by making a new stack usage map from the old one. The
2506 actual save will be done by store_one_arg.
2508 Another approach might be to try to reorder the argument
2509 evaluations to avoid this conflicting stack usage. */
2511 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2512 /* Since we will be writing into the entire argument area,
2513 the map must be allocated for its entire size, not just
2514 the part that is the responsibility of the caller. */
2515 needed += reg_parm_stack_space;
2518 #ifdef ARGS_GROW_DOWNWARD
2519 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2522 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2525 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2527 if (initial_highest_arg_in_use)
2528 memcpy (stack_usage_map, initial_stack_usage_map,
2529 initial_highest_arg_in_use);
2531 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2532 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2533 (highest_outgoing_arg_in_use
2534 - initial_highest_arg_in_use));
2537 /* The address of the outgoing argument list must not be
2538 copied to a register here, because argblock would be left
2539 pointing to the wrong place after the call to
2540 allocate_dynamic_stack_space below. */
2542 argblock = virtual_outgoing_args_rtx;
2546 if (inhibit_defer_pop == 0)
2548 /* Try to reuse some or all of the pending_stack_adjust
2549 to get this space. */
2551 = (combine_pending_stack_adjustment_and_call
2552 (unadjusted_args_size,
2553 &adjusted_args_size,
2554 preferred_unit_stack_boundary));
2556 /* combine_pending_stack_adjustment_and_call computes
2557 an adjustment before the arguments are allocated.
2558 Account for them and see whether or not the stack
2559 needs to go up or down. */
2560 needed = unadjusted_args_size - needed;
2564 /* We're releasing stack space. */
2565 /* ??? We can avoid any adjustment at all if we're
2566 already aligned. FIXME. */
2567 pending_stack_adjust = -needed;
2568 do_pending_stack_adjust ();
2572 /* We need to allocate space. We'll do that in
2573 push_block below. */
2574 pending_stack_adjust = 0;
2577 /* Special case this because overhead of `push_block' in
2578 this case is non-trivial. */
2580 argblock = virtual_outgoing_args_rtx;
2583 argblock = push_block (GEN_INT (needed), 0, 0);
2584 #ifdef ARGS_GROW_DOWNWARD
2585 argblock = plus_constant (argblock, needed);
2589 /* We only really need to call `copy_to_reg' in the case
2590 where push insns are going to be used to pass ARGBLOCK
2591 to a function call in ARGS. In that case, the stack
2592 pointer changes value from the allocation point to the
2593 call point, and hence the value of
2594 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2595 as well always do it. */
2596 argblock = copy_to_reg (argblock);
2601 if (ACCUMULATE_OUTGOING_ARGS)
2603 /* The save/restore code in store_one_arg handles all
2604 cases except one: a constructor call (including a C
2605 function returning a BLKmode struct) to initialize
2607 if (stack_arg_under_construction)
2609 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2610 rtx push_size = GEN_INT (reg_parm_stack_space
2611 + adjusted_args_size.constant);
2613 rtx push_size = GEN_INT (adjusted_args_size.constant);
2615 if (old_stack_level == 0)
2617 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2619 old_stack_pointer_delta = stack_pointer_delta;
2620 old_pending_adj = pending_stack_adjust;
2621 pending_stack_adjust = 0;
2622 /* stack_arg_under_construction says whether a stack
2623 arg is being constructed at the old stack level.
2624 Pushing the stack gets a clean outgoing argument
2626 old_stack_arg_under_construction
2627 = stack_arg_under_construction;
2628 stack_arg_under_construction = 0;
2629 /* Make a new map for the new argument list. */
2630 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2631 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2632 highest_outgoing_arg_in_use = 0;
2634 allocate_dynamic_stack_space (push_size, NULL_RTX,
2638 /* If argument evaluation might modify the stack pointer,
2639 copy the address of the argument list to a register. */
2640 for (i = 0; i < num_actuals; i++)
2641 if (args[i].pass_on_stack)
2643 argblock = copy_addr_to_reg (argblock);
2648 compute_argument_addresses (args, argblock, num_actuals);
2650 /* If we push args individually in reverse order, perform stack alignment
2651 before the first push (the last arg). */
2652 if (PUSH_ARGS_REVERSED && argblock == 0
2653 && adjusted_args_size.constant != unadjusted_args_size)
2655 /* When the stack adjustment is pending, we get better code
2656 by combining the adjustments. */
2657 if (pending_stack_adjust
2658 && ! (flags & ECF_LIBCALL_BLOCK)
2659 && ! inhibit_defer_pop)
2661 pending_stack_adjust
2662 = (combine_pending_stack_adjustment_and_call
2663 (unadjusted_args_size,
2664 &adjusted_args_size,
2665 preferred_unit_stack_boundary));
2666 do_pending_stack_adjust ();
2668 else if (argblock == 0)
2669 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2670 - unadjusted_args_size));
2672 /* Now that the stack is properly aligned, pops can't safely
2673 be deferred during the evaluation of the arguments. */
2676 funexp = rtx_for_function_call (fndecl, addr);
2678 /* Figure out the register where the value, if any, will come back. */
2680 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2681 && ! structure_value_addr)
2683 if (pcc_struct_value)
2684 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2685 fndecl, (pass == 0));
2687 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2690 /* Precompute all register parameters. It isn't safe to compute anything
2691 once we have started filling any specific hard regs. */
2692 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2694 if (TREE_OPERAND (exp, 2))
2695 static_chain_value = expand_expr (TREE_OPERAND (exp, 2),
2696 NULL_RTX, VOIDmode, 0);
2698 static_chain_value = 0;
2700 #ifdef REG_PARM_STACK_SPACE
2701 /* Save the fixed argument area if it's part of the caller's frame and
2702 is clobbered by argument setup for this call. */
2703 if (ACCUMULATE_OUTGOING_ARGS && pass)
2704 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2705 &low_to_save, &high_to_save);
2708 /* Now store (and compute if necessary) all non-register parms.
2709 These come before register parms, since they can require block-moves,
2710 which could clobber the registers used for register parms.
2711 Parms which have partial registers are not stored here,
2712 but we do preallocate space here if they want that. */
2714 for (i = 0; i < num_actuals; i++)
2715 if (args[i].reg == 0 || args[i].pass_on_stack)
2717 rtx before_arg = get_last_insn ();
2719 if (store_one_arg (&args[i], argblock, flags,
2720 adjusted_args_size.var != 0,
2721 reg_parm_stack_space)
2723 && check_sibcall_argument_overlap (before_arg,
2725 sibcall_failure = 1;
2727 if (flags & ECF_CONST
2729 && args[i].value == args[i].stack)
2730 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2731 gen_rtx_USE (VOIDmode,
2736 /* If we have a parm that is passed in registers but not in memory
2737 and whose alignment does not permit a direct copy into registers,
2738 make a group of pseudos that correspond to each register that we
2740 if (STRICT_ALIGNMENT)
2741 store_unaligned_arguments_into_pseudos (args, num_actuals);
2743 /* Now store any partially-in-registers parm.
2744 This is the last place a block-move can happen. */
2746 for (i = 0; i < num_actuals; i++)
2747 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2749 rtx before_arg = get_last_insn ();
2751 if (store_one_arg (&args[i], argblock, flags,
2752 adjusted_args_size.var != 0,
2753 reg_parm_stack_space)
2755 && check_sibcall_argument_overlap (before_arg,
2757 sibcall_failure = 1;
2760 /* If we pushed args in forward order, perform stack alignment
2761 after pushing the last arg. */
2762 if (!PUSH_ARGS_REVERSED && argblock == 0)
2763 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2764 - unadjusted_args_size));
2766 /* If register arguments require space on the stack and stack space
2767 was not preallocated, allocate stack space here for arguments
2768 passed in registers. */
2769 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2770 if (!ACCUMULATE_OUTGOING_ARGS
2771 && must_preallocate == 0 && reg_parm_stack_space > 0)
2772 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2775 /* Pass the function the address in which to return a
2777 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2779 structure_value_addr
2780 = convert_memory_address (Pmode, structure_value_addr);
2781 emit_move_insn (struct_value,
2783 force_operand (structure_value_addr,
2786 if (REG_P (struct_value))
2787 use_reg (&call_fusage, struct_value);
2790 funexp = prepare_call_address (funexp, static_chain_value,
2791 &call_fusage, reg_parm_seen, pass == 0);
2793 load_register_parameters (args, num_actuals, &call_fusage, flags,
2794 pass == 0, &sibcall_failure);
2796 /* Perform postincrements before actually calling the function. */
2799 /* Save a pointer to the last insn before the call, so that we can
2800 later safely search backwards to find the CALL_INSN. */
2801 before_call = get_last_insn ();
2803 /* Set up next argument register. For sibling calls on machines
2804 with register windows this should be the incoming register. */
2805 #ifdef FUNCTION_INCOMING_ARG
2807 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2811 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2814 /* All arguments and registers used for the call must be set up by
2817 /* Stack must be properly aligned now. */
2818 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
2821 /* Generate the actual call instruction. */
2822 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2823 adjusted_args_size.constant, struct_value_size,
2824 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2825 flags, & args_so_far);
2827 /* If call is cse'able, make appropriate pair of reg-notes around it.
2828 Test valreg so we don't crash; may safely ignore `const'
2829 if return type is void. Disable for PARALLEL return values, because
2830 we have no way to move such values into a pseudo register. */
2831 if (pass && (flags & ECF_LIBCALL_BLOCK))
2835 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2837 insns = get_insns ();
2839 /* Expansion of block moves possibly introduced a loop that may
2840 not appear inside libcall block. */
2841 for (insn = insns; insn; insn = NEXT_INSN (insn))
2853 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2855 /* Mark the return value as a pointer if needed. */
2856 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2857 mark_reg_pointer (temp,
2858 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2861 if (flag_unsafe_math_optimizations
2863 && DECL_BUILT_IN (fndecl)
2864 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2865 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2866 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2867 note = gen_rtx_fmt_e (SQRT,
2869 args[0].initial_value);
2872 /* Construct an "equal form" for the value which
2873 mentions all the arguments in order as well as
2874 the function name. */
2875 for (i = 0; i < num_actuals; i++)
2876 note = gen_rtx_EXPR_LIST (VOIDmode,
2877 args[i].initial_value, note);
2878 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2880 if (flags & ECF_PURE)
2881 note = gen_rtx_EXPR_LIST (VOIDmode,
2882 gen_rtx_USE (VOIDmode,
2883 gen_rtx_MEM (BLKmode,
2884 gen_rtx_SCRATCH (VOIDmode))),
2887 emit_libcall_block (insns, temp, valreg, note);
2892 else if (pass && (flags & ECF_MALLOC))
2894 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2897 /* The return value from a malloc-like function is a pointer. */
2898 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2899 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2901 emit_move_insn (temp, valreg);
2903 /* The return value from a malloc-like function can not alias
2905 last = get_last_insn ();
2907 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2909 /* Write out the sequence. */
2910 insns = get_insns ();
2916 /* For calls to `setjmp', etc., inform flow.c it should complain
2917 if nonvolatile values are live. For functions that cannot return,
2918 inform flow that control does not fall through. */
2920 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
2922 /* The barrier must be emitted
2923 immediately after the CALL_INSN. Some ports emit more
2924 than just a CALL_INSN above, so we must search for it here. */
2926 rtx last = get_last_insn ();
2927 while (!CALL_P (last))
2929 last = PREV_INSN (last);
2930 /* There was no CALL_INSN? */
2931 if (last == before_call)
2935 emit_barrier_after (last);
2937 /* Stack adjustments after a noreturn call are dead code.
2938 However when NO_DEFER_POP is in effect, we must preserve
2939 stack_pointer_delta. */
2940 if (inhibit_defer_pop == 0)
2942 stack_pointer_delta = old_stack_allocated;
2943 pending_stack_adjust = 0;
2947 if (flags & ECF_LONGJMP)
2948 current_function_calls_longjmp = 1;
2950 /* If value type not void, return an rtx for the value. */
2952 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2954 target = const0_rtx;
2955 else if (structure_value_addr)
2957 if (target == 0 || !MEM_P (target))
2960 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2961 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2962 structure_value_addr));
2963 set_mem_attributes (target, exp, 1);
2966 else if (pcc_struct_value)
2968 /* This is the special C++ case where we need to
2969 know what the true target was. We take care to
2970 never use this value more than once in one expression. */
2971 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2972 copy_to_reg (valreg));
2973 set_mem_attributes (target, exp, 1);
2975 /* Handle calls that return values in multiple non-contiguous locations.
2976 The Irix 6 ABI has examples of this. */
2977 else if (GET_CODE (valreg) == PARALLEL)
2981 /* This will only be assigned once, so it can be readonly. */
2982 tree nt = build_qualified_type (TREE_TYPE (exp),
2983 (TYPE_QUALS (TREE_TYPE (exp))
2984 | TYPE_QUAL_CONST));
2986 target = assign_temp (nt, 0, 1, 1);
2987 preserve_temp_slots (target);
2990 if (! rtx_equal_p (target, valreg))
2991 emit_group_store (target, valreg, TREE_TYPE (exp),
2992 int_size_in_bytes (TREE_TYPE (exp)));
2994 /* We can not support sibling calls for this case. */
2995 sibcall_failure = 1;
2998 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2999 && GET_MODE (target) == GET_MODE (valreg))
3001 /* TARGET and VALREG cannot be equal at this point because the
3002 latter would not have REG_FUNCTION_VALUE_P true, while the
3003 former would if it were referring to the same register.
3005 If they refer to the same register, this move will be a no-op,
3006 except when function inlining is being done. */
3007 emit_move_insn (target, valreg);
3009 /* If we are setting a MEM, this code must be executed. Since it is
3010 emitted after the call insn, sibcall optimization cannot be
3011 performed in that case. */
3013 sibcall_failure = 1;
3015 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3017 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3019 /* We can not support sibling calls for this case. */
3020 sibcall_failure = 1;
3024 if (shift_returned_value (TREE_TYPE (exp), &valreg))
3025 sibcall_failure = 1;
3027 target = copy_to_reg (valreg);
3030 if (targetm.calls.promote_function_return(funtype))
3032 /* If we promoted this return value, make the proper SUBREG. TARGET
3033 might be const0_rtx here, so be careful. */
3035 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3036 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3038 tree type = TREE_TYPE (exp);
3039 int unsignedp = TYPE_UNSIGNED (type);
3042 /* If we don't promote as expected, something is wrong. */
3043 if (GET_MODE (target)
3044 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3047 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3048 && GET_MODE_SIZE (GET_MODE (target))
3049 > GET_MODE_SIZE (TYPE_MODE (type)))
3051 offset = GET_MODE_SIZE (GET_MODE (target))
3052 - GET_MODE_SIZE (TYPE_MODE (type));
3053 if (! BYTES_BIG_ENDIAN)
3054 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3055 else if (! WORDS_BIG_ENDIAN)
3056 offset %= UNITS_PER_WORD;
3058 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3059 SUBREG_PROMOTED_VAR_P (target) = 1;
3060 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3064 /* If size of args is variable or this was a constructor call for a stack
3065 argument, restore saved stack-pointer value. */
3067 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3069 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3070 stack_pointer_delta = old_stack_pointer_delta;
3071 pending_stack_adjust = old_pending_adj;
3072 stack_arg_under_construction = old_stack_arg_under_construction;
3073 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3074 stack_usage_map = initial_stack_usage_map;
3075 sibcall_failure = 1;
3077 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3079 #ifdef REG_PARM_STACK_SPACE
3081 restore_fixed_argument_area (save_area, argblock,
3082 high_to_save, low_to_save);
3085 /* If we saved any argument areas, restore them. */
3086 for (i = 0; i < num_actuals; i++)
3087 if (args[i].save_area)
3089 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3091 = gen_rtx_MEM (save_mode,
3092 memory_address (save_mode,
3093 XEXP (args[i].stack_slot, 0)));
3095 if (save_mode != BLKmode)
3096 emit_move_insn (stack_area, args[i].save_area);
3098 emit_block_move (stack_area, args[i].save_area,
3099 GEN_INT (args[i].locate.size.constant),
3100 BLOCK_OP_CALL_PARM);
3103 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3104 stack_usage_map = initial_stack_usage_map;
3107 /* If this was alloca, record the new stack level for nonlocal gotos.
3108 Check for the handler slots since we might not have a save area
3109 for non-local gotos. */
3111 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3112 update_nonlocal_goto_save_area ();
3114 /* Free up storage we no longer need. */
3115 for (i = 0; i < num_actuals; ++i)
3116 if (args[i].aligned_regs)
3117 free (args[i].aligned_regs);
3121 /* Undo the fake expand_start_target_temps we did earlier. If
3122 there had been any cleanups created, we've already set
3124 expand_end_target_temps ();
3127 /* If this function is returning into a memory location marked as
3128 readonly, it means it is initializing that location. We normally treat
3129 functions as not clobbering such locations, so we need to specify that
3130 this one does. We do this by adding the appropriate CLOBBER to the
3131 CALL_INSN function usage list. This cannot be done by emitting a
3132 standalone CLOBBER after the call because the latter would be ignored
3133 by at least the delay slot scheduling pass. We do this now instead of
3134 adding to call_fusage before the call to emit_call_1 because TARGET
3135 may be modified in the meantime. */
3136 if (structure_value_addr != 0 && target != 0
3137 && MEM_P (target) && RTX_UNCHANGING_P (target))
3138 add_function_usage_to
3140 gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
3143 insns = get_insns ();
3148 tail_call_insns = insns;
3150 /* Restore the pending stack adjustment now that we have
3151 finished generating the sibling call sequence. */
3153 pending_stack_adjust = save_pending_stack_adjust;
3154 stack_pointer_delta = save_stack_pointer_delta;
3156 /* Prepare arg structure for next iteration. */
3157 for (i = 0; i < num_actuals; i++)
3160 args[i].aligned_regs = 0;
3164 sbitmap_free (stored_args_map);
3168 normal_call_insns = insns;
3170 /* Verify that we've deallocated all the stack we used. */
3171 if (! (flags & (ECF_NORETURN | ECF_LONGJMP))
3172 && old_stack_allocated != stack_pointer_delta
3173 - pending_stack_adjust)
3177 /* If something prevents making this a sibling call,
3178 zero out the sequence. */
3179 if (sibcall_failure)
3180 tail_call_insns = NULL_RTX;
3185 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3186 arguments too, as argument area is now clobbered by the call. */
3187 if (tail_call_insns)
3189 emit_insn (tail_call_insns);
3190 cfun->tail_call_emit = true;
3193 emit_insn (normal_call_insns);
3195 currently_expanding_call--;
3197 /* If this function returns with the stack pointer depressed, ensure
3198 this block saves and restores the stack pointer, show it was
3199 changed, and adjust for any outgoing arg space. */
3200 if (flags & ECF_SP_DEPRESSED)
3202 clear_pending_stack_adjust ();
3203 emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
3204 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3210 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3211 this function's incoming arguments.
3213 At the start of RTL generation we know the only REG_EQUIV notes
3214 in the rtl chain are those for incoming arguments, so we can safely
3215 flush any REG_EQUIV note.
3217 This is (slight) overkill. We could keep track of the highest
3218 argument we clobber and be more selective in removing notes, but it
3219 does not seem to be worth the effort. */
3221 fixup_tail_calls (void)
3226 purge_reg_equiv_notes ();
3228 /* A sibling call sequence also may invalidate RTX_UNCHANGING_P
3229 flag of some incoming arguments MEM RTLs, because it can write into
3230 those slots. We clear all those bits now.
3232 This is (slight) overkill, we could keep track of which arguments
3233 we actually write into. */
3234 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3237 purge_mem_unchanging_flag (PATTERN (insn));
3240 /* Similarly, invalidate RTX_UNCHANGING_P for any incoming
3241 arguments passed in registers. */
3242 for (arg = DECL_ARGUMENTS (current_function_decl);
3244 arg = TREE_CHAIN (arg))
3246 if (REG_P (DECL_RTL (arg)))
3247 RTX_UNCHANGING_P (DECL_RTL (arg)) = false;
3251 /* Traverse an argument list in VALUES and expand all complex
3252 arguments into their components. */
3254 split_complex_values (tree values)
3258 /* Before allocating memory, check for the common case of no complex. */
3259 for (p = values; p; p = TREE_CHAIN (p))
3261 tree type = TREE_TYPE (TREE_VALUE (p));
3262 if (type && TREE_CODE (type) == COMPLEX_TYPE
3263 && targetm.calls.split_complex_arg (type))
3269 values = copy_list (values);
3271 for (p = values; p; p = TREE_CHAIN (p))
3273 tree complex_value = TREE_VALUE (p);
3276 complex_type = TREE_TYPE (complex_value);
3280 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3281 && targetm.calls.split_complex_arg (complex_type))
3284 tree real, imag, next;
3286 subtype = TREE_TYPE (complex_type);
3287 complex_value = save_expr (complex_value);
3288 real = build1 (REALPART_EXPR, subtype, complex_value);
3289 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3291 TREE_VALUE (p) = real;
3292 next = TREE_CHAIN (p);
3293 imag = build_tree_list (NULL_TREE, imag);
3294 TREE_CHAIN (p) = imag;
3295 TREE_CHAIN (imag) = next;
3297 /* Skip the newly created node. */
3305 /* Traverse a list of TYPES and expand all complex types into their
3308 split_complex_types (tree types)
3312 /* Before allocating memory, check for the common case of no complex. */
3313 for (p = types; p; p = TREE_CHAIN (p))
3315 tree type = TREE_VALUE (p);
3316 if (TREE_CODE (type) == COMPLEX_TYPE
3317 && targetm.calls.split_complex_arg (type))
3323 types = copy_list (types);
3325 for (p = types; p; p = TREE_CHAIN (p))
3327 tree complex_type = TREE_VALUE (p);
3329 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3330 && targetm.calls.split_complex_arg (complex_type))
3334 /* Rewrite complex type with component type. */
3335 TREE_VALUE (p) = TREE_TYPE (complex_type);
3336 next = TREE_CHAIN (p);
3338 /* Add another component type for the imaginary part. */
3339 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3340 TREE_CHAIN (p) = imag;
3341 TREE_CHAIN (imag) = next;
3343 /* Skip the newly created node. */
3351 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3352 The RETVAL parameter specifies whether return value needs to be saved, other
3353 parameters are documented in the emit_library_call function below. */
3356 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3357 enum libcall_type fn_type,
3358 enum machine_mode outmode, int nargs, va_list p)
3360 /* Total size in bytes of all the stack-parms scanned so far. */
3361 struct args_size args_size;
3362 /* Size of arguments before any adjustments (such as rounding). */
3363 struct args_size original_args_size;
3369 CUMULATIVE_ARGS args_so_far;
3373 enum machine_mode mode;
3376 struct locate_and_pad_arg_data locate;
3380 int old_inhibit_defer_pop = inhibit_defer_pop;
3381 rtx call_fusage = 0;
3384 int pcc_struct_value = 0;
3385 int struct_value_size = 0;
3387 int reg_parm_stack_space = 0;
3390 tree tfom; /* type_for_mode (outmode, 0) */
3392 #ifdef REG_PARM_STACK_SPACE
3393 /* Define the boundary of the register parm stack space that needs to be
3395 int low_to_save, high_to_save;
3396 rtx save_area = 0; /* Place that it is saved. */
3399 /* Size of the stack reserved for parameter registers. */
3400 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3401 char *initial_stack_usage_map = stack_usage_map;
3403 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3405 #ifdef REG_PARM_STACK_SPACE
3406 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3409 /* By default, library functions can not throw. */
3410 flags = ECF_NOTHROW;
3422 case LCT_CONST_MAKE_BLOCK:
3423 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3425 case LCT_PURE_MAKE_BLOCK:
3426 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3429 flags |= ECF_NORETURN;
3432 flags = ECF_NORETURN;
3434 case LCT_ALWAYS_RETURN:
3435 flags = ECF_ALWAYS_RETURN;
3437 case LCT_RETURNS_TWICE:
3438 flags = ECF_RETURNS_TWICE;
3443 /* Ensure current function's preferred stack boundary is at least
3445 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3446 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3448 /* If this kind of value comes back in memory,
3449 decide where in memory it should come back. */
3450 if (outmode != VOIDmode)
3452 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3453 if (aggregate_value_p (tfom, 0))
3455 #ifdef PCC_STATIC_STRUCT_RETURN
3457 = hard_function_value (build_pointer_type (tfom), 0, 0);
3458 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3459 pcc_struct_value = 1;
3461 value = gen_reg_rtx (outmode);
3462 #else /* not PCC_STATIC_STRUCT_RETURN */
3463 struct_value_size = GET_MODE_SIZE (outmode);
3464 if (value != 0 && MEM_P (value))
3467 mem_value = assign_temp (tfom, 0, 1, 1);
3469 /* This call returns a big structure. */
3470 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3474 tfom = void_type_node;
3476 /* ??? Unfinished: must pass the memory address as an argument. */
3478 /* Copy all the libcall-arguments out of the varargs data
3479 and into a vector ARGVEC.
3481 Compute how to pass each argument. We only support a very small subset
3482 of the full argument passing conventions to limit complexity here since
3483 library functions shouldn't have many args. */
3485 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3486 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3488 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3489 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3491 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3494 args_size.constant = 0;
3499 /* Now we are about to start emitting insns that can be deleted
3500 if a libcall is deleted. */
3501 if (flags & ECF_LIBCALL_BLOCK)
3506 /* If there's a structure value address to be passed,
3507 either pass it in the special place, or pass it as an extra argument. */
3508 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3510 rtx addr = XEXP (mem_value, 0);
3513 /* Make sure it is a reasonable operand for a move or push insn. */
3514 if (!REG_P (addr) && !MEM_P (addr)
3515 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3516 addr = force_operand (addr, NULL_RTX);
3518 argvec[count].value = addr;
3519 argvec[count].mode = Pmode;
3520 argvec[count].partial = 0;
3522 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3523 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3526 locate_and_pad_parm (Pmode, NULL_TREE,
3527 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3530 argvec[count].reg != 0,
3532 0, NULL_TREE, &args_size, &argvec[count].locate);
3534 if (argvec[count].reg == 0 || argvec[count].partial != 0
3535 || reg_parm_stack_space > 0)
3536 args_size.constant += argvec[count].locate.size.constant;
3538 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3543 for (; count < nargs; count++)
3545 rtx val = va_arg (p, rtx);
3546 enum machine_mode mode = va_arg (p, enum machine_mode);
3548 /* We cannot convert the arg value to the mode the library wants here;
3549 must do it earlier where we know the signedness of the arg. */
3551 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3554 /* There's no need to call protect_from_queue, because
3555 either emit_move_insn or emit_push_insn will do that. */
3557 /* Make sure it is a reasonable operand for a move or push insn. */
3558 if (!REG_P (val) && !MEM_P (val)
3559 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3560 val = force_operand (val, NULL_RTX);
3562 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3565 int must_copy = ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3568 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3569 functions, so we have to pretend this isn't such a function. */
3570 if (flags & ECF_LIBCALL_BLOCK)
3572 rtx insns = get_insns ();
3576 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3578 /* If this was a CONST function, it is now PURE since
3579 it now reads memory. */
3580 if (flags & ECF_CONST)
3582 flags &= ~ECF_CONST;
3586 if (GET_MODE (val) == MEM && ! must_copy)
3590 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3592 emit_move_insn (slot, val);
3596 tree type = lang_hooks.types.type_for_mode (mode, 0);
3599 = gen_rtx_MEM (mode,
3600 expand_expr (build1 (ADDR_EXPR,
3601 build_pointer_type (type),
3602 make_tree (type, val)),
3603 NULL_RTX, VOIDmode, 0));
3606 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3607 gen_rtx_USE (VOIDmode, slot),
3610 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3611 gen_rtx_CLOBBER (VOIDmode,
3616 val = force_operand (XEXP (slot, 0), NULL_RTX);
3619 argvec[count].value = val;
3620 argvec[count].mode = mode;
3622 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3624 argvec[count].partial
3625 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3627 locate_and_pad_parm (mode, NULL_TREE,
3628 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3631 argvec[count].reg != 0,
3633 argvec[count].partial,
3634 NULL_TREE, &args_size, &argvec[count].locate);
3636 if (argvec[count].locate.size.var)
3639 if (argvec[count].reg == 0 || argvec[count].partial != 0
3640 || reg_parm_stack_space > 0)
3641 args_size.constant += argvec[count].locate.size.constant;
3643 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3646 /* If this machine requires an external definition for library
3647 functions, write one out. */
3648 assemble_external_libcall (fun);
3650 original_args_size = args_size;
3651 args_size.constant = (((args_size.constant
3652 + stack_pointer_delta
3656 - stack_pointer_delta);
3658 args_size.constant = MAX (args_size.constant,
3659 reg_parm_stack_space);
3661 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3662 args_size.constant -= reg_parm_stack_space;
3665 if (args_size.constant > current_function_outgoing_args_size)
3666 current_function_outgoing_args_size = args_size.constant;
3668 if (ACCUMULATE_OUTGOING_ARGS)
3670 /* Since the stack pointer will never be pushed, it is possible for
3671 the evaluation of a parm to clobber something we have already
3672 written to the stack. Since most function calls on RISC machines
3673 do not use the stack, this is uncommon, but must work correctly.
3675 Therefore, we save any area of the stack that was already written
3676 and that we are using. Here we set up to do this by making a new
3677 stack usage map from the old one.
3679 Another approach might be to try to reorder the argument
3680 evaluations to avoid this conflicting stack usage. */
3682 needed = args_size.constant;
3684 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3685 /* Since we will be writing into the entire argument area, the
3686 map must be allocated for its entire size, not just the part that
3687 is the responsibility of the caller. */
3688 needed += reg_parm_stack_space;
3691 #ifdef ARGS_GROW_DOWNWARD
3692 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3695 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3698 stack_usage_map = alloca (highest_outgoing_arg_in_use);
3700 if (initial_highest_arg_in_use)
3701 memcpy (stack_usage_map, initial_stack_usage_map,
3702 initial_highest_arg_in_use);
3704 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3705 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3706 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3709 /* We must be careful to use virtual regs before they're instantiated,
3710 and real regs afterwards. Loop optimization, for example, can create
3711 new libcalls after we've instantiated the virtual regs, and if we
3712 use virtuals anyway, they won't match the rtl patterns. */
3714 if (virtuals_instantiated)
3715 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3717 argblock = virtual_outgoing_args_rtx;
3722 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3725 /* If we push args individually in reverse order, perform stack alignment
3726 before the first push (the last arg). */
3727 if (argblock == 0 && PUSH_ARGS_REVERSED)
3728 anti_adjust_stack (GEN_INT (args_size.constant
3729 - original_args_size.constant));
3731 if (PUSH_ARGS_REVERSED)
3742 #ifdef REG_PARM_STACK_SPACE
3743 if (ACCUMULATE_OUTGOING_ARGS)
3745 /* The argument list is the property of the called routine and it
3746 may clobber it. If the fixed area has been used for previous
3747 parameters, we must save and restore it. */
3748 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3749 &low_to_save, &high_to_save);
3753 /* Push the args that need to be pushed. */
3755 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3756 are to be pushed. */
3757 for (count = 0; count < nargs; count++, argnum += inc)
3759 enum machine_mode mode = argvec[argnum].mode;
3760 rtx val = argvec[argnum].value;
3761 rtx reg = argvec[argnum].reg;
3762 int partial = argvec[argnum].partial;
3763 int lower_bound = 0, upper_bound = 0, i;
3765 if (! (reg != 0 && partial == 0))
3767 if (ACCUMULATE_OUTGOING_ARGS)
3769 /* If this is being stored into a pre-allocated, fixed-size,
3770 stack area, save any previous data at that location. */
3772 #ifdef ARGS_GROW_DOWNWARD
3773 /* stack_slot is negative, but we want to index stack_usage_map
3774 with positive values. */
3775 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3776 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3778 lower_bound = argvec[argnum].locate.offset.constant;
3779 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3783 /* Don't worry about things in the fixed argument area;
3784 it has already been saved. */
3785 if (i < reg_parm_stack_space)
3786 i = reg_parm_stack_space;
3787 while (i < upper_bound && stack_usage_map[i] == 0)
3790 if (i < upper_bound)
3792 /* We need to make a save area. */
3794 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3795 enum machine_mode save_mode
3796 = mode_for_size (size, MODE_INT, 1);
3798 = plus_constant (argblock,
3799 argvec[argnum].locate.offset.constant);
3801 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3803 if (save_mode == BLKmode)
3805 argvec[argnum].save_area
3806 = assign_stack_temp (BLKmode,
3807 argvec[argnum].locate.size.constant,
3810 emit_block_move (validize_mem (argvec[argnum].save_area),
3812 GEN_INT (argvec[argnum].locate.size.constant),
3813 BLOCK_OP_CALL_PARM);
3817 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3819 emit_move_insn (argvec[argnum].save_area, stack_area);
3824 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3825 partial, reg, 0, argblock,
3826 GEN_INT (argvec[argnum].locate.offset.constant),
3827 reg_parm_stack_space,
3828 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3830 /* Now mark the segment we just used. */
3831 if (ACCUMULATE_OUTGOING_ARGS)
3832 for (i = lower_bound; i < upper_bound; i++)
3833 stack_usage_map[i] = 1;
3839 /* If we pushed args in forward order, perform stack alignment
3840 after pushing the last arg. */
3841 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3842 anti_adjust_stack (GEN_INT (args_size.constant
3843 - original_args_size.constant));
3845 if (PUSH_ARGS_REVERSED)
3850 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3852 /* Now load any reg parms into their regs. */
3854 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3855 are to be pushed. */
3856 for (count = 0; count < nargs; count++, argnum += inc)
3858 enum machine_mode mode = argvec[argnum].mode;
3859 rtx val = argvec[argnum].value;
3860 rtx reg = argvec[argnum].reg;
3861 int partial = argvec[argnum].partial;
3863 /* Handle calls that pass values in multiple non-contiguous
3864 locations. The PA64 has examples of this for library calls. */
3865 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3866 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3867 else if (reg != 0 && partial == 0)
3868 emit_move_insn (reg, val);
3873 /* Any regs containing parms remain in use through the call. */
3874 for (count = 0; count < nargs; count++)
3876 rtx reg = argvec[count].reg;
3877 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3878 use_group_regs (&call_fusage, reg);
3880 use_reg (&call_fusage, reg);
3883 /* Pass the function the address in which to return a structure value. */
3884 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3886 emit_move_insn (struct_value,
3888 force_operand (XEXP (mem_value, 0),
3890 if (REG_P (struct_value))
3891 use_reg (&call_fusage, struct_value);
3894 /* Don't allow popping to be deferred, since then
3895 cse'ing of library calls could delete a call and leave the pop. */
3897 valreg = (mem_value == 0 && outmode != VOIDmode
3898 ? hard_libcall_value (outmode) : NULL_RTX);
3900 /* Stack must be properly aligned now. */
3901 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
3904 before_call = get_last_insn ();
3906 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3907 will set inhibit_defer_pop to that value. */
3908 /* The return type is needed to decide how many bytes the function pops.
3909 Signedness plays no role in that, so for simplicity, we pretend it's
3910 always signed. We also assume that the list of arguments passed has
3911 no impact, so we pretend it is unknown. */
3913 emit_call_1 (fun, NULL,
3914 get_identifier (XSTR (orgfun, 0)),
3915 build_function_type (tfom, NULL_TREE),
3916 original_args_size.constant, args_size.constant,
3918 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3920 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3922 /* For calls to `setjmp', etc., inform flow.c it should complain
3923 if nonvolatile values are live. For functions that cannot return,
3924 inform flow that control does not fall through. */
3926 if (flags & (ECF_NORETURN | ECF_LONGJMP))
3928 /* The barrier note must be emitted
3929 immediately after the CALL_INSN. Some ports emit more than
3930 just a CALL_INSN above, so we must search for it here. */
3932 rtx last = get_last_insn ();
3933 while (!CALL_P (last))
3935 last = PREV_INSN (last);
3936 /* There was no CALL_INSN? */
3937 if (last == before_call)
3941 emit_barrier_after (last);
3944 /* Now restore inhibit_defer_pop to its actual original value. */
3947 /* If call is cse'able, make appropriate pair of reg-notes around it.
3948 Test valreg so we don't crash; may safely ignore `const'
3949 if return type is void. Disable for PARALLEL return values, because
3950 we have no way to move such values into a pseudo register. */
3951 if (flags & ECF_LIBCALL_BLOCK)
3957 insns = get_insns ();
3967 if (GET_CODE (valreg) == PARALLEL)
3969 temp = gen_reg_rtx (outmode);
3970 emit_group_store (temp, valreg, NULL_TREE,
3971 GET_MODE_SIZE (outmode));
3975 temp = gen_reg_rtx (GET_MODE (valreg));
3977 /* Construct an "equal form" for the value which mentions all the
3978 arguments in order as well as the function name. */
3979 for (i = 0; i < nargs; i++)
3980 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3981 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3983 insns = get_insns ();
3986 if (flags & ECF_PURE)
3987 note = gen_rtx_EXPR_LIST (VOIDmode,
3988 gen_rtx_USE (VOIDmode,
3989 gen_rtx_MEM (BLKmode,
3990 gen_rtx_SCRATCH (VOIDmode))),
3993 emit_libcall_block (insns, temp, valreg, note);
4000 /* Copy the value to the right place. */
4001 if (outmode != VOIDmode && retval)
4007 if (value != mem_value)
4008 emit_move_insn (value, mem_value);
4010 else if (GET_CODE (valreg) == PARALLEL)
4013 value = gen_reg_rtx (outmode);
4014 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
4016 else if (value != 0)
4017 emit_move_insn (value, valreg);
4022 if (ACCUMULATE_OUTGOING_ARGS)
4024 #ifdef REG_PARM_STACK_SPACE
4026 restore_fixed_argument_area (save_area, argblock,
4027 high_to_save, low_to_save);
4030 /* If we saved any argument areas, restore them. */
4031 for (count = 0; count < nargs; count++)
4032 if (argvec[count].save_area)
4034 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4035 rtx adr = plus_constant (argblock,
4036 argvec[count].locate.offset.constant);
4037 rtx stack_area = gen_rtx_MEM (save_mode,
4038 memory_address (save_mode, adr));
4040 if (save_mode == BLKmode)
4041 emit_block_move (stack_area,
4042 validize_mem (argvec[count].save_area),
4043 GEN_INT (argvec[count].locate.size.constant),
4044 BLOCK_OP_CALL_PARM);
4046 emit_move_insn (stack_area, argvec[count].save_area);
4049 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4050 stack_usage_map = initial_stack_usage_map;
4057 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4058 (emitting the queue unless NO_QUEUE is nonzero),
4059 for a value of mode OUTMODE,
4060 with NARGS different arguments, passed as alternating rtx values
4061 and machine_modes to convert them to.
4062 The rtx values should have been passed through protect_from_queue already.
4064 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4065 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4066 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4067 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4068 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4069 or other LCT_ value for other types of library calls. */
4072 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4073 enum machine_mode outmode, int nargs, ...)
4077 va_start (p, nargs);
4078 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4082 /* Like emit_library_call except that an extra argument, VALUE,
4083 comes second and says where to store the result.
4084 (If VALUE is zero, this function chooses a convenient way
4085 to return the value.
4087 This function returns an rtx for where the value is to be found.
4088 If VALUE is nonzero, VALUE is returned. */
4091 emit_library_call_value (rtx orgfun, rtx value,
4092 enum libcall_type fn_type,
4093 enum machine_mode outmode, int nargs, ...)
4098 va_start (p, nargs);
4099 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4106 /* Store a single argument for a function call
4107 into the register or memory area where it must be passed.
4108 *ARG describes the argument value and where to pass it.
4110 ARGBLOCK is the address of the stack-block for all the arguments,
4111 or 0 on a machine where arguments are pushed individually.
4113 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4114 so must be careful about how the stack is used.
4116 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4117 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4118 that we need not worry about saving and restoring the stack.
4120 FNDECL is the declaration of the function we are calling.
4122 Return nonzero if this arg should cause sibcall failure,
4126 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4127 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4129 tree pval = arg->tree_value;
4133 int i, lower_bound = 0, upper_bound = 0;
4134 int sibcall_failure = 0;
4136 if (TREE_CODE (pval) == ERROR_MARK)
4139 /* Push a new temporary level for any temporaries we make for
4143 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4145 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4146 save any previous data at that location. */
4147 if (argblock && ! variable_size && arg->stack)
4149 #ifdef ARGS_GROW_DOWNWARD
4150 /* stack_slot is negative, but we want to index stack_usage_map
4151 with positive values. */
4152 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4153 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4157 lower_bound = upper_bound - arg->locate.size.constant;
4159 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4160 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4164 upper_bound = lower_bound + arg->locate.size.constant;
4168 /* Don't worry about things in the fixed argument area;
4169 it has already been saved. */
4170 if (i < reg_parm_stack_space)
4171 i = reg_parm_stack_space;
4172 while (i < upper_bound && stack_usage_map[i] == 0)
4175 if (i < upper_bound)
4177 /* We need to make a save area. */
4178 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4179 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4180 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4181 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4183 if (save_mode == BLKmode)
4185 tree ot = TREE_TYPE (arg->tree_value);
4186 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4187 | TYPE_QUAL_CONST));
4189 arg->save_area = assign_temp (nt, 0, 1, 1);
4190 preserve_temp_slots (arg->save_area);
4191 emit_block_move (validize_mem (arg->save_area), stack_area,
4192 expr_size (arg->tree_value),
4193 BLOCK_OP_CALL_PARM);
4197 arg->save_area = gen_reg_rtx (save_mode);
4198 emit_move_insn (arg->save_area, stack_area);
4204 /* If this isn't going to be placed on both the stack and in registers,
4205 set up the register and number of words. */
4206 if (! arg->pass_on_stack)
4208 if (flags & ECF_SIBCALL)
4209 reg = arg->tail_call_reg;
4212 partial = arg->partial;
4215 if (reg != 0 && partial == 0)
4216 /* Being passed entirely in a register. We shouldn't be called in
4220 /* If this arg needs special alignment, don't load the registers
4222 if (arg->n_aligned_regs != 0)
4225 /* If this is being passed partially in a register, we can't evaluate
4226 it directly into its stack slot. Otherwise, we can. */
4227 if (arg->value == 0)
4229 /* stack_arg_under_construction is nonzero if a function argument is
4230 being evaluated directly into the outgoing argument list and
4231 expand_call must take special action to preserve the argument list
4232 if it is called recursively.
4234 For scalar function arguments stack_usage_map is sufficient to
4235 determine which stack slots must be saved and restored. Scalar
4236 arguments in general have pass_on_stack == 0.
4238 If this argument is initialized by a function which takes the
4239 address of the argument (a C++ constructor or a C function
4240 returning a BLKmode structure), then stack_usage_map is
4241 insufficient and expand_call must push the stack around the
4242 function call. Such arguments have pass_on_stack == 1.
4244 Note that it is always safe to set stack_arg_under_construction,
4245 but this generates suboptimal code if set when not needed. */
4247 if (arg->pass_on_stack)
4248 stack_arg_under_construction++;
4250 arg->value = expand_expr (pval,
4252 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4253 ? NULL_RTX : arg->stack,
4254 VOIDmode, EXPAND_STACK_PARM);
4256 /* If we are promoting object (or for any other reason) the mode
4257 doesn't agree, convert the mode. */
4259 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4260 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4261 arg->value, arg->unsignedp);
4263 if (arg->pass_on_stack)
4264 stack_arg_under_construction--;
4267 /* Don't allow anything left on stack from computation
4268 of argument to alloca. */
4269 if (flags & ECF_MAY_BE_ALLOCA)
4270 do_pending_stack_adjust ();
4272 if (arg->value == arg->stack)
4273 /* If the value is already in the stack slot, we are done. */
4275 else if (arg->mode != BLKmode)
4279 /* Argument is a scalar, not entirely passed in registers.
4280 (If part is passed in registers, arg->partial says how much
4281 and emit_push_insn will take care of putting it there.)
4283 Push it, and if its size is less than the
4284 amount of space allocated to it,
4285 also bump stack pointer by the additional space.
4286 Note that in C the default argument promotions
4287 will prevent such mismatches. */
4289 size = GET_MODE_SIZE (arg->mode);
4290 /* Compute how much space the push instruction will push.
4291 On many machines, pushing a byte will advance the stack
4292 pointer by a halfword. */
4293 #ifdef PUSH_ROUNDING
4294 size = PUSH_ROUNDING (size);
4298 /* Compute how much space the argument should get:
4299 round up to a multiple of the alignment for arguments. */
4300 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4301 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4302 / (PARM_BOUNDARY / BITS_PER_UNIT))
4303 * (PARM_BOUNDARY / BITS_PER_UNIT));
4305 /* This isn't already where we want it on the stack, so put it there.
4306 This can either be done with push or copy insns. */
4307 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4308 PARM_BOUNDARY, partial, reg, used - size, argblock,
4309 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4310 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4312 /* Unless this is a partially-in-register argument, the argument is now
4315 arg->value = arg->stack;
4319 /* BLKmode, at least partly to be pushed. */
4321 unsigned int parm_align;
4325 /* Pushing a nonscalar.
4326 If part is passed in registers, PARTIAL says how much
4327 and emit_push_insn will take care of putting it there. */
4329 /* Round its size up to a multiple
4330 of the allocation unit for arguments. */
4332 if (arg->locate.size.var != 0)
4335 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4339 /* PUSH_ROUNDING has no effect on us, because
4340 emit_push_insn for BLKmode is careful to avoid it. */
4341 if (reg && GET_CODE (reg) == PARALLEL)
4343 /* Use the size of the elt to compute excess. */
4344 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
4345 excess = (arg->locate.size.constant
4346 - int_size_in_bytes (TREE_TYPE (pval))
4347 + partial * GET_MODE_SIZE (GET_MODE (elt)));
4350 excess = (arg->locate.size.constant
4351 - int_size_in_bytes (TREE_TYPE (pval))
4352 + partial * UNITS_PER_WORD);
4353 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4354 NULL_RTX, TYPE_MODE (sizetype), 0);
4357 /* Some types will require stricter alignment, which will be
4358 provided for elsewhere in argument layout. */
4359 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4361 /* When an argument is padded down, the block is aligned to
4362 PARM_BOUNDARY, but the actual argument isn't. */
4363 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4365 if (arg->locate.size.var)
4366 parm_align = BITS_PER_UNIT;
4369 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4370 parm_align = MIN (parm_align, excess_align);
4374 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4376 /* emit_push_insn might not work properly if arg->value and
4377 argblock + arg->locate.offset areas overlap. */
4381 if (XEXP (x, 0) == current_function_internal_arg_pointer
4382 || (GET_CODE (XEXP (x, 0)) == PLUS
4383 && XEXP (XEXP (x, 0), 0) ==
4384 current_function_internal_arg_pointer
4385 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4387 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4388 i = INTVAL (XEXP (XEXP (x, 0), 1));
4390 /* expand_call should ensure this. */
4391 if (arg->locate.offset.var || GET_CODE (size_rtx) != CONST_INT)
4394 if (arg->locate.offset.constant > i)
4396 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4397 sibcall_failure = 1;
4399 else if (arg->locate.offset.constant < i)
4401 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4402 sibcall_failure = 1;
4407 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4408 parm_align, partial, reg, excess, argblock,
4409 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4410 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4412 /* Unless this is a partially-in-register argument, the argument is now
4415 ??? Unlike the case above, in which we want the actual
4416 address of the data, so that we can load it directly into a
4417 register, here we want the address of the stack slot, so that
4418 it's properly aligned for word-by-word copying or something
4419 like that. It's not clear that this is always correct. */
4421 arg->value = arg->stack_slot;
4424 /* Mark all slots this store used. */
4425 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4426 && argblock && ! variable_size && arg->stack)
4427 for (i = lower_bound; i < upper_bound; i++)
4428 stack_usage_map[i] = 1;
4430 /* Once we have pushed something, pops can't safely
4431 be deferred during the rest of the arguments. */
4434 /* ANSI doesn't require a sequence point here,
4435 but PCC has one, so this will avoid some problems. */
4438 /* Free any temporary slots made in processing this argument. Show
4439 that we might have taken the address of something and pushed that
4441 preserve_temp_slots (NULL_RTX);
4445 return sibcall_failure;
4448 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4451 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4457 /* If the type has variable size... */
4458 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4461 /* If the type is marked as addressable (it is required
4462 to be constructed into the stack)... */
4463 if (TREE_ADDRESSABLE (type))
4469 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4470 takes trailing padding of a structure into account. */
4471 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4474 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, tree type)
4479 /* If the type has variable size... */
4480 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4483 /* If the type is marked as addressable (it is required
4484 to be constructed into the stack)... */
4485 if (TREE_ADDRESSABLE (type))
4488 /* If the padding and mode of the type is such that a copy into
4489 a register would put it into the wrong part of the register. */
4491 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4492 && (FUNCTION_ARG_PADDING (mode, type)
4493 == (BYTES_BIG_ENDIAN ? upward : downward)))