1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "insn-flags.h"
35 #ifndef ACCUMULATE_OUTGOING_ARGS
36 #define ACCUMULATE_OUTGOING_ARGS 0
39 /* Supply a default definition for PUSH_ARGS. */
42 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
48 #if !defined FUNCTION_OK_FOR_SIBCALL
49 #define FUNCTION_OK_FOR_SIBCALL(DECL) 1
52 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
53 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
65 #define PUSH_ARGS_REVERSED PUSH_ARGS
70 #ifndef PUSH_ARGS_REVERSED
71 #define PUSH_ARGS_REVERSED 0
74 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
75 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
77 /* Data structure and subroutines used within expand_call. */
81 /* Tree node for this argument. */
83 /* Mode for value; TYPE_MODE unless promoted. */
84 enum machine_mode mode;
85 /* Current RTL value for argument, or 0 if it isn't precomputed. */
87 /* Initially-compute RTL value for argument; only for const functions. */
89 /* Register to pass this argument in, 0 if passed on stack, or an
90 PARALLEL if the arg is to be copied into multiple non-contiguous
93 /* If REG was promoted from the actual mode of the argument expression,
94 indicates whether the promotion is sign- or zero-extended. */
96 /* Number of registers to use. 0 means put the whole arg in registers.
97 Also 0 if not passed in registers. */
99 /* Non-zero if argument must be passed on stack.
100 Note that some arguments may be passed on the stack
101 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
102 pass_on_stack identifies arguments that *cannot* go in registers. */
104 /* Offset of this argument from beginning of stack-args. */
105 struct args_size offset;
106 /* Similar, but offset to the start of the stack slot. Different from
107 OFFSET if this arg pads downward. */
108 struct args_size slot_offset;
109 /* Size of this argument on the stack, rounded up for any padding it gets,
110 parts of the argument passed in registers do not count.
111 If REG_PARM_STACK_SPACE is defined, then register parms
112 are counted here as well. */
113 struct args_size size;
114 /* Location on the stack at which parameter should be stored. The store
115 has already been done if STACK == VALUE. */
117 /* Location on the stack of the start of this argument slot. This can
118 differ from STACK if this arg pads downward. This location is known
119 to be aligned to FUNCTION_ARG_BOUNDARY. */
121 /* Place that this stack area has been saved, if needed. */
123 /* If an argument's alignment does not permit direct copying into registers,
124 copy in smaller-sized pieces into pseudos. These are stored in a
125 block pointed to by this field. The next field says how many
126 word-sized pseudos we made. */
129 /* The amount that the stack pointer needs to be adjusted to
130 force alignment for the next argument. */
131 struct args_size alignment_pad;
134 /* A vector of one char per byte of stack space. A byte if non-zero if
135 the corresponding stack location has been used.
136 This vector is used to prevent a function call within an argument from
137 clobbering any stack already set up. */
138 static char *stack_usage_map;
140 /* Size of STACK_USAGE_MAP. */
141 static int highest_outgoing_arg_in_use;
143 /* stack_arg_under_construction is nonzero when an argument may be
144 initialized with a constructor call (including a C function that
145 returns a BLKmode struct) and expand_call must take special action
146 to make sure the object being constructed does not overlap the
147 argument list for the constructor call. */
148 int stack_arg_under_construction;
150 static int calls_function PARAMS ((tree, int));
151 static int calls_function_1 PARAMS ((tree, int));
153 /* Nonzero if this is a call to a `const' function. */
155 /* Nonzero if this is a call to a `volatile' function. */
156 #define ECF_NORETURN 2
157 /* Nonzero if this is a call to malloc or a related function. */
159 /* Nonzero if it is plausible that this is a call to alloca. */
160 #define ECF_MAY_BE_ALLOCA 8
161 /* Nonzero if this is a call to a function that won't throw an exception. */
162 #define ECF_NOTHROW 16
163 /* Nonzero if this is a call to setjmp or a related function. */
164 #define ECF_RETURNS_TWICE 32
165 /* Nonzero if this is a call to `longjmp'. */
166 #define ECF_LONGJMP 64
167 /* Nonzero if this is a syscall that makes a new process in the image of
169 #define ECF_FORK_OR_EXEC 128
170 #define ECF_SIBCALL 256
171 /* Nonzero if this is a call to "pure" function (like const function,
172 but may read memory. */
175 static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
176 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
177 rtx, int, rtx, int));
178 static void precompute_register_parameters PARAMS ((int,
181 static void store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
183 static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
185 static int finalize_must_preallocate PARAMS ((int, int,
187 struct args_size *));
188 static void precompute_arguments PARAMS ((int, int, int,
190 struct args_size *));
191 static int compute_argument_block_size PARAMS ((int,
194 static void initialize_argument_information PARAMS ((int,
201 static void compute_argument_addresses PARAMS ((struct arg_data *,
203 static rtx rtx_for_function_call PARAMS ((tree, tree));
204 static void load_register_parameters PARAMS ((struct arg_data *,
206 static int libfunc_nothrow PARAMS ((rtx));
207 static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx, int,
210 static int special_function_p PARAMS ((tree, int));
211 static int flags_from_decl_or_type PARAMS ((tree));
212 static rtx try_to_integrate PARAMS ((tree, tree, rtx,
215 #ifdef REG_PARM_STACK_SPACE
216 static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
217 static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
220 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
223 If WHICH is 0, return 1 if EXP contains a call to any function.
224 Actually, we only need return 1 if evaluating EXP would require pushing
225 arguments on the stack, but that is too difficult to compute, so we just
226 assume any function call might require the stack. */
228 static tree calls_function_save_exprs;
231 calls_function (exp, which)
236 calls_function_save_exprs = 0;
237 val = calls_function_1 (exp, which);
238 calls_function_save_exprs = 0;
243 calls_function_1 (exp, which)
248 enum tree_code code = TREE_CODE (exp);
249 int type = TREE_CODE_CLASS (code);
250 int length = tree_code_length[(int) code];
252 /* If this code is language-specific, we don't know what it will do. */
253 if ((int) code >= NUM_TREE_CODES)
256 /* Only expressions and references can contain calls. */
257 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
266 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
267 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
270 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
272 if ((DECL_BUILT_IN (fndecl)
273 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
274 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
275 || (DECL_SAVED_INSNS (fndecl)
276 && DECL_SAVED_INSNS (fndecl)->calls_alloca))
280 /* Third operand is RTL. */
285 if (SAVE_EXPR_RTL (exp) != 0)
287 if (value_member (exp, calls_function_save_exprs))
289 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
290 calls_function_save_exprs);
291 return (TREE_OPERAND (exp, 0) != 0
292 && calls_function_1 (TREE_OPERAND (exp, 0), which));
298 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
299 if (DECL_INITIAL (local) != 0
300 && calls_function_1 (DECL_INITIAL (local), which))
304 register tree subblock;
306 for (subblock = BLOCK_SUBBLOCKS (exp);
308 subblock = TREE_CHAIN (subblock))
309 if (calls_function_1 (subblock, which))
314 case METHOD_CALL_EXPR:
318 case WITH_CLEANUP_EXPR:
329 for (i = 0; i < length; i++)
330 if (TREE_OPERAND (exp, i) != 0
331 && calls_function_1 (TREE_OPERAND (exp, i), which))
337 /* Force FUNEXP into a form suitable for the address of a CALL,
338 and return that as an rtx. Also load the static chain register
339 if FNDECL is a nested function.
341 CALL_FUSAGE points to a variable holding the prospective
342 CALL_INSN_FUNCTION_USAGE information. */
345 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
351 rtx static_chain_value = 0;
353 funexp = protect_from_queue (funexp, 0);
356 /* Get possible static chain value for nested function in C. */
357 static_chain_value = lookup_static_chain (fndecl);
359 /* Make a valid memory address and copy constants thru pseudo-regs,
360 but not for a constant address if -fno-function-cse. */
361 if (GET_CODE (funexp) != SYMBOL_REF)
362 /* If we are using registers for parameters, force the
363 function address into a register now. */
364 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
365 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
366 : memory_address (FUNCTION_MODE, funexp));
369 #ifndef NO_FUNCTION_CSE
370 if (optimize && ! flag_no_function_cse)
371 #ifdef NO_RECURSIVE_FUNCTION_CSE
372 if (fndecl != current_function_decl)
374 funexp = force_reg (Pmode, funexp);
378 if (static_chain_value != 0)
380 emit_move_insn (static_chain_rtx, static_chain_value);
382 if (GET_CODE (static_chain_rtx) == REG)
383 use_reg (call_fusage, static_chain_rtx);
389 /* Generate instructions to call function FUNEXP,
390 and optionally pop the results.
391 The CALL_INSN is the first insn generated.
393 FNDECL is the declaration node of the function. This is given to the
394 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
396 FUNTYPE is the data type of the function. This is given to the macro
397 RETURN_POPS_ARGS to determine whether this function pops its own args.
398 We used to allow an identifier for library functions, but that doesn't
399 work when the return type is an aggregate type and the calling convention
400 says that the pointer to this aggregate is to be popped by the callee.
402 STACK_SIZE is the number of bytes of arguments on the stack,
403 ROUNDED_STACK_SIZE is that number rounded up to
404 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
405 both to put into the call insn and to generate explicit popping
408 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
409 It is zero if this call doesn't want a structure value.
411 NEXT_ARG_REG is the rtx that results from executing
412 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
413 just after all the args have had their registers assigned.
414 This could be whatever you like, but normally it is the first
415 arg-register beyond those used for args in this call,
416 or 0 if all the arg-registers are used in this call.
417 It is passed on to `gen_call' so you can put this info in the call insn.
419 VALREG is a hard register in which a value is returned,
420 or 0 if the call does not return a value.
422 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
423 the args to this call were processed.
424 We restore `inhibit_defer_pop' to that value.
426 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
427 denote registers used by the called function. */
430 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
431 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
432 call_fusage, ecf_flags)
434 tree fndecl ATTRIBUTE_UNUSED;
435 tree funtype ATTRIBUTE_UNUSED;
436 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
437 HOST_WIDE_INT rounded_stack_size;
438 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
441 int old_inhibit_defer_pop;
445 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
446 #if defined (HAVE_call) && defined (HAVE_call_value)
447 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
450 int already_popped = 0;
451 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
453 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
454 and we don't want to load it into a register as an optimization,
455 because prepare_call_address already did it if it should be done. */
456 if (GET_CODE (funexp) != SYMBOL_REF)
457 funexp = memory_address (FUNCTION_MODE, funexp);
459 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
460 if ((ecf_flags & ECF_SIBCALL)
461 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
462 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
465 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
468 /* If this subroutine pops its own args, record that in the call insn
469 if possible, for the sake of frame pointer elimination. */
472 pat = gen_sibcall_value_pop (valreg,
473 gen_rtx_MEM (FUNCTION_MODE, funexp),
474 rounded_stack_size_rtx, next_arg_reg,
477 pat = gen_sibcall_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
478 rounded_stack_size_rtx, next_arg_reg, n_pop);
480 emit_call_insn (pat);
486 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
487 /* If the target has "call" or "call_value" insns, then prefer them
488 if no arguments are actually popped. If the target does not have
489 "call" or "call_value" insns, then we must use the popping versions
490 even if the call has no arguments to pop. */
491 #if defined (HAVE_call) && defined (HAVE_call_value)
492 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
495 if (HAVE_call_pop && HAVE_call_value_pop)
498 rtx n_pop = GEN_INT (n_popped);
501 /* If this subroutine pops its own args, record that in the call insn
502 if possible, for the sake of frame pointer elimination. */
505 pat = gen_call_value_pop (valreg,
506 gen_rtx_MEM (FUNCTION_MODE, funexp),
507 rounded_stack_size_rtx, next_arg_reg, n_pop);
509 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
510 rounded_stack_size_rtx, next_arg_reg, n_pop);
512 emit_call_insn (pat);
518 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
519 if ((ecf_flags & ECF_SIBCALL)
520 && HAVE_sibcall && HAVE_sibcall_value)
523 emit_call_insn (gen_sibcall_value (valreg,
524 gen_rtx_MEM (FUNCTION_MODE, funexp),
525 rounded_stack_size_rtx,
526 next_arg_reg, NULL_RTX));
528 emit_call_insn (gen_sibcall (gen_rtx_MEM (FUNCTION_MODE, funexp),
529 rounded_stack_size_rtx, next_arg_reg,
530 struct_value_size_rtx));
535 #if defined (HAVE_call) && defined (HAVE_call_value)
536 if (HAVE_call && HAVE_call_value)
539 emit_call_insn (gen_call_value (valreg,
540 gen_rtx_MEM (FUNCTION_MODE, funexp),
541 rounded_stack_size_rtx, next_arg_reg,
544 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
545 rounded_stack_size_rtx, next_arg_reg,
546 struct_value_size_rtx));
552 /* Find the CALL insn we just emitted. */
553 for (call_insn = get_last_insn ();
554 call_insn && GET_CODE (call_insn) != CALL_INSN;
555 call_insn = PREV_INSN (call_insn))
561 /* Mark memory as used for "pure" function call. */
562 if (ecf_flags & ECF_PURE)
564 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
565 gen_rtx_USE (VOIDmode,
566 gen_rtx_MEM (BLKmode,
567 gen_rtx_SCRATCH (VOIDmode))), call_fusage);
570 /* Put the register usage information on the CALL. If there is already
571 some usage information, put ours at the end. */
572 if (CALL_INSN_FUNCTION_USAGE (call_insn))
576 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
577 link = XEXP (link, 1))
580 XEXP (link, 1) = call_fusage;
583 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
585 /* If this is a const call, then set the insn's unchanging bit. */
586 if (ecf_flags & (ECF_CONST | ECF_PURE))
587 CONST_CALL_P (call_insn) = 1;
589 /* If this call can't throw, attach a REG_EH_REGION reg note to that
591 if (ecf_flags & ECF_NOTHROW)
592 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
593 REG_NOTES (call_insn));
595 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
597 /* Restore this now, so that we do defer pops for this call's args
598 if the context of the call as a whole permits. */
599 inhibit_defer_pop = old_inhibit_defer_pop;
604 CALL_INSN_FUNCTION_USAGE (call_insn)
605 = gen_rtx_EXPR_LIST (VOIDmode,
606 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
607 CALL_INSN_FUNCTION_USAGE (call_insn));
608 rounded_stack_size -= n_popped;
609 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
610 stack_pointer_delta -= n_popped;
613 if (!ACCUMULATE_OUTGOING_ARGS)
615 /* If returning from the subroutine does not automatically pop the args,
616 we need an instruction to pop them sooner or later.
617 Perhaps do it now; perhaps just record how much space to pop later.
619 If returning from the subroutine does pop the args, indicate that the
620 stack pointer will be changed. */
622 if (rounded_stack_size != 0)
624 if (flag_defer_pop && inhibit_defer_pop == 0
625 && !(ecf_flags & (ECF_CONST | ECF_PURE)))
626 pending_stack_adjust += rounded_stack_size;
628 adjust_stack (rounded_stack_size_rtx);
631 /* When we accumulate outgoing args, we must avoid any stack manipulations.
632 Restore the stack pointer to its original value now. Usually
633 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
634 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
635 popping variants of functions exist as well.
637 ??? We may optimize similar to defer_pop above, but it is
638 probably not worthwhile.
640 ??? It will be worthwhile to enable combine_stack_adjustments even for
643 anti_adjust_stack (GEN_INT (n_popped));
646 /* Determine if the function identified by NAME and FNDECL is one with
647 special properties we wish to know about.
649 For example, if the function might return more than one time (setjmp), then
650 set RETURNS_TWICE to a nonzero value.
652 Similarly set LONGJMP for if the function is in the longjmp family.
654 Set MALLOC for any of the standard memory allocation functions which
655 allocate from the heap.
657 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
658 space from the stack such as alloca. */
661 special_function_p (fndecl, flags)
665 if (! (flags & ECF_MALLOC)
666 && fndecl && DECL_NAME (fndecl)
667 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
668 /* Exclude functions not at the file scope, or not `extern',
669 since they are not the magic functions we would otherwise
671 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
673 char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
676 /* We assume that alloca will always be called by name. It
677 makes no sense to pass it as a pointer-to-function to
678 anything that does not understand its behavior. */
679 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
681 && ! strcmp (name, "alloca"))
682 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
684 && ! strcmp (name, "__builtin_alloca"))))
685 flags |= ECF_MAY_BE_ALLOCA;
687 /* Disregard prefix _, __ or __x. */
690 if (name[1] == '_' && name[2] == 'x')
692 else if (name[1] == '_')
701 && (! strcmp (tname, "setjmp")
702 || ! strcmp (tname, "setjmp_syscall")))
704 && ! strcmp (tname, "sigsetjmp"))
706 && ! strcmp (tname, "savectx")))
707 flags |= ECF_RETURNS_TWICE;
710 && ! strcmp (tname, "siglongjmp"))
711 flags |= ECF_LONGJMP;
713 else if ((tname[0] == 'q' && tname[1] == 's'
714 && ! strcmp (tname, "qsetjmp"))
715 || (tname[0] == 'v' && tname[1] == 'f'
716 && ! strcmp (tname, "vfork")))
717 flags |= ECF_RETURNS_TWICE;
719 else if (tname[0] == 'l' && tname[1] == 'o'
720 && ! strcmp (tname, "longjmp"))
721 flags |= ECF_LONGJMP;
723 else if ((tname[0] == 'f' && tname[1] == 'o'
724 && ! strcmp (tname, "fork"))
725 /* Linux specific: __clone. check NAME to insist on the
726 leading underscores, to avoid polluting the ISO / POSIX
728 || (name[0] == '_' && name[1] == '_'
729 && ! strcmp (tname, "clone"))
730 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
731 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
733 || ((tname[5] == 'p' || tname[5] == 'e')
734 && tname[6] == '\0'))))
735 flags |= ECF_FORK_OR_EXEC;
737 /* Do not add any more malloc-like functions to this list,
738 instead mark them as malloc functions using the malloc attribute.
739 Note, realloc is not suitable for attribute malloc since
740 it may return the same address across multiple calls.
741 C++ operator new is not suitable because it is not required
742 to return a unique pointer; indeed, the standard placement new
743 just returns its argument. */
744 else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode
745 && (! strcmp (tname, "malloc")
746 || ! strcmp (tname, "calloc")
747 || ! strcmp (tname, "strdup")))
753 /* Return nonzero when tree represent call to longjmp. */
755 setjmp_call_p (fndecl)
758 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
761 /* Detect flags (function attributes) from the function type node. */
763 flags_from_decl_or_type (exp)
767 /* ??? We can't set IS_MALLOC for function types? */
770 /* The function exp may have the `malloc' attribute. */
771 if (DECL_P (exp) && DECL_IS_MALLOC (exp))
774 /* The function exp may have the `pure' attribute. */
775 if (DECL_P (exp) && DECL_IS_PURE (exp))
778 if (TREE_NOTHROW (exp))
779 flags |= ECF_NOTHROW;
782 if (TREE_READONLY (exp) && !TREE_THIS_VOLATILE (exp))
785 if (TREE_THIS_VOLATILE (exp))
786 flags |= ECF_NORETURN;
792 /* Precompute all register parameters as described by ARGS, storing values
793 into fields within the ARGS array.
795 NUM_ACTUALS indicates the total number elements in the ARGS array.
797 Set REG_PARM_SEEN if we encounter a register parameter. */
800 precompute_register_parameters (num_actuals, args, reg_parm_seen)
802 struct arg_data *args;
809 for (i = 0; i < num_actuals; i++)
810 if (args[i].reg != 0 && ! args[i].pass_on_stack)
814 if (args[i].value == 0)
817 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
819 preserve_temp_slots (args[i].value);
822 /* ANSI doesn't require a sequence point here,
823 but PCC has one, so this will avoid some problems. */
827 /* If we are to promote the function arg to a wider mode,
830 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
832 = convert_modes (args[i].mode,
833 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
834 args[i].value, args[i].unsignedp);
836 /* If the value is expensive, and we are inside an appropriately
837 short loop, put the value into a pseudo and then put the pseudo
840 For small register classes, also do this if this call uses
841 register parameters. This is to avoid reload conflicts while
842 loading the parameters registers. */
844 if ((! (GET_CODE (args[i].value) == REG
845 || (GET_CODE (args[i].value) == SUBREG
846 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
847 && args[i].mode != BLKmode
848 && rtx_cost (args[i].value, SET) > 2
849 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
850 || preserve_subexpressions_p ()))
851 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
855 #ifdef REG_PARM_STACK_SPACE
857 /* The argument list is the property of the called routine and it
858 may clobber it. If the fixed area has been used for previous
859 parameters, we must save and restore it. */
861 save_fixed_argument_area (reg_parm_stack_space, argblock,
862 low_to_save, high_to_save)
863 int reg_parm_stack_space;
869 rtx save_area = NULL_RTX;
871 /* Compute the boundary of the that needs to be saved, if any. */
872 #ifdef ARGS_GROW_DOWNWARD
873 for (i = 0; i < reg_parm_stack_space + 1; i++)
875 for (i = 0; i < reg_parm_stack_space; i++)
878 if (i >= highest_outgoing_arg_in_use
879 || stack_usage_map[i] == 0)
882 if (*low_to_save == -1)
888 if (*low_to_save >= 0)
890 int num_to_save = *high_to_save - *low_to_save + 1;
891 enum machine_mode save_mode
892 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
895 /* If we don't have the required alignment, must do this in BLKmode. */
896 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
897 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
900 #ifdef ARGS_GROW_DOWNWARD
901 stack_area = gen_rtx_MEM (save_mode,
902 memory_address (save_mode,
903 plus_constant (argblock,
906 stack_area = gen_rtx_MEM (save_mode,
907 memory_address (save_mode,
908 plus_constant (argblock,
911 if (save_mode == BLKmode)
913 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
914 /* Cannot use emit_block_move here because it can be done by a
915 library call which in turn gets into this place again and deadly
916 infinite recursion happens. */
917 move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
922 save_area = gen_reg_rtx (save_mode);
923 emit_move_insn (save_area, stack_area);
930 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
936 enum machine_mode save_mode = GET_MODE (save_area);
937 #ifdef ARGS_GROW_DOWNWARD
939 = gen_rtx_MEM (save_mode,
940 memory_address (save_mode,
941 plus_constant (argblock,
945 = gen_rtx_MEM (save_mode,
946 memory_address (save_mode,
947 plus_constant (argblock,
951 if (save_mode != BLKmode)
952 emit_move_insn (stack_area, save_area);
954 /* Cannot use emit_block_move here because it can be done by a library
955 call which in turn gets into this place again and deadly infinite
956 recursion happens. */
957 move_by_pieces (stack_area, validize_mem (save_area),
958 high_to_save - low_to_save + 1, PARM_BOUNDARY);
962 /* If any elements in ARGS refer to parameters that are to be passed in
963 registers, but not in memory, and whose alignment does not permit a
964 direct copy into registers. Copy the values into a group of pseudos
965 which we will later copy into the appropriate hard registers.
967 Pseudos for each unaligned argument will be stored into the array
968 args[argnum].aligned_regs. The caller is responsible for deallocating
969 the aligned_regs array if it is nonzero. */
972 store_unaligned_arguments_into_pseudos (args, num_actuals)
973 struct arg_data *args;
978 for (i = 0; i < num_actuals; i++)
979 if (args[i].reg != 0 && ! args[i].pass_on_stack
980 && args[i].mode == BLKmode
981 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
982 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
984 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
985 int big_endian_correction = 0;
987 args[i].n_aligned_regs
988 = args[i].partial ? args[i].partial
989 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
991 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
992 * args[i].n_aligned_regs);
994 /* Structures smaller than a word are aligned to the least
995 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
996 this means we must skip the empty high order bytes when
997 calculating the bit offset. */
998 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
999 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1001 for (j = 0; j < args[i].n_aligned_regs; j++)
1003 rtx reg = gen_reg_rtx (word_mode);
1004 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1005 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1006 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1008 args[i].aligned_regs[j] = reg;
1010 /* There is no need to restrict this code to loading items
1011 in TYPE_ALIGN sized hunks. The bitfield instructions can
1012 load up entire word sized registers efficiently.
1014 ??? This may not be needed anymore.
1015 We use to emit a clobber here but that doesn't let later
1016 passes optimize the instructions we emit. By storing 0 into
1017 the register later passes know the first AND to zero out the
1018 bitfield being set in the register is unnecessary. The store
1019 of 0 will be deleted as will at least the first AND. */
1021 emit_move_insn (reg, const0_rtx);
1023 bytes -= bitsize / BITS_PER_UNIT;
1024 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
1025 extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1026 word_mode, word_mode, bitalign,
1028 bitalign, BITS_PER_WORD);
1033 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1036 NUM_ACTUALS is the total number of parameters.
1038 N_NAMED_ARGS is the total number of named arguments.
1040 FNDECL is the tree code for the target of this call (if known)
1042 ARGS_SO_FAR holds state needed by the target to know where to place
1045 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1046 for arguments which are passed in registers.
1048 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1049 and may be modified by this routine.
1051 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1052 flags which may may be modified by this routine. */
1055 initialize_argument_information (num_actuals, args, args_size, n_named_args,
1056 actparms, fndecl, args_so_far,
1057 reg_parm_stack_space, old_stack_level,
1058 old_pending_adj, must_preallocate,
1060 int num_actuals ATTRIBUTE_UNUSED;
1061 struct arg_data *args;
1062 struct args_size *args_size;
1063 int n_named_args ATTRIBUTE_UNUSED;
1066 CUMULATIVE_ARGS *args_so_far;
1067 int reg_parm_stack_space;
1068 rtx *old_stack_level;
1069 int *old_pending_adj;
1070 int *must_preallocate;
1073 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1076 /* Count arg position in order args appear. */
1079 struct args_size alignment_pad;
1083 args_size->constant = 0;
1086 /* In this loop, we consider args in the order they are written.
1087 We fill up ARGS from the front or from the back if necessary
1088 so that in any case the first arg to be pushed ends up at the front. */
1090 if (PUSH_ARGS_REVERSED)
1092 i = num_actuals - 1, inc = -1;
1093 /* In this case, must reverse order of args
1094 so that we compute and push the last arg first. */
1101 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1102 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1104 tree type = TREE_TYPE (TREE_VALUE (p));
1106 enum machine_mode mode;
1108 args[i].tree_value = TREE_VALUE (p);
1110 /* Replace erroneous argument with constant zero. */
1111 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1112 args[i].tree_value = integer_zero_node, type = integer_type_node;
1114 /* If TYPE is a transparent union, pass things the way we would
1115 pass the first field of the union. We have already verified that
1116 the modes are the same. */
1117 if (TYPE_TRANSPARENT_UNION (type))
1118 type = TREE_TYPE (TYPE_FIELDS (type));
1120 /* Decide where to pass this arg.
1122 args[i].reg is nonzero if all or part is passed in registers.
1124 args[i].partial is nonzero if part but not all is passed in registers,
1125 and the exact value says how many words are passed in registers.
1127 args[i].pass_on_stack is nonzero if the argument must at least be
1128 computed on the stack. It may then be loaded back into registers
1129 if args[i].reg is nonzero.
1131 These decisions are driven by the FUNCTION_... macros and must agree
1132 with those made by function.c. */
1134 /* See if this argument should be passed by invisible reference. */
1135 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1136 && contains_placeholder_p (TYPE_SIZE (type)))
1137 || TREE_ADDRESSABLE (type)
1138 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1139 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1140 type, argpos < n_named_args)
1144 /* If we're compiling a thunk, pass through invisible
1145 references instead of making a copy. */
1146 if (current_function_is_thunk
1147 #ifdef FUNCTION_ARG_CALLEE_COPIES
1148 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1149 type, argpos < n_named_args)
1150 /* If it's in a register, we must make a copy of it too. */
1151 /* ??? Is this a sufficient test? Is there a better one? */
1152 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1153 && REG_P (DECL_RTL (args[i].tree_value)))
1154 && ! TREE_ADDRESSABLE (type))
1158 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1159 new object from the argument. If we are passing by
1160 invisible reference, the callee will do that for us, so we
1161 can strip off the TARGET_EXPR. This is not always safe,
1162 but it is safe in the only case where this is a useful
1163 optimization; namely, when the argument is a plain object.
1164 In that case, the frontend is just asking the backend to
1165 make a bitwise copy of the argument. */
1167 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1168 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1169 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1170 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1172 args[i].tree_value = build1 (ADDR_EXPR,
1173 build_pointer_type (type),
1174 args[i].tree_value);
1175 type = build_pointer_type (type);
1179 /* We make a copy of the object and pass the address to the
1180 function being called. */
1183 if (!COMPLETE_TYPE_P (type)
1184 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1185 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1186 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1187 STACK_CHECK_MAX_VAR_SIZE))))
1189 /* This is a variable-sized object. Make space on the stack
1191 rtx size_rtx = expr_size (TREE_VALUE (p));
1193 if (*old_stack_level == 0)
1195 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1196 *old_pending_adj = pending_stack_adjust;
1197 pending_stack_adjust = 0;
1200 copy = gen_rtx_MEM (BLKmode,
1201 allocate_dynamic_stack_space (size_rtx,
1203 TYPE_ALIGN (type)));
1207 int size = int_size_in_bytes (type);
1208 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1211 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1213 store_expr (args[i].tree_value, copy, 0);
1214 *ecf_flags &= ~(ECF_CONST | ECF_PURE);
1216 args[i].tree_value = build1 (ADDR_EXPR,
1217 build_pointer_type (type),
1218 make_tree (type, copy));
1219 type = build_pointer_type (type);
1223 mode = TYPE_MODE (type);
1224 unsignedp = TREE_UNSIGNED (type);
1226 #ifdef PROMOTE_FUNCTION_ARGS
1227 mode = promote_mode (type, mode, &unsignedp, 1);
1230 args[i].unsignedp = unsignedp;
1231 args[i].mode = mode;
1233 #ifdef FUNCTION_INCOMING_ARG
1234 /* If this is a sibling call and the machine has register windows, the
1235 register window has to be unwinded before calling the routine, so
1236 arguments have to go into the incoming registers. */
1237 if (*ecf_flags & ECF_SIBCALL)
1238 args[i].reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1239 argpos < n_named_args);
1242 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1243 argpos < n_named_args);
1245 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1248 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1249 argpos < n_named_args);
1252 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1254 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1255 it means that we are to pass this arg in the register(s) designated
1256 by the PARALLEL, but also to pass it in the stack. */
1257 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1258 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1259 args[i].pass_on_stack = 1;
1261 /* If this is an addressable type, we must preallocate the stack
1262 since we must evaluate the object into its final location.
1264 If this is to be passed in both registers and the stack, it is simpler
1266 if (TREE_ADDRESSABLE (type)
1267 || (args[i].pass_on_stack && args[i].reg != 0))
1268 *must_preallocate = 1;
1270 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1271 we cannot consider this function call constant. */
1272 if (TREE_ADDRESSABLE (type))
1273 *ecf_flags &= ~(ECF_CONST | ECF_PURE);
1275 /* Compute the stack-size of this argument. */
1276 if (args[i].reg == 0 || args[i].partial != 0
1277 || reg_parm_stack_space > 0
1278 || args[i].pass_on_stack)
1279 locate_and_pad_parm (mode, type,
1280 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1285 fndecl, args_size, &args[i].offset,
1286 &args[i].size, &alignment_pad);
1288 #ifndef ARGS_GROW_DOWNWARD
1289 args[i].slot_offset = *args_size;
1292 args[i].alignment_pad = alignment_pad;
1294 /* If a part of the arg was put into registers,
1295 don't include that part in the amount pushed. */
1296 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1297 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1298 / (PARM_BOUNDARY / BITS_PER_UNIT)
1299 * (PARM_BOUNDARY / BITS_PER_UNIT));
1301 /* Update ARGS_SIZE, the total stack space for args so far. */
1303 args_size->constant += args[i].size.constant;
1304 if (args[i].size.var)
1306 ADD_PARM_SIZE (*args_size, args[i].size.var);
1309 /* Since the slot offset points to the bottom of the slot,
1310 we must record it after incrementing if the args grow down. */
1311 #ifdef ARGS_GROW_DOWNWARD
1312 args[i].slot_offset = *args_size;
1314 args[i].slot_offset.constant = -args_size->constant;
1316 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1319 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1320 have been used, etc. */
1322 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1323 argpos < n_named_args);
1327 /* Update ARGS_SIZE to contain the total size for the argument block.
1328 Return the original constant component of the argument block's size.
1330 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1331 for arguments passed in registers. */
1334 compute_argument_block_size (reg_parm_stack_space, args_size,
1335 preferred_stack_boundary)
1336 int reg_parm_stack_space;
1337 struct args_size *args_size;
1338 int preferred_stack_boundary ATTRIBUTE_UNUSED;
1340 int unadjusted_args_size = args_size->constant;
1342 /* For accumulate outgoing args mode we don't need to align, since the frame
1343 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1344 backends from generating missaligned frame sizes. */
1345 #ifdef STACK_BOUNDARY
1346 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1347 preferred_stack_boundary = STACK_BOUNDARY;
1350 /* Compute the actual size of the argument block required. The variable
1351 and constant sizes must be combined, the size may have to be rounded,
1352 and there may be a minimum required size. */
1356 args_size->var = ARGS_SIZE_TREE (*args_size);
1357 args_size->constant = 0;
1359 #ifdef PREFERRED_STACK_BOUNDARY
1360 preferred_stack_boundary /= BITS_PER_UNIT;
1361 if (preferred_stack_boundary > 1)
1363 /* We don't handle this case yet. To handle it correctly we have
1364 to add the delta, round and substract the delta.
1365 Currently no machine description requires this support. */
1366 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1368 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1372 if (reg_parm_stack_space > 0)
1375 = size_binop (MAX_EXPR, args_size->var,
1376 ssize_int (reg_parm_stack_space));
1378 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1379 /* The area corresponding to register parameters is not to count in
1380 the size of the block we need. So make the adjustment. */
1382 = size_binop (MINUS_EXPR, args_size->var,
1383 ssize_int (reg_parm_stack_space));
1389 #ifdef PREFERRED_STACK_BOUNDARY
1390 preferred_stack_boundary /= BITS_PER_UNIT;
1391 if (preferred_stack_boundary < 1)
1392 preferred_stack_boundary = 1;
1393 args_size->constant = (((args_size->constant
1394 + stack_pointer_delta
1395 + preferred_stack_boundary - 1)
1396 / preferred_stack_boundary
1397 * preferred_stack_boundary)
1398 - stack_pointer_delta);
1401 args_size->constant = MAX (args_size->constant,
1402 reg_parm_stack_space);
1404 #ifdef MAYBE_REG_PARM_STACK_SPACE
1405 if (reg_parm_stack_space == 0)
1406 args_size->constant = 0;
1409 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1410 args_size->constant -= reg_parm_stack_space;
1413 return unadjusted_args_size;
1416 /* Precompute parameters as needed for a function call.
1418 FLAGS is mask of ECF_* constants.
1420 MUST_PREALLOCATE indicates that we must preallocate stack space for
1421 any stack arguments.
1423 NUM_ACTUALS is the number of arguments.
1425 ARGS is an array containing information for each argument; this routine
1426 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1428 ARGS_SIZE contains information about the size of the arg list. */
1431 precompute_arguments (flags, must_preallocate, num_actuals, args, args_size)
1433 int must_preallocate;
1435 struct arg_data *args;
1436 struct args_size *args_size;
1440 /* If this function call is cse'able, precompute all the parameters.
1441 Note that if the parameter is constructed into a temporary, this will
1442 cause an additional copy because the parameter will be constructed
1443 into a temporary location and then copied into the outgoing arguments.
1444 If a parameter contains a call to alloca and this function uses the
1445 stack, precompute the parameter. */
1447 /* If we preallocated the stack space, and some arguments must be passed
1448 on the stack, then we must precompute any parameter which contains a
1449 function call which will store arguments on the stack.
1450 Otherwise, evaluating the parameter may clobber previous parameters
1451 which have already been stored into the stack. */
1453 for (i = 0; i < num_actuals; i++)
1454 if ((flags & (ECF_CONST | ECF_PURE))
1455 || ((args_size->var != 0 || args_size->constant != 0)
1456 && calls_function (args[i].tree_value, 1))
1457 || (must_preallocate
1458 && (args_size->var != 0 || args_size->constant != 0)
1459 && calls_function (args[i].tree_value, 0)))
1461 /* If this is an addressable type, we cannot pre-evaluate it. */
1462 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1468 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1470 preserve_temp_slots (args[i].value);
1473 /* ANSI doesn't require a sequence point here,
1474 but PCC has one, so this will avoid some problems. */
1477 args[i].initial_value = args[i].value
1478 = protect_from_queue (args[i].value, 0);
1480 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1483 = convert_modes (args[i].mode,
1484 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1485 args[i].value, args[i].unsignedp);
1486 #ifdef PROMOTE_FOR_CALL_ONLY
1487 /* CSE will replace this only if it contains args[i].value
1488 pseudo, so convert it down to the declared mode using
1490 if (GET_CODE (args[i].value) == REG
1491 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1493 args[i].initial_value
1494 = gen_rtx_SUBREG (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1496 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1497 SUBREG_PROMOTED_UNSIGNED_P (args[i].initial_value)
1498 = args[i].unsignedp;
1505 /* Given the current state of MUST_PREALLOCATE and information about
1506 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1507 compute and return the final value for MUST_PREALLOCATE. */
1510 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1511 int must_preallocate;
1513 struct arg_data *args;
1514 struct args_size *args_size;
1516 /* See if we have or want to preallocate stack space.
1518 If we would have to push a partially-in-regs parm
1519 before other stack parms, preallocate stack space instead.
1521 If the size of some parm is not a multiple of the required stack
1522 alignment, we must preallocate.
1524 If the total size of arguments that would otherwise create a copy in
1525 a temporary (such as a CALL) is more than half the total argument list
1526 size, preallocation is faster.
1528 Another reason to preallocate is if we have a machine (like the m88k)
1529 where stack alignment is required to be maintained between every
1530 pair of insns, not just when the call is made. However, we assume here
1531 that such machines either do not have push insns (and hence preallocation
1532 would occur anyway) or the problem is taken care of with
1535 if (! must_preallocate)
1537 int partial_seen = 0;
1538 int copy_to_evaluate_size = 0;
1541 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1543 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1545 else if (partial_seen && args[i].reg == 0)
1546 must_preallocate = 1;
1548 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1549 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1550 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1551 || TREE_CODE (args[i].tree_value) == COND_EXPR
1552 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1553 copy_to_evaluate_size
1554 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1557 if (copy_to_evaluate_size * 2 >= args_size->constant
1558 && args_size->constant > 0)
1559 must_preallocate = 1;
1561 return must_preallocate;
1564 /* If we preallocated stack space, compute the address of each argument
1565 and store it into the ARGS array.
1567 We need not ensure it is a valid memory address here; it will be
1568 validized when it is used.
1570 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1573 compute_argument_addresses (args, argblock, num_actuals)
1574 struct arg_data *args;
1580 rtx arg_reg = argblock;
1581 int i, arg_offset = 0;
1583 if (GET_CODE (argblock) == PLUS)
1584 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1586 for (i = 0; i < num_actuals; i++)
1588 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1589 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1592 /* Skip this parm if it will not be passed on the stack. */
1593 if (! args[i].pass_on_stack && args[i].reg != 0)
1596 if (GET_CODE (offset) == CONST_INT)
1597 addr = plus_constant (arg_reg, INTVAL (offset));
1599 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1601 addr = plus_constant (addr, arg_offset);
1602 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1605 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1607 if (GET_CODE (slot_offset) == CONST_INT)
1608 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1610 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1612 addr = plus_constant (addr, arg_offset);
1613 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1618 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1619 in a call instruction.
1621 FNDECL is the tree node for the target function. For an indirect call
1622 FNDECL will be NULL_TREE.
1624 EXP is the CALL_EXPR for this call. */
1627 rtx_for_function_call (fndecl, exp)
1633 /* Get the function to call, in the form of RTL. */
1636 /* If this is the first use of the function, see if we need to
1637 make an external definition for it. */
1638 if (! TREE_USED (fndecl))
1640 assemble_external (fndecl);
1641 TREE_USED (fndecl) = 1;
1644 /* Get a SYMBOL_REF rtx for the function address. */
1645 funexp = XEXP (DECL_RTL (fndecl), 0);
1648 /* Generate an rtx (probably a pseudo-register) for the address. */
1653 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1654 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1656 /* Check the function is executable. */
1657 if (current_function_check_memory_usage)
1659 #ifdef POINTERS_EXTEND_UNSIGNED
1660 /* It might be OK to convert funexp in place, but there's
1661 a lot going on between here and when it happens naturally
1662 that this seems safer. */
1663 funaddr = convert_memory_address (Pmode, funexp);
1665 emit_library_call (chkr_check_exec_libfunc, 1,
1674 /* Do the register loads required for any wholly-register parms or any
1675 parms which are passed both on the stack and in a register. Their
1676 expressions were already evaluated.
1678 Mark all register-parms as living through the call, putting these USE
1679 insns in the CALL_INSN_FUNCTION_USAGE field. */
1682 load_register_parameters (args, num_actuals, call_fusage)
1683 struct arg_data *args;
1689 #ifdef LOAD_ARGS_REVERSED
1690 for (i = num_actuals - 1; i >= 0; i--)
1692 for (i = 0; i < num_actuals; i++)
1695 rtx reg = args[i].reg;
1696 int partial = args[i].partial;
1701 /* Set to non-negative if must move a word at a time, even if just
1702 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1703 we just use a normal move insn. This value can be zero if the
1704 argument is a zero size structure with no fields. */
1705 nregs = (partial ? partial
1706 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1707 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1708 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1711 /* Handle calls that pass values in multiple non-contiguous
1712 locations. The Irix 6 ABI has examples of this. */
1714 if (GET_CODE (reg) == PARALLEL)
1715 emit_group_load (reg, args[i].value,
1716 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1717 TYPE_ALIGN (TREE_TYPE (args[i].tree_value)));
1719 /* If simple case, just do move. If normal partial, store_one_arg
1720 has already loaded the register for us. In all other cases,
1721 load the register(s) from memory. */
1723 else if (nregs == -1)
1724 emit_move_insn (reg, args[i].value);
1726 /* If we have pre-computed the values to put in the registers in
1727 the case of non-aligned structures, copy them in now. */
1729 else if (args[i].n_aligned_regs != 0)
1730 for (j = 0; j < args[i].n_aligned_regs; j++)
1731 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1732 args[i].aligned_regs[j]);
1734 else if (partial == 0 || args[i].pass_on_stack)
1735 move_block_to_reg (REGNO (reg),
1736 validize_mem (args[i].value), nregs,
1739 /* Handle calls that pass values in multiple non-contiguous
1740 locations. The Irix 6 ABI has examples of this. */
1741 if (GET_CODE (reg) == PARALLEL)
1742 use_group_regs (call_fusage, reg);
1743 else if (nregs == -1)
1744 use_reg (call_fusage, reg);
1746 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1751 /* Try to integreate function. See expand_inline_function for documentation
1752 about the parameters. */
1755 try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
1761 rtx structure_value_addr;
1766 rtx old_stack_level = 0;
1767 int reg_parm_stack_space = 0;
1769 #ifdef REG_PARM_STACK_SPACE
1770 #ifdef MAYBE_REG_PARM_STACK_SPACE
1771 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1773 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1777 before_call = get_last_insn ();
1779 temp = expand_inline_function (fndecl, actparms, target,
1781 structure_value_addr);
1783 /* If inlining succeeded, return. */
1784 if (temp != (rtx) (HOST_WIDE_INT) - 1)
1786 if (ACCUMULATE_OUTGOING_ARGS)
1788 /* If the outgoing argument list must be preserved, push
1789 the stack before executing the inlined function if it
1792 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1793 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1796 if (stack_arg_under_construction || i >= 0)
1799 = before_call ? NEXT_INSN (before_call) : get_insns ();
1800 rtx insn = NULL_RTX, seq;
1802 /* Look for a call in the inline function code.
1803 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1804 nonzero then there is a call and it is not necessary
1805 to scan the insns. */
1807 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1808 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1809 if (GET_CODE (insn) == CALL_INSN)
1814 /* Reserve enough stack space so that the largest
1815 argument list of any function call in the inline
1816 function does not overlap the argument list being
1817 evaluated. This is usually an overestimate because
1818 allocate_dynamic_stack_space reserves space for an
1819 outgoing argument list in addition to the requested
1820 space, but there is no way to ask for stack space such
1821 that an argument list of a certain length can be
1824 Add the stack space reserved for register arguments, if
1825 any, in the inline function. What is really needed is the
1826 largest value of reg_parm_stack_space in the inline
1827 function, but that is not available. Using the current
1828 value of reg_parm_stack_space is wrong, but gives
1829 correct results on all supported machines. */
1831 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1832 + reg_parm_stack_space);
1835 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1836 allocate_dynamic_stack_space (GEN_INT (adjust),
1837 NULL_RTX, BITS_PER_UNIT);
1840 emit_insns_before (seq, first_insn);
1841 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1846 /* If the result is equivalent to TARGET, return TARGET to simplify
1847 checks in store_expr. They can be equivalent but not equal in the
1848 case of a function that returns BLKmode. */
1849 if (temp != target && rtx_equal_p (temp, target))
1854 /* If inlining failed, mark FNDECL as needing to be compiled
1855 separately after all. If function was declared inline,
1857 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1858 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1860 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1861 warning ("called from here");
1863 mark_addressable (fndecl);
1864 return (rtx) (HOST_WIDE_INT) - 1;
1867 /* Generate all the code for a function call
1868 and return an rtx for its value.
1869 Store the value in TARGET (specified as an rtx) if convenient.
1870 If the value is stored in TARGET then TARGET is returned.
1871 If IGNORE is nonzero, then we ignore the value of the function call. */
1874 expand_call (exp, target, ignore)
1879 /* Nonzero if we are currently expanding a call. */
1880 static int currently_expanding_call = 0;
1882 /* List of actual parameters. */
1883 tree actparms = TREE_OPERAND (exp, 1);
1884 /* RTX for the function to be called. */
1886 /* Sequence of insns to perform a tail recursive "call". */
1887 rtx tail_recursion_insns = NULL_RTX;
1888 /* Sequence of insns to perform a normal "call". */
1889 rtx normal_call_insns = NULL_RTX;
1890 /* Sequence of insns to perform a tail recursive "call". */
1891 rtx tail_call_insns = NULL_RTX;
1892 /* Data type of the function. */
1894 /* Declaration of the function being called,
1895 or 0 if the function is computed (not known by name). */
1902 /* Register in which non-BLKmode value will be returned,
1903 or 0 if no value or if value is BLKmode. */
1905 /* Address where we should return a BLKmode value;
1906 0 if value not BLKmode. */
1907 rtx structure_value_addr = 0;
1908 /* Nonzero if that address is being passed by treating it as
1909 an extra, implicit first parameter. Otherwise,
1910 it is passed by being copied directly into struct_value_rtx. */
1911 int structure_value_addr_parm = 0;
1912 /* Size of aggregate value wanted, or zero if none wanted
1913 or if we are using the non-reentrant PCC calling convention
1914 or expecting the value in registers. */
1915 HOST_WIDE_INT struct_value_size = 0;
1916 /* Nonzero if called function returns an aggregate in memory PCC style,
1917 by returning the address of where to find it. */
1918 int pcc_struct_value = 0;
1920 /* Number of actual parameters in this call, including struct value addr. */
1922 /* Number of named args. Args after this are anonymous ones
1923 and they must all go on the stack. */
1926 /* Vector of information about each argument.
1927 Arguments are numbered in the order they will be pushed,
1928 not the order they are written. */
1929 struct arg_data *args;
1931 /* Total size in bytes of all the stack-parms scanned so far. */
1932 struct args_size args_size;
1933 /* Size of arguments before any adjustments (such as rounding). */
1934 int unadjusted_args_size;
1935 /* Data on reg parms scanned so far. */
1936 CUMULATIVE_ARGS args_so_far;
1937 /* Nonzero if a reg parm has been scanned. */
1939 /* Nonzero if this is an indirect function call. */
1941 /* Nonzero if we must avoid push-insns in the args for this call.
1942 If stack space is allocated for register parameters, but not by the
1943 caller, then it is preallocated in the fixed part of the stack frame.
1944 So the entire argument block must then be preallocated (i.e., we
1945 ignore PUSH_ROUNDING in that case). */
1947 int must_preallocate = !PUSH_ARGS;
1949 /* Size of the stack reserved for parameter registers. */
1950 int reg_parm_stack_space = 0;
1952 /* Address of space preallocated for stack parms
1953 (on machines that lack push insns), or 0 if space not preallocated. */
1956 /* Mask of ECF_ flags. */
1958 /* Nonzero if this is a call to an inline function. */
1959 int is_integrable = 0;
1960 #ifdef REG_PARM_STACK_SPACE
1961 /* Define the boundary of the register parm stack space that needs to be
1963 int low_to_save = -1, high_to_save;
1964 rtx save_area = 0; /* Place that it is saved */
1967 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1968 char *initial_stack_usage_map = stack_usage_map;
1969 int old_stack_arg_under_construction = 0;
1971 rtx old_stack_level = 0;
1972 int old_pending_adj = 0;
1973 int old_inhibit_defer_pop = inhibit_defer_pop;
1974 int old_stack_allocated;
1978 int preferred_stack_boundary;
1980 /* The value of the function call can be put in a hard register. But
1981 if -fcheck-memory-usage, code which invokes functions (and thus
1982 damages some hard registers) can be inserted before using the value.
1983 So, target is always a pseudo-register in that case. */
1984 if (current_function_check_memory_usage)
1987 /* See if this is "nothrow" function call. */
1988 if (TREE_NOTHROW (exp))
1989 flags |= ECF_NOTHROW;
1991 /* See if we can find a DECL-node for the actual function.
1992 As a result, decide whether this is a call to an integrable function. */
1994 p = TREE_OPERAND (exp, 0);
1995 if (TREE_CODE (p) == ADDR_EXPR)
1997 fndecl = TREE_OPERAND (p, 0);
1998 if (TREE_CODE (fndecl) != FUNCTION_DECL)
2003 && fndecl != current_function_decl
2004 && DECL_INLINE (fndecl)
2005 && DECL_SAVED_INSNS (fndecl)
2006 && DECL_SAVED_INSNS (fndecl)->inlinable)
2008 else if (! TREE_ADDRESSABLE (fndecl))
2010 /* In case this function later becomes inlinable,
2011 record that there was already a non-inline call to it.
2013 Use abstraction instead of setting TREE_ADDRESSABLE
2015 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2018 warning_with_decl (fndecl, "can't inline call to `%s'");
2019 warning ("called from here");
2021 mark_addressable (fndecl);
2024 flags |= flags_from_decl_or_type (fndecl);
2028 /* If we don't have specific function to call, see if we have a
2029 attributes set in the type. */
2031 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2033 #ifdef REG_PARM_STACK_SPACE
2034 #ifdef MAYBE_REG_PARM_STACK_SPACE
2035 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2037 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2041 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2042 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2043 must_preallocate = 1;
2046 /* Warn if this value is an aggregate type,
2047 regardless of which calling convention we are using for it. */
2048 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2049 warning ("function call has aggregate value");
2051 /* Set up a place to return a structure. */
2053 /* Cater to broken compilers. */
2054 if (aggregate_value_p (exp))
2056 /* This call returns a big structure. */
2057 flags &= ~(ECF_CONST | ECF_PURE);
2059 #ifdef PCC_STATIC_STRUCT_RETURN
2061 pcc_struct_value = 1;
2062 /* Easier than making that case work right. */
2065 /* In case this is a static function, note that it has been
2067 if (! TREE_ADDRESSABLE (fndecl))
2068 mark_addressable (fndecl);
2072 #else /* not PCC_STATIC_STRUCT_RETURN */
2074 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2076 if (target && GET_CODE (target) == MEM)
2077 structure_value_addr = XEXP (target, 0);
2080 /* Assign a temporary to hold the value. */
2083 /* For variable-sized objects, we must be called with a target
2084 specified. If we were to allocate space on the stack here,
2085 we would have no way of knowing when to free it. */
2087 if (struct_value_size < 0)
2090 /* This DECL is just something to feed to mark_addressable;
2091 it doesn't get pushed. */
2092 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
2093 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
2094 mark_addressable (d);
2095 mark_temp_addr_taken (DECL_RTL (d));
2096 structure_value_addr = XEXP (DECL_RTL (d), 0);
2101 #endif /* not PCC_STATIC_STRUCT_RETURN */
2104 /* If called function is inline, try to integrate it. */
2108 rtx temp = try_to_integrate (fndecl, actparms, target,
2109 ignore, TREE_TYPE (exp),
2110 structure_value_addr);
2111 if (temp != (rtx) (HOST_WIDE_INT) - 1)
2115 currently_expanding_call++;
2117 /* Tail calls can make things harder to debug, and we're traditionally
2118 pushed these optimizations into -O2. Don't try if we're already
2119 expanding a call, as that means we're an argument. Similarly, if
2120 there's pending loops or cleanups we know there's code to follow
2124 if (flag_optimize_sibling_calls
2125 && currently_expanding_call == 1
2126 && stmt_loop_nest_empty ()
2127 && ! any_pending_cleanups (1))
2129 tree new_actparms = NULL_TREE;
2131 /* Ok, we're going to give the tail call the old college try.
2132 This means we're going to evaluate the function arguments
2133 up to three times. There are two degrees of badness we can
2134 encounter, those that can be unsaved and those that can't.
2135 (See unsafe_for_reeval commentary for details.)
2137 Generate a new argument list. Pass safe arguments through
2138 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2139 For hard badness, evaluate them now and put their resulting
2140 rtx in a temporary VAR_DECL. */
2142 for (p = actparms; p; p = TREE_CHAIN (p))
2143 switch (unsafe_for_reeval (TREE_VALUE (p)))
2146 new_actparms = tree_cons (TREE_PURPOSE (p), TREE_VALUE (p),
2150 case 1: /* Mildly unsafe. */
2151 new_actparms = tree_cons (TREE_PURPOSE (p),
2152 unsave_expr (TREE_VALUE (p)),
2156 case 2: /* Wildly unsafe. */
2158 tree var = build_decl (VAR_DECL, NULL_TREE,
2159 TREE_TYPE (TREE_VALUE (p)));
2160 DECL_RTL (var) = expand_expr (TREE_VALUE (p), NULL_RTX,
2161 VOIDmode, EXPAND_NORMAL);
2162 new_actparms = tree_cons (TREE_PURPOSE (p), var, new_actparms);
2170 /* We built the new argument chain backwards. */
2171 actparms = nreverse (new_actparms);
2173 /* Expanding one of those dangerous arguments could have added
2174 cleanups, but otherwise give it a whirl. */
2175 try_tail_call = ! any_pending_cleanups (1);
2178 /* Generate a tail recursion sequence when calling ourselves. */
2181 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
2182 && TREE_OPERAND (TREE_OPERAND (exp, 0), 0) == current_function_decl)
2184 /* We want to emit any pending stack adjustments before the tail
2185 recursion "call". That way we know any adjustment after the tail
2186 recursion call can be ignored if we indeed use the tail recursion
2188 int save_pending_stack_adjust = pending_stack_adjust;
2189 int save_stack_pointer_delta = stack_pointer_delta;
2191 /* Use a new sequence to hold any RTL we generate. We do not even
2192 know if we will use this RTL yet. The final decision can not be
2193 made until after RTL generation for the entire function is
2197 /* Emit the pending stack adjustments before we expand any arguments. */
2198 do_pending_stack_adjust ();
2200 if (optimize_tail_recursion (actparms, get_last_insn ()))
2201 tail_recursion_insns = get_insns ();
2204 /* Restore the original pending stack adjustment for the sibling and
2205 normal call cases below. */
2206 pending_stack_adjust = save_pending_stack_adjust;
2207 stack_pointer_delta = save_stack_pointer_delta;
2210 function_call_count++;
2212 if (fndecl && DECL_NAME (fndecl))
2213 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
2215 #ifdef PREFERRED_STACK_BOUNDARY
2216 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2218 preferred_stack_boundary = STACK_BOUNDARY;
2221 /* Ensure current function's preferred stack boundary is at least
2222 what we need. We don't have to increase alignment for recursive
2224 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2225 && fndecl != current_function_decl)
2226 cfun->preferred_stack_boundary = preferred_stack_boundary;
2228 /* See if this is a call to a function that can return more than once
2229 or a call to longjmp or malloc. */
2230 flags |= special_function_p (fndecl, flags);
2232 if (flags & ECF_MAY_BE_ALLOCA)
2233 current_function_calls_alloca = 1;
2235 /* Operand 0 is a pointer-to-function; get the type of the function. */
2236 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
2237 if (! POINTER_TYPE_P (funtype))
2239 funtype = TREE_TYPE (funtype);
2241 /* We want to make two insn chains; one for a sibling call, the other
2242 for a normal call. We will select one of the two chains after
2243 initial RTL generation is complete. */
2244 for (pass = 0; pass < 2; pass++)
2246 int sibcall_failure = 0;
2247 /* We want to emit ay pending stack adjustments before the tail
2248 recursion "call". That way we know any adjustment after the tail
2249 recursion call can be ignored if we indeed use the tail recursion
2251 int save_pending_stack_adjust;
2252 int save_stack_pointer_delta;
2254 rtx before_call, next_arg_reg;
2258 /* Various reasons we can not use a sibling call. */
2260 #ifdef HAVE_sibcall_epilogue
2261 || ! HAVE_sibcall_epilogue
2265 /* The structure value address is used and modified in the
2266 loop below. It does not seem worth the effort to save and
2267 restore it as a state variable since few optimizable
2268 sibling calls will return a structure. */
2269 || structure_value_addr != NULL_RTX
2270 /* If the register holding the address is a callee saved
2271 register, then we lose. We have no way to prevent that,
2272 so we only allow calls to named functions. */
2273 /* ??? This could be done by having the insn constraints
2274 use a register class that is all call-clobbered. Any
2275 reload insns generated to fix things up would appear
2276 before the sibcall_epilogue. */
2277 || fndecl == NULL_TREE
2278 || ! FUNCTION_OK_FOR_SIBCALL (fndecl))
2281 /* Emit any queued insns now; otherwise they would end up in
2282 only one of the alternates. */
2285 /* We know at this point that there are not currently any
2286 pending cleanups. If, however, in the process of evaluating
2287 the arguments we were to create some, we'll need to be
2288 able to get rid of them. */
2289 expand_start_target_temps ();
2291 /* State variables we need to save and restore between
2293 save_pending_stack_adjust = pending_stack_adjust;
2294 save_stack_pointer_delta = stack_pointer_delta;
2297 flags &= ~ECF_SIBCALL;
2299 flags |= ECF_SIBCALL;
2301 /* Other state variables that we must reinitialize each time
2302 through the loop (that are not initialized by the loop itself). */
2306 /* Start a new sequence for the normal call case.
2308 From this point on, if the sibling call fails, we want to set
2309 sibcall_failure instead of continuing the loop. */
2312 /* When calling a const function, we must pop the stack args right away,
2313 so that the pop is deleted or moved with the call. */
2314 if (flags & (ECF_CONST | ECF_PURE))
2317 /* Don't let pending stack adjusts add up to too much.
2318 Also, do all pending adjustments now if there is any chance
2319 this might be a call to alloca or if we are expanding a sibling
2321 if (pending_stack_adjust >= 32
2322 || (pending_stack_adjust > 0 && (flags & ECF_MAY_BE_ALLOCA))
2324 do_pending_stack_adjust ();
2326 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2328 /* A fork duplicates the profile information, and an exec discards
2329 it. We can't rely on fork/exec to be paired. So write out the
2330 profile information we have gathered so far, and clear it. */
2331 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2332 is subject to race conditions, just as with multithreaded
2335 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"), 0,
2339 /* Push the temporary stack slot level so that we can free any
2340 temporaries we make. */
2343 /* Start updating where the next arg would go.
2345 On some machines (such as the PA) indirect calls have a different
2346 calling convention than normal calls. The last argument in
2347 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2349 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
2351 /* If struct_value_rtx is 0, it means pass the address
2352 as if it were an extra parameter. */
2353 if (structure_value_addr && struct_value_rtx == 0)
2355 /* If structure_value_addr is a REG other than
2356 virtual_outgoing_args_rtx, we can use always use it. If it
2357 is not a REG, we must always copy it into a register.
2358 If it is virtual_outgoing_args_rtx, we must copy it to another
2359 register in some cases. */
2360 rtx temp = (GET_CODE (structure_value_addr) != REG
2361 || (ACCUMULATE_OUTGOING_ARGS
2362 && stack_arg_under_construction
2363 && structure_value_addr == virtual_outgoing_args_rtx)
2364 ? copy_addr_to_reg (structure_value_addr)
2365 : structure_value_addr);
2368 = tree_cons (error_mark_node,
2369 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2372 structure_value_addr_parm = 1;
2375 /* Count the arguments and set NUM_ACTUALS. */
2376 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
2379 /* Compute number of named args.
2380 Normally, don't include the last named arg if anonymous args follow.
2381 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2382 (If no anonymous args follow, the result of list_length is actually
2383 one too large. This is harmless.)
2385 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2386 zero, this machine will be able to place unnamed args that were
2387 passed in registers into the stack. So treat all args as named.
2388 This allows the insns emitting for a specific argument list to be
2389 independent of the function declaration.
2391 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2392 reliable way to pass unnamed args in registers, so we must force
2393 them into memory. */
2395 if ((STRICT_ARGUMENT_NAMING
2396 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2397 && TYPE_ARG_TYPES (funtype) != 0)
2399 = (list_length (TYPE_ARG_TYPES (funtype))
2400 /* Don't include the last named arg. */
2401 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2402 /* Count the struct value address, if it is passed as a parm. */
2403 + structure_value_addr_parm);
2405 /* If we know nothing, treat all args as named. */
2406 n_named_args = num_actuals;
2408 /* Make a vector to hold all the information about each arg. */
2409 args = (struct arg_data *) alloca (num_actuals
2410 * sizeof (struct arg_data));
2411 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
2413 /* Build up entries inthe ARGS array, compute the size of the arguments
2414 into ARGS_SIZE, etc. */
2415 initialize_argument_information (num_actuals, args, &args_size,
2416 n_named_args, actparms, fndecl,
2417 &args_so_far, reg_parm_stack_space,
2418 &old_stack_level, &old_pending_adj,
2419 &must_preallocate, &flags);
2421 #ifdef FINAL_REG_PARM_STACK_SPACE
2422 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2428 /* If this function requires a variable-sized argument list, don't
2429 try to make a cse'able block for this call. We may be able to
2430 do this eventually, but it is too complicated to keep track of
2431 what insns go in the cse'able block and which don't.
2433 Also do not make a sibling call. */
2435 flags &= ~(ECF_CONST | ECF_PURE);
2436 must_preallocate = 1;
2437 sibcall_failure = 1;
2440 if (args_size.constant > current_function_args_size)
2442 /* If this function requires more stack slots than the current
2443 function, we cannot change it into a sibling call. */
2444 sibcall_failure = 1;
2447 /* Compute the actual size of the argument block required. The variable
2448 and constant sizes must be combined, the size may have to be rounded,
2449 and there may be a minimum required size. When generating a sibcall
2450 pattern, do not round up, since we'll be re-using whatever space our
2452 unadjusted_args_size
2453 = compute_argument_block_size (reg_parm_stack_space, &args_size,
2455 : preferred_stack_boundary));
2457 /* If the callee pops its own arguments, then it must pop exactly
2458 the same number of arguments as the current function. */
2459 if (RETURN_POPS_ARGS (fndecl, funtype, unadjusted_args_size)
2460 != RETURN_POPS_ARGS (current_function_decl,
2461 TREE_TYPE (current_function_decl),
2462 current_function_args_size))
2463 sibcall_failure = 1;
2465 /* Now make final decision about preallocating stack space. */
2466 must_preallocate = finalize_must_preallocate (must_preallocate,
2470 /* If the structure value address will reference the stack pointer, we
2471 must stabilize it. We don't need to do this if we know that we are
2472 not going to adjust the stack pointer in processing this call. */
2474 if (structure_value_addr
2475 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2476 || reg_mentioned_p (virtual_outgoing_args_rtx,
2477 structure_value_addr))
2479 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)
2481 structure_value_addr = copy_to_reg (structure_value_addr);
2483 /* Precompute any arguments as needed. */
2484 precompute_arguments (flags, must_preallocate, num_actuals,
2487 /* Now we are about to start emitting insns that can be deleted
2488 if a libcall is deleted. */
2489 if (flags & (ECF_CONST | ECF_PURE | ECF_MALLOC))
2492 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2494 /* If we have no actual push instructions, or shouldn't use them,
2495 make space for all args right now. */
2497 if (args_size.var != 0)
2499 if (old_stack_level == 0)
2501 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2502 old_pending_adj = pending_stack_adjust;
2503 pending_stack_adjust = 0;
2504 /* stack_arg_under_construction says whether a stack arg is
2505 being constructed at the old stack level. Pushing the stack
2506 gets a clean outgoing argument block. */
2507 old_stack_arg_under_construction = stack_arg_under_construction;
2508 stack_arg_under_construction = 0;
2510 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2514 /* Note that we must go through the motions of allocating an argument
2515 block even if the size is zero because we may be storing args
2516 in the area reserved for register arguments, which may be part of
2519 int needed = args_size.constant;
2521 /* Store the maximum argument space used. It will be pushed by
2522 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2525 if (needed > current_function_outgoing_args_size)
2526 current_function_outgoing_args_size = needed;
2528 if (must_preallocate)
2530 if (ACCUMULATE_OUTGOING_ARGS)
2532 /* Since the stack pointer will never be pushed, it is possible
2533 for the evaluation of a parm to clobber something we have
2534 already written to the stack. Since most function calls on
2535 RISC machines do not use the stack, this is uncommon, but
2536 must work correctly.
2538 Therefore, we save any area of the stack that was already
2539 written and that we are using. Here we set up to do this by
2540 making a new stack usage map from the old one. The actual
2541 save will be done by store_one_arg.
2543 Another approach might be to try to reorder the argument
2544 evaluations to avoid this conflicting stack usage. */
2546 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2547 /* Since we will be writing into the entire argument area, the
2548 map must be allocated for its entire size, not just the part
2549 that is the responsibility of the caller. */
2550 needed += reg_parm_stack_space;
2553 #ifdef ARGS_GROW_DOWNWARD
2554 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2557 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2560 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2562 if (initial_highest_arg_in_use)
2563 bcopy (initial_stack_usage_map, stack_usage_map,
2564 initial_highest_arg_in_use);
2566 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2567 bzero (&stack_usage_map[initial_highest_arg_in_use],
2568 (highest_outgoing_arg_in_use
2569 - initial_highest_arg_in_use));
2572 /* The address of the outgoing argument list must not be copied
2573 to a register here, because argblock would be left pointing
2574 to the wrong place after the call to
2575 allocate_dynamic_stack_space below. */
2577 argblock = virtual_outgoing_args_rtx;
2581 if (inhibit_defer_pop == 0)
2583 /* Try to reuse some or all of the pending_stack_adjust
2584 to get this space. Maybe we can avoid any pushing. */
2585 if (needed > pending_stack_adjust)
2587 needed -= pending_stack_adjust;
2588 pending_stack_adjust = 0;
2592 pending_stack_adjust -= needed;
2596 /* Special case this because overhead of `push_block' in this
2597 case is non-trivial. */
2599 argblock = virtual_outgoing_args_rtx;
2601 argblock = push_block (GEN_INT (needed), 0, 0);
2603 /* We only really need to call `copy_to_reg' in the case where
2604 push insns are going to be used to pass ARGBLOCK to a function
2605 call in ARGS. In that case, the stack pointer changes value
2606 from the allocation point to the call point, and hence
2607 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2608 But might as well always do it. */
2609 argblock = copy_to_reg (argblock);
2614 /* The argument block when performing a sibling call is the
2615 incoming argument block. */
2618 rtx temp = plus_constant (arg_pointer_rtx,
2619 FIRST_PARM_OFFSET (current_function_decl));
2620 argblock = force_reg (Pmode, force_operand (temp, NULL_RTX));
2623 if (ACCUMULATE_OUTGOING_ARGS)
2625 /* The save/restore code in store_one_arg handles all cases except one:
2626 a constructor call (including a C function returning a BLKmode struct)
2627 to initialize an argument. */
2628 if (stack_arg_under_construction)
2630 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2631 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
2633 rtx push_size = GEN_INT (args_size.constant);
2635 if (old_stack_level == 0)
2637 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2638 old_pending_adj = pending_stack_adjust;
2639 pending_stack_adjust = 0;
2640 /* stack_arg_under_construction says whether a stack arg is
2641 being constructed at the old stack level. Pushing the stack
2642 gets a clean outgoing argument block. */
2643 old_stack_arg_under_construction = stack_arg_under_construction;
2644 stack_arg_under_construction = 0;
2645 /* Make a new map for the new argument list. */
2646 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2647 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2648 highest_outgoing_arg_in_use = 0;
2650 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
2652 /* If argument evaluation might modify the stack pointer, copy the
2653 address of the argument list to a register. */
2654 for (i = 0; i < num_actuals; i++)
2655 if (args[i].pass_on_stack)
2657 argblock = copy_addr_to_reg (argblock);
2662 compute_argument_addresses (args, argblock, num_actuals);
2664 #ifdef PREFERRED_STACK_BOUNDARY
2665 /* If we push args individually in reverse order, perform stack alignment
2666 before the first push (the last arg). */
2667 if (PUSH_ARGS_REVERSED && argblock == 0
2668 && args_size.constant != unadjusted_args_size)
2670 /* When the stack adjustment is pending, we get better code
2671 by combining the adjustments. */
2672 if (pending_stack_adjust && ! (flags & (ECF_CONST | ECF_PURE))
2673 && ! inhibit_defer_pop)
2676 args_size.constant = (unadjusted_args_size
2677 + ((pending_stack_adjust
2678 + args_size.constant
2679 - unadjusted_args_size)
2680 % (preferred_stack_boundary
2682 adjust = (pending_stack_adjust - args_size.constant
2683 + unadjusted_args_size);
2684 adjust_stack (GEN_INT (adjust));
2685 pending_stack_adjust = 0;
2687 else if (argblock == 0)
2688 anti_adjust_stack (GEN_INT (args_size.constant
2689 - unadjusted_args_size));
2691 /* Now that the stack is properly aligned, pops can't safely
2692 be deferred during the evaluation of the arguments. */
2696 /* Don't try to defer pops if preallocating, not even from the first arg,
2697 since ARGBLOCK probably refers to the SP. */
2701 funexp = rtx_for_function_call (fndecl, exp);
2703 /* Figure out the register where the value, if any, will come back. */
2705 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2706 && ! structure_value_addr)
2708 if (pcc_struct_value)
2709 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2710 fndecl, (pass == 0));
2712 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2715 /* Precompute all register parameters. It isn't safe to compute anything
2716 once we have started filling any specific hard regs. */
2717 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2719 #ifdef REG_PARM_STACK_SPACE
2720 /* Save the fixed argument area if it's part of the caller's frame and
2721 is clobbered by argument setup for this call. */
2722 if (ACCUMULATE_OUTGOING_ARGS)
2723 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2724 &low_to_save, &high_to_save);
2727 /* Now store (and compute if necessary) all non-register parms.
2728 These come before register parms, since they can require block-moves,
2729 which could clobber the registers used for register parms.
2730 Parms which have partial registers are not stored here,
2731 but we do preallocate space here if they want that. */
2733 for (i = 0; i < num_actuals; i++)
2734 if (args[i].reg == 0 || args[i].pass_on_stack)
2735 store_one_arg (&args[i], argblock, flags & ECF_MAY_BE_ALLOCA,
2736 args_size.var != 0, reg_parm_stack_space);
2738 /* If we have a parm that is passed in registers but not in memory
2739 and whose alignment does not permit a direct copy into registers,
2740 make a group of pseudos that correspond to each register that we
2742 if (STRICT_ALIGNMENT)
2743 store_unaligned_arguments_into_pseudos (args, num_actuals);
2745 /* Now store any partially-in-registers parm.
2746 This is the last place a block-move can happen. */
2748 for (i = 0; i < num_actuals; i++)
2749 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2750 store_one_arg (&args[i], argblock, flags & ECF_MAY_BE_ALLOCA,
2751 args_size.var != 0, reg_parm_stack_space);
2753 #ifdef PREFERRED_STACK_BOUNDARY
2754 /* If we pushed args in forward order, perform stack alignment
2755 after pushing the last arg. */
2756 if (!PUSH_ARGS_REVERSED && argblock == 0)
2757 anti_adjust_stack (GEN_INT (args_size.constant
2758 - unadjusted_args_size));
2761 /* If register arguments require space on the stack and stack space
2762 was not preallocated, allocate stack space here for arguments
2763 passed in registers. */
2764 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2765 if (!ACCUMULATE_OUTGOING_ARGS
2766 && must_preallocate == 0 && reg_parm_stack_space > 0)
2767 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2770 /* Pass the function the address in which to return a
2772 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2774 emit_move_insn (struct_value_rtx,
2776 force_operand (structure_value_addr,
2779 /* Mark the memory for the aggregate as write-only. */
2780 if (current_function_check_memory_usage)
2781 emit_library_call (chkr_set_right_libfunc, 1,
2783 structure_value_addr, ptr_mode,
2784 GEN_INT (struct_value_size),
2785 TYPE_MODE (sizetype),
2786 GEN_INT (MEMORY_USE_WO),
2787 TYPE_MODE (integer_type_node));
2789 if (GET_CODE (struct_value_rtx) == REG)
2790 use_reg (&call_fusage, struct_value_rtx);
2793 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
2796 load_register_parameters (args, num_actuals, &call_fusage);
2798 /* Perform postincrements before actually calling the function. */
2801 /* Save a pointer to the last insn before the call, so that we can
2802 later safely search backwards to find the CALL_INSN. */
2803 before_call = get_last_insn ();
2805 /* Set up next argument register. For sibling calls on machines
2806 with register windows this should be the incoming register. */
2807 #ifdef FUNCTION_INCOMING_ARG
2809 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2813 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2816 /* All arguments and registers used for the call must be set up by
2819 #ifdef PREFERRED_STACK_BOUNDARY
2820 /* Stack must to be properly aligned now. */
2821 if (stack_pointer_delta & (preferred_stack_boundary / BITS_PER_UNIT - 1))
2825 /* Generate the actual call instruction. */
2826 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2827 args_size.constant, struct_value_size,
2828 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2831 /* Verify that we've deallocated all the stack we used. */
2833 && old_stack_allocated != stack_pointer_delta - pending_stack_adjust)
2836 /* If call is cse'able, make appropriate pair of reg-notes around it.
2837 Test valreg so we don't crash; may safely ignore `const'
2838 if return type is void. Disable for PARALLEL return values, because
2839 we have no way to move such values into a pseudo register. */
2840 if ((flags & (ECF_CONST | ECF_PURE))
2841 && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2844 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2847 /* Mark the return value as a pointer if needed. */
2848 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2849 mark_reg_pointer (temp, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2851 /* Construct an "equal form" for the value which mentions all the
2852 arguments in order as well as the function name. */
2853 for (i = 0; i < num_actuals; i++)
2854 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2855 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2857 insns = get_insns ();
2860 if (flags & ECF_PURE)
2861 note = gen_rtx_EXPR_LIST (VOIDmode,
2862 gen_rtx_USE (VOIDmode,
2863 gen_rtx_MEM (BLKmode,
2864 gen_rtx_SCRATCH (VOIDmode))), note);
2866 emit_libcall_block (insns, temp, valreg, note);
2870 else if (flags & (ECF_CONST | ECF_PURE))
2872 /* Otherwise, just write out the sequence without a note. */
2873 rtx insns = get_insns ();
2878 else if (flags & ECF_MALLOC)
2880 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2883 /* The return value from a malloc-like function is a pointer. */
2884 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2885 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2887 emit_move_insn (temp, valreg);
2889 /* The return value from a malloc-like function can not alias
2891 last = get_last_insn ();
2893 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2895 /* Write out the sequence. */
2896 insns = get_insns ();
2902 /* For calls to `setjmp', etc., inform flow.c it should complain
2903 if nonvolatile values are live. For functions that cannot return,
2904 inform flow that control does not fall through. */
2906 if ((flags & (ECF_RETURNS_TWICE | ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
2908 /* The barrier or NOTE_INSN_SETJMP note must be emitted
2909 immediately after the CALL_INSN. Some ports emit more
2910 than just a CALL_INSN above, so we must search for it here. */
2912 rtx last = get_last_insn ();
2913 while (GET_CODE (last) != CALL_INSN)
2915 last = PREV_INSN (last);
2916 /* There was no CALL_INSN? */
2917 if (last == before_call)
2921 if (flags & ECF_RETURNS_TWICE)
2923 emit_note_after (NOTE_INSN_SETJMP, last);
2924 current_function_calls_setjmp = 1;
2925 sibcall_failure = 1;
2928 emit_barrier_after (last);
2931 if (flags & ECF_LONGJMP)
2932 current_function_calls_longjmp = 1, sibcall_failure = 1;
2934 /* If this function is returning into a memory location marked as
2935 readonly, it means it is initializing that location. But we normally
2936 treat functions as not clobbering such locations, so we need to
2937 specify that this one does. */
2938 if (target != 0 && GET_CODE (target) == MEM
2939 && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
2940 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
2942 /* If value type not void, return an rtx for the value. */
2944 /* If there are cleanups to be called, don't use a hard reg as target.
2945 We need to double check this and see if it matters anymore. */
2946 if (any_pending_cleanups (1))
2948 if (target && REG_P (target)
2949 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2951 sibcall_failure = 1;
2954 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2957 target = const0_rtx;
2959 else if (structure_value_addr)
2961 if (target == 0 || GET_CODE (target) != MEM)
2963 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2964 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2965 structure_value_addr));
2966 MEM_SET_IN_STRUCT_P (target,
2967 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2970 else if (pcc_struct_value)
2972 /* This is the special C++ case where we need to
2973 know what the true target was. We take care to
2974 never use this value more than once in one expression. */
2975 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2976 copy_to_reg (valreg));
2977 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2979 /* Handle calls that return values in multiple non-contiguous locations.
2980 The Irix 6 ABI has examples of this. */
2981 else if (GET_CODE (valreg) == PARALLEL)
2983 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2987 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)),
2989 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2990 preserve_temp_slots (target);
2993 if (! rtx_equal_p (target, valreg))
2994 emit_group_store (target, valreg, bytes,
2995 TYPE_ALIGN (TREE_TYPE (exp)));
2997 /* We can not support sibling calls for this case. */
2998 sibcall_failure = 1;
3001 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3002 && GET_MODE (target) == GET_MODE (valreg))
3004 /* TARGET and VALREG cannot be equal at this point because the
3005 latter would not have REG_FUNCTION_VALUE_P true, while the
3006 former would if it were referring to the same register.
3008 If they refer to the same register, this move will be a no-op,
3009 except when function inlining is being done. */
3010 emit_move_insn (target, valreg);
3012 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3013 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3015 target = copy_to_reg (valreg);
3017 #ifdef PROMOTE_FUNCTION_RETURN
3018 /* If we promoted this return value, make the proper SUBREG. TARGET
3019 might be const0_rtx here, so be careful. */
3020 if (GET_CODE (target) == REG
3021 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3022 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3024 tree type = TREE_TYPE (exp);
3025 int unsignedp = TREE_UNSIGNED (type);
3027 /* If we don't promote as expected, something is wrong. */
3028 if (GET_MODE (target)
3029 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3032 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
3033 SUBREG_PROMOTED_VAR_P (target) = 1;
3034 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
3038 /* If size of args is variable or this was a constructor call for a stack
3039 argument, restore saved stack-pointer value. */
3041 if (old_stack_level)
3043 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3044 pending_stack_adjust = old_pending_adj;
3045 stack_arg_under_construction = old_stack_arg_under_construction;
3046 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3047 stack_usage_map = initial_stack_usage_map;
3048 sibcall_failure = 1;
3050 else if (ACCUMULATE_OUTGOING_ARGS)
3052 #ifdef REG_PARM_STACK_SPACE
3055 restore_fixed_argument_area (save_area, argblock,
3056 high_to_save, low_to_save);
3057 sibcall_failure = 1;
3061 /* If we saved any argument areas, restore them. */
3062 for (i = 0; i < num_actuals; i++)
3063 if (args[i].save_area)
3065 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3067 = gen_rtx_MEM (save_mode,
3068 memory_address (save_mode,
3069 XEXP (args[i].stack_slot, 0)));
3071 if (save_mode != BLKmode)
3072 emit_move_insn (stack_area, args[i].save_area);
3074 emit_block_move (stack_area,
3075 validize_mem (args[i].save_area),
3076 GEN_INT (args[i].size.constant),
3078 sibcall_failure = 1;
3081 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3082 stack_usage_map = initial_stack_usage_map;
3085 /* If this was alloca, record the new stack level for nonlocal gotos.
3086 Check for the handler slots since we might not have a save area
3087 for non-local gotos. */
3089 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3090 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3094 /* Free up storage we no longer need. */
3095 for (i = 0; i < num_actuals; ++i)
3096 if (args[i].aligned_regs)
3097 free (args[i].aligned_regs);
3101 /* Undo the fake expand_start_target_temps we did earlier. If
3102 there had been any cleanups created, we've already set
3104 expand_end_target_temps ();
3107 insns = get_insns ();
3112 tail_call_insns = insns;
3114 /* If something prevents making this a sibling call,
3115 zero out the sequence. */
3116 if (sibcall_failure)
3117 tail_call_insns = NULL_RTX;
3118 /* Restore the pending stack adjustment now that we have
3119 finished generating the sibling call sequence. */
3121 pending_stack_adjust = save_pending_stack_adjust;
3122 stack_pointer_delta = save_stack_pointer_delta;
3125 normal_call_insns = insns;
3128 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3129 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3130 can happen if the arguments to this function call an inline
3131 function who's expansion contains another CALL_PLACEHOLDER.
3133 If there are any C_Ps in any of these sequences, replace them
3134 with their normal call. */
3136 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3137 if (GET_CODE (insn) == CALL_INSN
3138 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3139 replace_call_placeholder (insn, sibcall_use_normal);
3141 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3142 if (GET_CODE (insn) == CALL_INSN
3143 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3144 replace_call_placeholder (insn, sibcall_use_normal);
3146 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3147 if (GET_CODE (insn) == CALL_INSN
3148 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3149 replace_call_placeholder (insn, sibcall_use_normal);
3151 /* If this was a potential tail recursion site, then emit a
3152 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3153 One of them will be selected later. */
3154 if (tail_recursion_insns || tail_call_insns)
3156 /* The tail recursion label must be kept around. We could expose
3157 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3158 and makes determining true tail recursion sites difficult.
3160 So we set LABEL_PRESERVE_P here, then clear it when we select
3161 one of the call sequences after rtl generation is complete. */
3162 if (tail_recursion_insns)
3163 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3164 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3166 tail_recursion_insns,
3167 tail_recursion_label));
3170 emit_insns (normal_call_insns);
3172 currently_expanding_call--;
3177 /* Returns nonzero if FUN is the symbol for a library function which can
3181 libfunc_nothrow (fun)
3184 if (fun == throw_libfunc
3185 || fun == rethrow_libfunc
3186 || fun == sjthrow_libfunc
3187 || fun == sjpopnthrow_libfunc)
3193 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3194 The RETVAL parameter specifies whether return value needs to be saved, other
3195 parameters are documented in the emit_library_call function bellow. */
3197 emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
3202 enum machine_mode outmode;
3206 /* Total size in bytes of all the stack-parms scanned so far. */
3207 struct args_size args_size;
3208 /* Size of arguments before any adjustments (such as rounding). */
3209 struct args_size original_args_size;
3210 register int argnum;
3214 struct args_size alignment_pad;
3216 CUMULATIVE_ARGS args_so_far;
3217 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3218 struct args_size offset; struct args_size size; rtx save_area; };
3220 int old_inhibit_defer_pop = inhibit_defer_pop;
3221 rtx call_fusage = 0;
3224 int pcc_struct_value = 0;
3225 int struct_value_size = 0;
3227 int reg_parm_stack_space = 0;
3230 #ifdef REG_PARM_STACK_SPACE
3231 /* Define the boundary of the register parm stack space that needs to be
3233 int low_to_save = -1, high_to_save = 0;
3234 rtx save_area = 0; /* Place that it is saved */
3237 /* Size of the stack reserved for parameter registers. */
3238 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3239 char *initial_stack_usage_map = stack_usage_map;
3241 #ifdef REG_PARM_STACK_SPACE
3242 #ifdef MAYBE_REG_PARM_STACK_SPACE
3243 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3245 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3251 else if (fn_type == 2)
3255 if (libfunc_nothrow (fun))
3256 flags |= ECF_NOTHROW;
3258 #ifdef PREFERRED_STACK_BOUNDARY
3259 /* Ensure current function's preferred stack boundary is at least
3261 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3262 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3265 /* If this kind of value comes back in memory,
3266 decide where in memory it should come back. */
3267 if (outmode != VOIDmode && aggregate_value_p (type_for_mode (outmode, 0)))
3269 #ifdef PCC_STATIC_STRUCT_RETURN
3271 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3273 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3274 pcc_struct_value = 1;
3276 value = gen_reg_rtx (outmode);
3277 #else /* not PCC_STATIC_STRUCT_RETURN */
3278 struct_value_size = GET_MODE_SIZE (outmode);
3279 if (value != 0 && GET_CODE (value) == MEM)
3282 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3285 /* This call returns a big structure. */
3286 flags &= ~(ECF_CONST | ECF_PURE);
3289 /* ??? Unfinished: must pass the memory address as an argument. */
3291 /* Copy all the libcall-arguments out of the varargs data
3292 and into a vector ARGVEC.
3294 Compute how to pass each argument. We only support a very small subset
3295 of the full argument passing conventions to limit complexity here since
3296 library functions shouldn't have many args. */
3298 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3299 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3301 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3303 args_size.constant = 0;
3308 /* Now we are about to start emitting insns that can be deleted
3309 if a libcall is deleted. */
3310 if (flags & (ECF_CONST | ECF_PURE))
3315 /* If there's a structure value address to be passed,
3316 either pass it in the special place, or pass it as an extra argument. */
3317 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3319 rtx addr = XEXP (mem_value, 0);
3322 /* Make sure it is a reasonable operand for a move or push insn. */
3323 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3324 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3325 addr = force_operand (addr, NULL_RTX);
3327 argvec[count].value = addr;
3328 argvec[count].mode = Pmode;
3329 argvec[count].partial = 0;
3331 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3332 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3333 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3337 locate_and_pad_parm (Pmode, NULL_TREE,
3338 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3341 argvec[count].reg != 0,
3343 NULL_TREE, &args_size, &argvec[count].offset,
3344 &argvec[count].size, &alignment_pad);
3347 if (argvec[count].reg == 0 || argvec[count].partial != 0
3348 || reg_parm_stack_space > 0)
3349 args_size.constant += argvec[count].size.constant;
3351 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3356 for (; count < nargs; count++)
3358 rtx val = va_arg (p, rtx);
3359 enum machine_mode mode = va_arg (p, enum machine_mode);
3361 /* We cannot convert the arg value to the mode the library wants here;
3362 must do it earlier where we know the signedness of the arg. */
3364 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3367 /* On some machines, there's no way to pass a float to a library fcn.
3368 Pass it as a double instead. */
3369 #ifdef LIBGCC_NEEDS_DOUBLE
3370 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3371 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3374 /* There's no need to call protect_from_queue, because
3375 either emit_move_insn or emit_push_insn will do that. */
3377 /* Make sure it is a reasonable operand for a move or push insn. */
3378 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3379 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3380 val = force_operand (val, NULL_RTX);
3382 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3383 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3385 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3386 be viewed as just an efficiency improvement. */
3387 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3388 emit_move_insn (slot, val);
3389 val = force_operand (XEXP (slot, 0), NULL_RTX);
3394 argvec[count].value = val;
3395 argvec[count].mode = mode;
3397 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3399 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3400 argvec[count].partial
3401 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3403 argvec[count].partial = 0;
3406 locate_and_pad_parm (mode, NULL_TREE,
3407 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3410 argvec[count].reg != 0,
3412 NULL_TREE, &args_size, &argvec[count].offset,
3413 &argvec[count].size, &alignment_pad);
3415 if (argvec[count].size.var)
3418 if (reg_parm_stack_space == 0 && argvec[count].partial)
3419 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3421 if (argvec[count].reg == 0 || argvec[count].partial != 0
3422 || reg_parm_stack_space > 0)
3423 args_size.constant += argvec[count].size.constant;
3425 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3428 #ifdef FINAL_REG_PARM_STACK_SPACE
3429 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3432 /* If this machine requires an external definition for library
3433 functions, write one out. */
3434 assemble_external_libcall (fun);
3436 original_args_size = args_size;
3437 #ifdef PREFERRED_STACK_BOUNDARY
3438 args_size.constant = (((args_size.constant
3439 + stack_pointer_delta
3443 - stack_pointer_delta);
3446 args_size.constant = MAX (args_size.constant,
3447 reg_parm_stack_space);
3449 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3450 args_size.constant -= reg_parm_stack_space;
3453 if (args_size.constant > current_function_outgoing_args_size)
3454 current_function_outgoing_args_size = args_size.constant;
3456 if (ACCUMULATE_OUTGOING_ARGS)
3458 /* Since the stack pointer will never be pushed, it is possible for
3459 the evaluation of a parm to clobber something we have already
3460 written to the stack. Since most function calls on RISC machines
3461 do not use the stack, this is uncommon, but must work correctly.
3463 Therefore, we save any area of the stack that was already written
3464 and that we are using. Here we set up to do this by making a new
3465 stack usage map from the old one.
3467 Another approach might be to try to reorder the argument
3468 evaluations to avoid this conflicting stack usage. */
3470 needed = args_size.constant;
3472 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3473 /* Since we will be writing into the entire argument area, the
3474 map must be allocated for its entire size, not just the part that
3475 is the responsibility of the caller. */
3476 needed += reg_parm_stack_space;
3479 #ifdef ARGS_GROW_DOWNWARD
3480 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3483 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3486 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3488 if (initial_highest_arg_in_use)
3489 bcopy (initial_stack_usage_map, stack_usage_map,
3490 initial_highest_arg_in_use);
3492 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3493 bzero (&stack_usage_map[initial_highest_arg_in_use],
3494 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3497 /* The address of the outgoing argument list must not be copied to a
3498 register here, because argblock would be left pointing to the
3499 wrong place after the call to allocate_dynamic_stack_space below.
3502 argblock = virtual_outgoing_args_rtx;
3507 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3510 #ifdef PREFERRED_STACK_BOUNDARY
3511 /* If we push args individually in reverse order, perform stack alignment
3512 before the first push (the last arg). */
3513 if (argblock == 0 && PUSH_ARGS_REVERSED)
3514 anti_adjust_stack (GEN_INT (args_size.constant
3515 - original_args_size.constant));
3518 if (PUSH_ARGS_REVERSED)
3529 #ifdef REG_PARM_STACK_SPACE
3530 if (ACCUMULATE_OUTGOING_ARGS)
3532 /* The argument list is the property of the called routine and it
3533 may clobber it. If the fixed area has been used for previous
3534 parameters, we must save and restore it.
3536 Here we compute the boundary of the that needs to be saved, if any. */
3538 #ifdef ARGS_GROW_DOWNWARD
3539 for (count = 0; count < reg_parm_stack_space + 1; count++)
3541 for (count = 0; count < reg_parm_stack_space; count++)
3544 if (count >= highest_outgoing_arg_in_use
3545 || stack_usage_map[count] == 0)
3548 if (low_to_save == -1)
3549 low_to_save = count;
3551 high_to_save = count;
3554 if (low_to_save >= 0)
3556 int num_to_save = high_to_save - low_to_save + 1;
3557 enum machine_mode save_mode
3558 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3561 /* If we don't have the required alignment, must do this in BLKmode. */
3562 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3563 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3564 save_mode = BLKmode;
3566 #ifdef ARGS_GROW_DOWNWARD
3567 stack_area = gen_rtx_MEM (save_mode,
3568 memory_address (save_mode,
3569 plus_constant (argblock,
3572 stack_area = gen_rtx_MEM (save_mode,
3573 memory_address (save_mode,
3574 plus_constant (argblock,
3577 if (save_mode == BLKmode)
3579 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3580 emit_block_move (validize_mem (save_area), stack_area,
3581 GEN_INT (num_to_save), PARM_BOUNDARY);
3585 save_area = gen_reg_rtx (save_mode);
3586 emit_move_insn (save_area, stack_area);
3592 /* Push the args that need to be pushed. */
3594 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3595 are to be pushed. */
3596 for (count = 0; count < nargs; count++, argnum += inc)
3598 register enum machine_mode mode = argvec[argnum].mode;
3599 register rtx val = argvec[argnum].value;
3600 rtx reg = argvec[argnum].reg;
3601 int partial = argvec[argnum].partial;
3602 int lower_bound = 0, upper_bound = 0, i;
3604 if (! (reg != 0 && partial == 0))
3606 if (ACCUMULATE_OUTGOING_ARGS)
3608 /* If this is being stored into a pre-allocated, fixed-size, stack
3609 area, save any previous data at that location. */
3611 #ifdef ARGS_GROW_DOWNWARD
3612 /* stack_slot is negative, but we want to index stack_usage_map
3613 with positive values. */
3614 upper_bound = -argvec[argnum].offset.constant + 1;
3615 lower_bound = upper_bound - argvec[argnum].size.constant;
3617 lower_bound = argvec[argnum].offset.constant;
3618 upper_bound = lower_bound + argvec[argnum].size.constant;
3621 for (i = lower_bound; i < upper_bound; i++)
3622 if (stack_usage_map[i]
3623 /* Don't store things in the fixed argument area at this point;
3624 it has already been saved. */
3625 && i > reg_parm_stack_space)
3628 if (i != upper_bound)
3630 /* We need to make a save area. See what mode we can make it. */
3631 enum machine_mode save_mode
3632 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3639 plus_constant (argblock,
3640 argvec[argnum].offset.constant)));
3641 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3643 emit_move_insn (argvec[argnum].save_area, stack_area);
3647 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3648 argblock, GEN_INT (argvec[argnum].offset.constant),
3649 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3651 /* Now mark the segment we just used. */
3652 if (ACCUMULATE_OUTGOING_ARGS)
3653 for (i = lower_bound; i < upper_bound; i++)
3654 stack_usage_map[i] = 1;
3660 #ifdef PREFERRED_STACK_BOUNDARY
3661 /* If we pushed args in forward order, perform stack alignment
3662 after pushing the last arg. */
3663 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3664 anti_adjust_stack (GEN_INT (args_size.constant
3665 - original_args_size.constant));
3668 if (PUSH_ARGS_REVERSED)
3673 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3675 /* Now load any reg parms into their regs. */
3677 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3678 are to be pushed. */
3679 for (count = 0; count < nargs; count++, argnum += inc)
3681 register rtx val = argvec[argnum].value;
3682 rtx reg = argvec[argnum].reg;
3683 int partial = argvec[argnum].partial;
3685 /* Handle calls that pass values in multiple non-contiguous
3686 locations. The PA64 has examples of this for library calls. */
3687 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3688 emit_group_load (reg, val,
3689 GET_MODE_SIZE (GET_MODE (val)),
3690 GET_MODE_ALIGNMENT (GET_MODE (val)));
3691 else if (reg != 0 && partial == 0)
3692 emit_move_insn (reg, val);
3697 /* Any regs containing parms remain in use through the call. */
3698 for (count = 0; count < nargs; count++)
3700 rtx reg = argvec[count].reg;
3701 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3702 use_group_regs (&call_fusage, reg);
3704 use_reg (&call_fusage, reg);
3707 /* Pass the function the address in which to return a structure value. */
3708 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3710 emit_move_insn (struct_value_rtx,
3712 force_operand (XEXP (mem_value, 0),
3714 if (GET_CODE (struct_value_rtx) == REG)
3715 use_reg (&call_fusage, struct_value_rtx);
3718 /* Don't allow popping to be deferred, since then
3719 cse'ing of library calls could delete a call and leave the pop. */
3721 valreg = (mem_value == 0 && outmode != VOIDmode
3722 ? hard_libcall_value (outmode) : NULL_RTX);
3724 #ifdef PREFERRED_STACK_BOUNDARY
3725 /* Stack must to be properly aligned now. */
3726 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
3730 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3731 will set inhibit_defer_pop to that value. */
3732 /* The return type is needed to decide how many bytes the function pops.
3733 Signedness plays no role in that, so for simplicity, we pretend it's
3734 always signed. We also assume that the list of arguments passed has
3735 no impact, so we pretend it is unknown. */
3738 get_identifier (XSTR (orgfun, 0)),
3739 build_function_type (outmode == VOIDmode ? void_type_node
3740 : type_for_mode (outmode, 0), NULL_TREE),
3741 original_args_size.constant, args_size.constant,
3743 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3745 old_inhibit_defer_pop + 1, call_fusage, flags);
3747 /* Now restore inhibit_defer_pop to its actual original value. */
3750 /* If call is cse'able, make appropriate pair of reg-notes around it.
3751 Test valreg so we don't crash; may safely ignore `const'
3752 if return type is void. Disable for PARALLEL return values, because
3753 we have no way to move such values into a pseudo register. */
3754 if ((flags & (ECF_CONST | ECF_PURE))
3755 && valreg != 0 && GET_CODE (valreg) != PARALLEL)
3758 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3762 /* Construct an "equal form" for the value which mentions all the
3763 arguments in order as well as the function name. */
3764 for (i = 0; i < nargs; i++)
3765 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3766 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3768 insns = get_insns ();
3771 if (flags & ECF_PURE)
3772 note = gen_rtx_EXPR_LIST (VOIDmode,
3773 gen_rtx_USE (VOIDmode,
3774 gen_rtx_MEM (BLKmode,
3775 gen_rtx_SCRATCH (VOIDmode))), note);
3777 emit_libcall_block (insns, temp, valreg, note);
3781 else if (flags & (ECF_CONST | ECF_PURE))
3783 /* Otherwise, just write out the sequence without a note. */
3784 rtx insns = get_insns ();
3791 /* Copy the value to the right place. */
3792 if (outmode != VOIDmode && retval)
3798 if (value != mem_value)
3799 emit_move_insn (value, mem_value);
3801 else if (value != 0)
3802 emit_move_insn (value, hard_libcall_value (outmode));
3804 value = hard_libcall_value (outmode);
3807 if (ACCUMULATE_OUTGOING_ARGS)
3809 #ifdef REG_PARM_STACK_SPACE
3812 enum machine_mode save_mode = GET_MODE (save_area);
3813 #ifdef ARGS_GROW_DOWNWARD
3815 = gen_rtx_MEM (save_mode,
3816 memory_address (save_mode,
3817 plus_constant (argblock,
3821 = gen_rtx_MEM (save_mode,
3822 memory_address (save_mode,
3823 plus_constant (argblock, low_to_save)));
3825 if (save_mode != BLKmode)
3826 emit_move_insn (stack_area, save_area);
3828 emit_block_move (stack_area, validize_mem (save_area),
3829 GEN_INT (high_to_save - low_to_save + 1),
3834 /* If we saved any argument areas, restore them. */
3835 for (count = 0; count < nargs; count++)
3836 if (argvec[count].save_area)
3838 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3840 = gen_rtx_MEM (save_mode,
3843 plus_constant (argblock,
3844 argvec[count].offset.constant)));
3846 emit_move_insn (stack_area, argvec[count].save_area);
3849 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3850 stack_usage_map = initial_stack_usage_map;
3857 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3858 (emitting the queue unless NO_QUEUE is nonzero),
3859 for a value of mode OUTMODE,
3860 with NARGS different arguments, passed as alternating rtx values
3861 and machine_modes to convert them to.
3862 The rtx values should have been passed through protect_from_queue already.
3864 FN_TYPE will is zero for `normal' calls, one for `const' calls, wich
3865 which will be enclosed in REG_LIBCALL/REG_RETVAL notes and two for `pure'
3866 calls, that are handled like `const' calls with extra
3867 (use (memory (scratch)). */
3870 emit_library_call VPARAMS((rtx orgfun, int fn_type, enum machine_mode outmode,
3873 #ifndef ANSI_PROTOTYPES
3876 enum machine_mode outmode;
3881 VA_START (p, nargs);
3883 #ifndef ANSI_PROTOTYPES
3884 orgfun = va_arg (p, rtx);
3885 fn_type = va_arg (p, int);
3886 outmode = va_arg (p, enum machine_mode);
3887 nargs = va_arg (p, int);
3890 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
3895 /* Like emit_library_call except that an extra argument, VALUE,
3896 comes second and says where to store the result.
3897 (If VALUE is zero, this function chooses a convenient way
3898 to return the value.
3900 This function returns an rtx for where the value is to be found.
3901 If VALUE is nonzero, VALUE is returned. */
3904 emit_library_call_value VPARAMS((rtx orgfun, rtx value, int fn_type,
3905 enum machine_mode outmode, int nargs, ...))
3907 #ifndef ANSI_PROTOTYPES
3911 enum machine_mode outmode;
3916 VA_START (p, nargs);
3918 #ifndef ANSI_PROTOTYPES
3919 orgfun = va_arg (p, rtx);
3920 value = va_arg (p, rtx);
3921 fn_type = va_arg (p, int);
3922 outmode = va_arg (p, enum machine_mode);
3923 nargs = va_arg (p, int);
3926 value = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode, nargs, p);
3934 /* Return an rtx which represents a suitable home on the stack
3935 given TYPE, the type of the argument looking for a home.
3936 This is called only for BLKmode arguments.
3938 SIZE is the size needed for this target.
3939 ARGS_ADDR is the address of the bottom of the argument block for this call.
3940 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3941 if this machine uses push insns. */
3944 target_for_arg (type, size, args_addr, offset)
3948 struct args_size offset;
3951 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3953 /* We do not call memory_address if possible,
3954 because we want to address as close to the stack
3955 as possible. For non-variable sized arguments,
3956 this will be stack-pointer relative addressing. */
3957 if (GET_CODE (offset_rtx) == CONST_INT)
3958 target = plus_constant (args_addr, INTVAL (offset_rtx));
3961 /* I have no idea how to guarantee that this
3962 will work in the presence of register parameters. */
3963 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3964 target = memory_address (QImode, target);
3967 return gen_rtx_MEM (BLKmode, target);
3971 /* Store a single argument for a function call
3972 into the register or memory area where it must be passed.
3973 *ARG describes the argument value and where to pass it.
3975 ARGBLOCK is the address of the stack-block for all the arguments,
3976 or 0 on a machine where arguments are pushed individually.
3978 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3979 so must be careful about how the stack is used.
3981 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3982 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3983 that we need not worry about saving and restoring the stack.
3985 FNDECL is the declaration of the function we are calling. */
3988 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3989 reg_parm_stack_space)
3990 struct arg_data *arg;
3993 int variable_size ATTRIBUTE_UNUSED;
3994 int reg_parm_stack_space;
3996 register tree pval = arg->tree_value;
4000 int i, lower_bound = 0, upper_bound = 0;
4002 if (TREE_CODE (pval) == ERROR_MARK)
4005 /* Push a new temporary level for any temporaries we make for
4009 if (ACCUMULATE_OUTGOING_ARGS)
4011 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4012 save any previous data at that location. */
4013 if (argblock && ! variable_size && arg->stack)
4015 #ifdef ARGS_GROW_DOWNWARD
4016 /* stack_slot is negative, but we want to index stack_usage_map
4017 with positive values. */
4018 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4019 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4023 lower_bound = upper_bound - arg->size.constant;
4025 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4026 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4030 upper_bound = lower_bound + arg->size.constant;
4033 for (i = lower_bound; i < upper_bound; i++)
4034 if (stack_usage_map[i]
4035 /* Don't store things in the fixed argument area at this point;
4036 it has already been saved. */
4037 && i > reg_parm_stack_space)
4040 if (i != upper_bound)
4042 /* We need to make a save area. See what mode we can make it. */
4043 enum machine_mode save_mode
4044 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
4046 = gen_rtx_MEM (save_mode,
4047 memory_address (save_mode,
4048 XEXP (arg->stack_slot, 0)));
4050 if (save_mode == BLKmode)
4052 arg->save_area = assign_stack_temp (BLKmode,
4053 arg->size.constant, 0);
4054 MEM_SET_IN_STRUCT_P (arg->save_area,
4055 AGGREGATE_TYPE_P (TREE_TYPE
4056 (arg->tree_value)));
4057 preserve_temp_slots (arg->save_area);
4058 emit_block_move (validize_mem (arg->save_area), stack_area,
4059 GEN_INT (arg->size.constant),
4064 arg->save_area = gen_reg_rtx (save_mode);
4065 emit_move_insn (arg->save_area, stack_area);
4069 /* Now that we have saved any slots that will be overwritten by this
4070 store, mark all slots this store will use. We must do this before
4071 we actually expand the argument since the expansion itself may
4072 trigger library calls which might need to use the same stack slot. */
4073 if (argblock && ! variable_size && arg->stack)
4074 for (i = lower_bound; i < upper_bound; i++)
4075 stack_usage_map[i] = 1;
4078 /* If this isn't going to be placed on both the stack and in registers,
4079 set up the register and number of words. */
4080 if (! arg->pass_on_stack)
4081 reg = arg->reg, partial = arg->partial;
4083 if (reg != 0 && partial == 0)
4084 /* Being passed entirely in a register. We shouldn't be called in
4088 /* If this arg needs special alignment, don't load the registers
4090 if (arg->n_aligned_regs != 0)
4093 /* If this is being passed partially in a register, we can't evaluate
4094 it directly into its stack slot. Otherwise, we can. */
4095 if (arg->value == 0)
4097 /* stack_arg_under_construction is nonzero if a function argument is
4098 being evaluated directly into the outgoing argument list and
4099 expand_call must take special action to preserve the argument list
4100 if it is called recursively.
4102 For scalar function arguments stack_usage_map is sufficient to
4103 determine which stack slots must be saved and restored. Scalar
4104 arguments in general have pass_on_stack == 0.
4106 If this argument is initialized by a function which takes the
4107 address of the argument (a C++ constructor or a C function
4108 returning a BLKmode structure), then stack_usage_map is
4109 insufficient and expand_call must push the stack around the
4110 function call. Such arguments have pass_on_stack == 1.
4112 Note that it is always safe to set stack_arg_under_construction,
4113 but this generates suboptimal code if set when not needed. */
4115 if (arg->pass_on_stack)
4116 stack_arg_under_construction++;
4118 arg->value = expand_expr (pval,
4120 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4121 ? NULL_RTX : arg->stack,
4124 /* If we are promoting object (or for any other reason) the mode
4125 doesn't agree, convert the mode. */
4127 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4128 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4129 arg->value, arg->unsignedp);
4131 if (arg->pass_on_stack)
4132 stack_arg_under_construction--;
4135 /* Don't allow anything left on stack from computation
4136 of argument to alloca. */
4138 do_pending_stack_adjust ();
4140 if (arg->value == arg->stack)
4142 /* If the value is already in the stack slot, we are done. */
4143 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
4145 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4146 XEXP (arg->stack, 0), Pmode,
4147 ARGS_SIZE_RTX (arg->size),
4148 TYPE_MODE (sizetype),
4149 GEN_INT (MEMORY_USE_RW),
4150 TYPE_MODE (integer_type_node));
4153 else if (arg->mode != BLKmode)
4157 /* Argument is a scalar, not entirely passed in registers.
4158 (If part is passed in registers, arg->partial says how much
4159 and emit_push_insn will take care of putting it there.)
4161 Push it, and if its size is less than the
4162 amount of space allocated to it,
4163 also bump stack pointer by the additional space.
4164 Note that in C the default argument promotions
4165 will prevent such mismatches. */
4167 size = GET_MODE_SIZE (arg->mode);
4168 /* Compute how much space the push instruction will push.
4169 On many machines, pushing a byte will advance the stack
4170 pointer by a halfword. */
4171 #ifdef PUSH_ROUNDING
4172 size = PUSH_ROUNDING (size);
4176 /* Compute how much space the argument should get:
4177 round up to a multiple of the alignment for arguments. */
4178 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4179 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4180 / (PARM_BOUNDARY / BITS_PER_UNIT))
4181 * (PARM_BOUNDARY / BITS_PER_UNIT));
4183 /* This isn't already where we want it on the stack, so put it there.
4184 This can either be done with push or copy insns. */
4185 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
4186 partial, reg, used - size, argblock,
4187 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4188 ARGS_SIZE_RTX (arg->alignment_pad));
4192 /* BLKmode, at least partly to be pushed. */
4194 register int excess;
4197 /* Pushing a nonscalar.
4198 If part is passed in registers, PARTIAL says how much
4199 and emit_push_insn will take care of putting it there. */
4201 /* Round its size up to a multiple
4202 of the allocation unit for arguments. */
4204 if (arg->size.var != 0)
4207 size_rtx = ARGS_SIZE_RTX (arg->size);
4211 /* PUSH_ROUNDING has no effect on us, because
4212 emit_push_insn for BLKmode is careful to avoid it. */
4213 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
4214 + partial * UNITS_PER_WORD);
4215 size_rtx = expr_size (pval);
4218 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4219 TYPE_ALIGN (TREE_TYPE (pval)), partial, reg, excess,
4220 argblock, ARGS_SIZE_RTX (arg->offset),
4221 reg_parm_stack_space,
4222 ARGS_SIZE_RTX (arg->alignment_pad));
4226 /* Unless this is a partially-in-register argument, the argument is now
4229 ??? Note that this can change arg->value from arg->stack to
4230 arg->stack_slot and it matters when they are not the same.
4231 It isn't totally clear that this is correct in all cases. */
4233 arg->value = arg->stack_slot;
4235 /* Once we have pushed something, pops can't safely
4236 be deferred during the rest of the arguments. */
4239 /* ANSI doesn't require a sequence point here,
4240 but PCC has one, so this will avoid some problems. */
4243 /* Free any temporary slots made in processing this argument. Show
4244 that we might have taken the address of something and pushed that
4246 preserve_temp_slots (NULL_RTX);