1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
38 #include "langhooks.h"
41 /* Decide whether a function's arguments should be processed
42 from first to last or from last to first.
44 They should if the stack and args grow in opposite directions, but
45 only if we have push insns. */
49 #ifndef PUSH_ARGS_REVERSED
50 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
51 #define PUSH_ARGS_REVERSED PUSH_ARGS
57 #ifndef PUSH_ARGS_REVERSED
58 #define PUSH_ARGS_REVERSED 0
61 #ifndef STACK_POINTER_OFFSET
62 #define STACK_POINTER_OFFSET 0
65 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
66 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
68 /* Data structure and subroutines used within expand_call. */
72 /* Tree node for this argument. */
74 /* Mode for value; TYPE_MODE unless promoted. */
75 enum machine_mode mode;
76 /* Current RTL value for argument, or 0 if it isn't precomputed. */
78 /* Initially-compute RTL value for argument; only for const functions. */
80 /* Register to pass this argument in, 0 if passed on stack, or an
81 PARALLEL if the arg is to be copied into multiple non-contiguous
84 /* Register to pass this argument in when generating tail call sequence.
85 This is not the same register as for normal calls on machines with
88 /* If REG was promoted from the actual mode of the argument expression,
89 indicates whether the promotion is sign- or zero-extended. */
91 /* Number of registers to use. 0 means put the whole arg in registers.
92 Also 0 if not passed in registers. */
94 /* Nonzero if argument must be passed on stack.
95 Note that some arguments may be passed on the stack
96 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
97 pass_on_stack identifies arguments that *cannot* go in registers. */
99 /* Offset of this argument from beginning of stack-args. */
100 struct args_size offset;
101 /* Similar, but offset to the start of the stack slot. Different from
102 OFFSET if this arg pads downward. */
103 struct args_size slot_offset;
104 /* Size of this argument on the stack, rounded up for any padding it gets,
105 parts of the argument passed in registers do not count.
106 If REG_PARM_STACK_SPACE is defined, then register parms
107 are counted here as well. */
108 struct args_size size;
109 /* Location on the stack at which parameter should be stored. The store
110 has already been done if STACK == VALUE. */
112 /* Location on the stack of the start of this argument slot. This can
113 differ from STACK if this arg pads downward. This location is known
114 to be aligned to FUNCTION_ARG_BOUNDARY. */
116 /* Place that this stack area has been saved, if needed. */
118 /* If an argument's alignment does not permit direct copying into registers,
119 copy in smaller-sized pieces into pseudos. These are stored in a
120 block pointed to by this field. The next field says how many
121 word-sized pseudos we made. */
124 /* The amount that the stack pointer needs to be adjusted to
125 force alignment for the next argument. */
126 struct args_size alignment_pad;
129 /* A vector of one char per byte of stack space. A byte if nonzero if
130 the corresponding stack location has been used.
131 This vector is used to prevent a function call within an argument from
132 clobbering any stack already set up. */
133 static char *stack_usage_map;
135 /* Size of STACK_USAGE_MAP. */
136 static int highest_outgoing_arg_in_use;
138 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
139 stack location's tail call argument has been already stored into the stack.
140 This bitmap is used to prevent sibling call optimization if function tries
141 to use parent's incoming argument slots when they have been already
142 overwritten with tail call arguments. */
143 static sbitmap stored_args_map;
145 /* stack_arg_under_construction is nonzero when an argument may be
146 initialized with a constructor call (including a C function that
147 returns a BLKmode struct) and expand_call must take special action
148 to make sure the object being constructed does not overlap the
149 argument list for the constructor call. */
150 int stack_arg_under_construction;
152 static int calls_function PARAMS ((tree, int));
153 static int calls_function_1 PARAMS ((tree, int));
155 static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
156 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
159 static void precompute_register_parameters PARAMS ((int,
162 static int store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
164 static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
166 static int finalize_must_preallocate PARAMS ((int, int,
168 struct args_size *));
169 static void precompute_arguments PARAMS ((int, int,
171 static int compute_argument_block_size PARAMS ((int,
174 static void initialize_argument_information PARAMS ((int,
181 static void compute_argument_addresses PARAMS ((struct arg_data *,
183 static rtx rtx_for_function_call PARAMS ((tree, tree));
184 static void load_register_parameters PARAMS ((struct arg_data *,
187 static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx,
191 static int special_function_p PARAMS ((tree, int));
192 static rtx try_to_integrate PARAMS ((tree, tree, rtx,
194 static int check_sibcall_argument_overlap_1 PARAMS ((rtx));
195 static int check_sibcall_argument_overlap PARAMS ((rtx, struct arg_data *,
198 static int combine_pending_stack_adjustment_and_call
199 PARAMS ((int, struct args_size *, int));
200 static tree fix_unsafe_tree PARAMS ((tree));
202 #ifdef REG_PARM_STACK_SPACE
203 static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
204 static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
207 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
210 If WHICH is 0, return 1 if EXP contains a call to any function.
211 Actually, we only need return 1 if evaluating EXP would require pushing
212 arguments on the stack, but that is too difficult to compute, so we just
213 assume any function call might require the stack. */
215 static tree calls_function_save_exprs;
218 calls_function (exp, which)
224 calls_function_save_exprs = 0;
225 val = calls_function_1 (exp, which);
226 calls_function_save_exprs = 0;
230 /* Recursive function to do the work of above function. */
233 calls_function_1 (exp, which)
238 enum tree_code code = TREE_CODE (exp);
239 int class = TREE_CODE_CLASS (code);
240 int length = first_rtl_op (code);
242 /* If this code is language-specific, we don't know what it will do. */
243 if ((int) code >= NUM_TREE_CODES)
251 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
253 && (TYPE_RETURNS_STACK_DEPRESSED
254 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
256 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
257 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
259 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
261 & ECF_MAY_BE_ALLOCA))
270 for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
271 if (calls_function_1 (TREE_VALUE (tem), which))
278 if (SAVE_EXPR_RTL (exp) != 0)
280 if (value_member (exp, calls_function_save_exprs))
282 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
283 calls_function_save_exprs);
284 return (TREE_OPERAND (exp, 0) != 0
285 && calls_function_1 (TREE_OPERAND (exp, 0), which));
292 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
293 if (DECL_INITIAL (local) != 0
294 && calls_function_1 (DECL_INITIAL (local), which))
297 for (subblock = BLOCK_SUBBLOCKS (exp);
299 subblock = TREE_CHAIN (subblock))
300 if (calls_function_1 (subblock, which))
306 for (; exp != 0; exp = TREE_CHAIN (exp))
307 if (calls_function_1 (TREE_VALUE (exp), which))
315 /* Only expressions, references, and blocks can contain calls. */
316 if (! IS_EXPR_CODE_CLASS (class) && class != 'r' && class != 'b')
319 for (i = 0; i < length; i++)
320 if (TREE_OPERAND (exp, i) != 0
321 && calls_function_1 (TREE_OPERAND (exp, i), which))
327 /* Force FUNEXP into a form suitable for the address of a CALL,
328 and return that as an rtx. Also load the static chain register
329 if FNDECL is a nested function.
331 CALL_FUSAGE points to a variable holding the prospective
332 CALL_INSN_FUNCTION_USAGE information. */
335 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen, sibcallp)
342 rtx static_chain_value = 0;
344 funexp = protect_from_queue (funexp, 0);
347 /* Get possible static chain value for nested function in C. */
348 static_chain_value = lookup_static_chain (fndecl);
350 /* Make a valid memory address and copy constants thru pseudo-regs,
351 but not for a constant address if -fno-function-cse. */
352 if (GET_CODE (funexp) != SYMBOL_REF)
353 /* If we are using registers for parameters, force the
354 function address into a register now. */
355 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
356 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
357 : memory_address (FUNCTION_MODE, funexp));
360 #ifndef NO_FUNCTION_CSE
361 if (optimize && ! flag_no_function_cse)
362 #ifdef NO_RECURSIVE_FUNCTION_CSE
363 if (fndecl != current_function_decl)
365 funexp = force_reg (Pmode, funexp);
369 if (static_chain_value != 0)
371 emit_move_insn (static_chain_rtx, static_chain_value);
373 if (GET_CODE (static_chain_rtx) == REG)
374 use_reg (call_fusage, static_chain_rtx);
380 /* Generate instructions to call function FUNEXP,
381 and optionally pop the results.
382 The CALL_INSN is the first insn generated.
384 FNDECL is the declaration node of the function. This is given to the
385 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
387 FUNTYPE is the data type of the function. This is given to the macro
388 RETURN_POPS_ARGS to determine whether this function pops its own args.
389 We used to allow an identifier for library functions, but that doesn't
390 work when the return type is an aggregate type and the calling convention
391 says that the pointer to this aggregate is to be popped by the callee.
393 STACK_SIZE is the number of bytes of arguments on the stack,
394 ROUNDED_STACK_SIZE is that number rounded up to
395 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
396 both to put into the call insn and to generate explicit popping
399 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
400 It is zero if this call doesn't want a structure value.
402 NEXT_ARG_REG is the rtx that results from executing
403 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
404 just after all the args have had their registers assigned.
405 This could be whatever you like, but normally it is the first
406 arg-register beyond those used for args in this call,
407 or 0 if all the arg-registers are used in this call.
408 It is passed on to `gen_call' so you can put this info in the call insn.
410 VALREG is a hard register in which a value is returned,
411 or 0 if the call does not return a value.
413 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
414 the args to this call were processed.
415 We restore `inhibit_defer_pop' to that value.
417 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
418 denote registers used by the called function. */
421 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
422 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
423 call_fusage, ecf_flags, args_so_far)
425 tree fndecl ATTRIBUTE_UNUSED;
426 tree funtype ATTRIBUTE_UNUSED;
427 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
428 HOST_WIDE_INT rounded_stack_size;
429 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
430 rtx next_arg_reg ATTRIBUTE_UNUSED;
432 int old_inhibit_defer_pop;
435 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED;
437 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
439 int already_popped = 0;
440 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
441 #if defined (HAVE_call) && defined (HAVE_call_value)
442 rtx struct_value_size_rtx;
443 struct_value_size_rtx = GEN_INT (struct_value_size);
446 #ifdef CALL_POPS_ARGS
447 n_popped += CALL_POPS_ARGS (* args_so_far);
450 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
451 and we don't want to load it into a register as an optimization,
452 because prepare_call_address already did it if it should be done. */
453 if (GET_CODE (funexp) != SYMBOL_REF)
454 funexp = memory_address (FUNCTION_MODE, funexp);
456 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
457 if ((ecf_flags & ECF_SIBCALL)
458 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
459 && (n_popped > 0 || stack_size == 0))
461 rtx n_pop = GEN_INT (n_popped);
464 /* If this subroutine pops its own args, record that in the call insn
465 if possible, for the sake of frame pointer elimination. */
468 pat = GEN_SIBCALL_VALUE_POP (valreg,
469 gen_rtx_MEM (FUNCTION_MODE, funexp),
470 rounded_stack_size_rtx, next_arg_reg,
473 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
474 rounded_stack_size_rtx, next_arg_reg, n_pop);
476 emit_call_insn (pat);
482 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
483 /* If the target has "call" or "call_value" insns, then prefer them
484 if no arguments are actually popped. If the target does not have
485 "call" or "call_value" insns, then we must use the popping versions
486 even if the call has no arguments to pop. */
487 #if defined (HAVE_call) && defined (HAVE_call_value)
488 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
489 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
491 if (HAVE_call_pop && HAVE_call_value_pop)
494 rtx n_pop = GEN_INT (n_popped);
497 /* If this subroutine pops its own args, record that in the call insn
498 if possible, for the sake of frame pointer elimination. */
501 pat = GEN_CALL_VALUE_POP (valreg,
502 gen_rtx_MEM (FUNCTION_MODE, funexp),
503 rounded_stack_size_rtx, next_arg_reg, n_pop);
505 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
506 rounded_stack_size_rtx, next_arg_reg, n_pop);
508 emit_call_insn (pat);
514 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
515 if ((ecf_flags & ECF_SIBCALL)
516 && HAVE_sibcall && HAVE_sibcall_value)
519 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
520 gen_rtx_MEM (FUNCTION_MODE, funexp),
521 rounded_stack_size_rtx,
522 next_arg_reg, NULL_RTX));
524 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
525 rounded_stack_size_rtx, next_arg_reg,
526 struct_value_size_rtx));
531 #if defined (HAVE_call) && defined (HAVE_call_value)
532 if (HAVE_call && HAVE_call_value)
535 emit_call_insn (GEN_CALL_VALUE (valreg,
536 gen_rtx_MEM (FUNCTION_MODE, funexp),
537 rounded_stack_size_rtx, next_arg_reg,
540 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
541 rounded_stack_size_rtx, next_arg_reg,
542 struct_value_size_rtx));
548 /* Find the CALL insn we just emitted. */
549 for (call_insn = get_last_insn ();
550 call_insn && GET_CODE (call_insn) != CALL_INSN;
551 call_insn = PREV_INSN (call_insn))
557 /* Mark memory as used for "pure" function call. */
558 if (ecf_flags & ECF_PURE)
562 gen_rtx_USE (VOIDmode,
563 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
566 /* Put the register usage information on the CALL. If there is already
567 some usage information, put ours at the end. */
568 if (CALL_INSN_FUNCTION_USAGE (call_insn))
572 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
573 link = XEXP (link, 1))
576 XEXP (link, 1) = call_fusage;
579 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
581 /* If this is a const call, then set the insn's unchanging bit. */
582 if (ecf_flags & (ECF_CONST | ECF_PURE))
583 CONST_OR_PURE_CALL_P (call_insn) = 1;
585 /* If this call can't throw, attach a REG_EH_REGION reg note to that
587 if (ecf_flags & ECF_NOTHROW)
588 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
589 REG_NOTES (call_insn));
591 if (ecf_flags & ECF_NORETURN)
592 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
593 REG_NOTES (call_insn));
594 if (ecf_flags & ECF_ALWAYS_RETURN)
595 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
596 REG_NOTES (call_insn));
598 if (ecf_flags & ECF_RETURNS_TWICE)
600 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
601 REG_NOTES (call_insn));
602 current_function_calls_setjmp = 1;
605 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
607 /* Restore this now, so that we do defer pops for this call's args
608 if the context of the call as a whole permits. */
609 inhibit_defer_pop = old_inhibit_defer_pop;
614 CALL_INSN_FUNCTION_USAGE (call_insn)
615 = gen_rtx_EXPR_LIST (VOIDmode,
616 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
617 CALL_INSN_FUNCTION_USAGE (call_insn));
618 rounded_stack_size -= n_popped;
619 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
620 stack_pointer_delta -= n_popped;
623 if (!ACCUMULATE_OUTGOING_ARGS)
625 /* If returning from the subroutine does not automatically pop the args,
626 we need an instruction to pop them sooner or later.
627 Perhaps do it now; perhaps just record how much space to pop later.
629 If returning from the subroutine does pop the args, indicate that the
630 stack pointer will be changed. */
632 if (rounded_stack_size != 0)
634 if (ecf_flags & ECF_SP_DEPRESSED)
635 /* Just pretend we did the pop. */
636 stack_pointer_delta -= rounded_stack_size;
637 else if (flag_defer_pop && inhibit_defer_pop == 0
638 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
639 pending_stack_adjust += rounded_stack_size;
641 adjust_stack (rounded_stack_size_rtx);
644 /* When we accumulate outgoing args, we must avoid any stack manipulations.
645 Restore the stack pointer to its original value now. Usually
646 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
647 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
648 popping variants of functions exist as well.
650 ??? We may optimize similar to defer_pop above, but it is
651 probably not worthwhile.
653 ??? It will be worthwhile to enable combine_stack_adjustments even for
656 anti_adjust_stack (GEN_INT (n_popped));
659 /* Determine if the function identified by NAME and FNDECL is one with
660 special properties we wish to know about.
662 For example, if the function might return more than one time (setjmp), then
663 set RETURNS_TWICE to a nonzero value.
665 Similarly set LONGJMP for if the function is in the longjmp family.
667 Set MALLOC for any of the standard memory allocation functions which
668 allocate from the heap.
670 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
671 space from the stack such as alloca. */
674 special_function_p (fndecl, flags)
678 if (! (flags & ECF_MALLOC)
679 && fndecl && DECL_NAME (fndecl)
680 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
681 /* Exclude functions not at the file scope, or not `extern',
682 since they are not the magic functions we would otherwise
684 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
686 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
687 const char *tname = name;
689 /* We assume that alloca will always be called by name. It
690 makes no sense to pass it as a pointer-to-function to
691 anything that does not understand its behavior. */
692 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
694 && ! strcmp (name, "alloca"))
695 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
697 && ! strcmp (name, "__builtin_alloca"))))
698 flags |= ECF_MAY_BE_ALLOCA;
700 /* Disregard prefix _, __ or __x. */
703 if (name[1] == '_' && name[2] == 'x')
705 else if (name[1] == '_')
714 && (! strcmp (tname, "setjmp")
715 || ! strcmp (tname, "setjmp_syscall")))
717 && ! strcmp (tname, "sigsetjmp"))
719 && ! strcmp (tname, "savectx")))
720 flags |= ECF_RETURNS_TWICE;
723 && ! strcmp (tname, "siglongjmp"))
724 flags |= ECF_LONGJMP;
726 else if ((tname[0] == 'q' && tname[1] == 's'
727 && ! strcmp (tname, "qsetjmp"))
728 || (tname[0] == 'v' && tname[1] == 'f'
729 && ! strcmp (tname, "vfork")))
730 flags |= ECF_RETURNS_TWICE;
732 else if (tname[0] == 'l' && tname[1] == 'o'
733 && ! strcmp (tname, "longjmp"))
734 flags |= ECF_LONGJMP;
736 else if ((tname[0] == 'f' && tname[1] == 'o'
737 && ! strcmp (tname, "fork"))
738 /* Linux specific: __clone. check NAME to insist on the
739 leading underscores, to avoid polluting the ISO / POSIX
741 || (name[0] == '_' && name[1] == '_'
742 && ! strcmp (tname, "clone"))
743 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
744 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
746 || ((tname[5] == 'p' || tname[5] == 'e')
747 && tname[6] == '\0'))))
748 flags |= ECF_FORK_OR_EXEC;
750 /* Do not add any more malloc-like functions to this list,
751 instead mark them as malloc functions using the malloc attribute.
752 Note, realloc is not suitable for attribute malloc since
753 it may return the same address across multiple calls.
754 C++ operator new is not suitable because it is not required
755 to return a unique pointer; indeed, the standard placement new
756 just returns its argument. */
757 else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode
758 && (! strcmp (tname, "malloc")
759 || ! strcmp (tname, "calloc")
760 || ! strcmp (tname, "strdup")))
766 /* Return nonzero when tree represent call to longjmp. */
769 setjmp_call_p (fndecl)
772 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
775 /* Return true when exp contains alloca call. */
780 if (TREE_CODE (exp) == CALL_EXPR
781 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
782 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
784 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
785 0) & ECF_MAY_BE_ALLOCA))
790 /* Detect flags (function attributes) from the function decl or type node. */
793 flags_from_decl_or_type (exp)
798 /* ??? We can't set IS_MALLOC for function types? */
801 type = TREE_TYPE (exp);
803 /* The function exp may have the `malloc' attribute. */
804 if (DECL_P (exp) && DECL_IS_MALLOC (exp))
807 /* The function exp may have the `pure' attribute. */
808 if (DECL_P (exp) && DECL_IS_PURE (exp))
809 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
811 if (TREE_NOTHROW (exp))
812 flags |= ECF_NOTHROW;
815 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
816 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
818 if (TREE_THIS_VOLATILE (exp))
819 flags |= ECF_NORETURN;
821 /* Mark if the function returns with the stack pointer depressed. We
822 cannot consider it pure or constant in that case. */
823 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
825 flags |= ECF_SP_DEPRESSED;
826 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
832 /* Precompute all register parameters as described by ARGS, storing values
833 into fields within the ARGS array.
835 NUM_ACTUALS indicates the total number elements in the ARGS array.
837 Set REG_PARM_SEEN if we encounter a register parameter. */
840 precompute_register_parameters (num_actuals, args, reg_parm_seen)
842 struct arg_data *args;
849 for (i = 0; i < num_actuals; i++)
850 if (args[i].reg != 0 && ! args[i].pass_on_stack)
854 if (args[i].value == 0)
857 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
859 preserve_temp_slots (args[i].value);
862 /* ANSI doesn't require a sequence point here,
863 but PCC has one, so this will avoid some problems. */
867 /* If the value is a non-legitimate constant, force it into a
868 pseudo now. TLS symbols sometimes need a call to resolve. */
869 if (CONSTANT_P (args[i].value)
870 && !LEGITIMATE_CONSTANT_P (args[i].value))
871 args[i].value = force_reg (args[i].mode, args[i].value);
873 /* If we are to promote the function arg to a wider mode,
876 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
878 = convert_modes (args[i].mode,
879 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
880 args[i].value, args[i].unsignedp);
882 /* If the value is expensive, and we are inside an appropriately
883 short loop, put the value into a pseudo and then put the pseudo
886 For small register classes, also do this if this call uses
887 register parameters. This is to avoid reload conflicts while
888 loading the parameters registers. */
890 if ((! (GET_CODE (args[i].value) == REG
891 || (GET_CODE (args[i].value) == SUBREG
892 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
893 && args[i].mode != BLKmode
894 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
895 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
896 || preserve_subexpressions_p ()))
897 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
901 #ifdef REG_PARM_STACK_SPACE
903 /* The argument list is the property of the called routine and it
904 may clobber it. If the fixed area has been used for previous
905 parameters, we must save and restore it. */
908 save_fixed_argument_area (reg_parm_stack_space, argblock,
909 low_to_save, high_to_save)
910 int reg_parm_stack_space;
918 /* Compute the boundary of the area that needs to be saved, if any. */
919 high = reg_parm_stack_space;
920 #ifdef ARGS_GROW_DOWNWARD
923 if (high > highest_outgoing_arg_in_use)
924 high = highest_outgoing_arg_in_use;
926 for (low = 0; low < high; low++)
927 if (stack_usage_map[low] != 0)
930 enum machine_mode save_mode;
935 while (stack_usage_map[--high] == 0)
939 *high_to_save = high;
941 num_to_save = high - low + 1;
942 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
944 /* If we don't have the required alignment, must do this
946 if ((low & (MIN (GET_MODE_SIZE (save_mode),
947 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
950 #ifdef ARGS_GROW_DOWNWARD
955 stack_area = gen_rtx_MEM (save_mode,
956 memory_address (save_mode,
957 plus_constant (argblock,
960 set_mem_align (stack_area, PARM_BOUNDARY);
961 if (save_mode == BLKmode)
963 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
964 emit_block_move (validize_mem (save_area), stack_area,
965 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
969 save_area = gen_reg_rtx (save_mode);
970 emit_move_insn (save_area, stack_area);
980 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
986 enum machine_mode save_mode = GET_MODE (save_area);
990 #ifdef ARGS_GROW_DOWNWARD
991 delta = -high_to_save;
995 stack_area = gen_rtx_MEM (save_mode,
996 memory_address (save_mode,
997 plus_constant (argblock, delta)));
998 set_mem_align (stack_area, PARM_BOUNDARY);
1000 if (save_mode != BLKmode)
1001 emit_move_insn (stack_area, save_area);
1003 emit_block_move (stack_area, validize_mem (save_area),
1004 GEN_INT (high_to_save - low_to_save + 1),
1005 BLOCK_OP_CALL_PARM);
1007 #endif /* REG_PARM_STACK_SPACE */
1009 /* If any elements in ARGS refer to parameters that are to be passed in
1010 registers, but not in memory, and whose alignment does not permit a
1011 direct copy into registers. Copy the values into a group of pseudos
1012 which we will later copy into the appropriate hard registers.
1014 Pseudos for each unaligned argument will be stored into the array
1015 args[argnum].aligned_regs. The caller is responsible for deallocating
1016 the aligned_regs array if it is nonzero. */
1019 store_unaligned_arguments_into_pseudos (args, num_actuals)
1020 struct arg_data *args;
1025 for (i = 0; i < num_actuals; i++)
1026 if (args[i].reg != 0 && ! args[i].pass_on_stack
1027 && args[i].mode == BLKmode
1028 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1029 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1031 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1032 int big_endian_correction = 0;
1034 args[i].n_aligned_regs
1035 = args[i].partial ? args[i].partial
1036 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1038 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
1039 * args[i].n_aligned_regs);
1041 /* Structures smaller than a word are aligned to the least
1042 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1043 this means we must skip the empty high order bytes when
1044 calculating the bit offset. */
1045 if (BYTES_BIG_ENDIAN
1046 && bytes < UNITS_PER_WORD)
1047 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1049 for (j = 0; j < args[i].n_aligned_regs; j++)
1051 rtx reg = gen_reg_rtx (word_mode);
1052 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1053 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1055 args[i].aligned_regs[j] = reg;
1057 /* There is no need to restrict this code to loading items
1058 in TYPE_ALIGN sized hunks. The bitfield instructions can
1059 load up entire word sized registers efficiently.
1061 ??? This may not be needed anymore.
1062 We use to emit a clobber here but that doesn't let later
1063 passes optimize the instructions we emit. By storing 0 into
1064 the register later passes know the first AND to zero out the
1065 bitfield being set in the register is unnecessary. The store
1066 of 0 will be deleted as will at least the first AND. */
1068 emit_move_insn (reg, const0_rtx);
1070 bytes -= bitsize / BITS_PER_UNIT;
1071 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
1072 extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1073 word_mode, word_mode,
1080 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1083 NUM_ACTUALS is the total number of parameters.
1085 N_NAMED_ARGS is the total number of named arguments.
1087 FNDECL is the tree code for the target of this call (if known)
1089 ARGS_SO_FAR holds state needed by the target to know where to place
1092 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1093 for arguments which are passed in registers.
1095 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1096 and may be modified by this routine.
1098 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1099 flags which may may be modified by this routine. */
1102 initialize_argument_information (num_actuals, args, args_size, n_named_args,
1103 actparms, fndecl, args_so_far,
1104 reg_parm_stack_space, old_stack_level,
1105 old_pending_adj, must_preallocate,
1107 int num_actuals ATTRIBUTE_UNUSED;
1108 struct arg_data *args;
1109 struct args_size *args_size;
1110 int n_named_args ATTRIBUTE_UNUSED;
1113 CUMULATIVE_ARGS *args_so_far;
1114 int reg_parm_stack_space;
1115 rtx *old_stack_level;
1116 int *old_pending_adj;
1117 int *must_preallocate;
1120 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1123 /* Count arg position in order args appear. */
1126 struct args_size alignment_pad;
1130 args_size->constant = 0;
1133 /* In this loop, we consider args in the order they are written.
1134 We fill up ARGS from the front or from the back if necessary
1135 so that in any case the first arg to be pushed ends up at the front. */
1137 if (PUSH_ARGS_REVERSED)
1139 i = num_actuals - 1, inc = -1;
1140 /* In this case, must reverse order of args
1141 so that we compute and push the last arg first. */
1148 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1149 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1151 tree type = TREE_TYPE (TREE_VALUE (p));
1153 enum machine_mode mode;
1155 args[i].tree_value = TREE_VALUE (p);
1157 /* Replace erroneous argument with constant zero. */
1158 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1159 args[i].tree_value = integer_zero_node, type = integer_type_node;
1161 /* If TYPE is a transparent union, pass things the way we would
1162 pass the first field of the union. We have already verified that
1163 the modes are the same. */
1164 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1165 type = TREE_TYPE (TYPE_FIELDS (type));
1167 /* Decide where to pass this arg.
1169 args[i].reg is nonzero if all or part is passed in registers.
1171 args[i].partial is nonzero if part but not all is passed in registers,
1172 and the exact value says how many words are passed in registers.
1174 args[i].pass_on_stack is nonzero if the argument must at least be
1175 computed on the stack. It may then be loaded back into registers
1176 if args[i].reg is nonzero.
1178 These decisions are driven by the FUNCTION_... macros and must agree
1179 with those made by function.c. */
1181 /* See if this argument should be passed by invisible reference. */
1182 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1183 && contains_placeholder_p (TYPE_SIZE (type)))
1184 || TREE_ADDRESSABLE (type)
1185 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1186 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1187 type, argpos < n_named_args)
1191 /* If we're compiling a thunk, pass through invisible
1192 references instead of making a copy. */
1193 if (current_function_is_thunk
1194 #ifdef FUNCTION_ARG_CALLEE_COPIES
1195 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1196 type, argpos < n_named_args)
1197 /* If it's in a register, we must make a copy of it too. */
1198 /* ??? Is this a sufficient test? Is there a better one? */
1199 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1200 && REG_P (DECL_RTL (args[i].tree_value)))
1201 && ! TREE_ADDRESSABLE (type))
1205 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1206 new object from the argument. If we are passing by
1207 invisible reference, the callee will do that for us, so we
1208 can strip off the TARGET_EXPR. This is not always safe,
1209 but it is safe in the only case where this is a useful
1210 optimization; namely, when the argument is a plain object.
1211 In that case, the frontend is just asking the backend to
1212 make a bitwise copy of the argument. */
1214 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1215 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1216 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1217 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1219 args[i].tree_value = build1 (ADDR_EXPR,
1220 build_pointer_type (type),
1221 args[i].tree_value);
1222 type = build_pointer_type (type);
1224 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1226 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1227 We implement this by passing the address of the temporary
1228 rather than expanding it into another allocated slot. */
1229 args[i].tree_value = build1 (ADDR_EXPR,
1230 build_pointer_type (type),
1231 args[i].tree_value);
1232 type = build_pointer_type (type);
1236 /* We make a copy of the object and pass the address to the
1237 function being called. */
1240 if (!COMPLETE_TYPE_P (type)
1241 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1242 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1243 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1244 STACK_CHECK_MAX_VAR_SIZE))))
1246 /* This is a variable-sized object. Make space on the stack
1248 rtx size_rtx = expr_size (TREE_VALUE (p));
1250 if (*old_stack_level == 0)
1252 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1253 *old_pending_adj = pending_stack_adjust;
1254 pending_stack_adjust = 0;
1257 copy = gen_rtx_MEM (BLKmode,
1258 allocate_dynamic_stack_space
1259 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1260 set_mem_attributes (copy, type, 1);
1263 copy = assign_temp (type, 0, 1, 0);
1265 store_expr (args[i].tree_value, copy, 0);
1266 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1268 args[i].tree_value = build1 (ADDR_EXPR,
1269 build_pointer_type (type),
1270 make_tree (type, copy));
1271 type = build_pointer_type (type);
1275 mode = TYPE_MODE (type);
1276 unsignedp = TREE_UNSIGNED (type);
1278 #ifdef PROMOTE_FUNCTION_ARGS
1279 mode = promote_mode (type, mode, &unsignedp, 1);
1282 args[i].unsignedp = unsignedp;
1283 args[i].mode = mode;
1285 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1286 argpos < n_named_args);
1287 #ifdef FUNCTION_INCOMING_ARG
1288 /* If this is a sibling call and the machine has register windows, the
1289 register window has to be unwinded before calling the routine, so
1290 arguments have to go into the incoming registers. */
1291 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1292 argpos < n_named_args);
1294 args[i].tail_call_reg = args[i].reg;
1297 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1300 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1301 argpos < n_named_args);
1304 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1306 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1307 it means that we are to pass this arg in the register(s) designated
1308 by the PARALLEL, but also to pass it in the stack. */
1309 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1310 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1311 args[i].pass_on_stack = 1;
1313 /* If this is an addressable type, we must preallocate the stack
1314 since we must evaluate the object into its final location.
1316 If this is to be passed in both registers and the stack, it is simpler
1318 if (TREE_ADDRESSABLE (type)
1319 || (args[i].pass_on_stack && args[i].reg != 0))
1320 *must_preallocate = 1;
1322 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1323 we cannot consider this function call constant. */
1324 if (TREE_ADDRESSABLE (type))
1325 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1327 /* Compute the stack-size of this argument. */
1328 if (args[i].reg == 0 || args[i].partial != 0
1329 || reg_parm_stack_space > 0
1330 || args[i].pass_on_stack)
1331 locate_and_pad_parm (mode, type,
1332 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1337 fndecl, args_size, &args[i].offset,
1338 &args[i].size, &alignment_pad);
1340 #ifndef ARGS_GROW_DOWNWARD
1341 args[i].slot_offset = *args_size;
1344 args[i].alignment_pad = alignment_pad;
1346 /* If a part of the arg was put into registers,
1347 don't include that part in the amount pushed. */
1348 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1349 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1350 / (PARM_BOUNDARY / BITS_PER_UNIT)
1351 * (PARM_BOUNDARY / BITS_PER_UNIT));
1353 /* Update ARGS_SIZE, the total stack space for args so far. */
1355 args_size->constant += args[i].size.constant;
1356 if (args[i].size.var)
1358 ADD_PARM_SIZE (*args_size, args[i].size.var);
1361 /* Since the slot offset points to the bottom of the slot,
1362 we must record it after incrementing if the args grow down. */
1363 #ifdef ARGS_GROW_DOWNWARD
1364 args[i].slot_offset = *args_size;
1366 args[i].slot_offset.constant = -args_size->constant;
1368 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1371 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1372 have been used, etc. */
1374 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1375 argpos < n_named_args);
1379 /* Update ARGS_SIZE to contain the total size for the argument block.
1380 Return the original constant component of the argument block's size.
1382 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1383 for arguments passed in registers. */
1386 compute_argument_block_size (reg_parm_stack_space, args_size,
1387 preferred_stack_boundary)
1388 int reg_parm_stack_space;
1389 struct args_size *args_size;
1390 int preferred_stack_boundary ATTRIBUTE_UNUSED;
1392 int unadjusted_args_size = args_size->constant;
1394 /* For accumulate outgoing args mode we don't need to align, since the frame
1395 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1396 backends from generating misaligned frame sizes. */
1397 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1398 preferred_stack_boundary = STACK_BOUNDARY;
1400 /* Compute the actual size of the argument block required. The variable
1401 and constant sizes must be combined, the size may have to be rounded,
1402 and there may be a minimum required size. */
1406 args_size->var = ARGS_SIZE_TREE (*args_size);
1407 args_size->constant = 0;
1409 preferred_stack_boundary /= BITS_PER_UNIT;
1410 if (preferred_stack_boundary > 1)
1412 /* We don't handle this case yet. To handle it correctly we have
1413 to add the delta, round and subtract the delta.
1414 Currently no machine description requires this support. */
1415 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1417 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1420 if (reg_parm_stack_space > 0)
1423 = size_binop (MAX_EXPR, args_size->var,
1424 ssize_int (reg_parm_stack_space));
1426 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1427 /* The area corresponding to register parameters is not to count in
1428 the size of the block we need. So make the adjustment. */
1430 = size_binop (MINUS_EXPR, args_size->var,
1431 ssize_int (reg_parm_stack_space));
1437 preferred_stack_boundary /= BITS_PER_UNIT;
1438 if (preferred_stack_boundary < 1)
1439 preferred_stack_boundary = 1;
1440 args_size->constant = (((args_size->constant
1441 + stack_pointer_delta
1442 + preferred_stack_boundary - 1)
1443 / preferred_stack_boundary
1444 * preferred_stack_boundary)
1445 - stack_pointer_delta);
1447 args_size->constant = MAX (args_size->constant,
1448 reg_parm_stack_space);
1450 #ifdef MAYBE_REG_PARM_STACK_SPACE
1451 if (reg_parm_stack_space == 0)
1452 args_size->constant = 0;
1455 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1456 args_size->constant -= reg_parm_stack_space;
1459 return unadjusted_args_size;
1462 /* Precompute parameters as needed for a function call.
1464 FLAGS is mask of ECF_* constants.
1466 NUM_ACTUALS is the number of arguments.
1468 ARGS is an array containing information for each argument; this
1469 routine fills in the INITIAL_VALUE and VALUE fields for each
1470 precomputed argument. */
1473 precompute_arguments (flags, num_actuals, args)
1476 struct arg_data *args;
1480 /* If this function call is cse'able, precompute all the parameters.
1481 Note that if the parameter is constructed into a temporary, this will
1482 cause an additional copy because the parameter will be constructed
1483 into a temporary location and then copied into the outgoing arguments.
1484 If a parameter contains a call to alloca and this function uses the
1485 stack, precompute the parameter. */
1487 /* If we preallocated the stack space, and some arguments must be passed
1488 on the stack, then we must precompute any parameter which contains a
1489 function call which will store arguments on the stack.
1490 Otherwise, evaluating the parameter may clobber previous parameters
1491 which have already been stored into the stack. (we have code to avoid
1492 such case by saving the outgoing stack arguments, but it results in
1495 for (i = 0; i < num_actuals; i++)
1496 if ((flags & ECF_LIBCALL_BLOCK)
1497 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1499 enum machine_mode mode;
1501 /* If this is an addressable type, we cannot pre-evaluate it. */
1502 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1506 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1508 /* ANSI doesn't require a sequence point here,
1509 but PCC has one, so this will avoid some problems. */
1512 args[i].initial_value = args[i].value
1513 = protect_from_queue (args[i].value, 0);
1515 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1516 if (mode != args[i].mode)
1519 = convert_modes (args[i].mode, mode,
1520 args[i].value, args[i].unsignedp);
1521 #ifdef PROMOTE_FOR_CALL_ONLY
1522 /* CSE will replace this only if it contains args[i].value
1523 pseudo, so convert it down to the declared mode using
1525 if (GET_CODE (args[i].value) == REG
1526 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1528 args[i].initial_value
1529 = gen_lowpart_SUBREG (mode, args[i].value);
1530 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1531 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1539 /* Given the current state of MUST_PREALLOCATE and information about
1540 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1541 compute and return the final value for MUST_PREALLOCATE. */
1544 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1545 int must_preallocate;
1547 struct arg_data *args;
1548 struct args_size *args_size;
1550 /* See if we have or want to preallocate stack space.
1552 If we would have to push a partially-in-regs parm
1553 before other stack parms, preallocate stack space instead.
1555 If the size of some parm is not a multiple of the required stack
1556 alignment, we must preallocate.
1558 If the total size of arguments that would otherwise create a copy in
1559 a temporary (such as a CALL) is more than half the total argument list
1560 size, preallocation is faster.
1562 Another reason to preallocate is if we have a machine (like the m88k)
1563 where stack alignment is required to be maintained between every
1564 pair of insns, not just when the call is made. However, we assume here
1565 that such machines either do not have push insns (and hence preallocation
1566 would occur anyway) or the problem is taken care of with
1569 if (! must_preallocate)
1571 int partial_seen = 0;
1572 int copy_to_evaluate_size = 0;
1575 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1577 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1579 else if (partial_seen && args[i].reg == 0)
1580 must_preallocate = 1;
1582 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1583 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1584 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1585 || TREE_CODE (args[i].tree_value) == COND_EXPR
1586 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1587 copy_to_evaluate_size
1588 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1591 if (copy_to_evaluate_size * 2 >= args_size->constant
1592 && args_size->constant > 0)
1593 must_preallocate = 1;
1595 return must_preallocate;
1598 /* If we preallocated stack space, compute the address of each argument
1599 and store it into the ARGS array.
1601 We need not ensure it is a valid memory address here; it will be
1602 validized when it is used.
1604 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1607 compute_argument_addresses (args, argblock, num_actuals)
1608 struct arg_data *args;
1614 rtx arg_reg = argblock;
1615 int i, arg_offset = 0;
1617 if (GET_CODE (argblock) == PLUS)
1618 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1620 for (i = 0; i < num_actuals; i++)
1622 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1623 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1626 /* Skip this parm if it will not be passed on the stack. */
1627 if (! args[i].pass_on_stack && args[i].reg != 0)
1630 if (GET_CODE (offset) == CONST_INT)
1631 addr = plus_constant (arg_reg, INTVAL (offset));
1633 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1635 addr = plus_constant (addr, arg_offset);
1636 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1637 set_mem_attributes (args[i].stack,
1638 TREE_TYPE (args[i].tree_value), 1);
1640 if (GET_CODE (slot_offset) == CONST_INT)
1641 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1643 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1645 addr = plus_constant (addr, arg_offset);
1646 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1647 set_mem_attributes (args[i].stack_slot,
1648 TREE_TYPE (args[i].tree_value), 1);
1650 /* Function incoming arguments may overlap with sibling call
1651 outgoing arguments and we cannot allow reordering of reads
1652 from function arguments with stores to outgoing arguments
1653 of sibling calls. */
1654 set_mem_alias_set (args[i].stack, 0);
1655 set_mem_alias_set (args[i].stack_slot, 0);
1660 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1661 in a call instruction.
1663 FNDECL is the tree node for the target function. For an indirect call
1664 FNDECL will be NULL_TREE.
1666 ADDR is the operand 0 of CALL_EXPR for this call. */
1669 rtx_for_function_call (fndecl, addr)
1675 /* Get the function to call, in the form of RTL. */
1678 /* If this is the first use of the function, see if we need to
1679 make an external definition for it. */
1680 if (! TREE_USED (fndecl))
1682 assemble_external (fndecl);
1683 TREE_USED (fndecl) = 1;
1686 /* Get a SYMBOL_REF rtx for the function address. */
1687 funexp = XEXP (DECL_RTL (fndecl), 0);
1690 /* Generate an rtx (probably a pseudo-register) for the address. */
1693 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1694 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1700 /* Do the register loads required for any wholly-register parms or any
1701 parms which are passed both on the stack and in a register. Their
1702 expressions were already evaluated.
1704 Mark all register-parms as living through the call, putting these USE
1705 insns in the CALL_INSN_FUNCTION_USAGE field.
1707 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1708 checking, setting *SIBCALL_FAILURE if appropriate. */
1711 load_register_parameters (args, num_actuals, call_fusage, flags,
1712 is_sibcall, sibcall_failure)
1713 struct arg_data *args;
1718 int *sibcall_failure;
1722 #ifdef LOAD_ARGS_REVERSED
1723 for (i = num_actuals - 1; i >= 0; i--)
1725 for (i = 0; i < num_actuals; i++)
1728 rtx reg = ((flags & ECF_SIBCALL)
1729 ? args[i].tail_call_reg : args[i].reg);
1730 int partial = args[i].partial;
1735 rtx before_arg = get_last_insn ();
1736 /* Set to non-negative if must move a word at a time, even if just
1737 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1738 we just use a normal move insn. This value can be zero if the
1739 argument is a zero size structure with no fields. */
1740 nregs = (partial ? partial
1741 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1742 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1743 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1746 /* Handle calls that pass values in multiple non-contiguous
1747 locations. The Irix 6 ABI has examples of this. */
1749 if (GET_CODE (reg) == PARALLEL)
1750 emit_group_load (reg, args[i].value,
1751 int_size_in_bytes (TREE_TYPE (args[i].tree_value)));
1753 /* If simple case, just do move. If normal partial, store_one_arg
1754 has already loaded the register for us. In all other cases,
1755 load the register(s) from memory. */
1757 else if (nregs == -1)
1758 emit_move_insn (reg, args[i].value);
1760 /* If we have pre-computed the values to put in the registers in
1761 the case of non-aligned structures, copy them in now. */
1763 else if (args[i].n_aligned_regs != 0)
1764 for (j = 0; j < args[i].n_aligned_regs; j++)
1765 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1766 args[i].aligned_regs[j]);
1768 else if (partial == 0 || args[i].pass_on_stack)
1769 move_block_to_reg (REGNO (reg),
1770 validize_mem (args[i].value), nregs,
1773 /* When a parameter is a block, and perhaps in other cases, it is
1774 possible that it did a load from an argument slot that was
1775 already clobbered. */
1777 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1778 *sibcall_failure = 1;
1780 /* Handle calls that pass values in multiple non-contiguous
1781 locations. The Irix 6 ABI has examples of this. */
1782 if (GET_CODE (reg) == PARALLEL)
1783 use_group_regs (call_fusage, reg);
1784 else if (nregs == -1)
1785 use_reg (call_fusage, reg);
1787 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1792 /* Try to integrate function. See expand_inline_function for documentation
1793 about the parameters. */
1796 try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
1802 rtx structure_value_addr;
1807 rtx old_stack_level = 0;
1808 int reg_parm_stack_space = 0;
1810 #ifdef REG_PARM_STACK_SPACE
1811 #ifdef MAYBE_REG_PARM_STACK_SPACE
1812 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1814 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1818 before_call = get_last_insn ();
1820 timevar_push (TV_INTEGRATION);
1822 temp = expand_inline_function (fndecl, actparms, target,
1824 structure_value_addr);
1826 timevar_pop (TV_INTEGRATION);
1828 /* If inlining succeeded, return. */
1829 if (temp != (rtx) (size_t) - 1)
1831 if (ACCUMULATE_OUTGOING_ARGS)
1833 /* If the outgoing argument list must be preserved, push
1834 the stack before executing the inlined function if it
1837 i = reg_parm_stack_space;
1838 if (i > highest_outgoing_arg_in_use)
1839 i = highest_outgoing_arg_in_use;
1840 while (--i >= 0 && stack_usage_map[i] == 0)
1843 if (stack_arg_under_construction || i >= 0)
1846 = before_call ? NEXT_INSN (before_call) : get_insns ();
1847 rtx insn = NULL_RTX, seq;
1849 /* Look for a call in the inline function code.
1850 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1851 nonzero then there is a call and it is not necessary
1852 to scan the insns. */
1854 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1855 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1856 if (GET_CODE (insn) == CALL_INSN)
1861 /* Reserve enough stack space so that the largest
1862 argument list of any function call in the inline
1863 function does not overlap the argument list being
1864 evaluated. This is usually an overestimate because
1865 allocate_dynamic_stack_space reserves space for an
1866 outgoing argument list in addition to the requested
1867 space, but there is no way to ask for stack space such
1868 that an argument list of a certain length can be
1871 Add the stack space reserved for register arguments, if
1872 any, in the inline function. What is really needed is the
1873 largest value of reg_parm_stack_space in the inline
1874 function, but that is not available. Using the current
1875 value of reg_parm_stack_space is wrong, but gives
1876 correct results on all supported machines. */
1878 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1879 + reg_parm_stack_space);
1882 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1883 allocate_dynamic_stack_space (GEN_INT (adjust),
1884 NULL_RTX, BITS_PER_UNIT);
1887 emit_insn_before (seq, first_insn);
1888 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1893 /* If the result is equivalent to TARGET, return TARGET to simplify
1894 checks in store_expr. They can be equivalent but not equal in the
1895 case of a function that returns BLKmode. */
1896 if (temp != target && rtx_equal_p (temp, target))
1901 /* If inlining failed, mark FNDECL as needing to be compiled
1902 separately after all. If function was declared inline,
1904 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1905 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1907 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1908 warning ("called from here");
1910 (*lang_hooks.mark_addressable) (fndecl);
1911 return (rtx) (size_t) - 1;
1914 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1915 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1916 bytes, then we would need to push some additional bytes to pad the
1917 arguments. So, we compute an adjust to the stack pointer for an
1918 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1919 bytes. Then, when the arguments are pushed the stack will be perfectly
1920 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1921 be popped after the call. Returns the adjustment. */
1924 combine_pending_stack_adjustment_and_call (unadjusted_args_size,
1926 preferred_unit_stack_boundary)
1927 int unadjusted_args_size;
1928 struct args_size *args_size;
1929 int preferred_unit_stack_boundary;
1931 /* The number of bytes to pop so that the stack will be
1932 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1933 HOST_WIDE_INT adjustment;
1934 /* The alignment of the stack after the arguments are pushed, if we
1935 just pushed the arguments without adjust the stack here. */
1936 HOST_WIDE_INT unadjusted_alignment;
1938 unadjusted_alignment
1939 = ((stack_pointer_delta + unadjusted_args_size)
1940 % preferred_unit_stack_boundary);
1942 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1943 as possible -- leaving just enough left to cancel out the
1944 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1945 PENDING_STACK_ADJUST is non-negative, and congruent to
1946 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1948 /* Begin by trying to pop all the bytes. */
1949 unadjusted_alignment
1950 = (unadjusted_alignment
1951 - (pending_stack_adjust % preferred_unit_stack_boundary));
1952 adjustment = pending_stack_adjust;
1953 /* Push enough additional bytes that the stack will be aligned
1954 after the arguments are pushed. */
1955 if (preferred_unit_stack_boundary > 1)
1957 if (unadjusted_alignment > 0)
1958 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1960 adjustment += unadjusted_alignment;
1963 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1964 bytes after the call. The right number is the entire
1965 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1966 by the arguments in the first place. */
1968 = pending_stack_adjust - adjustment + unadjusted_args_size;
1973 /* Scan X expression if it does not dereference any argument slots
1974 we already clobbered by tail call arguments (as noted in stored_args_map
1976 Return nonzero if X expression dereferences such argument slots,
1980 check_sibcall_argument_overlap_1 (x)
1991 code = GET_CODE (x);
1995 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1997 else if (GET_CODE (XEXP (x, 0)) == PLUS
1998 && XEXP (XEXP (x, 0), 0) ==
1999 current_function_internal_arg_pointer
2000 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2001 i = INTVAL (XEXP (XEXP (x, 0), 1));
2005 #ifdef ARGS_GROW_DOWNWARD
2006 i = -i - GET_MODE_SIZE (GET_MODE (x));
2009 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
2010 if (i + k < stored_args_map->n_bits
2011 && TEST_BIT (stored_args_map, i + k))
2017 /* Scan all subexpressions. */
2018 fmt = GET_RTX_FORMAT (code);
2019 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2023 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2026 else if (*fmt == 'E')
2028 for (j = 0; j < XVECLEN (x, i); j++)
2029 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2036 /* Scan sequence after INSN if it does not dereference any argument slots
2037 we already clobbered by tail call arguments (as noted in stored_args_map
2038 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2039 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2040 should be 0). Return nonzero if sequence after INSN dereferences such argument
2041 slots, zero otherwise. */
2044 check_sibcall_argument_overlap (insn, arg, mark_stored_args_map)
2046 struct arg_data *arg;
2047 int mark_stored_args_map;
2051 if (insn == NULL_RTX)
2052 insn = get_insns ();
2054 insn = NEXT_INSN (insn);
2056 for (; insn; insn = NEXT_INSN (insn))
2058 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
2061 if (mark_stored_args_map)
2063 #ifdef ARGS_GROW_DOWNWARD
2064 low = -arg->slot_offset.constant - arg->size.constant;
2066 low = arg->slot_offset.constant;
2069 for (high = low + arg->size.constant; low < high; low++)
2070 SET_BIT (stored_args_map, low);
2072 return insn != NULL_RTX;
2079 switch (unsafe_for_reeval (t))
2084 case 1: /* Mildly unsafe. */
2085 t = unsave_expr (t);
2088 case 2: /* Wildly unsafe. */
2090 tree var = build_decl (VAR_DECL, NULL_TREE,
2093 expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
2104 /* Generate all the code for a function call
2105 and return an rtx for its value.
2106 Store the value in TARGET (specified as an rtx) if convenient.
2107 If the value is stored in TARGET then TARGET is returned.
2108 If IGNORE is nonzero, then we ignore the value of the function call. */
2111 expand_call (exp, target, ignore)
2116 /* Nonzero if we are currently expanding a call. */
2117 static int currently_expanding_call = 0;
2119 /* List of actual parameters. */
2120 tree actparms = TREE_OPERAND (exp, 1);
2121 /* RTX for the function to be called. */
2123 /* Sequence of insns to perform a tail recursive "call". */
2124 rtx tail_recursion_insns = NULL_RTX;
2125 /* Sequence of insns to perform a normal "call". */
2126 rtx normal_call_insns = NULL_RTX;
2127 /* Sequence of insns to perform a tail recursive "call". */
2128 rtx tail_call_insns = NULL_RTX;
2129 /* Data type of the function. */
2131 /* Declaration of the function being called,
2132 or 0 if the function is computed (not known by name). */
2135 int try_tail_call = 1;
2136 int try_tail_recursion = 1;
2139 /* Register in which non-BLKmode value will be returned,
2140 or 0 if no value or if value is BLKmode. */
2142 /* Address where we should return a BLKmode value;
2143 0 if value not BLKmode. */
2144 rtx structure_value_addr = 0;
2145 /* Nonzero if that address is being passed by treating it as
2146 an extra, implicit first parameter. Otherwise,
2147 it is passed by being copied directly into struct_value_rtx. */
2148 int structure_value_addr_parm = 0;
2149 /* Size of aggregate value wanted, or zero if none wanted
2150 or if we are using the non-reentrant PCC calling convention
2151 or expecting the value in registers. */
2152 HOST_WIDE_INT struct_value_size = 0;
2153 /* Nonzero if called function returns an aggregate in memory PCC style,
2154 by returning the address of where to find it. */
2155 int pcc_struct_value = 0;
2157 /* Number of actual parameters in this call, including struct value addr. */
2159 /* Number of named args. Args after this are anonymous ones
2160 and they must all go on the stack. */
2163 /* Vector of information about each argument.
2164 Arguments are numbered in the order they will be pushed,
2165 not the order they are written. */
2166 struct arg_data *args;
2168 /* Total size in bytes of all the stack-parms scanned so far. */
2169 struct args_size args_size;
2170 struct args_size adjusted_args_size;
2171 /* Size of arguments before any adjustments (such as rounding). */
2172 int unadjusted_args_size;
2173 /* Data on reg parms scanned so far. */
2174 CUMULATIVE_ARGS args_so_far;
2175 /* Nonzero if a reg parm has been scanned. */
2177 /* Nonzero if this is an indirect function call. */
2179 /* Nonzero if we must avoid push-insns in the args for this call.
2180 If stack space is allocated for register parameters, but not by the
2181 caller, then it is preallocated in the fixed part of the stack frame.
2182 So the entire argument block must then be preallocated (i.e., we
2183 ignore PUSH_ROUNDING in that case). */
2185 int must_preallocate = !PUSH_ARGS;
2187 /* Size of the stack reserved for parameter registers. */
2188 int reg_parm_stack_space = 0;
2190 /* Address of space preallocated for stack parms
2191 (on machines that lack push insns), or 0 if space not preallocated. */
2194 /* Mask of ECF_ flags. */
2196 /* Nonzero if this is a call to an inline function. */
2197 int is_integrable = 0;
2198 #ifdef REG_PARM_STACK_SPACE
2199 /* Define the boundary of the register parm stack space that needs to be
2201 int low_to_save, high_to_save;
2202 rtx save_area = 0; /* Place that it is saved */
2205 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2206 char *initial_stack_usage_map = stack_usage_map;
2207 int old_stack_arg_under_construction = 0;
2209 rtx old_stack_level = 0;
2210 int old_pending_adj = 0;
2211 int old_inhibit_defer_pop = inhibit_defer_pop;
2212 int old_stack_allocated;
2214 tree p = TREE_OPERAND (exp, 0);
2215 tree addr = TREE_OPERAND (exp, 0);
2217 /* The alignment of the stack, in bits. */
2218 HOST_WIDE_INT preferred_stack_boundary;
2219 /* The alignment of the stack, in bytes. */
2220 HOST_WIDE_INT preferred_unit_stack_boundary;
2222 /* See if this is "nothrow" function call. */
2223 if (TREE_NOTHROW (exp))
2224 flags |= ECF_NOTHROW;
2226 /* See if we can find a DECL-node for the actual function.
2227 As a result, decide whether this is a call to an integrable function. */
2229 fndecl = get_callee_fndecl (exp);
2233 && fndecl != current_function_decl
2234 && DECL_INLINE (fndecl)
2235 && DECL_SAVED_INSNS (fndecl)
2236 && DECL_SAVED_INSNS (fndecl)->inlinable)
2238 else if (! TREE_ADDRESSABLE (fndecl))
2240 /* In case this function later becomes inlinable,
2241 record that there was already a non-inline call to it.
2243 Use abstraction instead of setting TREE_ADDRESSABLE
2245 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2248 warning_with_decl (fndecl, "can't inline call to `%s'");
2249 warning ("called from here");
2251 (*lang_hooks.mark_addressable) (fndecl);
2254 flags |= flags_from_decl_or_type (fndecl);
2257 /* If we don't have specific function to call, see if we have a
2258 attributes set in the type. */
2260 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2262 #ifdef REG_PARM_STACK_SPACE
2263 #ifdef MAYBE_REG_PARM_STACK_SPACE
2264 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2266 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2270 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2271 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2272 must_preallocate = 1;
2275 /* Warn if this value is an aggregate type,
2276 regardless of which calling convention we are using for it. */
2277 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2278 warning ("function call has aggregate value");
2280 /* Set up a place to return a structure. */
2282 /* Cater to broken compilers. */
2283 if (aggregate_value_p (exp))
2285 /* This call returns a big structure. */
2286 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2288 #ifdef PCC_STATIC_STRUCT_RETURN
2290 pcc_struct_value = 1;
2291 /* Easier than making that case work right. */
2294 /* In case this is a static function, note that it has been
2296 if (! TREE_ADDRESSABLE (fndecl))
2297 (*lang_hooks.mark_addressable) (fndecl);
2301 #else /* not PCC_STATIC_STRUCT_RETURN */
2303 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2305 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
2307 /* The structure value address arg is already in actparms.
2308 Pull it out. It might be nice to just leave it there, but
2309 we need to set structure_value_addr. */
2310 tree return_arg = TREE_VALUE (actparms);
2311 actparms = TREE_CHAIN (actparms);
2312 structure_value_addr = expand_expr (return_arg, NULL_RTX,
2313 VOIDmode, EXPAND_NORMAL);
2315 else if (target && GET_CODE (target) == MEM)
2316 structure_value_addr = XEXP (target, 0);
2319 /* For variable-sized objects, we must be called with a target
2320 specified. If we were to allocate space on the stack here,
2321 we would have no way of knowing when to free it. */
2322 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2324 mark_temp_addr_taken (d);
2325 structure_value_addr = XEXP (d, 0);
2329 #endif /* not PCC_STATIC_STRUCT_RETURN */
2332 /* If called function is inline, try to integrate it. */
2336 rtx temp = try_to_integrate (fndecl, actparms, target,
2337 ignore, TREE_TYPE (exp),
2338 structure_value_addr);
2339 if (temp != (rtx) (size_t) - 1)
2343 /* Figure out the amount to which the stack should be aligned. */
2344 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2346 /* Operand 0 is a pointer-to-function; get the type of the function. */
2347 funtype = TREE_TYPE (addr);
2348 if (! POINTER_TYPE_P (funtype))
2350 funtype = TREE_TYPE (funtype);
2352 /* See if this is a call to a function that can return more than once
2353 or a call to longjmp or malloc. */
2354 flags |= special_function_p (fndecl, flags);
2356 if (flags & ECF_MAY_BE_ALLOCA)
2357 current_function_calls_alloca = 1;
2359 /* If struct_value_rtx is 0, it means pass the address
2360 as if it were an extra parameter. */
2361 if (structure_value_addr && struct_value_rtx == 0)
2363 /* If structure_value_addr is a REG other than
2364 virtual_outgoing_args_rtx, we can use always use it. If it
2365 is not a REG, we must always copy it into a register.
2366 If it is virtual_outgoing_args_rtx, we must copy it to another
2367 register in some cases. */
2368 rtx temp = (GET_CODE (structure_value_addr) != REG
2369 || (ACCUMULATE_OUTGOING_ARGS
2370 && stack_arg_under_construction
2371 && structure_value_addr == virtual_outgoing_args_rtx)
2372 ? copy_addr_to_reg (structure_value_addr)
2373 : structure_value_addr);
2376 = tree_cons (error_mark_node,
2377 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2380 structure_value_addr_parm = 1;
2383 /* Count the arguments and set NUM_ACTUALS. */
2384 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2387 /* Compute number of named args.
2388 Normally, don't include the last named arg if anonymous args follow.
2389 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2390 (If no anonymous args follow, the result of list_length is actually
2391 one too large. This is harmless.)
2393 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2394 zero, this machine will be able to place unnamed args that were
2395 passed in registers into the stack. So treat all args as named.
2396 This allows the insns emitting for a specific argument list to be
2397 independent of the function declaration.
2399 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2400 reliable way to pass unnamed args in registers, so we must force
2401 them into memory. */
2403 if ((STRICT_ARGUMENT_NAMING
2404 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2405 && TYPE_ARG_TYPES (funtype) != 0)
2407 = (list_length (TYPE_ARG_TYPES (funtype))
2408 /* Don't include the last named arg. */
2409 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2410 /* Count the struct value address, if it is passed as a parm. */
2411 + structure_value_addr_parm);
2413 /* If we know nothing, treat all args as named. */
2414 n_named_args = num_actuals;
2416 /* Start updating where the next arg would go.
2418 On some machines (such as the PA) indirect calls have a different
2419 calling convention than normal calls. The last argument in
2420 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2422 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl);
2424 /* Make a vector to hold all the information about each arg. */
2425 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
2426 memset ((char *) args, 0, num_actuals * sizeof (struct arg_data));
2428 /* Build up entries in the ARGS array, compute the size of the
2429 arguments into ARGS_SIZE, etc. */
2430 initialize_argument_information (num_actuals, args, &args_size,
2431 n_named_args, actparms, fndecl,
2432 &args_so_far, reg_parm_stack_space,
2433 &old_stack_level, &old_pending_adj,
2434 &must_preallocate, &flags);
2438 /* If this function requires a variable-sized argument list, don't
2439 try to make a cse'able block for this call. We may be able to
2440 do this eventually, but it is too complicated to keep track of
2441 what insns go in the cse'able block and which don't. */
2443 flags &= ~ECF_LIBCALL_BLOCK;
2444 must_preallocate = 1;
2447 /* Now make final decision about preallocating stack space. */
2448 must_preallocate = finalize_must_preallocate (must_preallocate,
2452 /* If the structure value address will reference the stack pointer, we
2453 must stabilize it. We don't need to do this if we know that we are
2454 not going to adjust the stack pointer in processing this call. */
2456 if (structure_value_addr
2457 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2458 || reg_mentioned_p (virtual_outgoing_args_rtx,
2459 structure_value_addr))
2461 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2462 structure_value_addr = copy_to_reg (structure_value_addr);
2464 /* Tail calls can make things harder to debug, and we're traditionally
2465 pushed these optimizations into -O2. Don't try if we're already
2466 expanding a call, as that means we're an argument. Don't try if
2467 there's cleanups, as we know there's code to follow the call.
2469 If rtx_equal_function_value_matters is false, that means we've
2470 finished with regular parsing. Which means that some of the
2471 machinery we use to generate tail-calls is no longer in place.
2472 This is most often true of sjlj-exceptions, which we couldn't
2473 tail-call to anyway. */
2475 if (currently_expanding_call++ != 0
2476 || !flag_optimize_sibling_calls
2477 || !rtx_equal_function_value_matters
2478 || any_pending_cleanups (1)
2480 try_tail_call = try_tail_recursion = 0;
2482 /* Tail recursion fails, when we are not dealing with recursive calls. */
2483 if (!try_tail_recursion
2484 || TREE_CODE (addr) != ADDR_EXPR
2485 || TREE_OPERAND (addr, 0) != current_function_decl)
2486 try_tail_recursion = 0;
2488 /* Rest of purposes for tail call optimizations to fail. */
2490 #ifdef HAVE_sibcall_epilogue
2491 !HAVE_sibcall_epilogue
2496 /* Doing sibling call optimization needs some work, since
2497 structure_value_addr can be allocated on the stack.
2498 It does not seem worth the effort since few optimizable
2499 sibling calls will return a structure. */
2500 || structure_value_addr != NULL_RTX
2501 /* Check whether the target is able to optimize the call
2503 || !(*targetm.function_ok_for_sibcall) (fndecl, exp)
2504 /* Functions that do not return exactly once may not be sibcall
2506 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
2507 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2508 /* If this function requires more stack slots than the current
2509 function, we cannot change it into a sibling call. */
2510 || args_size.constant > current_function_args_size
2511 /* If the callee pops its own arguments, then it must pop exactly
2512 the same number of arguments as the current function. */
2513 || RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2514 != RETURN_POPS_ARGS (current_function_decl,
2515 TREE_TYPE (current_function_decl),
2516 current_function_args_size))
2519 if (try_tail_call || try_tail_recursion)
2522 actparms = NULL_TREE;
2523 /* Ok, we're going to give the tail call the old college try.
2524 This means we're going to evaluate the function arguments
2525 up to three times. There are two degrees of badness we can
2526 encounter, those that can be unsaved and those that can't.
2527 (See unsafe_for_reeval commentary for details.)
2529 Generate a new argument list. Pass safe arguments through
2530 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2531 For hard badness, evaluate them now and put their resulting
2532 rtx in a temporary VAR_DECL.
2534 initialize_argument_information has ordered the array for the
2535 order to be pushed, and we must remember this when reconstructing
2536 the original argument order. */
2538 if (PUSH_ARGS_REVERSED)
2547 i = num_actuals - 1;
2551 for (; i != end; i += inc)
2553 args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
2554 /* We need to build actparms for optimize_tail_recursion. We can
2555 safely trash away TREE_PURPOSE, since it is unused by this
2557 if (try_tail_recursion)
2558 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2560 /* Do the same for the function address if it is an expression. */
2562 addr = fix_unsafe_tree (addr);
2563 /* Expanding one of those dangerous arguments could have added
2564 cleanups, but otherwise give it a whirl. */
2565 if (any_pending_cleanups (1))
2566 try_tail_call = try_tail_recursion = 0;
2569 /* Generate a tail recursion sequence when calling ourselves. */
2571 if (try_tail_recursion)
2573 /* We want to emit any pending stack adjustments before the tail
2574 recursion "call". That way we know any adjustment after the tail
2575 recursion call can be ignored if we indeed use the tail recursion
2577 int save_pending_stack_adjust = pending_stack_adjust;
2578 int save_stack_pointer_delta = stack_pointer_delta;
2580 /* Emit any queued insns now; otherwise they would end up in
2581 only one of the alternates. */
2584 /* Use a new sequence to hold any RTL we generate. We do not even
2585 know if we will use this RTL yet. The final decision can not be
2586 made until after RTL generation for the entire function is
2589 /* If expanding any of the arguments creates cleanups, we can't
2590 do a tailcall. So, we'll need to pop the pending cleanups
2591 list. If, however, all goes well, and there are no cleanups
2592 then the call to expand_start_target_temps will have no
2594 expand_start_target_temps ();
2595 if (optimize_tail_recursion (actparms, get_last_insn ()))
2597 if (any_pending_cleanups (1))
2598 try_tail_call = try_tail_recursion = 0;
2600 tail_recursion_insns = get_insns ();
2602 expand_end_target_temps ();
2605 /* Restore the original pending stack adjustment for the sibling and
2606 normal call cases below. */
2607 pending_stack_adjust = save_pending_stack_adjust;
2608 stack_pointer_delta = save_stack_pointer_delta;
2611 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2613 /* A fork duplicates the profile information, and an exec discards
2614 it. We can't rely on fork/exec to be paired. So write out the
2615 profile information we have gathered so far, and clear it. */
2616 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2617 is subject to race conditions, just as with multithreaded
2620 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__gcov_flush"),
2625 /* Ensure current function's preferred stack boundary is at least
2626 what we need. We don't have to increase alignment for recursive
2628 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2629 && fndecl != current_function_decl)
2630 cfun->preferred_stack_boundary = preferred_stack_boundary;
2632 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2634 function_call_count++;
2636 /* We want to make two insn chains; one for a sibling call, the other
2637 for a normal call. We will select one of the two chains after
2638 initial RTL generation is complete. */
2639 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2641 int sibcall_failure = 0;
2642 /* We want to emit any pending stack adjustments before the tail
2643 recursion "call". That way we know any adjustment after the tail
2644 recursion call can be ignored if we indeed use the tail recursion
2646 int save_pending_stack_adjust = 0;
2647 int save_stack_pointer_delta = 0;
2649 rtx before_call, next_arg_reg;
2653 /* Emit any queued insns now; otherwise they would end up in
2654 only one of the alternates. */
2657 /* State variables we need to save and restore between
2659 save_pending_stack_adjust = pending_stack_adjust;
2660 save_stack_pointer_delta = stack_pointer_delta;
2663 flags &= ~ECF_SIBCALL;
2665 flags |= ECF_SIBCALL;
2667 /* Other state variables that we must reinitialize each time
2668 through the loop (that are not initialized by the loop itself). */
2672 /* Start a new sequence for the normal call case.
2674 From this point on, if the sibling call fails, we want to set
2675 sibcall_failure instead of continuing the loop. */
2680 /* We know at this point that there are not currently any
2681 pending cleanups. If, however, in the process of evaluating
2682 the arguments we were to create some, we'll need to be
2683 able to get rid of them. */
2684 expand_start_target_temps ();
2687 /* Don't let pending stack adjusts add up to too much.
2688 Also, do all pending adjustments now if there is any chance
2689 this might be a call to alloca or if we are expanding a sibling
2690 call sequence or if we are calling a function that is to return
2691 with stack pointer depressed. */
2692 if (pending_stack_adjust >= 32
2693 || (pending_stack_adjust > 0
2694 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2696 do_pending_stack_adjust ();
2698 /* When calling a const function, we must pop the stack args right away,
2699 so that the pop is deleted or moved with the call. */
2700 if (pass && (flags & ECF_LIBCALL_BLOCK))
2703 #ifdef FINAL_REG_PARM_STACK_SPACE
2704 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2707 /* Precompute any arguments as needed. */
2709 precompute_arguments (flags, num_actuals, args);
2711 /* Now we are about to start emitting insns that can be deleted
2712 if a libcall is deleted. */
2713 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2716 adjusted_args_size = args_size;
2717 /* Compute the actual size of the argument block required. The variable
2718 and constant sizes must be combined, the size may have to be rounded,
2719 and there may be a minimum required size. When generating a sibcall
2720 pattern, do not round up, since we'll be re-using whatever space our
2722 unadjusted_args_size
2723 = compute_argument_block_size (reg_parm_stack_space,
2724 &adjusted_args_size,
2726 : preferred_stack_boundary));
2728 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2730 /* The argument block when performing a sibling call is the
2731 incoming argument block. */
2734 argblock = virtual_incoming_args_rtx;
2736 #ifdef STACK_GROWS_DOWNWARD
2737 = plus_constant (argblock, current_function_pretend_args_size);
2739 = plus_constant (argblock, -current_function_pretend_args_size);
2741 stored_args_map = sbitmap_alloc (args_size.constant);
2742 sbitmap_zero (stored_args_map);
2745 /* If we have no actual push instructions, or shouldn't use them,
2746 make space for all args right now. */
2747 else if (adjusted_args_size.var != 0)
2749 if (old_stack_level == 0)
2751 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2752 old_pending_adj = pending_stack_adjust;
2753 pending_stack_adjust = 0;
2754 /* stack_arg_under_construction says whether a stack arg is
2755 being constructed at the old stack level. Pushing the stack
2756 gets a clean outgoing argument block. */
2757 old_stack_arg_under_construction = stack_arg_under_construction;
2758 stack_arg_under_construction = 0;
2760 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2764 /* Note that we must go through the motions of allocating an argument
2765 block even if the size is zero because we may be storing args
2766 in the area reserved for register arguments, which may be part of
2769 int needed = adjusted_args_size.constant;
2771 /* Store the maximum argument space used. It will be pushed by
2772 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2775 if (needed > current_function_outgoing_args_size)
2776 current_function_outgoing_args_size = needed;
2778 if (must_preallocate)
2780 if (ACCUMULATE_OUTGOING_ARGS)
2782 /* Since the stack pointer will never be pushed, it is
2783 possible for the evaluation of a parm to clobber
2784 something we have already written to the stack.
2785 Since most function calls on RISC machines do not use
2786 the stack, this is uncommon, but must work correctly.
2788 Therefore, we save any area of the stack that was already
2789 written and that we are using. Here we set up to do this
2790 by making a new stack usage map from the old one. The
2791 actual save will be done by store_one_arg.
2793 Another approach might be to try to reorder the argument
2794 evaluations to avoid this conflicting stack usage. */
2796 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2797 /* Since we will be writing into the entire argument area,
2798 the map must be allocated for its entire size, not just
2799 the part that is the responsibility of the caller. */
2800 needed += reg_parm_stack_space;
2803 #ifdef ARGS_GROW_DOWNWARD
2804 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2807 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2811 = (char *) alloca (highest_outgoing_arg_in_use);
2813 if (initial_highest_arg_in_use)
2814 memcpy (stack_usage_map, initial_stack_usage_map,
2815 initial_highest_arg_in_use);
2817 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2818 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2819 (highest_outgoing_arg_in_use
2820 - initial_highest_arg_in_use));
2823 /* The address of the outgoing argument list must not be
2824 copied to a register here, because argblock would be left
2825 pointing to the wrong place after the call to
2826 allocate_dynamic_stack_space below. */
2828 argblock = virtual_outgoing_args_rtx;
2832 if (inhibit_defer_pop == 0)
2834 /* Try to reuse some or all of the pending_stack_adjust
2835 to get this space. */
2837 = (combine_pending_stack_adjustment_and_call
2838 (unadjusted_args_size,
2839 &adjusted_args_size,
2840 preferred_unit_stack_boundary));
2842 /* combine_pending_stack_adjustment_and_call computes
2843 an adjustment before the arguments are allocated.
2844 Account for them and see whether or not the stack
2845 needs to go up or down. */
2846 needed = unadjusted_args_size - needed;
2850 /* We're releasing stack space. */
2851 /* ??? We can avoid any adjustment at all if we're
2852 already aligned. FIXME. */
2853 pending_stack_adjust = -needed;
2854 do_pending_stack_adjust ();
2858 /* We need to allocate space. We'll do that in
2859 push_block below. */
2860 pending_stack_adjust = 0;
2863 /* Special case this because overhead of `push_block' in
2864 this case is non-trivial. */
2866 argblock = virtual_outgoing_args_rtx;
2868 argblock = push_block (GEN_INT (needed), 0, 0);
2870 /* We only really need to call `copy_to_reg' in the case
2871 where push insns are going to be used to pass ARGBLOCK
2872 to a function call in ARGS. In that case, the stack
2873 pointer changes value from the allocation point to the
2874 call point, and hence the value of
2875 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2876 as well always do it. */
2877 argblock = copy_to_reg (argblock);
2879 /* The save/restore code in store_one_arg handles all
2880 cases except one: a constructor call (including a C
2881 function returning a BLKmode struct) to initialize
2883 if (stack_arg_under_construction)
2885 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2886 rtx push_size = GEN_INT (reg_parm_stack_space
2887 + adjusted_args_size.constant);
2889 rtx push_size = GEN_INT (adjusted_args_size.constant);
2891 if (old_stack_level == 0)
2893 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2895 old_pending_adj = pending_stack_adjust;
2896 pending_stack_adjust = 0;
2897 /* stack_arg_under_construction says whether a stack
2898 arg is being constructed at the old stack level.
2899 Pushing the stack gets a clean outgoing argument
2901 old_stack_arg_under_construction
2902 = stack_arg_under_construction;
2903 stack_arg_under_construction = 0;
2904 /* Make a new map for the new argument list. */
2905 stack_usage_map = (char *)
2906 alloca (highest_outgoing_arg_in_use);
2907 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2908 highest_outgoing_arg_in_use = 0;
2910 allocate_dynamic_stack_space (push_size, NULL_RTX,
2913 /* If argument evaluation might modify the stack pointer,
2914 copy the address of the argument list to a register. */
2915 for (i = 0; i < num_actuals; i++)
2916 if (args[i].pass_on_stack)
2918 argblock = copy_addr_to_reg (argblock);
2925 compute_argument_addresses (args, argblock, num_actuals);
2927 /* If we push args individually in reverse order, perform stack alignment
2928 before the first push (the last arg). */
2929 if (PUSH_ARGS_REVERSED && argblock == 0
2930 && adjusted_args_size.constant != unadjusted_args_size)
2932 /* When the stack adjustment is pending, we get better code
2933 by combining the adjustments. */
2934 if (pending_stack_adjust
2935 && ! (flags & ECF_LIBCALL_BLOCK)
2936 && ! inhibit_defer_pop)
2938 pending_stack_adjust
2939 = (combine_pending_stack_adjustment_and_call
2940 (unadjusted_args_size,
2941 &adjusted_args_size,
2942 preferred_unit_stack_boundary));
2943 do_pending_stack_adjust ();
2945 else if (argblock == 0)
2946 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2947 - unadjusted_args_size));
2949 /* Now that the stack is properly aligned, pops can't safely
2950 be deferred during the evaluation of the arguments. */
2953 funexp = rtx_for_function_call (fndecl, addr);
2955 /* Figure out the register where the value, if any, will come back. */
2957 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2958 && ! structure_value_addr)
2960 if (pcc_struct_value)
2961 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2962 fndecl, (pass == 0));
2964 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2967 /* Precompute all register parameters. It isn't safe to compute anything
2968 once we have started filling any specific hard regs. */
2969 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2971 #ifdef REG_PARM_STACK_SPACE
2972 /* Save the fixed argument area if it's part of the caller's frame and
2973 is clobbered by argument setup for this call. */
2974 if (ACCUMULATE_OUTGOING_ARGS && pass)
2975 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2976 &low_to_save, &high_to_save);
2979 /* Now store (and compute if necessary) all non-register parms.
2980 These come before register parms, since they can require block-moves,
2981 which could clobber the registers used for register parms.
2982 Parms which have partial registers are not stored here,
2983 but we do preallocate space here if they want that. */
2985 for (i = 0; i < num_actuals; i++)
2986 if (args[i].reg == 0 || args[i].pass_on_stack)
2988 rtx before_arg = get_last_insn ();
2990 if (store_one_arg (&args[i], argblock, flags,
2991 adjusted_args_size.var != 0,
2992 reg_parm_stack_space)
2994 && check_sibcall_argument_overlap (before_arg,
2996 sibcall_failure = 1;
2999 /* If we have a parm that is passed in registers but not in memory
3000 and whose alignment does not permit a direct copy into registers,
3001 make a group of pseudos that correspond to each register that we
3003 if (STRICT_ALIGNMENT)
3004 store_unaligned_arguments_into_pseudos (args, num_actuals);
3006 /* Now store any partially-in-registers parm.
3007 This is the last place a block-move can happen. */
3009 for (i = 0; i < num_actuals; i++)
3010 if (args[i].partial != 0 && ! args[i].pass_on_stack)
3012 rtx before_arg = get_last_insn ();
3014 if (store_one_arg (&args[i], argblock, flags,
3015 adjusted_args_size.var != 0,
3016 reg_parm_stack_space)
3018 && check_sibcall_argument_overlap (before_arg,
3020 sibcall_failure = 1;
3023 /* If we pushed args in forward order, perform stack alignment
3024 after pushing the last arg. */
3025 if (!PUSH_ARGS_REVERSED && argblock == 0)
3026 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3027 - unadjusted_args_size));
3029 /* If register arguments require space on the stack and stack space
3030 was not preallocated, allocate stack space here for arguments
3031 passed in registers. */
3032 #ifdef OUTGOING_REG_PARM_STACK_SPACE
3033 if (!ACCUMULATE_OUTGOING_ARGS
3034 && must_preallocate == 0 && reg_parm_stack_space > 0)
3035 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3038 /* Pass the function the address in which to return a
3040 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3042 emit_move_insn (struct_value_rtx,
3044 force_operand (structure_value_addr,
3047 if (GET_CODE (struct_value_rtx) == REG)
3048 use_reg (&call_fusage, struct_value_rtx);
3051 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3052 reg_parm_seen, pass == 0);
3054 load_register_parameters (args, num_actuals, &call_fusage, flags,
3055 pass == 0, &sibcall_failure);
3057 /* Perform postincrements before actually calling the function. */
3060 /* Save a pointer to the last insn before the call, so that we can
3061 later safely search backwards to find the CALL_INSN. */
3062 before_call = get_last_insn ();
3064 /* Set up next argument register. For sibling calls on machines
3065 with register windows this should be the incoming register. */
3066 #ifdef FUNCTION_INCOMING_ARG
3068 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3072 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3075 /* All arguments and registers used for the call must be set up by
3078 /* Stack must be properly aligned now. */
3079 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
3082 /* Generate the actual call instruction. */
3083 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
3084 adjusted_args_size.constant, struct_value_size,
3085 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3086 flags, & args_so_far);
3088 /* Verify that we've deallocated all the stack we used. */
3090 && old_stack_allocated != stack_pointer_delta - pending_stack_adjust)
3093 /* If call is cse'able, make appropriate pair of reg-notes around it.
3094 Test valreg so we don't crash; may safely ignore `const'
3095 if return type is void. Disable for PARALLEL return values, because
3096 we have no way to move such values into a pseudo register. */
3097 if (pass && (flags & ECF_LIBCALL_BLOCK))
3101 if (valreg == 0 || GET_CODE (valreg) == PARALLEL)
3103 insns = get_insns ();
3110 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3112 /* Mark the return value as a pointer if needed. */
3113 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3114 mark_reg_pointer (temp,
3115 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3117 /* Construct an "equal form" for the value which mentions all the
3118 arguments in order as well as the function name. */
3119 for (i = 0; i < num_actuals; i++)
3120 note = gen_rtx_EXPR_LIST (VOIDmode,
3121 args[i].initial_value, note);
3122 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3124 insns = get_insns ();
3127 if (flags & ECF_PURE)
3128 note = gen_rtx_EXPR_LIST (VOIDmode,
3129 gen_rtx_USE (VOIDmode,
3130 gen_rtx_MEM (BLKmode,
3131 gen_rtx_SCRATCH (VOIDmode))),
3134 emit_libcall_block (insns, temp, valreg, note);
3139 else if (pass && (flags & ECF_MALLOC))
3141 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3144 /* The return value from a malloc-like function is a pointer. */
3145 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3146 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3148 emit_move_insn (temp, valreg);
3150 /* The return value from a malloc-like function can not alias
3152 last = get_last_insn ();
3154 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3156 /* Write out the sequence. */
3157 insns = get_insns ();
3163 /* For calls to `setjmp', etc., inform flow.c it should complain
3164 if nonvolatile values are live. For functions that cannot return,
3165 inform flow that control does not fall through. */
3167 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3169 /* The barrier must be emitted
3170 immediately after the CALL_INSN. Some ports emit more
3171 than just a CALL_INSN above, so we must search for it here. */
3173 rtx last = get_last_insn ();
3174 while (GET_CODE (last) != CALL_INSN)
3176 last = PREV_INSN (last);
3177 /* There was no CALL_INSN? */
3178 if (last == before_call)
3182 emit_barrier_after (last);
3185 if (flags & ECF_LONGJMP)
3186 current_function_calls_longjmp = 1;
3188 /* If this function is returning into a memory location marked as
3189 readonly, it means it is initializing that location. But we normally
3190 treat functions as not clobbering such locations, so we need to
3191 specify that this one does. */
3192 if (target != 0 && GET_CODE (target) == MEM
3193 && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
3194 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3196 /* If value type not void, return an rtx for the value. */
3198 /* If there are cleanups to be called, don't use a hard reg as target.
3199 We need to double check this and see if it matters anymore. */
3200 if (any_pending_cleanups (1))
3202 if (target && REG_P (target)
3203 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3205 sibcall_failure = 1;
3208 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3210 target = const0_rtx;
3211 else if (structure_value_addr)
3213 if (target == 0 || GET_CODE (target) != MEM)
3216 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3217 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3218 structure_value_addr));
3219 set_mem_attributes (target, exp, 1);
3222 else if (pcc_struct_value)
3224 /* This is the special C++ case where we need to
3225 know what the true target was. We take care to
3226 never use this value more than once in one expression. */
3227 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3228 copy_to_reg (valreg));
3229 set_mem_attributes (target, exp, 1);
3231 /* Handle calls that return values in multiple non-contiguous locations.
3232 The Irix 6 ABI has examples of this. */
3233 else if (GET_CODE (valreg) == PARALLEL)
3237 /* This will only be assigned once, so it can be readonly. */
3238 tree nt = build_qualified_type (TREE_TYPE (exp),
3239 (TYPE_QUALS (TREE_TYPE (exp))
3240 | TYPE_QUAL_CONST));
3242 target = assign_temp (nt, 0, 1, 1);
3243 preserve_temp_slots (target);
3246 if (! rtx_equal_p (target, valreg))
3247 emit_group_store (target, valreg,
3248 int_size_in_bytes (TREE_TYPE (exp)));
3250 /* We can not support sibling calls for this case. */
3251 sibcall_failure = 1;
3254 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3255 && GET_MODE (target) == GET_MODE (valreg))
3257 /* TARGET and VALREG cannot be equal at this point because the
3258 latter would not have REG_FUNCTION_VALUE_P true, while the
3259 former would if it were referring to the same register.
3261 If they refer to the same register, this move will be a no-op,
3262 except when function inlining is being done. */
3263 emit_move_insn (target, valreg);
3265 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3267 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3269 /* We can not support sibling calls for this case. */
3270 sibcall_failure = 1;
3273 target = copy_to_reg (valreg);
3275 #ifdef PROMOTE_FUNCTION_RETURN
3276 /* If we promoted this return value, make the proper SUBREG. TARGET
3277 might be const0_rtx here, so be careful. */
3278 if (GET_CODE (target) == REG
3279 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3280 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3282 tree type = TREE_TYPE (exp);
3283 int unsignedp = TREE_UNSIGNED (type);
3286 /* If we don't promote as expected, something is wrong. */
3287 if (GET_MODE (target)
3288 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3291 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3292 && GET_MODE_SIZE (GET_MODE (target))
3293 > GET_MODE_SIZE (TYPE_MODE (type)))
3295 offset = GET_MODE_SIZE (GET_MODE (target))
3296 - GET_MODE_SIZE (TYPE_MODE (type));
3297 if (! BYTES_BIG_ENDIAN)
3298 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3299 else if (! WORDS_BIG_ENDIAN)
3300 offset %= UNITS_PER_WORD;
3302 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3303 SUBREG_PROMOTED_VAR_P (target) = 1;
3304 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3308 /* If size of args is variable or this was a constructor call for a stack
3309 argument, restore saved stack-pointer value. */
3311 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3313 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3314 pending_stack_adjust = old_pending_adj;
3315 stack_arg_under_construction = old_stack_arg_under_construction;
3316 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3317 stack_usage_map = initial_stack_usage_map;
3318 sibcall_failure = 1;
3320 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3322 #ifdef REG_PARM_STACK_SPACE
3324 restore_fixed_argument_area (save_area, argblock,
3325 high_to_save, low_to_save);
3328 /* If we saved any argument areas, restore them. */
3329 for (i = 0; i < num_actuals; i++)
3330 if (args[i].save_area)
3332 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3334 = gen_rtx_MEM (save_mode,
3335 memory_address (save_mode,
3336 XEXP (args[i].stack_slot, 0)));
3338 if (save_mode != BLKmode)
3339 emit_move_insn (stack_area, args[i].save_area);
3341 emit_block_move (stack_area, args[i].save_area,
3342 GEN_INT (args[i].size.constant),
3343 BLOCK_OP_CALL_PARM);
3346 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3347 stack_usage_map = initial_stack_usage_map;
3350 /* If this was alloca, record the new stack level for nonlocal gotos.
3351 Check for the handler slots since we might not have a save area
3352 for non-local gotos. */
3354 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3355 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3357 /* Free up storage we no longer need. */
3358 for (i = 0; i < num_actuals; ++i)
3359 if (args[i].aligned_regs)
3360 free (args[i].aligned_regs);
3364 /* Undo the fake expand_start_target_temps we did earlier. If
3365 there had been any cleanups created, we've already set
3367 expand_end_target_temps ();
3370 insns = get_insns ();
3375 tail_call_insns = insns;
3377 /* Restore the pending stack adjustment now that we have
3378 finished generating the sibling call sequence. */
3380 pending_stack_adjust = save_pending_stack_adjust;
3381 stack_pointer_delta = save_stack_pointer_delta;
3383 /* Prepare arg structure for next iteration. */
3384 for (i = 0; i < num_actuals; i++)
3387 args[i].aligned_regs = 0;
3391 sbitmap_free (stored_args_map);
3394 normal_call_insns = insns;
3396 /* If something prevents making this a sibling call,
3397 zero out the sequence. */
3398 if (sibcall_failure)
3399 tail_call_insns = NULL_RTX;
3402 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3403 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3404 can happen if the arguments to this function call an inline
3405 function who's expansion contains another CALL_PLACEHOLDER.
3407 If there are any C_Ps in any of these sequences, replace them
3408 with their normal call. */
3410 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3411 if (GET_CODE (insn) == CALL_INSN
3412 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3413 replace_call_placeholder (insn, sibcall_use_normal);
3415 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3416 if (GET_CODE (insn) == CALL_INSN
3417 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3418 replace_call_placeholder (insn, sibcall_use_normal);
3420 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3421 if (GET_CODE (insn) == CALL_INSN
3422 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3423 replace_call_placeholder (insn, sibcall_use_normal);
3425 /* If this was a potential tail recursion site, then emit a
3426 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3427 One of them will be selected later. */
3428 if (tail_recursion_insns || tail_call_insns)
3430 /* The tail recursion label must be kept around. We could expose
3431 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3432 and makes determining true tail recursion sites difficult.
3434 So we set LABEL_PRESERVE_P here, then clear it when we select
3435 one of the call sequences after rtl generation is complete. */
3436 if (tail_recursion_insns)
3437 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3438 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3440 tail_recursion_insns,
3441 tail_recursion_label));
3444 emit_insn (normal_call_insns);
3446 currently_expanding_call--;
3448 /* If this function returns with the stack pointer depressed, ensure
3449 this block saves and restores the stack pointer, show it was
3450 changed, and adjust for any outgoing arg space. */
3451 if (flags & ECF_SP_DEPRESSED)
3453 clear_pending_stack_adjust ();
3454 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
3455 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3456 save_stack_pointer ();
3462 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3463 The RETVAL parameter specifies whether return value needs to be saved, other
3464 parameters are documented in the emit_library_call function below. */
3467 emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
3471 enum libcall_type fn_type;
3472 enum machine_mode outmode;
3476 /* Total size in bytes of all the stack-parms scanned so far. */
3477 struct args_size args_size;
3478 /* Size of arguments before any adjustments (such as rounding). */
3479 struct args_size original_args_size;
3484 struct args_size alignment_pad;
3486 CUMULATIVE_ARGS args_so_far;
3490 enum machine_mode mode;
3493 struct args_size offset;
3494 struct args_size size;
3498 int old_inhibit_defer_pop = inhibit_defer_pop;
3499 rtx call_fusage = 0;
3502 int pcc_struct_value = 0;
3503 int struct_value_size = 0;
3505 int reg_parm_stack_space = 0;
3508 tree tfom; /* type_for_mode (outmode, 0) */
3510 #ifdef REG_PARM_STACK_SPACE
3511 /* Define the boundary of the register parm stack space that needs to be
3513 int low_to_save, high_to_save;
3514 rtx save_area = 0; /* Place that it is saved. */
3517 /* Size of the stack reserved for parameter registers. */
3518 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3519 char *initial_stack_usage_map = stack_usage_map;
3521 #ifdef REG_PARM_STACK_SPACE
3522 #ifdef MAYBE_REG_PARM_STACK_SPACE
3523 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3525 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3529 /* By default, library functions can not throw. */
3530 flags = ECF_NOTHROW;
3542 case LCT_CONST_MAKE_BLOCK:
3543 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3545 case LCT_PURE_MAKE_BLOCK:
3546 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3549 flags |= ECF_NORETURN;
3552 flags = ECF_NORETURN;
3554 case LCT_ALWAYS_RETURN:
3555 flags = ECF_ALWAYS_RETURN;
3557 case LCT_RETURNS_TWICE:
3558 flags = ECF_RETURNS_TWICE;
3563 /* Ensure current function's preferred stack boundary is at least
3565 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3566 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3568 /* If this kind of value comes back in memory,
3569 decide where in memory it should come back. */
3570 if (outmode != VOIDmode)
3572 tfom = (*lang_hooks.types.type_for_mode) (outmode, 0);
3573 if (aggregate_value_p (tfom))
3575 #ifdef PCC_STATIC_STRUCT_RETURN
3577 = hard_function_value (build_pointer_type (tfom), 0, 0);
3578 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3579 pcc_struct_value = 1;
3581 value = gen_reg_rtx (outmode);
3582 #else /* not PCC_STATIC_STRUCT_RETURN */
3583 struct_value_size = GET_MODE_SIZE (outmode);
3584 if (value != 0 && GET_CODE (value) == MEM)
3587 mem_value = assign_temp (tfom, 0, 1, 1);
3589 /* This call returns a big structure. */
3590 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3594 tfom = void_type_node;
3596 /* ??? Unfinished: must pass the memory address as an argument. */
3598 /* Copy all the libcall-arguments out of the varargs data
3599 and into a vector ARGVEC.
3601 Compute how to pass each argument. We only support a very small subset
3602 of the full argument passing conventions to limit complexity here since
3603 library functions shouldn't have many args. */
3605 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3606 memset ((char *) argvec, 0, (nargs + 1) * sizeof (struct arg));
3608 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3609 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3611 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3614 args_size.constant = 0;
3619 /* Now we are about to start emitting insns that can be deleted
3620 if a libcall is deleted. */
3621 if (flags & ECF_LIBCALL_BLOCK)
3626 /* If there's a structure value address to be passed,
3627 either pass it in the special place, or pass it as an extra argument. */
3628 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3630 rtx addr = XEXP (mem_value, 0);
3633 /* Make sure it is a reasonable operand for a move or push insn. */
3634 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3635 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3636 addr = force_operand (addr, NULL_RTX);
3638 argvec[count].value = addr;
3639 argvec[count].mode = Pmode;
3640 argvec[count].partial = 0;
3642 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3643 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3644 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3648 locate_and_pad_parm (Pmode, NULL_TREE,
3649 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3652 argvec[count].reg != 0,
3654 NULL_TREE, &args_size, &argvec[count].offset,
3655 &argvec[count].size, &alignment_pad);
3657 if (argvec[count].reg == 0 || argvec[count].partial != 0
3658 || reg_parm_stack_space > 0)
3659 args_size.constant += argvec[count].size.constant;
3661 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3666 for (; count < nargs; count++)
3668 rtx val = va_arg (p, rtx);
3669 enum machine_mode mode = va_arg (p, enum machine_mode);
3671 /* We cannot convert the arg value to the mode the library wants here;
3672 must do it earlier where we know the signedness of the arg. */
3674 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3677 /* On some machines, there's no way to pass a float to a library fcn.
3678 Pass it as a double instead. */
3679 #ifdef LIBGCC_NEEDS_DOUBLE
3680 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3681 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3684 /* There's no need to call protect_from_queue, because
3685 either emit_move_insn or emit_push_insn will do that. */
3687 /* Make sure it is a reasonable operand for a move or push insn. */
3688 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3689 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3690 val = force_operand (val, NULL_RTX);
3692 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3693 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3697 #ifdef FUNCTION_ARG_CALLEE_COPIES
3698 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3703 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3704 functions, so we have to pretend this isn't such a function. */
3705 if (flags & ECF_LIBCALL_BLOCK)
3707 rtx insns = get_insns ();
3711 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3713 /* If this was a CONST function, it is now PURE since
3714 it now reads memory. */
3715 if (flags & ECF_CONST)
3717 flags &= ~ECF_CONST;
3721 if (GET_MODE (val) == MEM && ! must_copy)
3725 slot = assign_temp ((*lang_hooks.types.type_for_mode) (mode, 0),
3727 emit_move_insn (slot, val);
3731 tree type = (*lang_hooks.types.type_for_mode) (mode, 0);
3734 = gen_rtx_MEM (mode,
3735 expand_expr (build1 (ADDR_EXPR,
3736 build_pointer_type (type),
3737 make_tree (type, val)),
3738 NULL_RTX, VOIDmode, 0));
3741 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3742 gen_rtx_USE (VOIDmode, slot),
3745 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3746 gen_rtx_CLOBBER (VOIDmode,
3751 val = force_operand (XEXP (slot, 0), NULL_RTX);
3755 argvec[count].value = val;
3756 argvec[count].mode = mode;
3758 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3760 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3761 argvec[count].partial
3762 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3764 argvec[count].partial = 0;
3767 locate_and_pad_parm (mode, NULL_TREE,
3768 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3771 argvec[count].reg != 0,
3773 NULL_TREE, &args_size, &argvec[count].offset,
3774 &argvec[count].size, &alignment_pad);
3776 if (argvec[count].size.var)
3779 if (reg_parm_stack_space == 0 && argvec[count].partial)
3780 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3782 if (argvec[count].reg == 0 || argvec[count].partial != 0
3783 || reg_parm_stack_space > 0)
3784 args_size.constant += argvec[count].size.constant;
3786 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3789 #ifdef FINAL_REG_PARM_STACK_SPACE
3790 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3793 /* If this machine requires an external definition for library
3794 functions, write one out. */
3795 assemble_external_libcall (fun);
3797 original_args_size = args_size;
3798 args_size.constant = (((args_size.constant
3799 + stack_pointer_delta
3803 - stack_pointer_delta);
3805 args_size.constant = MAX (args_size.constant,
3806 reg_parm_stack_space);
3808 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3809 args_size.constant -= reg_parm_stack_space;
3812 if (args_size.constant > current_function_outgoing_args_size)
3813 current_function_outgoing_args_size = args_size.constant;
3815 if (ACCUMULATE_OUTGOING_ARGS)
3817 /* Since the stack pointer will never be pushed, it is possible for
3818 the evaluation of a parm to clobber something we have already
3819 written to the stack. Since most function calls on RISC machines
3820 do not use the stack, this is uncommon, but must work correctly.
3822 Therefore, we save any area of the stack that was already written
3823 and that we are using. Here we set up to do this by making a new
3824 stack usage map from the old one.
3826 Another approach might be to try to reorder the argument
3827 evaluations to avoid this conflicting stack usage. */
3829 needed = args_size.constant;
3831 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3832 /* Since we will be writing into the entire argument area, the
3833 map must be allocated for its entire size, not just the part that
3834 is the responsibility of the caller. */
3835 needed += reg_parm_stack_space;
3838 #ifdef ARGS_GROW_DOWNWARD
3839 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3842 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3845 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3847 if (initial_highest_arg_in_use)
3848 memcpy (stack_usage_map, initial_stack_usage_map,
3849 initial_highest_arg_in_use);
3851 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3852 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3853 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3856 /* We must be careful to use virtual regs before they're instantiated,
3857 and real regs afterwards. Loop optimization, for example, can create
3858 new libcalls after we've instantiated the virtual regs, and if we
3859 use virtuals anyway, they won't match the rtl patterns. */
3861 if (virtuals_instantiated)
3862 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3864 argblock = virtual_outgoing_args_rtx;
3869 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3872 /* If we push args individually in reverse order, perform stack alignment
3873 before the first push (the last arg). */
3874 if (argblock == 0 && PUSH_ARGS_REVERSED)
3875 anti_adjust_stack (GEN_INT (args_size.constant
3876 - original_args_size.constant));
3878 if (PUSH_ARGS_REVERSED)
3889 #ifdef REG_PARM_STACK_SPACE
3890 if (ACCUMULATE_OUTGOING_ARGS)
3892 /* The argument list is the property of the called routine and it
3893 may clobber it. If the fixed area has been used for previous
3894 parameters, we must save and restore it. */
3895 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3896 &low_to_save, &high_to_save);
3900 /* Push the args that need to be pushed. */
3902 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3903 are to be pushed. */
3904 for (count = 0; count < nargs; count++, argnum += inc)
3906 enum machine_mode mode = argvec[argnum].mode;
3907 rtx val = argvec[argnum].value;
3908 rtx reg = argvec[argnum].reg;
3909 int partial = argvec[argnum].partial;
3910 int lower_bound = 0, upper_bound = 0, i;
3912 if (! (reg != 0 && partial == 0))
3914 if (ACCUMULATE_OUTGOING_ARGS)
3916 /* If this is being stored into a pre-allocated, fixed-size,
3917 stack area, save any previous data at that location. */
3919 #ifdef ARGS_GROW_DOWNWARD
3920 /* stack_slot is negative, but we want to index stack_usage_map
3921 with positive values. */
3922 upper_bound = -argvec[argnum].offset.constant + 1;
3923 lower_bound = upper_bound - argvec[argnum].size.constant;
3925 lower_bound = argvec[argnum].offset.constant;
3926 upper_bound = lower_bound + argvec[argnum].size.constant;
3930 /* Don't worry about things in the fixed argument area;
3931 it has already been saved. */
3932 if (i < reg_parm_stack_space)
3933 i = reg_parm_stack_space;
3934 while (i < upper_bound && stack_usage_map[i] == 0)
3937 if (i < upper_bound)
3939 /* We need to make a save area. See what mode we can make
3941 enum machine_mode save_mode
3942 = mode_for_size (argvec[argnum].size.constant
3950 plus_constant (argblock,
3951 argvec[argnum].offset.constant)));
3952 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3954 emit_move_insn (argvec[argnum].save_area, stack_area);
3958 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3959 partial, reg, 0, argblock,
3960 GEN_INT (argvec[argnum].offset.constant),
3961 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3963 /* Now mark the segment we just used. */
3964 if (ACCUMULATE_OUTGOING_ARGS)
3965 for (i = lower_bound; i < upper_bound; i++)
3966 stack_usage_map[i] = 1;
3972 /* If we pushed args in forward order, perform stack alignment
3973 after pushing the last arg. */
3974 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3975 anti_adjust_stack (GEN_INT (args_size.constant
3976 - original_args_size.constant));
3978 if (PUSH_ARGS_REVERSED)
3983 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
3985 /* Now load any reg parms into their regs. */
3987 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3988 are to be pushed. */
3989 for (count = 0; count < nargs; count++, argnum += inc)
3991 rtx val = argvec[argnum].value;
3992 rtx reg = argvec[argnum].reg;
3993 int partial = argvec[argnum].partial;
3995 /* Handle calls that pass values in multiple non-contiguous
3996 locations. The PA64 has examples of this for library calls. */
3997 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3998 emit_group_load (reg, val, GET_MODE_SIZE (GET_MODE (val)));
3999 else if (reg != 0 && partial == 0)
4000 emit_move_insn (reg, val);
4005 /* Any regs containing parms remain in use through the call. */
4006 for (count = 0; count < nargs; count++)
4008 rtx reg = argvec[count].reg;
4009 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4010 use_group_regs (&call_fusage, reg);
4012 use_reg (&call_fusage, reg);
4015 /* Pass the function the address in which to return a structure value. */
4016 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
4018 emit_move_insn (struct_value_rtx,
4020 force_operand (XEXP (mem_value, 0),
4022 if (GET_CODE (struct_value_rtx) == REG)
4023 use_reg (&call_fusage, struct_value_rtx);
4026 /* Don't allow popping to be deferred, since then
4027 cse'ing of library calls could delete a call and leave the pop. */
4029 valreg = (mem_value == 0 && outmode != VOIDmode
4030 ? hard_libcall_value (outmode) : NULL_RTX);
4032 /* Stack must be properly aligned now. */
4033 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
4036 before_call = get_last_insn ();
4038 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4039 will set inhibit_defer_pop to that value. */
4040 /* The return type is needed to decide how many bytes the function pops.
4041 Signedness plays no role in that, so for simplicity, we pretend it's
4042 always signed. We also assume that the list of arguments passed has
4043 no impact, so we pretend it is unknown. */
4046 get_identifier (XSTR (orgfun, 0)),
4047 build_function_type (tfom, NULL_TREE),
4048 original_args_size.constant, args_size.constant,
4050 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4052 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
4054 /* For calls to `setjmp', etc., inform flow.c it should complain
4055 if nonvolatile values are live. For functions that cannot return,
4056 inform flow that control does not fall through. */
4058 if (flags & (ECF_NORETURN | ECF_LONGJMP))
4060 /* The barrier note must be emitted
4061 immediately after the CALL_INSN. Some ports emit more than
4062 just a CALL_INSN above, so we must search for it here. */
4064 rtx last = get_last_insn ();
4065 while (GET_CODE (last) != CALL_INSN)
4067 last = PREV_INSN (last);
4068 /* There was no CALL_INSN? */
4069 if (last == before_call)
4073 emit_barrier_after (last);
4076 /* Now restore inhibit_defer_pop to its actual original value. */
4079 /* If call is cse'able, make appropriate pair of reg-notes around it.
4080 Test valreg so we don't crash; may safely ignore `const'
4081 if return type is void. Disable for PARALLEL return values, because
4082 we have no way to move such values into a pseudo register. */
4083 if (flags & ECF_LIBCALL_BLOCK)
4089 insns = get_insns ();
4099 if (GET_CODE (valreg) == PARALLEL)
4101 temp = gen_reg_rtx (outmode);
4102 emit_group_store (temp, valreg, outmode);
4106 temp = gen_reg_rtx (GET_MODE (valreg));
4108 /* Construct an "equal form" for the value which mentions all the
4109 arguments in order as well as the function name. */
4110 for (i = 0; i < nargs; i++)
4111 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4112 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
4114 insns = get_insns ();
4117 if (flags & ECF_PURE)
4118 note = gen_rtx_EXPR_LIST (VOIDmode,
4119 gen_rtx_USE (VOIDmode,
4120 gen_rtx_MEM (BLKmode,
4121 gen_rtx_SCRATCH (VOIDmode))),
4124 emit_libcall_block (insns, temp, valreg, note);
4131 /* Copy the value to the right place. */
4132 if (outmode != VOIDmode && retval)
4138 if (value != mem_value)
4139 emit_move_insn (value, mem_value);
4141 else if (GET_CODE (valreg) == PARALLEL)
4144 value = gen_reg_rtx (outmode);
4145 emit_group_store (value, valreg, outmode);
4147 else if (value != 0)
4148 emit_move_insn (value, valreg);
4153 if (ACCUMULATE_OUTGOING_ARGS)
4155 #ifdef REG_PARM_STACK_SPACE
4157 restore_fixed_argument_area (save_area, argblock,
4158 high_to_save, low_to_save);
4161 /* If we saved any argument areas, restore them. */
4162 for (count = 0; count < nargs; count++)
4163 if (argvec[count].save_area)
4165 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4167 = gen_rtx_MEM (save_mode,
4170 plus_constant (argblock,
4171 argvec[count].offset.constant)));
4173 emit_move_insn (stack_area, argvec[count].save_area);
4176 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4177 stack_usage_map = initial_stack_usage_map;
4184 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4185 (emitting the queue unless NO_QUEUE is nonzero),
4186 for a value of mode OUTMODE,
4187 with NARGS different arguments, passed as alternating rtx values
4188 and machine_modes to convert them to.
4189 The rtx values should have been passed through protect_from_queue already.
4191 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4192 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4193 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4194 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4195 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4196 or other LCT_ value for other types of library calls. */
4199 emit_library_call VPARAMS((rtx orgfun, enum libcall_type fn_type,
4200 enum machine_mode outmode, int nargs, ...))
4203 VA_FIXEDARG (p, rtx, orgfun);
4204 VA_FIXEDARG (p, int, fn_type);
4205 VA_FIXEDARG (p, enum machine_mode, outmode);
4206 VA_FIXEDARG (p, int, nargs);
4208 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4213 /* Like emit_library_call except that an extra argument, VALUE,
4214 comes second and says where to store the result.
4215 (If VALUE is zero, this function chooses a convenient way
4216 to return the value.
4218 This function returns an rtx for where the value is to be found.
4219 If VALUE is nonzero, VALUE is returned. */
4222 emit_library_call_value VPARAMS((rtx orgfun, rtx value,
4223 enum libcall_type fn_type,
4224 enum machine_mode outmode, int nargs, ...))
4229 VA_FIXEDARG (p, rtx, orgfun);
4230 VA_FIXEDARG (p, rtx, value);
4231 VA_FIXEDARG (p, int, fn_type);
4232 VA_FIXEDARG (p, enum machine_mode, outmode);
4233 VA_FIXEDARG (p, int, nargs);
4235 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4243 /* Store a single argument for a function call
4244 into the register or memory area where it must be passed.
4245 *ARG describes the argument value and where to pass it.
4247 ARGBLOCK is the address of the stack-block for all the arguments,
4248 or 0 on a machine where arguments are pushed individually.
4250 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4251 so must be careful about how the stack is used.
4253 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4254 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4255 that we need not worry about saving and restoring the stack.
4257 FNDECL is the declaration of the function we are calling.
4259 Return nonzero if this arg should cause sibcall failure,
4263 store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
4264 struct arg_data *arg;
4267 int variable_size ATTRIBUTE_UNUSED;
4268 int reg_parm_stack_space;
4270 tree pval = arg->tree_value;
4274 int i, lower_bound = 0, upper_bound = 0;
4275 int sibcall_failure = 0;
4277 if (TREE_CODE (pval) == ERROR_MARK)
4280 /* Push a new temporary level for any temporaries we make for
4284 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4286 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4287 save any previous data at that location. */
4288 if (argblock && ! variable_size && arg->stack)
4290 #ifdef ARGS_GROW_DOWNWARD
4291 /* stack_slot is negative, but we want to index stack_usage_map
4292 with positive values. */
4293 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4294 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4298 lower_bound = upper_bound - arg->size.constant;
4300 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4301 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4305 upper_bound = lower_bound + arg->size.constant;
4309 /* Don't worry about things in the fixed argument area;
4310 it has already been saved. */
4311 if (i < reg_parm_stack_space)
4312 i = reg_parm_stack_space;
4313 while (i < upper_bound && stack_usage_map[i] == 0)
4316 if (i < upper_bound)
4318 /* We need to make a save area. See what mode we can make it. */
4319 enum machine_mode save_mode
4320 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
4322 = gen_rtx_MEM (save_mode,
4323 memory_address (save_mode,
4324 XEXP (arg->stack_slot, 0)));
4326 if (save_mode == BLKmode)
4328 tree ot = TREE_TYPE (arg->tree_value);
4329 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4330 | TYPE_QUAL_CONST));
4332 arg->save_area = assign_temp (nt, 0, 1, 1);
4333 preserve_temp_slots (arg->save_area);
4334 emit_block_move (validize_mem (arg->save_area), stack_area,
4335 expr_size (arg->tree_value),
4336 BLOCK_OP_CALL_PARM);
4340 arg->save_area = gen_reg_rtx (save_mode);
4341 emit_move_insn (arg->save_area, stack_area);
4347 /* If this isn't going to be placed on both the stack and in registers,
4348 set up the register and number of words. */
4349 if (! arg->pass_on_stack)
4351 if (flags & ECF_SIBCALL)
4352 reg = arg->tail_call_reg;
4355 partial = arg->partial;
4358 if (reg != 0 && partial == 0)
4359 /* Being passed entirely in a register. We shouldn't be called in
4363 /* If this arg needs special alignment, don't load the registers
4365 if (arg->n_aligned_regs != 0)
4368 /* If this is being passed partially in a register, we can't evaluate
4369 it directly into its stack slot. Otherwise, we can. */
4370 if (arg->value == 0)
4372 /* stack_arg_under_construction is nonzero if a function argument is
4373 being evaluated directly into the outgoing argument list and
4374 expand_call must take special action to preserve the argument list
4375 if it is called recursively.
4377 For scalar function arguments stack_usage_map is sufficient to
4378 determine which stack slots must be saved and restored. Scalar
4379 arguments in general have pass_on_stack == 0.
4381 If this argument is initialized by a function which takes the
4382 address of the argument (a C++ constructor or a C function
4383 returning a BLKmode structure), then stack_usage_map is
4384 insufficient and expand_call must push the stack around the
4385 function call. Such arguments have pass_on_stack == 1.
4387 Note that it is always safe to set stack_arg_under_construction,
4388 but this generates suboptimal code if set when not needed. */
4390 if (arg->pass_on_stack)
4391 stack_arg_under_construction++;
4393 arg->value = expand_expr (pval,
4395 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4396 ? NULL_RTX : arg->stack,
4397 VOIDmode, EXPAND_STACK_PARM);
4399 /* If we are promoting object (or for any other reason) the mode
4400 doesn't agree, convert the mode. */
4402 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4403 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4404 arg->value, arg->unsignedp);
4406 if (arg->pass_on_stack)
4407 stack_arg_under_construction--;
4410 /* Don't allow anything left on stack from computation
4411 of argument to alloca. */
4412 if (flags & ECF_MAY_BE_ALLOCA)
4413 do_pending_stack_adjust ();
4415 if (arg->value == arg->stack)
4416 /* If the value is already in the stack slot, we are done. */
4418 else if (arg->mode != BLKmode)
4422 /* Argument is a scalar, not entirely passed in registers.
4423 (If part is passed in registers, arg->partial says how much
4424 and emit_push_insn will take care of putting it there.)
4426 Push it, and if its size is less than the
4427 amount of space allocated to it,
4428 also bump stack pointer by the additional space.
4429 Note that in C the default argument promotions
4430 will prevent such mismatches. */
4432 size = GET_MODE_SIZE (arg->mode);
4433 /* Compute how much space the push instruction will push.
4434 On many machines, pushing a byte will advance the stack
4435 pointer by a halfword. */
4436 #ifdef PUSH_ROUNDING
4437 size = PUSH_ROUNDING (size);
4441 /* Compute how much space the argument should get:
4442 round up to a multiple of the alignment for arguments. */
4443 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4444 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4445 / (PARM_BOUNDARY / BITS_PER_UNIT))
4446 * (PARM_BOUNDARY / BITS_PER_UNIT));
4448 /* This isn't already where we want it on the stack, so put it there.
4449 This can either be done with push or copy insns. */
4450 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4451 PARM_BOUNDARY, partial, reg, used - size, argblock,
4452 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4453 ARGS_SIZE_RTX (arg->alignment_pad));
4455 /* Unless this is a partially-in-register argument, the argument is now
4458 arg->value = arg->stack;
4462 /* BLKmode, at least partly to be pushed. */
4464 unsigned int parm_align;
4468 /* Pushing a nonscalar.
4469 If part is passed in registers, PARTIAL says how much
4470 and emit_push_insn will take care of putting it there. */
4472 /* Round its size up to a multiple
4473 of the allocation unit for arguments. */
4475 if (arg->size.var != 0)
4478 size_rtx = ARGS_SIZE_RTX (arg->size);
4482 /* PUSH_ROUNDING has no effect on us, because
4483 emit_push_insn for BLKmode is careful to avoid it. */
4484 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
4485 + partial * UNITS_PER_WORD);
4486 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4487 NULL_RTX, TYPE_MODE (sizetype), 0);
4490 /* Some types will require stricter alignment, which will be
4491 provided for elsewhere in argument layout. */
4492 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4494 /* When an argument is padded down, the block is aligned to
4495 PARM_BOUNDARY, but the actual argument isn't. */
4496 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4499 parm_align = BITS_PER_UNIT;
4502 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4503 parm_align = MIN (parm_align, excess_align);
4507 if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4509 /* emit_push_insn might not work properly if arg->value and
4510 argblock + arg->offset areas overlap. */
4514 if (XEXP (x, 0) == current_function_internal_arg_pointer
4515 || (GET_CODE (XEXP (x, 0)) == PLUS
4516 && XEXP (XEXP (x, 0), 0) ==
4517 current_function_internal_arg_pointer
4518 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4520 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4521 i = INTVAL (XEXP (XEXP (x, 0), 1));
4523 /* expand_call should ensure this */
4524 if (arg->offset.var || GET_CODE (size_rtx) != CONST_INT)
4527 if (arg->offset.constant > i)
4529 if (arg->offset.constant < i + INTVAL (size_rtx))
4530 sibcall_failure = 1;
4532 else if (arg->offset.constant < i)
4534 if (i < arg->offset.constant + INTVAL (size_rtx))
4535 sibcall_failure = 1;
4540 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4541 parm_align, partial, reg, excess, argblock,
4542 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4543 ARGS_SIZE_RTX (arg->alignment_pad));
4545 /* Unless this is a partially-in-register argument, the argument is now
4548 ??? Unlike the case above, in which we want the actual
4549 address of the data, so that we can load it directly into a
4550 register, here we want the address of the stack slot, so that
4551 it's properly aligned for word-by-word copying or something
4552 like that. It's not clear that this is always correct. */
4554 arg->value = arg->stack_slot;
4557 /* Mark all slots this store used. */
4558 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4559 && argblock && ! variable_size && arg->stack)
4560 for (i = lower_bound; i < upper_bound; i++)
4561 stack_usage_map[i] = 1;
4563 /* Once we have pushed something, pops can't safely
4564 be deferred during the rest of the arguments. */
4567 /* ANSI doesn't require a sequence point here,
4568 but PCC has one, so this will avoid some problems. */
4571 /* Free any temporary slots made in processing this argument. Show
4572 that we might have taken the address of something and pushed that
4574 preserve_temp_slots (NULL_RTX);
4578 return sibcall_failure;
4582 /* Nonzero if we do not know how to pass TYPE solely in registers.
4583 We cannot do so in the following cases:
4585 - if the type has variable size
4586 - if the type is marked as addressable (it is required to be constructed
4588 - if the padding and mode of the type is such that a copy into a register
4589 would put it into the wrong part of the register.
4591 Which padding can't be supported depends on the byte endianness.
4593 A value in a register is implicitly padded at the most significant end.
4594 On a big-endian machine, that is the lower end in memory.
4595 So a value padded in memory at the upper end can't go in a register.
4596 For a little-endian machine, the reverse is true. */
4599 default_must_pass_in_stack (mode, type)
4600 enum machine_mode mode;
4606 /* If the type has variable size... */
4607 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4610 /* If the type is marked as addressable (it is required
4611 to be constructed into the stack)... */
4612 if (TREE_ADDRESSABLE (type))
4615 /* If the padding and mode of the type is such that a copy into
4616 a register would put it into the wrong part of the register. */
4618 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4619 && (FUNCTION_ARG_PADDING (mode, type)
4620 == (BYTES_BIG_ENDIAN ? upward : downward)))