1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
39 #include "langhooks.h"
44 #ifndef STACK_POINTER_OFFSET
45 #define STACK_POINTER_OFFSET 0
48 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
49 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51 /* Data structure and subroutines used within expand_call. */
55 /* Tree node for this argument. */
57 /* Mode for value; TYPE_MODE unless promoted. */
58 enum machine_mode mode;
59 /* Current RTL value for argument, or 0 if it isn't precomputed. */
61 /* Initially-compute RTL value for argument; only for const functions. */
63 /* Register to pass this argument in, 0 if passed on stack, or an
64 PARALLEL if the arg is to be copied into multiple non-contiguous
67 /* Register to pass this argument in when generating tail call sequence.
68 This is not the same register as for normal calls on machines with
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
74 /* Number of registers to use. 0 means put the whole arg in registers.
75 Also 0 if not passed in registers. */
77 /* Nonzero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
82 /* Some fields packaged up for locate_and_pad_parm. */
83 struct locate_and_pad_arg_data locate;
84 /* Location on the stack at which parameter should be stored. The store
85 has already been done if STACK == VALUE. */
87 /* Location on the stack of the start of this argument slot. This can
88 differ from STACK if this arg pads downward. This location is known
89 to be aligned to FUNCTION_ARG_BOUNDARY. */
91 /* Place that this stack area has been saved, if needed. */
93 /* If an argument's alignment does not permit direct copying into registers,
94 copy in smaller-sized pieces into pseudos. These are stored in a
95 block pointed to by this field. The next field says how many
96 word-sized pseudos we made. */
101 /* A vector of one char per byte of stack space. A byte if nonzero if
102 the corresponding stack location has been used.
103 This vector is used to prevent a function call within an argument from
104 clobbering any stack already set up. */
105 static char *stack_usage_map;
107 /* Size of STACK_USAGE_MAP. */
108 static int highest_outgoing_arg_in_use;
110 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
111 stack location's tail call argument has been already stored into the stack.
112 This bitmap is used to prevent sibling call optimization if function tries
113 to use parent's incoming argument slots when they have been already
114 overwritten with tail call arguments. */
115 static sbitmap stored_args_map;
117 /* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
122 int stack_arg_under_construction;
124 static int calls_function (tree, int);
125 static int calls_function_1 (tree, int);
127 static void emit_call_1 (rtx, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
128 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
130 static void precompute_register_parameters (int, struct arg_data *, int *);
131 static int store_one_arg (struct arg_data *, rtx, int, int, int);
132 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
133 static int finalize_must_preallocate (int, int, struct arg_data *,
135 static void precompute_arguments (int, int, struct arg_data *);
136 static int compute_argument_block_size (int, struct args_size *, int);
137 static void initialize_argument_information (int, struct arg_data *,
138 struct args_size *, int, tree,
139 tree, CUMULATIVE_ARGS *, int,
140 rtx *, int *, int *, int *);
141 static void compute_argument_addresses (struct arg_data *, rtx, int);
142 static rtx rtx_for_function_call (tree, tree);
143 static void load_register_parameters (struct arg_data *, int, rtx *, int,
145 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
146 enum machine_mode, int, va_list);
147 static int special_function_p (tree, int);
148 static rtx try_to_integrate (tree, tree, rtx, int, tree, rtx);
149 static int check_sibcall_argument_overlap_1 (rtx);
150 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
152 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
154 static tree fix_unsafe_tree (tree);
156 #ifdef REG_PARM_STACK_SPACE
157 static rtx save_fixed_argument_area (int, rtx, int *, int *);
158 static void restore_fixed_argument_area (rtx, rtx, int, int);
161 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
164 If WHICH is 0, return 1 if EXP contains a call to any function.
165 Actually, we only need return 1 if evaluating EXP would require pushing
166 arguments on the stack, but that is too difficult to compute, so we just
167 assume any function call might require the stack. */
169 static tree calls_function_save_exprs;
172 calls_function (tree exp, int which)
176 calls_function_save_exprs = 0;
177 val = calls_function_1 (exp, which);
178 calls_function_save_exprs = 0;
182 /* Recursive function to do the work of above function. */
185 calls_function_1 (tree exp, int which)
188 enum tree_code code = TREE_CODE (exp);
189 int class = TREE_CODE_CLASS (code);
190 int length = first_rtl_op (code);
192 /* If this code is language-specific, we don't know what it will do. */
193 if ((int) code >= NUM_TREE_CODES)
201 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
203 && (TYPE_RETURNS_STACK_DEPRESSED
204 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
206 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
207 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
209 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
211 & ECF_MAY_BE_ALLOCA))
220 for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
221 if (calls_function_1 (TREE_VALUE (tem), which))
228 if (SAVE_EXPR_RTL (exp) != 0)
230 if (value_member (exp, calls_function_save_exprs))
232 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
233 calls_function_save_exprs);
234 return (TREE_OPERAND (exp, 0) != 0
235 && calls_function_1 (TREE_OPERAND (exp, 0), which));
242 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
243 if (DECL_INITIAL (local) != 0
244 && calls_function_1 (DECL_INITIAL (local), which))
247 for (subblock = BLOCK_SUBBLOCKS (exp);
249 subblock = TREE_CHAIN (subblock))
250 if (calls_function_1 (subblock, which))
256 for (; exp != 0; exp = TREE_CHAIN (exp))
257 if (calls_function_1 (TREE_VALUE (exp), which))
265 /* Only expressions and blocks can contain calls. */
266 if (! IS_EXPR_CODE_CLASS (class) && class != 'b')
269 for (i = 0; i < length; i++)
270 if (TREE_OPERAND (exp, i) != 0
271 && calls_function_1 (TREE_OPERAND (exp, i), which))
277 /* Force FUNEXP into a form suitable for the address of a CALL,
278 and return that as an rtx. Also load the static chain register
279 if FNDECL is a nested function.
281 CALL_FUSAGE points to a variable holding the prospective
282 CALL_INSN_FUNCTION_USAGE information. */
285 prepare_call_address (rtx funexp, tree fndecl, rtx *call_fusage,
286 int reg_parm_seen, int sibcallp)
288 rtx static_chain_value = 0;
290 funexp = protect_from_queue (funexp, 0);
293 /* Get possible static chain value for nested function in C. */
294 static_chain_value = lookup_static_chain (fndecl);
296 /* Make a valid memory address and copy constants thru pseudo-regs,
297 but not for a constant address if -fno-function-cse. */
298 if (GET_CODE (funexp) != SYMBOL_REF)
299 /* If we are using registers for parameters, force the
300 function address into a register now. */
301 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
302 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
303 : memory_address (FUNCTION_MODE, funexp));
306 #ifndef NO_FUNCTION_CSE
307 if (optimize && ! flag_no_function_cse)
308 #ifdef NO_RECURSIVE_FUNCTION_CSE
309 if (fndecl != current_function_decl)
311 funexp = force_reg (Pmode, funexp);
315 if (static_chain_value != 0)
317 emit_move_insn (static_chain_rtx, static_chain_value);
319 if (GET_CODE (static_chain_rtx) == REG)
320 use_reg (call_fusage, static_chain_rtx);
326 /* Generate instructions to call function FUNEXP,
327 and optionally pop the results.
328 The CALL_INSN is the first insn generated.
330 FNDECL is the declaration node of the function. This is given to the
331 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
333 FUNTYPE is the data type of the function. This is given to the macro
334 RETURN_POPS_ARGS to determine whether this function pops its own args.
335 We used to allow an identifier for library functions, but that doesn't
336 work when the return type is an aggregate type and the calling convention
337 says that the pointer to this aggregate is to be popped by the callee.
339 STACK_SIZE is the number of bytes of arguments on the stack,
340 ROUNDED_STACK_SIZE is that number rounded up to
341 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
342 both to put into the call insn and to generate explicit popping
345 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
346 It is zero if this call doesn't want a structure value.
348 NEXT_ARG_REG is the rtx that results from executing
349 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
350 just after all the args have had their registers assigned.
351 This could be whatever you like, but normally it is the first
352 arg-register beyond those used for args in this call,
353 or 0 if all the arg-registers are used in this call.
354 It is passed on to `gen_call' so you can put this info in the call insn.
356 VALREG is a hard register in which a value is returned,
357 or 0 if the call does not return a value.
359 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
360 the args to this call were processed.
361 We restore `inhibit_defer_pop' to that value.
363 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
364 denote registers used by the called function. */
367 emit_call_1 (rtx funexp, tree fndecl ATTRIBUTE_UNUSED, tree funtype ATTRIBUTE_UNUSED,
368 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
369 HOST_WIDE_INT rounded_stack_size,
370 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
371 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
372 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
373 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
375 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
377 int already_popped = 0;
378 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
379 #if defined (HAVE_call) && defined (HAVE_call_value)
380 rtx struct_value_size_rtx;
381 struct_value_size_rtx = GEN_INT (struct_value_size);
384 #ifdef CALL_POPS_ARGS
385 n_popped += CALL_POPS_ARGS (* args_so_far);
388 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
389 and we don't want to load it into a register as an optimization,
390 because prepare_call_address already did it if it should be done. */
391 if (GET_CODE (funexp) != SYMBOL_REF)
392 funexp = memory_address (FUNCTION_MODE, funexp);
394 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
395 if ((ecf_flags & ECF_SIBCALL)
396 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
397 && (n_popped > 0 || stack_size == 0))
399 rtx n_pop = GEN_INT (n_popped);
402 /* If this subroutine pops its own args, record that in the call insn
403 if possible, for the sake of frame pointer elimination. */
406 pat = GEN_SIBCALL_VALUE_POP (valreg,
407 gen_rtx_MEM (FUNCTION_MODE, funexp),
408 rounded_stack_size_rtx, next_arg_reg,
411 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
412 rounded_stack_size_rtx, next_arg_reg, n_pop);
414 emit_call_insn (pat);
420 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
421 /* If the target has "call" or "call_value" insns, then prefer them
422 if no arguments are actually popped. If the target does not have
423 "call" or "call_value" insns, then we must use the popping versions
424 even if the call has no arguments to pop. */
425 #if defined (HAVE_call) && defined (HAVE_call_value)
426 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
427 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
429 if (HAVE_call_pop && HAVE_call_value_pop)
432 rtx n_pop = GEN_INT (n_popped);
435 /* If this subroutine pops its own args, record that in the call insn
436 if possible, for the sake of frame pointer elimination. */
439 pat = GEN_CALL_VALUE_POP (valreg,
440 gen_rtx_MEM (FUNCTION_MODE, funexp),
441 rounded_stack_size_rtx, next_arg_reg, n_pop);
443 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
444 rounded_stack_size_rtx, next_arg_reg, n_pop);
446 emit_call_insn (pat);
452 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
453 if ((ecf_flags & ECF_SIBCALL)
454 && HAVE_sibcall && HAVE_sibcall_value)
457 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
458 gen_rtx_MEM (FUNCTION_MODE, funexp),
459 rounded_stack_size_rtx,
460 next_arg_reg, NULL_RTX));
462 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
463 rounded_stack_size_rtx, next_arg_reg,
464 struct_value_size_rtx));
469 #if defined (HAVE_call) && defined (HAVE_call_value)
470 if (HAVE_call && HAVE_call_value)
473 emit_call_insn (GEN_CALL_VALUE (valreg,
474 gen_rtx_MEM (FUNCTION_MODE, funexp),
475 rounded_stack_size_rtx, next_arg_reg,
478 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
479 rounded_stack_size_rtx, next_arg_reg,
480 struct_value_size_rtx));
486 /* Find the call we just emitted. */
487 call_insn = last_call_insn ();
489 /* Mark memory as used for "pure" function call. */
490 if (ecf_flags & ECF_PURE)
494 gen_rtx_USE (VOIDmode,
495 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
498 /* Put the register usage information there. */
499 add_function_usage_to (call_insn, call_fusage);
501 /* If this is a const call, then set the insn's unchanging bit. */
502 if (ecf_flags & (ECF_CONST | ECF_PURE))
503 CONST_OR_PURE_CALL_P (call_insn) = 1;
505 /* If this call can't throw, attach a REG_EH_REGION reg note to that
507 if (ecf_flags & ECF_NOTHROW)
508 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
509 REG_NOTES (call_insn));
511 note_eh_region_may_contain_throw ();
513 if (ecf_flags & ECF_NORETURN)
514 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
515 REG_NOTES (call_insn));
516 if (ecf_flags & ECF_ALWAYS_RETURN)
517 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
518 REG_NOTES (call_insn));
520 if (ecf_flags & ECF_RETURNS_TWICE)
522 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
523 REG_NOTES (call_insn));
524 current_function_calls_setjmp = 1;
527 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
529 /* Restore this now, so that we do defer pops for this call's args
530 if the context of the call as a whole permits. */
531 inhibit_defer_pop = old_inhibit_defer_pop;
533 /* Don't bother cleaning up after a noreturn function. */
534 if (ecf_flags & (ECF_NORETURN | ECF_LONGJMP))
540 CALL_INSN_FUNCTION_USAGE (call_insn)
541 = gen_rtx_EXPR_LIST (VOIDmode,
542 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
543 CALL_INSN_FUNCTION_USAGE (call_insn));
544 rounded_stack_size -= n_popped;
545 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
546 stack_pointer_delta -= n_popped;
549 if (!ACCUMULATE_OUTGOING_ARGS)
551 /* If returning from the subroutine does not automatically pop the args,
552 we need an instruction to pop them sooner or later.
553 Perhaps do it now; perhaps just record how much space to pop later.
555 If returning from the subroutine does pop the args, indicate that the
556 stack pointer will be changed. */
558 if (rounded_stack_size != 0)
560 if (ecf_flags & ECF_SP_DEPRESSED)
561 /* Just pretend we did the pop. */
562 stack_pointer_delta -= rounded_stack_size;
563 else if (flag_defer_pop && inhibit_defer_pop == 0
564 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
565 pending_stack_adjust += rounded_stack_size;
567 adjust_stack (rounded_stack_size_rtx);
570 /* When we accumulate outgoing args, we must avoid any stack manipulations.
571 Restore the stack pointer to its original value now. Usually
572 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
573 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
574 popping variants of functions exist as well.
576 ??? We may optimize similar to defer_pop above, but it is
577 probably not worthwhile.
579 ??? It will be worthwhile to enable combine_stack_adjustments even for
582 anti_adjust_stack (GEN_INT (n_popped));
585 /* Determine if the function identified by NAME and FNDECL is one with
586 special properties we wish to know about.
588 For example, if the function might return more than one time (setjmp), then
589 set RETURNS_TWICE to a nonzero value.
591 Similarly set LONGJMP for if the function is in the longjmp family.
593 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
594 space from the stack such as alloca. */
597 special_function_p (tree fndecl, int flags)
599 if (! (flags & ECF_MALLOC)
600 && fndecl && DECL_NAME (fndecl)
601 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
602 /* Exclude functions not at the file scope, or not `extern',
603 since they are not the magic functions we would otherwise
605 FIXME: this should be handled with attributes, not with this
606 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
607 because you can declare fork() inside a function if you
609 && (DECL_CONTEXT (fndecl) == NULL_TREE
610 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
611 && TREE_PUBLIC (fndecl))
613 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
614 const char *tname = name;
616 /* We assume that alloca will always be called by name. It
617 makes no sense to pass it as a pointer-to-function to
618 anything that does not understand its behavior. */
619 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
621 && ! strcmp (name, "alloca"))
622 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
624 && ! strcmp (name, "__builtin_alloca"))))
625 flags |= ECF_MAY_BE_ALLOCA;
627 /* Disregard prefix _, __ or __x. */
630 if (name[1] == '_' && name[2] == 'x')
632 else if (name[1] == '_')
641 && (! strcmp (tname, "setjmp")
642 || ! strcmp (tname, "setjmp_syscall")))
644 && ! strcmp (tname, "sigsetjmp"))
646 && ! strcmp (tname, "savectx")))
647 flags |= ECF_RETURNS_TWICE;
650 && ! strcmp (tname, "siglongjmp"))
651 flags |= ECF_LONGJMP;
653 else if ((tname[0] == 'q' && tname[1] == 's'
654 && ! strcmp (tname, "qsetjmp"))
655 || (tname[0] == 'v' && tname[1] == 'f'
656 && ! strcmp (tname, "vfork")))
657 flags |= ECF_RETURNS_TWICE;
659 else if (tname[0] == 'l' && tname[1] == 'o'
660 && ! strcmp (tname, "longjmp"))
661 flags |= ECF_LONGJMP;
663 else if ((tname[0] == 'f' && tname[1] == 'o'
664 && ! strcmp (tname, "fork"))
665 /* Linux specific: __clone. check NAME to insist on the
666 leading underscores, to avoid polluting the ISO / POSIX
668 || (name[0] == '_' && name[1] == '_'
669 && ! strcmp (tname, "clone"))
670 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
671 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
673 || ((tname[5] == 'p' || tname[5] == 'e')
674 && tname[6] == '\0'))))
675 flags |= ECF_FORK_OR_EXEC;
680 /* Return nonzero when tree represent call to longjmp. */
683 setjmp_call_p (tree fndecl)
685 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
688 /* Return true when exp contains alloca call. */
690 alloca_call_p (tree exp)
692 if (TREE_CODE (exp) == CALL_EXPR
693 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
694 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
696 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
697 0) & ECF_MAY_BE_ALLOCA))
702 /* Detect flags (function attributes) from the function decl or type node. */
705 flags_from_decl_or_type (tree exp)
712 struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
713 type = TREE_TYPE (exp);
717 if (i->pure_function)
718 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
719 if (i->const_function)
720 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
723 /* The function exp may have the `malloc' attribute. */
724 if (DECL_IS_MALLOC (exp))
727 /* The function exp may have the `pure' attribute. */
728 if (DECL_IS_PURE (exp))
729 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
731 if (TREE_NOTHROW (exp))
732 flags |= ECF_NOTHROW;
734 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
735 flags |= ECF_LIBCALL_BLOCK;
738 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
741 if (TREE_THIS_VOLATILE (exp))
742 flags |= ECF_NORETURN;
744 /* Mark if the function returns with the stack pointer depressed. We
745 cannot consider it pure or constant in that case. */
746 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
748 flags |= ECF_SP_DEPRESSED;
749 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
755 /* Precompute all register parameters as described by ARGS, storing values
756 into fields within the ARGS array.
758 NUM_ACTUALS indicates the total number elements in the ARGS array.
760 Set REG_PARM_SEEN if we encounter a register parameter. */
763 precompute_register_parameters (int num_actuals, struct arg_data *args, int *reg_parm_seen)
769 for (i = 0; i < num_actuals; i++)
770 if (args[i].reg != 0 && ! args[i].pass_on_stack)
774 if (args[i].value == 0)
777 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
779 preserve_temp_slots (args[i].value);
782 /* ANSI doesn't require a sequence point here,
783 but PCC has one, so this will avoid some problems. */
787 /* If the value is a non-legitimate constant, force it into a
788 pseudo now. TLS symbols sometimes need a call to resolve. */
789 if (CONSTANT_P (args[i].value)
790 && !LEGITIMATE_CONSTANT_P (args[i].value))
791 args[i].value = force_reg (args[i].mode, args[i].value);
793 /* If we are to promote the function arg to a wider mode,
796 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
798 = convert_modes (args[i].mode,
799 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
800 args[i].value, args[i].unsignedp);
802 /* If the value is expensive, and we are inside an appropriately
803 short loop, put the value into a pseudo and then put the pseudo
806 For small register classes, also do this if this call uses
807 register parameters. This is to avoid reload conflicts while
808 loading the parameters registers. */
810 if ((! (GET_CODE (args[i].value) == REG
811 || (GET_CODE (args[i].value) == SUBREG
812 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
813 && args[i].mode != BLKmode
814 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
815 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
816 || preserve_subexpressions_p ()))
817 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
821 #ifdef REG_PARM_STACK_SPACE
823 /* The argument list is the property of the called routine and it
824 may clobber it. If the fixed area has been used for previous
825 parameters, we must save and restore it. */
828 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
833 /* Compute the boundary of the area that needs to be saved, if any. */
834 high = reg_parm_stack_space;
835 #ifdef ARGS_GROW_DOWNWARD
838 if (high > highest_outgoing_arg_in_use)
839 high = highest_outgoing_arg_in_use;
841 for (low = 0; low < high; low++)
842 if (stack_usage_map[low] != 0)
845 enum machine_mode save_mode;
850 while (stack_usage_map[--high] == 0)
854 *high_to_save = high;
856 num_to_save = high - low + 1;
857 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
859 /* If we don't have the required alignment, must do this
861 if ((low & (MIN (GET_MODE_SIZE (save_mode),
862 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
865 #ifdef ARGS_GROW_DOWNWARD
870 stack_area = gen_rtx_MEM (save_mode,
871 memory_address (save_mode,
872 plus_constant (argblock,
875 set_mem_align (stack_area, PARM_BOUNDARY);
876 if (save_mode == BLKmode)
878 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
879 emit_block_move (validize_mem (save_area), stack_area,
880 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
884 save_area = gen_reg_rtx (save_mode);
885 emit_move_insn (save_area, stack_area);
895 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
897 enum machine_mode save_mode = GET_MODE (save_area);
901 #ifdef ARGS_GROW_DOWNWARD
902 delta = -high_to_save;
906 stack_area = gen_rtx_MEM (save_mode,
907 memory_address (save_mode,
908 plus_constant (argblock, delta)));
909 set_mem_align (stack_area, PARM_BOUNDARY);
911 if (save_mode != BLKmode)
912 emit_move_insn (stack_area, save_area);
914 emit_block_move (stack_area, validize_mem (save_area),
915 GEN_INT (high_to_save - low_to_save + 1),
918 #endif /* REG_PARM_STACK_SPACE */
920 /* If any elements in ARGS refer to parameters that are to be passed in
921 registers, but not in memory, and whose alignment does not permit a
922 direct copy into registers. Copy the values into a group of pseudos
923 which we will later copy into the appropriate hard registers.
925 Pseudos for each unaligned argument will be stored into the array
926 args[argnum].aligned_regs. The caller is responsible for deallocating
927 the aligned_regs array if it is nonzero. */
930 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
934 for (i = 0; i < num_actuals; i++)
935 if (args[i].reg != 0 && ! args[i].pass_on_stack
936 && args[i].mode == BLKmode
937 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
938 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
940 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
941 int nregs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
942 int endian_correction = 0;
944 args[i].n_aligned_regs = args[i].partial ? args[i].partial : nregs;
945 args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
947 /* Structures smaller than a word are normally aligned to the
948 least significant byte. On a BYTES_BIG_ENDIAN machine,
949 this means we must skip the empty high order bytes when
950 calculating the bit offset. */
951 if (bytes < UNITS_PER_WORD
952 #ifdef BLOCK_REG_PADDING
953 && (BLOCK_REG_PADDING (args[i].mode,
954 TREE_TYPE (args[i].tree_value), 1)
960 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
962 for (j = 0; j < args[i].n_aligned_regs; j++)
964 rtx reg = gen_reg_rtx (word_mode);
965 rtx word = operand_subword_force (args[i].value, j, BLKmode);
966 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
968 args[i].aligned_regs[j] = reg;
969 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
970 word_mode, word_mode, BITS_PER_WORD);
972 /* There is no need to restrict this code to loading items
973 in TYPE_ALIGN sized hunks. The bitfield instructions can
974 load up entire word sized registers efficiently.
976 ??? This may not be needed anymore.
977 We use to emit a clobber here but that doesn't let later
978 passes optimize the instructions we emit. By storing 0 into
979 the register later passes know the first AND to zero out the
980 bitfield being set in the register is unnecessary. The store
981 of 0 will be deleted as will at least the first AND. */
983 emit_move_insn (reg, const0_rtx);
985 bytes -= bitsize / BITS_PER_UNIT;
986 store_bit_field (reg, bitsize, endian_correction, word_mode,
987 word, BITS_PER_WORD);
992 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
995 NUM_ACTUALS is the total number of parameters.
997 N_NAMED_ARGS is the total number of named arguments.
999 FNDECL is the tree code for the target of this call (if known)
1001 ARGS_SO_FAR holds state needed by the target to know where to place
1004 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1005 for arguments which are passed in registers.
1007 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1008 and may be modified by this routine.
1010 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1011 flags which may may be modified by this routine. */
1014 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1015 struct arg_data *args,
1016 struct args_size *args_size,
1017 int n_named_args ATTRIBUTE_UNUSED,
1018 tree actparms, tree fndecl,
1019 CUMULATIVE_ARGS *args_so_far,
1020 int reg_parm_stack_space,
1021 rtx *old_stack_level, int *old_pending_adj,
1022 int *must_preallocate, int *ecf_flags)
1024 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1027 /* Count arg position in order args appear. */
1033 args_size->constant = 0;
1036 /* In this loop, we consider args in the order they are written.
1037 We fill up ARGS from the front or from the back if necessary
1038 so that in any case the first arg to be pushed ends up at the front. */
1040 if (PUSH_ARGS_REVERSED)
1042 i = num_actuals - 1, inc = -1;
1043 /* In this case, must reverse order of args
1044 so that we compute and push the last arg first. */
1051 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1052 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1054 tree type = TREE_TYPE (TREE_VALUE (p));
1056 enum machine_mode mode;
1058 args[i].tree_value = TREE_VALUE (p);
1060 /* Replace erroneous argument with constant zero. */
1061 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1062 args[i].tree_value = integer_zero_node, type = integer_type_node;
1064 /* If TYPE is a transparent union, pass things the way we would
1065 pass the first field of the union. We have already verified that
1066 the modes are the same. */
1067 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1068 type = TREE_TYPE (TYPE_FIELDS (type));
1070 /* Decide where to pass this arg.
1072 args[i].reg is nonzero if all or part is passed in registers.
1074 args[i].partial is nonzero if part but not all is passed in registers,
1075 and the exact value says how many words are passed in registers.
1077 args[i].pass_on_stack is nonzero if the argument must at least be
1078 computed on the stack. It may then be loaded back into registers
1079 if args[i].reg is nonzero.
1081 These decisions are driven by the FUNCTION_... macros and must agree
1082 with those made by function.c. */
1084 /* See if this argument should be passed by invisible reference. */
1085 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
1086 || TREE_ADDRESSABLE (type)
1087 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1088 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1089 type, argpos < n_named_args)
1093 /* If we're compiling a thunk, pass through invisible
1094 references instead of making a copy. */
1095 if (current_function_is_thunk
1096 #ifdef FUNCTION_ARG_CALLEE_COPIES
1097 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1098 type, argpos < n_named_args)
1099 /* If it's in a register, we must make a copy of it too. */
1100 /* ??? Is this a sufficient test? Is there a better one? */
1101 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1102 && REG_P (DECL_RTL (args[i].tree_value)))
1103 && ! TREE_ADDRESSABLE (type))
1107 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1108 new object from the argument. If we are passing by
1109 invisible reference, the callee will do that for us, so we
1110 can strip off the TARGET_EXPR. This is not always safe,
1111 but it is safe in the only case where this is a useful
1112 optimization; namely, when the argument is a plain object.
1113 In that case, the frontend is just asking the backend to
1114 make a bitwise copy of the argument. */
1116 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1117 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1118 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1119 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1121 args[i].tree_value = build1 (ADDR_EXPR,
1122 build_pointer_type (type),
1123 args[i].tree_value);
1124 type = build_pointer_type (type);
1126 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1128 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1129 We implement this by passing the address of the temporary
1130 rather than expanding it into another allocated slot. */
1131 args[i].tree_value = build1 (ADDR_EXPR,
1132 build_pointer_type (type),
1133 args[i].tree_value);
1134 type = build_pointer_type (type);
1138 /* We make a copy of the object and pass the address to the
1139 function being called. */
1142 if (!COMPLETE_TYPE_P (type)
1143 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1144 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1145 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1146 STACK_CHECK_MAX_VAR_SIZE))))
1148 /* This is a variable-sized object. Make space on the stack
1150 rtx size_rtx = expr_size (TREE_VALUE (p));
1152 if (*old_stack_level == 0)
1154 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1155 *old_pending_adj = pending_stack_adjust;
1156 pending_stack_adjust = 0;
1159 copy = gen_rtx_MEM (BLKmode,
1160 allocate_dynamic_stack_space
1161 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1162 set_mem_attributes (copy, type, 1);
1165 copy = assign_temp (type, 0, 1, 0);
1167 store_expr (args[i].tree_value, copy, 0);
1168 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1170 args[i].tree_value = build1 (ADDR_EXPR,
1171 build_pointer_type (type),
1172 make_tree (type, copy));
1173 type = build_pointer_type (type);
1177 mode = TYPE_MODE (type);
1178 unsignedp = TREE_UNSIGNED (type);
1180 #ifdef PROMOTE_FUNCTION_ARGS
1181 mode = promote_mode (type, mode, &unsignedp, 1);
1184 args[i].unsignedp = unsignedp;
1185 args[i].mode = mode;
1187 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1188 argpos < n_named_args);
1189 #ifdef FUNCTION_INCOMING_ARG
1190 /* If this is a sibling call and the machine has register windows, the
1191 register window has to be unwinded before calling the routine, so
1192 arguments have to go into the incoming registers. */
1193 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1194 argpos < n_named_args);
1196 args[i].tail_call_reg = args[i].reg;
1199 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1202 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1203 argpos < n_named_args);
1206 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1208 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1209 it means that we are to pass this arg in the register(s) designated
1210 by the PARALLEL, but also to pass it in the stack. */
1211 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1212 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1213 args[i].pass_on_stack = 1;
1215 /* If this is an addressable type, we must preallocate the stack
1216 since we must evaluate the object into its final location.
1218 If this is to be passed in both registers and the stack, it is simpler
1220 if (TREE_ADDRESSABLE (type)
1221 || (args[i].pass_on_stack && args[i].reg != 0))
1222 *must_preallocate = 1;
1224 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1225 we cannot consider this function call constant. */
1226 if (TREE_ADDRESSABLE (type))
1227 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1229 /* Compute the stack-size of this argument. */
1230 if (args[i].reg == 0 || args[i].partial != 0
1231 || reg_parm_stack_space > 0
1232 || args[i].pass_on_stack)
1233 locate_and_pad_parm (mode, type,
1234 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1239 args[i].pass_on_stack ? 0 : args[i].partial,
1240 fndecl, args_size, &args[i].locate);
1241 #ifdef BLOCK_REG_PADDING
1243 /* The argument is passed entirely in registers. See at which
1244 end it should be padded. */
1245 args[i].locate.where_pad =
1246 BLOCK_REG_PADDING (mode, type,
1247 int_size_in_bytes (type) <= UNITS_PER_WORD);
1250 /* Update ARGS_SIZE, the total stack space for args so far. */
1252 args_size->constant += args[i].locate.size.constant;
1253 if (args[i].locate.size.var)
1254 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1256 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1257 have been used, etc. */
1259 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1260 argpos < n_named_args);
1264 /* Update ARGS_SIZE to contain the total size for the argument block.
1265 Return the original constant component of the argument block's size.
1267 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1268 for arguments passed in registers. */
1271 compute_argument_block_size (int reg_parm_stack_space,
1272 struct args_size *args_size,
1273 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1275 int unadjusted_args_size = args_size->constant;
1277 /* For accumulate outgoing args mode we don't need to align, since the frame
1278 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1279 backends from generating misaligned frame sizes. */
1280 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1281 preferred_stack_boundary = STACK_BOUNDARY;
1283 /* Compute the actual size of the argument block required. The variable
1284 and constant sizes must be combined, the size may have to be rounded,
1285 and there may be a minimum required size. */
1289 args_size->var = ARGS_SIZE_TREE (*args_size);
1290 args_size->constant = 0;
1292 preferred_stack_boundary /= BITS_PER_UNIT;
1293 if (preferred_stack_boundary > 1)
1295 /* We don't handle this case yet. To handle it correctly we have
1296 to add the delta, round and subtract the delta.
1297 Currently no machine description requires this support. */
1298 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1300 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1303 if (reg_parm_stack_space > 0)
1306 = size_binop (MAX_EXPR, args_size->var,
1307 ssize_int (reg_parm_stack_space));
1309 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1310 /* The area corresponding to register parameters is not to count in
1311 the size of the block we need. So make the adjustment. */
1313 = size_binop (MINUS_EXPR, args_size->var,
1314 ssize_int (reg_parm_stack_space));
1320 preferred_stack_boundary /= BITS_PER_UNIT;
1321 if (preferred_stack_boundary < 1)
1322 preferred_stack_boundary = 1;
1323 args_size->constant = (((args_size->constant
1324 + stack_pointer_delta
1325 + preferred_stack_boundary - 1)
1326 / preferred_stack_boundary
1327 * preferred_stack_boundary)
1328 - stack_pointer_delta);
1330 args_size->constant = MAX (args_size->constant,
1331 reg_parm_stack_space);
1333 #ifdef MAYBE_REG_PARM_STACK_SPACE
1334 if (reg_parm_stack_space == 0)
1335 args_size->constant = 0;
1338 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1339 args_size->constant -= reg_parm_stack_space;
1342 return unadjusted_args_size;
1345 /* Precompute parameters as needed for a function call.
1347 FLAGS is mask of ECF_* constants.
1349 NUM_ACTUALS is the number of arguments.
1351 ARGS is an array containing information for each argument; this
1352 routine fills in the INITIAL_VALUE and VALUE fields for each
1353 precomputed argument. */
1356 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1360 /* If this function call is cse'able, precompute all the parameters.
1361 Note that if the parameter is constructed into a temporary, this will
1362 cause an additional copy because the parameter will be constructed
1363 into a temporary location and then copied into the outgoing arguments.
1364 If a parameter contains a call to alloca and this function uses the
1365 stack, precompute the parameter. */
1367 /* If we preallocated the stack space, and some arguments must be passed
1368 on the stack, then we must precompute any parameter which contains a
1369 function call which will store arguments on the stack.
1370 Otherwise, evaluating the parameter may clobber previous parameters
1371 which have already been stored into the stack. (we have code to avoid
1372 such case by saving the outgoing stack arguments, but it results in
1375 for (i = 0; i < num_actuals; i++)
1376 if ((flags & ECF_LIBCALL_BLOCK)
1377 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1379 enum machine_mode mode;
1381 /* If this is an addressable type, we cannot pre-evaluate it. */
1382 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1386 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1388 /* ANSI doesn't require a sequence point here,
1389 but PCC has one, so this will avoid some problems. */
1392 args[i].initial_value = args[i].value
1393 = protect_from_queue (args[i].value, 0);
1395 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1396 if (mode != args[i].mode)
1399 = convert_modes (args[i].mode, mode,
1400 args[i].value, args[i].unsignedp);
1401 #ifdef PROMOTE_FOR_CALL_ONLY
1402 /* CSE will replace this only if it contains args[i].value
1403 pseudo, so convert it down to the declared mode using
1405 if (GET_CODE (args[i].value) == REG
1406 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1408 args[i].initial_value
1409 = gen_lowpart_SUBREG (mode, args[i].value);
1410 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1411 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1419 /* Given the current state of MUST_PREALLOCATE and information about
1420 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1421 compute and return the final value for MUST_PREALLOCATE. */
1424 finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1426 /* See if we have or want to preallocate stack space.
1428 If we would have to push a partially-in-regs parm
1429 before other stack parms, preallocate stack space instead.
1431 If the size of some parm is not a multiple of the required stack
1432 alignment, we must preallocate.
1434 If the total size of arguments that would otherwise create a copy in
1435 a temporary (such as a CALL) is more than half the total argument list
1436 size, preallocation is faster.
1438 Another reason to preallocate is if we have a machine (like the m88k)
1439 where stack alignment is required to be maintained between every
1440 pair of insns, not just when the call is made. However, we assume here
1441 that such machines either do not have push insns (and hence preallocation
1442 would occur anyway) or the problem is taken care of with
1445 if (! must_preallocate)
1447 int partial_seen = 0;
1448 int copy_to_evaluate_size = 0;
1451 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1453 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1455 else if (partial_seen && args[i].reg == 0)
1456 must_preallocate = 1;
1458 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1459 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1460 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1461 || TREE_CODE (args[i].tree_value) == COND_EXPR
1462 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1463 copy_to_evaluate_size
1464 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1467 if (copy_to_evaluate_size * 2 >= args_size->constant
1468 && args_size->constant > 0)
1469 must_preallocate = 1;
1471 return must_preallocate;
1474 /* If we preallocated stack space, compute the address of each argument
1475 and store it into the ARGS array.
1477 We need not ensure it is a valid memory address here; it will be
1478 validized when it is used.
1480 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1483 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1487 rtx arg_reg = argblock;
1488 int i, arg_offset = 0;
1490 if (GET_CODE (argblock) == PLUS)
1491 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1493 for (i = 0; i < num_actuals; i++)
1495 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1496 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1499 /* Skip this parm if it will not be passed on the stack. */
1500 if (! args[i].pass_on_stack && args[i].reg != 0)
1503 if (GET_CODE (offset) == CONST_INT)
1504 addr = plus_constant (arg_reg, INTVAL (offset));
1506 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1508 addr = plus_constant (addr, arg_offset);
1509 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1510 set_mem_align (args[i].stack, PARM_BOUNDARY);
1511 set_mem_attributes (args[i].stack,
1512 TREE_TYPE (args[i].tree_value), 1);
1514 if (GET_CODE (slot_offset) == CONST_INT)
1515 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1517 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1519 addr = plus_constant (addr, arg_offset);
1520 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1521 set_mem_align (args[i].stack_slot, PARM_BOUNDARY);
1522 set_mem_attributes (args[i].stack_slot,
1523 TREE_TYPE (args[i].tree_value), 1);
1525 /* Function incoming arguments may overlap with sibling call
1526 outgoing arguments and we cannot allow reordering of reads
1527 from function arguments with stores to outgoing arguments
1528 of sibling calls. */
1529 set_mem_alias_set (args[i].stack, 0);
1530 set_mem_alias_set (args[i].stack_slot, 0);
1535 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1536 in a call instruction.
1538 FNDECL is the tree node for the target function. For an indirect call
1539 FNDECL will be NULL_TREE.
1541 ADDR is the operand 0 of CALL_EXPR for this call. */
1544 rtx_for_function_call (tree fndecl, tree addr)
1548 /* Get the function to call, in the form of RTL. */
1551 /* If this is the first use of the function, see if we need to
1552 make an external definition for it. */
1553 if (! TREE_USED (fndecl))
1555 assemble_external (fndecl);
1556 TREE_USED (fndecl) = 1;
1559 /* Get a SYMBOL_REF rtx for the function address. */
1560 funexp = XEXP (DECL_RTL (fndecl), 0);
1563 /* Generate an rtx (probably a pseudo-register) for the address. */
1566 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1567 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1573 /* Do the register loads required for any wholly-register parms or any
1574 parms which are passed both on the stack and in a register. Their
1575 expressions were already evaluated.
1577 Mark all register-parms as living through the call, putting these USE
1578 insns in the CALL_INSN_FUNCTION_USAGE field.
1580 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1581 checking, setting *SIBCALL_FAILURE if appropriate. */
1584 load_register_parameters (struct arg_data *args, int num_actuals,
1585 rtx *call_fusage, int flags, int is_sibcall,
1586 int *sibcall_failure)
1590 #ifdef LOAD_ARGS_REVERSED
1591 for (i = num_actuals - 1; i >= 0; i--)
1593 for (i = 0; i < num_actuals; i++)
1596 rtx reg = ((flags & ECF_SIBCALL)
1597 ? args[i].tail_call_reg : args[i].reg);
1600 int partial = args[i].partial;
1603 rtx before_arg = get_last_insn ();
1604 /* Set to non-negative if must move a word at a time, even if just
1605 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1606 we just use a normal move insn. This value can be zero if the
1607 argument is a zero size structure with no fields. */
1611 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1613 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1614 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1617 size = GET_MODE_SIZE (args[i].mode);
1619 /* Handle calls that pass values in multiple non-contiguous
1620 locations. The Irix 6 ABI has examples of this. */
1622 if (GET_CODE (reg) == PARALLEL)
1624 tree type = TREE_TYPE (args[i].tree_value);
1625 emit_group_load (reg, args[i].value, type,
1626 int_size_in_bytes (type));
1629 /* If simple case, just do move. If normal partial, store_one_arg
1630 has already loaded the register for us. In all other cases,
1631 load the register(s) from memory. */
1633 else if (nregs == -1)
1635 emit_move_insn (reg, args[i].value);
1636 #ifdef BLOCK_REG_PADDING
1637 /* Handle case where we have a value that needs shifting
1638 up to the msb. eg. a QImode value and we're padding
1639 upward on a BYTES_BIG_ENDIAN machine. */
1640 if (size < UNITS_PER_WORD
1641 && (args[i].locate.where_pad
1642 == (BYTES_BIG_ENDIAN ? upward : downward)))
1645 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1647 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1648 report the whole reg as used. Strictly speaking, the
1649 call only uses SIZE bytes at the msb end, but it doesn't
1650 seem worth generating rtl to say that. */
1651 reg = gen_rtx_REG (word_mode, REGNO (reg));
1652 x = expand_binop (word_mode, ashl_optab, reg,
1653 GEN_INT (shift), reg, 1, OPTAB_WIDEN);
1655 emit_move_insn (reg, x);
1660 /* If we have pre-computed the values to put in the registers in
1661 the case of non-aligned structures, copy them in now. */
1663 else if (args[i].n_aligned_regs != 0)
1664 for (j = 0; j < args[i].n_aligned_regs; j++)
1665 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1666 args[i].aligned_regs[j]);
1668 else if (partial == 0 || args[i].pass_on_stack)
1670 rtx mem = validize_mem (args[i].value);
1672 #ifdef BLOCK_REG_PADDING
1673 /* Handle a BLKmode that needs shifting. */
1674 if (nregs == 1 && size < UNITS_PER_WORD
1675 && args[i].locate.where_pad == downward)
1677 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1678 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1679 rtx x = gen_reg_rtx (word_mode);
1680 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1681 optab dir = BYTES_BIG_ENDIAN ? lshr_optab : ashl_optab;
1683 emit_move_insn (x, tem);
1684 x = expand_binop (word_mode, dir, x, GEN_INT (shift),
1685 ri, 1, OPTAB_WIDEN);
1687 emit_move_insn (ri, x);
1691 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1694 /* When a parameter is a block, and perhaps in other cases, it is
1695 possible that it did a load from an argument slot that was
1696 already clobbered. */
1698 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1699 *sibcall_failure = 1;
1701 /* Handle calls that pass values in multiple non-contiguous
1702 locations. The Irix 6 ABI has examples of this. */
1703 if (GET_CODE (reg) == PARALLEL)
1704 use_group_regs (call_fusage, reg);
1705 else if (nregs == -1)
1706 use_reg (call_fusage, reg);
1708 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1713 /* Try to integrate function. See expand_inline_function for documentation
1714 about the parameters. */
1717 try_to_integrate (tree fndecl, tree actparms, rtx target, int ignore,
1718 tree type, rtx structure_value_addr)
1723 rtx old_stack_level = 0;
1724 int reg_parm_stack_space = 0;
1726 #ifdef REG_PARM_STACK_SPACE
1727 #ifdef MAYBE_REG_PARM_STACK_SPACE
1728 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1730 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1734 before_call = get_last_insn ();
1736 timevar_push (TV_INTEGRATION);
1738 temp = expand_inline_function (fndecl, actparms, target,
1740 structure_value_addr);
1742 timevar_pop (TV_INTEGRATION);
1744 /* If inlining succeeded, return. */
1745 if (temp != (rtx) (size_t) - 1)
1747 if (ACCUMULATE_OUTGOING_ARGS)
1749 /* If the outgoing argument list must be preserved, push
1750 the stack before executing the inlined function if it
1753 i = reg_parm_stack_space;
1754 if (i > highest_outgoing_arg_in_use)
1755 i = highest_outgoing_arg_in_use;
1756 while (--i >= 0 && stack_usage_map[i] == 0)
1759 if (stack_arg_under_construction || i >= 0)
1762 = before_call ? NEXT_INSN (before_call) : get_insns ();
1763 rtx insn = NULL_RTX, seq;
1765 /* Look for a call in the inline function code.
1766 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1767 nonzero then there is a call and it is not necessary
1768 to scan the insns. */
1770 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1771 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1772 if (GET_CODE (insn) == CALL_INSN)
1777 /* Reserve enough stack space so that the largest
1778 argument list of any function call in the inline
1779 function does not overlap the argument list being
1780 evaluated. This is usually an overestimate because
1781 allocate_dynamic_stack_space reserves space for an
1782 outgoing argument list in addition to the requested
1783 space, but there is no way to ask for stack space such
1784 that an argument list of a certain length can be
1787 Add the stack space reserved for register arguments, if
1788 any, in the inline function. What is really needed is the
1789 largest value of reg_parm_stack_space in the inline
1790 function, but that is not available. Using the current
1791 value of reg_parm_stack_space is wrong, but gives
1792 correct results on all supported machines. */
1794 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1795 + reg_parm_stack_space);
1798 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1799 allocate_dynamic_stack_space (GEN_INT (adjust),
1800 NULL_RTX, BITS_PER_UNIT);
1803 emit_insn_before (seq, first_insn);
1804 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1809 /* If the result is equivalent to TARGET, return TARGET to simplify
1810 checks in store_expr. They can be equivalent but not equal in the
1811 case of a function that returns BLKmode. */
1812 if (temp != target && rtx_equal_p (temp, target))
1817 /* If inlining failed, mark FNDECL as needing to be compiled
1818 separately after all. If function was declared inline,
1820 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1821 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1823 warning ("%Hinlining failed in call to '%F'",
1824 &DECL_SOURCE_LOCATION (fndecl), fndecl);
1825 warning ("called from here");
1827 (*lang_hooks.mark_addressable) (fndecl);
1828 return (rtx) (size_t) - 1;
1831 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1832 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1833 bytes, then we would need to push some additional bytes to pad the
1834 arguments. So, we compute an adjust to the stack pointer for an
1835 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1836 bytes. Then, when the arguments are pushed the stack will be perfectly
1837 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1838 be popped after the call. Returns the adjustment. */
1841 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1842 struct args_size *args_size,
1843 int preferred_unit_stack_boundary)
1845 /* The number of bytes to pop so that the stack will be
1846 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1847 HOST_WIDE_INT adjustment;
1848 /* The alignment of the stack after the arguments are pushed, if we
1849 just pushed the arguments without adjust the stack here. */
1850 HOST_WIDE_INT unadjusted_alignment;
1852 unadjusted_alignment
1853 = ((stack_pointer_delta + unadjusted_args_size)
1854 % preferred_unit_stack_boundary);
1856 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1857 as possible -- leaving just enough left to cancel out the
1858 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1859 PENDING_STACK_ADJUST is non-negative, and congruent to
1860 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1862 /* Begin by trying to pop all the bytes. */
1863 unadjusted_alignment
1864 = (unadjusted_alignment
1865 - (pending_stack_adjust % preferred_unit_stack_boundary));
1866 adjustment = pending_stack_adjust;
1867 /* Push enough additional bytes that the stack will be aligned
1868 after the arguments are pushed. */
1869 if (preferred_unit_stack_boundary > 1)
1871 if (unadjusted_alignment > 0)
1872 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1874 adjustment += unadjusted_alignment;
1877 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1878 bytes after the call. The right number is the entire
1879 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1880 by the arguments in the first place. */
1882 = pending_stack_adjust - adjustment + unadjusted_args_size;
1887 /* Scan X expression if it does not dereference any argument slots
1888 we already clobbered by tail call arguments (as noted in stored_args_map
1890 Return nonzero if X expression dereferences such argument slots,
1894 check_sibcall_argument_overlap_1 (rtx x)
1904 code = GET_CODE (x);
1908 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1910 else if (GET_CODE (XEXP (x, 0)) == PLUS
1911 && XEXP (XEXP (x, 0), 0) ==
1912 current_function_internal_arg_pointer
1913 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1914 i = INTVAL (XEXP (XEXP (x, 0), 1));
1918 #ifdef ARGS_GROW_DOWNWARD
1919 i = -i - GET_MODE_SIZE (GET_MODE (x));
1922 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1923 if (i + k < stored_args_map->n_bits
1924 && TEST_BIT (stored_args_map, i + k))
1930 /* Scan all subexpressions. */
1931 fmt = GET_RTX_FORMAT (code);
1932 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1936 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1939 else if (*fmt == 'E')
1941 for (j = 0; j < XVECLEN (x, i); j++)
1942 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1949 /* Scan sequence after INSN if it does not dereference any argument slots
1950 we already clobbered by tail call arguments (as noted in stored_args_map
1951 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1952 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1953 should be 0). Return nonzero if sequence after INSN dereferences such argument
1954 slots, zero otherwise. */
1957 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1961 if (insn == NULL_RTX)
1962 insn = get_insns ();
1964 insn = NEXT_INSN (insn);
1966 for (; insn; insn = NEXT_INSN (insn))
1968 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1971 if (mark_stored_args_map)
1973 #ifdef ARGS_GROW_DOWNWARD
1974 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1976 low = arg->locate.slot_offset.constant;
1979 for (high = low + arg->locate.size.constant; low < high; low++)
1980 SET_BIT (stored_args_map, low);
1982 return insn != NULL_RTX;
1986 fix_unsafe_tree (tree t)
1988 switch (unsafe_for_reeval (t))
1993 case 1: /* Mildly unsafe. */
1994 t = unsave_expr (t);
1997 case 2: /* Wildly unsafe. */
1999 tree var = build_decl (VAR_DECL, NULL_TREE,
2002 expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
2013 /* Generate all the code for a function call
2014 and return an rtx for its value.
2015 Store the value in TARGET (specified as an rtx) if convenient.
2016 If the value is stored in TARGET then TARGET is returned.
2017 If IGNORE is nonzero, then we ignore the value of the function call. */
2020 expand_call (tree exp, rtx target, int ignore)
2022 /* Nonzero if we are currently expanding a call. */
2023 static int currently_expanding_call = 0;
2025 /* List of actual parameters. */
2026 tree actparms = TREE_OPERAND (exp, 1);
2027 /* RTX for the function to be called. */
2029 /* Sequence of insns to perform a tail recursive "call". */
2030 rtx tail_recursion_insns = NULL_RTX;
2031 /* Sequence of insns to perform a normal "call". */
2032 rtx normal_call_insns = NULL_RTX;
2033 /* Sequence of insns to perform a tail recursive "call". */
2034 rtx tail_call_insns = NULL_RTX;
2035 /* Data type of the function. */
2037 tree type_arg_types;
2038 /* Declaration of the function being called,
2039 or 0 if the function is computed (not known by name). */
2042 int try_tail_call = 1;
2043 int try_tail_recursion = 1;
2046 /* Register in which non-BLKmode value will be returned,
2047 or 0 if no value or if value is BLKmode. */
2049 /* Address where we should return a BLKmode value;
2050 0 if value not BLKmode. */
2051 rtx structure_value_addr = 0;
2052 /* Nonzero if that address is being passed by treating it as
2053 an extra, implicit first parameter. Otherwise,
2054 it is passed by being copied directly into struct_value_rtx. */
2055 int structure_value_addr_parm = 0;
2056 /* Size of aggregate value wanted, or zero if none wanted
2057 or if we are using the non-reentrant PCC calling convention
2058 or expecting the value in registers. */
2059 HOST_WIDE_INT struct_value_size = 0;
2060 /* Nonzero if called function returns an aggregate in memory PCC style,
2061 by returning the address of where to find it. */
2062 int pcc_struct_value = 0;
2064 /* Number of actual parameters in this call, including struct value addr. */
2066 /* Number of named args. Args after this are anonymous ones
2067 and they must all go on the stack. */
2070 /* Vector of information about each argument.
2071 Arguments are numbered in the order they will be pushed,
2072 not the order they are written. */
2073 struct arg_data *args;
2075 /* Total size in bytes of all the stack-parms scanned so far. */
2076 struct args_size args_size;
2077 struct args_size adjusted_args_size;
2078 /* Size of arguments before any adjustments (such as rounding). */
2079 int unadjusted_args_size;
2080 /* Data on reg parms scanned so far. */
2081 CUMULATIVE_ARGS args_so_far;
2082 /* Nonzero if a reg parm has been scanned. */
2084 /* Nonzero if this is an indirect function call. */
2086 /* Nonzero if we must avoid push-insns in the args for this call.
2087 If stack space is allocated for register parameters, but not by the
2088 caller, then it is preallocated in the fixed part of the stack frame.
2089 So the entire argument block must then be preallocated (i.e., we
2090 ignore PUSH_ROUNDING in that case). */
2092 int must_preallocate = !PUSH_ARGS;
2094 /* Size of the stack reserved for parameter registers. */
2095 int reg_parm_stack_space = 0;
2097 /* Address of space preallocated for stack parms
2098 (on machines that lack push insns), or 0 if space not preallocated. */
2101 /* Mask of ECF_ flags. */
2103 /* Nonzero if this is a call to an inline function. */
2104 int is_integrable = 0;
2105 #ifdef REG_PARM_STACK_SPACE
2106 /* Define the boundary of the register parm stack space that needs to be
2108 int low_to_save, high_to_save;
2109 rtx save_area = 0; /* Place that it is saved */
2112 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2113 char *initial_stack_usage_map = stack_usage_map;
2115 int old_stack_allocated;
2117 /* State variables to track stack modifications. */
2118 rtx old_stack_level = 0;
2119 int old_stack_arg_under_construction = 0;
2120 int old_pending_adj = 0;
2121 int old_inhibit_defer_pop = inhibit_defer_pop;
2123 /* Some stack pointer alterations we make are performed via
2124 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2125 which we then also need to save/restore along the way. */
2126 int old_stack_pointer_delta = 0;
2129 tree p = TREE_OPERAND (exp, 0);
2130 tree addr = TREE_OPERAND (exp, 0);
2132 /* The alignment of the stack, in bits. */
2133 HOST_WIDE_INT preferred_stack_boundary;
2134 /* The alignment of the stack, in bytes. */
2135 HOST_WIDE_INT preferred_unit_stack_boundary;
2137 /* See if this is "nothrow" function call. */
2138 if (TREE_NOTHROW (exp))
2139 flags |= ECF_NOTHROW;
2141 /* See if we can find a DECL-node for the actual function.
2142 As a result, decide whether this is a call to an integrable function. */
2144 fndecl = get_callee_fndecl (exp);
2148 && fndecl != current_function_decl
2149 && DECL_INLINE (fndecl)
2150 && DECL_SAVED_INSNS (fndecl)
2151 && DECL_SAVED_INSNS (fndecl)->inlinable)
2153 else if (! TREE_ADDRESSABLE (fndecl))
2155 /* In case this function later becomes inlinable,
2156 record that there was already a non-inline call to it.
2158 Use abstraction instead of setting TREE_ADDRESSABLE
2160 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2163 warning ("%Hcan't inline call to '%F'",
2164 &DECL_SOURCE_LOCATION (fndecl), fndecl);
2165 warning ("called from here");
2167 (*lang_hooks.mark_addressable) (fndecl);
2170 flags |= flags_from_decl_or_type (fndecl);
2173 /* If we don't have specific function to call, see if we have a
2174 attributes set in the type. */
2176 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2178 /* Warn if this value is an aggregate type,
2179 regardless of which calling convention we are using for it. */
2180 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2181 warning ("function call has aggregate value");
2183 /* If the result of a pure or const function call is ignored (or void),
2184 and none of its arguments are volatile, we can avoid expanding the
2185 call and just evaluate the arguments for side-effects. */
2186 if ((flags & (ECF_CONST | ECF_PURE))
2187 && (ignore || target == const0_rtx
2188 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2190 bool volatilep = false;
2193 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2194 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
2202 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2203 expand_expr (TREE_VALUE (arg), const0_rtx,
2204 VOIDmode, EXPAND_NORMAL);
2209 #ifdef REG_PARM_STACK_SPACE
2210 #ifdef MAYBE_REG_PARM_STACK_SPACE
2211 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2213 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2217 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2218 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2219 must_preallocate = 1;
2222 /* Set up a place to return a structure. */
2224 /* Cater to broken compilers. */
2225 if (aggregate_value_p (exp))
2227 /* This call returns a big structure. */
2228 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2230 #ifdef PCC_STATIC_STRUCT_RETURN
2232 pcc_struct_value = 1;
2233 /* Easier than making that case work right. */
2236 /* In case this is a static function, note that it has been
2238 if (! TREE_ADDRESSABLE (fndecl))
2239 (*lang_hooks.mark_addressable) (fndecl);
2243 #else /* not PCC_STATIC_STRUCT_RETURN */
2245 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2247 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
2249 /* The structure value address arg is already in actparms.
2250 Pull it out. It might be nice to just leave it there, but
2251 we need to set structure_value_addr. */
2252 tree return_arg = TREE_VALUE (actparms);
2253 actparms = TREE_CHAIN (actparms);
2254 structure_value_addr = expand_expr (return_arg, NULL_RTX,
2255 VOIDmode, EXPAND_NORMAL);
2257 else if (target && GET_CODE (target) == MEM)
2258 structure_value_addr = XEXP (target, 0);
2261 /* For variable-sized objects, we must be called with a target
2262 specified. If we were to allocate space on the stack here,
2263 we would have no way of knowing when to free it. */
2264 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2266 mark_temp_addr_taken (d);
2267 structure_value_addr = XEXP (d, 0);
2271 #endif /* not PCC_STATIC_STRUCT_RETURN */
2274 /* If called function is inline, try to integrate it. */
2278 rtx temp = try_to_integrate (fndecl, actparms, target,
2279 ignore, TREE_TYPE (exp),
2280 structure_value_addr);
2281 if (temp != (rtx) (size_t) - 1)
2285 /* Figure out the amount to which the stack should be aligned. */
2286 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2289 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2290 if (i && i->preferred_incoming_stack_boundary)
2291 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2294 /* Operand 0 is a pointer-to-function; get the type of the function. */
2295 funtype = TREE_TYPE (addr);
2296 if (! POINTER_TYPE_P (funtype))
2298 funtype = TREE_TYPE (funtype);
2300 /* Munge the tree to split complex arguments into their imaginary
2302 if (SPLIT_COMPLEX_ARGS)
2304 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2305 actparms = split_complex_values (actparms);
2308 type_arg_types = TYPE_ARG_TYPES (funtype);
2310 /* See if this is a call to a function that can return more than once
2311 or a call to longjmp or malloc. */
2312 flags |= special_function_p (fndecl, flags);
2314 if (flags & ECF_MAY_BE_ALLOCA)
2315 current_function_calls_alloca = 1;
2317 /* If struct_value_rtx is 0, it means pass the address
2318 as if it were an extra parameter. */
2319 if (structure_value_addr && struct_value_rtx == 0)
2321 /* If structure_value_addr is a REG other than
2322 virtual_outgoing_args_rtx, we can use always use it. If it
2323 is not a REG, we must always copy it into a register.
2324 If it is virtual_outgoing_args_rtx, we must copy it to another
2325 register in some cases. */
2326 rtx temp = (GET_CODE (structure_value_addr) != REG
2327 || (ACCUMULATE_OUTGOING_ARGS
2328 && stack_arg_under_construction
2329 && structure_value_addr == virtual_outgoing_args_rtx)
2330 ? copy_addr_to_reg (structure_value_addr)
2331 : structure_value_addr);
2334 = tree_cons (error_mark_node,
2335 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2338 structure_value_addr_parm = 1;
2341 /* Count the arguments and set NUM_ACTUALS. */
2342 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2345 /* Compute number of named args.
2346 Normally, don't include the last named arg if anonymous args follow.
2347 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2348 (If no anonymous args follow, the result of list_length is actually
2349 one too large. This is harmless.)
2351 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2352 zero, this machine will be able to place unnamed args that were
2353 passed in registers into the stack. So treat all args as named.
2354 This allows the insns emitting for a specific argument list to be
2355 independent of the function declaration.
2357 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2358 reliable way to pass unnamed args in registers, so we must force
2359 them into memory. */
2361 if ((STRICT_ARGUMENT_NAMING
2362 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2363 && type_arg_types != 0)
2365 = (list_length (type_arg_types)
2366 /* Don't include the last named arg. */
2367 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2368 /* Count the struct value address, if it is passed as a parm. */
2369 + structure_value_addr_parm);
2371 /* If we know nothing, treat all args as named. */
2372 n_named_args = num_actuals;
2374 /* Start updating where the next arg would go.
2376 On some machines (such as the PA) indirect calls have a different
2377 calling convention than normal calls. The last argument in
2378 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2380 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl);
2382 /* Make a vector to hold all the information about each arg. */
2383 args = alloca (num_actuals * sizeof (struct arg_data));
2384 memset (args, 0, num_actuals * sizeof (struct arg_data));
2386 /* Build up entries in the ARGS array, compute the size of the
2387 arguments into ARGS_SIZE, etc. */
2388 initialize_argument_information (num_actuals, args, &args_size,
2389 n_named_args, actparms, fndecl,
2390 &args_so_far, reg_parm_stack_space,
2391 &old_stack_level, &old_pending_adj,
2392 &must_preallocate, &flags);
2396 /* If this function requires a variable-sized argument list, don't
2397 try to make a cse'able block for this call. We may be able to
2398 do this eventually, but it is too complicated to keep track of
2399 what insns go in the cse'able block and which don't. */
2401 flags &= ~ECF_LIBCALL_BLOCK;
2402 must_preallocate = 1;
2405 /* Now make final decision about preallocating stack space. */
2406 must_preallocate = finalize_must_preallocate (must_preallocate,
2410 /* If the structure value address will reference the stack pointer, we
2411 must stabilize it. We don't need to do this if we know that we are
2412 not going to adjust the stack pointer in processing this call. */
2414 if (structure_value_addr
2415 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2416 || reg_mentioned_p (virtual_outgoing_args_rtx,
2417 structure_value_addr))
2419 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2420 structure_value_addr = copy_to_reg (structure_value_addr);
2422 /* Tail calls can make things harder to debug, and we're traditionally
2423 pushed these optimizations into -O2. Don't try if we're already
2424 expanding a call, as that means we're an argument. Don't try if
2425 there's cleanups, as we know there's code to follow the call.
2427 If rtx_equal_function_value_matters is false, that means we've
2428 finished with regular parsing. Which means that some of the
2429 machinery we use to generate tail-calls is no longer in place.
2430 This is most often true of sjlj-exceptions, which we couldn't
2431 tail-call to anyway. */
2433 if (currently_expanding_call++ != 0
2434 || !flag_optimize_sibling_calls
2435 || !rtx_equal_function_value_matters
2436 || any_pending_cleanups ()
2438 try_tail_call = try_tail_recursion = 0;
2440 /* Tail recursion fails, when we are not dealing with recursive calls. */
2441 if (!try_tail_recursion
2442 || TREE_CODE (addr) != ADDR_EXPR
2443 || TREE_OPERAND (addr, 0) != current_function_decl)
2444 try_tail_recursion = 0;
2446 /* Rest of purposes for tail call optimizations to fail. */
2448 #ifdef HAVE_sibcall_epilogue
2449 !HAVE_sibcall_epilogue
2454 /* Doing sibling call optimization needs some work, since
2455 structure_value_addr can be allocated on the stack.
2456 It does not seem worth the effort since few optimizable
2457 sibling calls will return a structure. */
2458 || structure_value_addr != NULL_RTX
2459 /* Check whether the target is able to optimize the call
2461 || !(*targetm.function_ok_for_sibcall) (fndecl, exp)
2462 /* Functions that do not return exactly once may not be sibcall
2464 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
2465 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2466 /* If the called function is nested in the current one, it might access
2467 some of the caller's arguments, but could clobber them beforehand if
2468 the argument areas are shared. */
2469 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2470 /* If this function requires more stack slots than the current
2471 function, we cannot change it into a sibling call. */
2472 || args_size.constant > current_function_args_size
2473 /* If the callee pops its own arguments, then it must pop exactly
2474 the same number of arguments as the current function. */
2475 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2476 != RETURN_POPS_ARGS (current_function_decl,
2477 TREE_TYPE (current_function_decl),
2478 current_function_args_size))
2479 || !(*lang_hooks.decls.ok_for_sibcall) (fndecl))
2482 if (try_tail_call || try_tail_recursion)
2485 actparms = NULL_TREE;
2486 /* Ok, we're going to give the tail call the old college try.
2487 This means we're going to evaluate the function arguments
2488 up to three times. There are two degrees of badness we can
2489 encounter, those that can be unsaved and those that can't.
2490 (See unsafe_for_reeval commentary for details.)
2492 Generate a new argument list. Pass safe arguments through
2493 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2494 For hard badness, evaluate them now and put their resulting
2495 rtx in a temporary VAR_DECL.
2497 initialize_argument_information has ordered the array for the
2498 order to be pushed, and we must remember this when reconstructing
2499 the original argument order. */
2501 if (PUSH_ARGS_REVERSED)
2510 i = num_actuals - 1;
2514 for (; i != end; i += inc)
2516 args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
2517 /* We need to build actparms for optimize_tail_recursion. We can
2518 safely trash away TREE_PURPOSE, since it is unused by this
2520 if (try_tail_recursion)
2521 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2523 /* Do the same for the function address if it is an expression. */
2525 addr = fix_unsafe_tree (addr);
2526 /* Expanding one of those dangerous arguments could have added
2527 cleanups, but otherwise give it a whirl. */
2528 if (any_pending_cleanups ())
2529 try_tail_call = try_tail_recursion = 0;
2532 /* Generate a tail recursion sequence when calling ourselves. */
2534 if (try_tail_recursion)
2536 /* We want to emit any pending stack adjustments before the tail
2537 recursion "call". That way we know any adjustment after the tail
2538 recursion call can be ignored if we indeed use the tail recursion
2540 int save_pending_stack_adjust = pending_stack_adjust;
2541 int save_stack_pointer_delta = stack_pointer_delta;
2543 /* Emit any queued insns now; otherwise they would end up in
2544 only one of the alternates. */
2547 /* Use a new sequence to hold any RTL we generate. We do not even
2548 know if we will use this RTL yet. The final decision can not be
2549 made until after RTL generation for the entire function is
2552 /* If expanding any of the arguments creates cleanups, we can't
2553 do a tailcall. So, we'll need to pop the pending cleanups
2554 list. If, however, all goes well, and there are no cleanups
2555 then the call to expand_start_target_temps will have no
2557 expand_start_target_temps ();
2558 if (optimize_tail_recursion (actparms, get_last_insn ()))
2560 if (any_pending_cleanups ())
2561 try_tail_call = try_tail_recursion = 0;
2563 tail_recursion_insns = get_insns ();
2565 expand_end_target_temps ();
2568 /* Restore the original pending stack adjustment for the sibling and
2569 normal call cases below. */
2570 pending_stack_adjust = save_pending_stack_adjust;
2571 stack_pointer_delta = save_stack_pointer_delta;
2574 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2576 /* A fork duplicates the profile information, and an exec discards
2577 it. We can't rely on fork/exec to be paired. So write out the
2578 profile information we have gathered so far, and clear it. */
2579 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2580 is subject to race conditions, just as with multithreaded
2583 emit_library_call (gcov_flush_libfunc, LCT_ALWAYS_RETURN, VOIDmode, 0);
2586 /* Ensure current function's preferred stack boundary is at least
2587 what we need. We don't have to increase alignment for recursive
2589 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2590 && fndecl != current_function_decl)
2591 cfun->preferred_stack_boundary = preferred_stack_boundary;
2592 if (fndecl == current_function_decl)
2593 cfun->recursive_call_emit = true;
2595 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2597 function_call_count++;
2599 /* We want to make two insn chains; one for a sibling call, the other
2600 for a normal call. We will select one of the two chains after
2601 initial RTL generation is complete. */
2602 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2604 int sibcall_failure = 0;
2605 /* We want to emit any pending stack adjustments before the tail
2606 recursion "call". That way we know any adjustment after the tail
2607 recursion call can be ignored if we indeed use the tail recursion
2609 int save_pending_stack_adjust = 0;
2610 int save_stack_pointer_delta = 0;
2612 rtx before_call, next_arg_reg;
2616 /* Emit any queued insns now; otherwise they would end up in
2617 only one of the alternates. */
2620 /* State variables we need to save and restore between
2622 save_pending_stack_adjust = pending_stack_adjust;
2623 save_stack_pointer_delta = stack_pointer_delta;
2626 flags &= ~ECF_SIBCALL;
2628 flags |= ECF_SIBCALL;
2630 /* Other state variables that we must reinitialize each time
2631 through the loop (that are not initialized by the loop itself). */
2635 /* Start a new sequence for the normal call case.
2637 From this point on, if the sibling call fails, we want to set
2638 sibcall_failure instead of continuing the loop. */
2643 /* We know at this point that there are not currently any
2644 pending cleanups. If, however, in the process of evaluating
2645 the arguments we were to create some, we'll need to be
2646 able to get rid of them. */
2647 expand_start_target_temps ();
2650 /* Don't let pending stack adjusts add up to too much.
2651 Also, do all pending adjustments now if there is any chance
2652 this might be a call to alloca or if we are expanding a sibling
2653 call sequence or if we are calling a function that is to return
2654 with stack pointer depressed. */
2655 if (pending_stack_adjust >= 32
2656 || (pending_stack_adjust > 0
2657 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2659 do_pending_stack_adjust ();
2661 /* When calling a const function, we must pop the stack args right away,
2662 so that the pop is deleted or moved with the call. */
2663 if (pass && (flags & ECF_LIBCALL_BLOCK))
2666 #ifdef FINAL_REG_PARM_STACK_SPACE
2667 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2670 /* Precompute any arguments as needed. */
2672 precompute_arguments (flags, num_actuals, args);
2674 /* Now we are about to start emitting insns that can be deleted
2675 if a libcall is deleted. */
2676 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2679 adjusted_args_size = args_size;
2680 /* Compute the actual size of the argument block required. The variable
2681 and constant sizes must be combined, the size may have to be rounded,
2682 and there may be a minimum required size. When generating a sibcall
2683 pattern, do not round up, since we'll be re-using whatever space our
2685 unadjusted_args_size
2686 = compute_argument_block_size (reg_parm_stack_space,
2687 &adjusted_args_size,
2689 : preferred_stack_boundary));
2691 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2693 /* The argument block when performing a sibling call is the
2694 incoming argument block. */
2697 argblock = virtual_incoming_args_rtx;
2699 #ifdef STACK_GROWS_DOWNWARD
2700 = plus_constant (argblock, current_function_pretend_args_size);
2702 = plus_constant (argblock, -current_function_pretend_args_size);
2704 stored_args_map = sbitmap_alloc (args_size.constant);
2705 sbitmap_zero (stored_args_map);
2708 /* If we have no actual push instructions, or shouldn't use them,
2709 make space for all args right now. */
2710 else if (adjusted_args_size.var != 0)
2712 if (old_stack_level == 0)
2714 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2715 old_stack_pointer_delta = stack_pointer_delta;
2716 old_pending_adj = pending_stack_adjust;
2717 pending_stack_adjust = 0;
2718 /* stack_arg_under_construction says whether a stack arg is
2719 being constructed at the old stack level. Pushing the stack
2720 gets a clean outgoing argument block. */
2721 old_stack_arg_under_construction = stack_arg_under_construction;
2722 stack_arg_under_construction = 0;
2724 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2728 /* Note that we must go through the motions of allocating an argument
2729 block even if the size is zero because we may be storing args
2730 in the area reserved for register arguments, which may be part of
2733 int needed = adjusted_args_size.constant;
2735 /* Store the maximum argument space used. It will be pushed by
2736 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2739 if (needed > current_function_outgoing_args_size)
2740 current_function_outgoing_args_size = needed;
2742 if (must_preallocate)
2744 if (ACCUMULATE_OUTGOING_ARGS)
2746 /* Since the stack pointer will never be pushed, it is
2747 possible for the evaluation of a parm to clobber
2748 something we have already written to the stack.
2749 Since most function calls on RISC machines do not use
2750 the stack, this is uncommon, but must work correctly.
2752 Therefore, we save any area of the stack that was already
2753 written and that we are using. Here we set up to do this
2754 by making a new stack usage map from the old one. The
2755 actual save will be done by store_one_arg.
2757 Another approach might be to try to reorder the argument
2758 evaluations to avoid this conflicting stack usage. */
2760 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2761 /* Since we will be writing into the entire argument area,
2762 the map must be allocated for its entire size, not just
2763 the part that is the responsibility of the caller. */
2764 needed += reg_parm_stack_space;
2767 #ifdef ARGS_GROW_DOWNWARD
2768 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2771 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2774 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2776 if (initial_highest_arg_in_use)
2777 memcpy (stack_usage_map, initial_stack_usage_map,
2778 initial_highest_arg_in_use);
2780 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2781 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2782 (highest_outgoing_arg_in_use
2783 - initial_highest_arg_in_use));
2786 /* The address of the outgoing argument list must not be
2787 copied to a register here, because argblock would be left
2788 pointing to the wrong place after the call to
2789 allocate_dynamic_stack_space below. */
2791 argblock = virtual_outgoing_args_rtx;
2795 if (inhibit_defer_pop == 0)
2797 /* Try to reuse some or all of the pending_stack_adjust
2798 to get this space. */
2800 = (combine_pending_stack_adjustment_and_call
2801 (unadjusted_args_size,
2802 &adjusted_args_size,
2803 preferred_unit_stack_boundary));
2805 /* combine_pending_stack_adjustment_and_call computes
2806 an adjustment before the arguments are allocated.
2807 Account for them and see whether or not the stack
2808 needs to go up or down. */
2809 needed = unadjusted_args_size - needed;
2813 /* We're releasing stack space. */
2814 /* ??? We can avoid any adjustment at all if we're
2815 already aligned. FIXME. */
2816 pending_stack_adjust = -needed;
2817 do_pending_stack_adjust ();
2821 /* We need to allocate space. We'll do that in
2822 push_block below. */
2823 pending_stack_adjust = 0;
2826 /* Special case this because overhead of `push_block' in
2827 this case is non-trivial. */
2829 argblock = virtual_outgoing_args_rtx;
2832 argblock = push_block (GEN_INT (needed), 0, 0);
2833 #ifdef ARGS_GROW_DOWNWARD
2834 argblock = plus_constant (argblock, needed);
2838 /* We only really need to call `copy_to_reg' in the case
2839 where push insns are going to be used to pass ARGBLOCK
2840 to a function call in ARGS. In that case, the stack
2841 pointer changes value from the allocation point to the
2842 call point, and hence the value of
2843 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2844 as well always do it. */
2845 argblock = copy_to_reg (argblock);
2850 if (ACCUMULATE_OUTGOING_ARGS)
2852 /* The save/restore code in store_one_arg handles all
2853 cases except one: a constructor call (including a C
2854 function returning a BLKmode struct) to initialize
2856 if (stack_arg_under_construction)
2858 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2859 rtx push_size = GEN_INT (reg_parm_stack_space
2860 + adjusted_args_size.constant);
2862 rtx push_size = GEN_INT (adjusted_args_size.constant);
2864 if (old_stack_level == 0)
2866 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2868 old_stack_pointer_delta = stack_pointer_delta;
2869 old_pending_adj = pending_stack_adjust;
2870 pending_stack_adjust = 0;
2871 /* stack_arg_under_construction says whether a stack
2872 arg is being constructed at the old stack level.
2873 Pushing the stack gets a clean outgoing argument
2875 old_stack_arg_under_construction
2876 = stack_arg_under_construction;
2877 stack_arg_under_construction = 0;
2878 /* Make a new map for the new argument list. */
2879 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2880 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2881 highest_outgoing_arg_in_use = 0;
2883 allocate_dynamic_stack_space (push_size, NULL_RTX,
2887 /* If argument evaluation might modify the stack pointer,
2888 copy the address of the argument list to a register. */
2889 for (i = 0; i < num_actuals; i++)
2890 if (args[i].pass_on_stack)
2892 argblock = copy_addr_to_reg (argblock);
2897 compute_argument_addresses (args, argblock, num_actuals);
2899 /* If we push args individually in reverse order, perform stack alignment
2900 before the first push (the last arg). */
2901 if (PUSH_ARGS_REVERSED && argblock == 0
2902 && adjusted_args_size.constant != unadjusted_args_size)
2904 /* When the stack adjustment is pending, we get better code
2905 by combining the adjustments. */
2906 if (pending_stack_adjust
2907 && ! (flags & ECF_LIBCALL_BLOCK)
2908 && ! inhibit_defer_pop)
2910 pending_stack_adjust
2911 = (combine_pending_stack_adjustment_and_call
2912 (unadjusted_args_size,
2913 &adjusted_args_size,
2914 preferred_unit_stack_boundary));
2915 do_pending_stack_adjust ();
2917 else if (argblock == 0)
2918 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2919 - unadjusted_args_size));
2921 /* Now that the stack is properly aligned, pops can't safely
2922 be deferred during the evaluation of the arguments. */
2925 funexp = rtx_for_function_call (fndecl, addr);
2927 /* Figure out the register where the value, if any, will come back. */
2929 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2930 && ! structure_value_addr)
2932 if (pcc_struct_value)
2933 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2934 fndecl, (pass == 0));
2936 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2939 /* Precompute all register parameters. It isn't safe to compute anything
2940 once we have started filling any specific hard regs. */
2941 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2943 #ifdef REG_PARM_STACK_SPACE
2944 /* Save the fixed argument area if it's part of the caller's frame and
2945 is clobbered by argument setup for this call. */
2946 if (ACCUMULATE_OUTGOING_ARGS && pass)
2947 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2948 &low_to_save, &high_to_save);
2951 /* Now store (and compute if necessary) all non-register parms.
2952 These come before register parms, since they can require block-moves,
2953 which could clobber the registers used for register parms.
2954 Parms which have partial registers are not stored here,
2955 but we do preallocate space here if they want that. */
2957 for (i = 0; i < num_actuals; i++)
2958 if (args[i].reg == 0 || args[i].pass_on_stack)
2960 rtx before_arg = get_last_insn ();
2962 if (store_one_arg (&args[i], argblock, flags,
2963 adjusted_args_size.var != 0,
2964 reg_parm_stack_space)
2966 && check_sibcall_argument_overlap (before_arg,
2968 sibcall_failure = 1;
2971 /* If we have a parm that is passed in registers but not in memory
2972 and whose alignment does not permit a direct copy into registers,
2973 make a group of pseudos that correspond to each register that we
2975 if (STRICT_ALIGNMENT)
2976 store_unaligned_arguments_into_pseudos (args, num_actuals);
2978 /* Now store any partially-in-registers parm.
2979 This is the last place a block-move can happen. */
2981 for (i = 0; i < num_actuals; i++)
2982 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2984 rtx before_arg = get_last_insn ();
2986 if (store_one_arg (&args[i], argblock, flags,
2987 adjusted_args_size.var != 0,
2988 reg_parm_stack_space)
2990 && check_sibcall_argument_overlap (before_arg,
2992 sibcall_failure = 1;
2995 /* If we pushed args in forward order, perform stack alignment
2996 after pushing the last arg. */
2997 if (!PUSH_ARGS_REVERSED && argblock == 0)
2998 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2999 - unadjusted_args_size));
3001 /* If register arguments require space on the stack and stack space
3002 was not preallocated, allocate stack space here for arguments
3003 passed in registers. */
3004 #ifdef OUTGOING_REG_PARM_STACK_SPACE
3005 if (!ACCUMULATE_OUTGOING_ARGS
3006 && must_preallocate == 0 && reg_parm_stack_space > 0)
3007 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3010 /* Pass the function the address in which to return a
3012 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3014 #ifdef POINTERS_EXTEND_UNSIGNED
3015 if (GET_MODE (structure_value_addr) != Pmode)
3016 structure_value_addr = convert_memory_address
3017 (Pmode, structure_value_addr);
3019 emit_move_insn (struct_value_rtx,
3021 force_operand (structure_value_addr,
3024 if (GET_CODE (struct_value_rtx) == REG)
3025 use_reg (&call_fusage, struct_value_rtx);
3028 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3029 reg_parm_seen, pass == 0);
3031 load_register_parameters (args, num_actuals, &call_fusage, flags,
3032 pass == 0, &sibcall_failure);
3034 /* Perform postincrements before actually calling the function. */
3037 /* Save a pointer to the last insn before the call, so that we can
3038 later safely search backwards to find the CALL_INSN. */
3039 before_call = get_last_insn ();
3041 /* Set up next argument register. For sibling calls on machines
3042 with register windows this should be the incoming register. */
3043 #ifdef FUNCTION_INCOMING_ARG
3045 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3049 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3052 /* All arguments and registers used for the call must be set up by
3055 /* Stack must be properly aligned now. */
3056 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
3059 /* Generate the actual call instruction. */
3060 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
3061 adjusted_args_size.constant, struct_value_size,
3062 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3063 flags, & args_so_far);
3065 /* If call is cse'able, make appropriate pair of reg-notes around it.
3066 Test valreg so we don't crash; may safely ignore `const'
3067 if return type is void. Disable for PARALLEL return values, because
3068 we have no way to move such values into a pseudo register. */
3069 if (pass && (flags & ECF_LIBCALL_BLOCK))
3073 if (valreg == 0 || GET_CODE (valreg) == PARALLEL)
3075 insns = get_insns ();
3082 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3084 /* Mark the return value as a pointer if needed. */
3085 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3086 mark_reg_pointer (temp,
3087 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3089 /* Construct an "equal form" for the value which mentions all the
3090 arguments in order as well as the function name. */
3091 for (i = 0; i < num_actuals; i++)
3092 note = gen_rtx_EXPR_LIST (VOIDmode,
3093 args[i].initial_value, note);
3094 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3096 insns = get_insns ();
3099 if (flags & ECF_PURE)
3100 note = gen_rtx_EXPR_LIST (VOIDmode,
3101 gen_rtx_USE (VOIDmode,
3102 gen_rtx_MEM (BLKmode,
3103 gen_rtx_SCRATCH (VOIDmode))),
3106 emit_libcall_block (insns, temp, valreg, note);
3111 else if (pass && (flags & ECF_MALLOC))
3113 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3116 /* The return value from a malloc-like function is a pointer. */
3117 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3118 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3120 emit_move_insn (temp, valreg);
3122 /* The return value from a malloc-like function can not alias
3124 last = get_last_insn ();
3126 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3128 /* Write out the sequence. */
3129 insns = get_insns ();
3135 /* For calls to `setjmp', etc., inform flow.c it should complain
3136 if nonvolatile values are live. For functions that cannot return,
3137 inform flow that control does not fall through. */
3139 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3141 /* The barrier must be emitted
3142 immediately after the CALL_INSN. Some ports emit more
3143 than just a CALL_INSN above, so we must search for it here. */
3145 rtx last = get_last_insn ();
3146 while (GET_CODE (last) != CALL_INSN)
3148 last = PREV_INSN (last);
3149 /* There was no CALL_INSN? */
3150 if (last == before_call)
3154 emit_barrier_after (last);
3156 /* Stack adjustments after a noreturn call are dead code. */
3157 stack_pointer_delta = old_stack_allocated;
3158 pending_stack_adjust = 0;
3161 if (flags & ECF_LONGJMP)
3162 current_function_calls_longjmp = 1;
3164 /* If value type not void, return an rtx for the value. */
3166 /* If there are cleanups to be called, don't use a hard reg as target.
3167 We need to double check this and see if it matters anymore. */
3168 if (any_pending_cleanups ())
3170 if (target && REG_P (target)
3171 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3173 sibcall_failure = 1;
3176 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3178 target = const0_rtx;
3179 else if (structure_value_addr)
3181 if (target == 0 || GET_CODE (target) != MEM)
3184 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3185 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3186 structure_value_addr));
3187 set_mem_attributes (target, exp, 1);
3190 else if (pcc_struct_value)
3192 /* This is the special C++ case where we need to
3193 know what the true target was. We take care to
3194 never use this value more than once in one expression. */
3195 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3196 copy_to_reg (valreg));
3197 set_mem_attributes (target, exp, 1);
3199 /* Handle calls that return values in multiple non-contiguous locations.
3200 The Irix 6 ABI has examples of this. */
3201 else if (GET_CODE (valreg) == PARALLEL)
3205 /* This will only be assigned once, so it can be readonly. */
3206 tree nt = build_qualified_type (TREE_TYPE (exp),
3207 (TYPE_QUALS (TREE_TYPE (exp))
3208 | TYPE_QUAL_CONST));
3210 target = assign_temp (nt, 0, 1, 1);
3211 preserve_temp_slots (target);
3214 if (! rtx_equal_p (target, valreg))
3215 emit_group_store (target, valreg, TREE_TYPE (exp),
3216 int_size_in_bytes (TREE_TYPE (exp)));
3218 /* We can not support sibling calls for this case. */
3219 sibcall_failure = 1;
3222 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3223 && GET_MODE (target) == GET_MODE (valreg))
3225 /* TARGET and VALREG cannot be equal at this point because the
3226 latter would not have REG_FUNCTION_VALUE_P true, while the
3227 former would if it were referring to the same register.
3229 If they refer to the same register, this move will be a no-op,
3230 except when function inlining is being done. */
3231 emit_move_insn (target, valreg);
3233 /* If we are setting a MEM, this code must be executed. Since it is
3234 emitted after the call insn, sibcall optimization cannot be
3235 performed in that case. */
3236 if (GET_CODE (target) == MEM)
3237 sibcall_failure = 1;
3239 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3241 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3243 /* We can not support sibling calls for this case. */
3244 sibcall_failure = 1;
3247 target = copy_to_reg (valreg);
3249 #ifdef PROMOTE_FUNCTION_RETURN
3250 /* If we promoted this return value, make the proper SUBREG. TARGET
3251 might be const0_rtx here, so be careful. */
3252 if (GET_CODE (target) == REG
3253 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3254 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3256 tree type = TREE_TYPE (exp);
3257 int unsignedp = TREE_UNSIGNED (type);
3260 /* If we don't promote as expected, something is wrong. */
3261 if (GET_MODE (target)
3262 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3265 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3266 && GET_MODE_SIZE (GET_MODE (target))
3267 > GET_MODE_SIZE (TYPE_MODE (type)))
3269 offset = GET_MODE_SIZE (GET_MODE (target))
3270 - GET_MODE_SIZE (TYPE_MODE (type));
3271 if (! BYTES_BIG_ENDIAN)
3272 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3273 else if (! WORDS_BIG_ENDIAN)
3274 offset %= UNITS_PER_WORD;
3276 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3277 SUBREG_PROMOTED_VAR_P (target) = 1;
3278 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3282 /* If size of args is variable or this was a constructor call for a stack
3283 argument, restore saved stack-pointer value. */
3285 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3287 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3288 stack_pointer_delta = old_stack_pointer_delta;
3289 pending_stack_adjust = old_pending_adj;
3290 stack_arg_under_construction = old_stack_arg_under_construction;
3291 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3292 stack_usage_map = initial_stack_usage_map;
3293 sibcall_failure = 1;
3295 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3297 #ifdef REG_PARM_STACK_SPACE
3299 restore_fixed_argument_area (save_area, argblock,
3300 high_to_save, low_to_save);
3303 /* If we saved any argument areas, restore them. */
3304 for (i = 0; i < num_actuals; i++)
3305 if (args[i].save_area)
3307 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3309 = gen_rtx_MEM (save_mode,
3310 memory_address (save_mode,
3311 XEXP (args[i].stack_slot, 0)));
3313 if (save_mode != BLKmode)
3314 emit_move_insn (stack_area, args[i].save_area);
3316 emit_block_move (stack_area, args[i].save_area,
3317 GEN_INT (args[i].locate.size.constant),
3318 BLOCK_OP_CALL_PARM);
3321 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3322 stack_usage_map = initial_stack_usage_map;
3325 /* If this was alloca, record the new stack level for nonlocal gotos.
3326 Check for the handler slots since we might not have a save area
3327 for non-local gotos. */
3329 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3330 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3332 /* Free up storage we no longer need. */
3333 for (i = 0; i < num_actuals; ++i)
3334 if (args[i].aligned_regs)
3335 free (args[i].aligned_regs);
3339 /* Undo the fake expand_start_target_temps we did earlier. If
3340 there had been any cleanups created, we've already set
3342 expand_end_target_temps ();
3345 /* If this function is returning into a memory location marked as
3346 readonly, it means it is initializing that location. We normally treat
3347 functions as not clobbering such locations, so we need to specify that
3348 this one does. We do this by adding the appropriate CLOBBER to the
3349 CALL_INSN function usage list. This cannot be done by emitting a
3350 standalone CLOBBER after the call because the latter would be ignored
3351 by at least the delay slot scheduling pass. We do this now instead of
3352 adding to call_fusage before the call to emit_call_1 because TARGET
3353 may be modified in the meantime. */
3354 if (structure_value_addr != 0 && target != 0
3355 && GET_CODE (target) == MEM && RTX_UNCHANGING_P (target))
3356 add_function_usage_to
3358 gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
3361 insns = get_insns ();
3366 tail_call_insns = insns;
3368 /* Restore the pending stack adjustment now that we have
3369 finished generating the sibling call sequence. */
3371 pending_stack_adjust = save_pending_stack_adjust;
3372 stack_pointer_delta = save_stack_pointer_delta;
3374 /* Prepare arg structure for next iteration. */
3375 for (i = 0; i < num_actuals; i++)
3378 args[i].aligned_regs = 0;
3382 sbitmap_free (stored_args_map);
3386 normal_call_insns = insns;
3388 /* Verify that we've deallocated all the stack we used. */
3389 if (! (flags & (ECF_NORETURN | ECF_LONGJMP))
3390 && old_stack_allocated != stack_pointer_delta
3391 - pending_stack_adjust)
3395 /* If something prevents making this a sibling call,
3396 zero out the sequence. */
3397 if (sibcall_failure)
3398 tail_call_insns = NULL_RTX;
3401 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3402 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3403 can happen if the arguments to this function call an inline
3404 function who's expansion contains another CALL_PLACEHOLDER.
3406 If there are any C_Ps in any of these sequences, replace them
3407 with their normal call. */
3409 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3410 if (GET_CODE (insn) == CALL_INSN
3411 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3412 replace_call_placeholder (insn, sibcall_use_normal);
3414 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3415 if (GET_CODE (insn) == CALL_INSN
3416 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3417 replace_call_placeholder (insn, sibcall_use_normal);
3419 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3420 if (GET_CODE (insn) == CALL_INSN
3421 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3422 replace_call_placeholder (insn, sibcall_use_normal);
3424 /* If this was a potential tail recursion site, then emit a
3425 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3426 One of them will be selected later. */
3427 if (tail_recursion_insns || tail_call_insns)
3429 /* The tail recursion label must be kept around. We could expose
3430 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3431 and makes determining true tail recursion sites difficult.
3433 So we set LABEL_PRESERVE_P here, then clear it when we select
3434 one of the call sequences after rtl generation is complete. */
3435 if (tail_recursion_insns)
3436 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3437 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3439 tail_recursion_insns,
3440 tail_recursion_label));
3443 emit_insn (normal_call_insns);
3445 currently_expanding_call--;
3447 /* If this function returns with the stack pointer depressed, ensure
3448 this block saves and restores the stack pointer, show it was
3449 changed, and adjust for any outgoing arg space. */
3450 if (flags & ECF_SP_DEPRESSED)
3452 clear_pending_stack_adjust ();
3453 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
3454 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3455 save_stack_pointer ();
3461 /* Traverse an argument list in VALUES and expand all complex
3462 arguments into their components. */
3464 split_complex_values (tree values)
3468 values = copy_list (values);
3470 for (p = values; p; p = TREE_CHAIN (p))
3472 tree complex_value = TREE_VALUE (p);
3475 complex_type = TREE_TYPE (complex_value);
3479 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
3482 tree real, imag, next;
3484 subtype = TREE_TYPE (complex_type);
3485 complex_value = save_expr (complex_value);
3486 real = build1 (REALPART_EXPR, subtype, complex_value);
3487 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3489 TREE_VALUE (p) = real;
3490 next = TREE_CHAIN (p);
3491 imag = build_tree_list (NULL_TREE, imag);
3492 TREE_CHAIN (p) = imag;
3493 TREE_CHAIN (imag) = next;
3495 /* Skip the newly created node. */
3503 /* Traverse a list of TYPES and expand all complex types into their
3506 split_complex_types (tree types)
3510 types = copy_list (types);
3512 for (p = types; p; p = TREE_CHAIN (p))
3514 tree complex_type = TREE_VALUE (p);
3516 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
3520 /* Rewrite complex type with component type. */
3521 TREE_VALUE (p) = TREE_TYPE (complex_type);
3522 next = TREE_CHAIN (p);
3524 /* Add another component type for the imaginary part. */
3525 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3526 TREE_CHAIN (p) = imag;
3527 TREE_CHAIN (imag) = next;
3529 /* Skip the newly created node. */
3537 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3538 The RETVAL parameter specifies whether return value needs to be saved, other
3539 parameters are documented in the emit_library_call function below. */
3542 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3543 enum libcall_type fn_type,
3544 enum machine_mode outmode, int nargs, va_list p)
3546 /* Total size in bytes of all the stack-parms scanned so far. */
3547 struct args_size args_size;
3548 /* Size of arguments before any adjustments (such as rounding). */
3549 struct args_size original_args_size;
3555 CUMULATIVE_ARGS args_so_far;
3559 enum machine_mode mode;
3562 struct locate_and_pad_arg_data locate;
3566 int old_inhibit_defer_pop = inhibit_defer_pop;
3567 rtx call_fusage = 0;
3570 int pcc_struct_value = 0;
3571 int struct_value_size = 0;
3573 int reg_parm_stack_space = 0;
3576 tree tfom; /* type_for_mode (outmode, 0) */
3578 #ifdef REG_PARM_STACK_SPACE
3579 /* Define the boundary of the register parm stack space that needs to be
3581 int low_to_save, high_to_save;
3582 rtx save_area = 0; /* Place that it is saved. */
3585 /* Size of the stack reserved for parameter registers. */
3586 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3587 char *initial_stack_usage_map = stack_usage_map;
3589 #ifdef REG_PARM_STACK_SPACE
3590 #ifdef MAYBE_REG_PARM_STACK_SPACE
3591 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3593 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3597 /* By default, library functions can not throw. */
3598 flags = ECF_NOTHROW;
3610 case LCT_CONST_MAKE_BLOCK:
3611 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3613 case LCT_PURE_MAKE_BLOCK:
3614 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3617 flags |= ECF_NORETURN;
3620 flags = ECF_NORETURN;
3622 case LCT_ALWAYS_RETURN:
3623 flags = ECF_ALWAYS_RETURN;
3625 case LCT_RETURNS_TWICE:
3626 flags = ECF_RETURNS_TWICE;
3631 /* Ensure current function's preferred stack boundary is at least
3633 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3634 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3636 /* If this kind of value comes back in memory,
3637 decide where in memory it should come back. */
3638 if (outmode != VOIDmode)
3640 tfom = (*lang_hooks.types.type_for_mode) (outmode, 0);
3641 if (aggregate_value_p (tfom))
3643 #ifdef PCC_STATIC_STRUCT_RETURN
3645 = hard_function_value (build_pointer_type (tfom), 0, 0);
3646 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3647 pcc_struct_value = 1;
3649 value = gen_reg_rtx (outmode);
3650 #else /* not PCC_STATIC_STRUCT_RETURN */
3651 struct_value_size = GET_MODE_SIZE (outmode);
3652 if (value != 0 && GET_CODE (value) == MEM)
3655 mem_value = assign_temp (tfom, 0, 1, 1);
3657 /* This call returns a big structure. */
3658 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3662 tfom = void_type_node;
3664 /* ??? Unfinished: must pass the memory address as an argument. */
3666 /* Copy all the libcall-arguments out of the varargs data
3667 and into a vector ARGVEC.
3669 Compute how to pass each argument. We only support a very small subset
3670 of the full argument passing conventions to limit complexity here since
3671 library functions shouldn't have many args. */
3673 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3674 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3676 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3677 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3679 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3682 args_size.constant = 0;
3687 /* Now we are about to start emitting insns that can be deleted
3688 if a libcall is deleted. */
3689 if (flags & ECF_LIBCALL_BLOCK)
3694 /* If there's a structure value address to be passed,
3695 either pass it in the special place, or pass it as an extra argument. */
3696 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3698 rtx addr = XEXP (mem_value, 0);
3701 /* Make sure it is a reasonable operand for a move or push insn. */
3702 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3703 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3704 addr = force_operand (addr, NULL_RTX);
3706 argvec[count].value = addr;
3707 argvec[count].mode = Pmode;
3708 argvec[count].partial = 0;
3710 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3711 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3712 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3716 locate_and_pad_parm (Pmode, NULL_TREE,
3717 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3720 argvec[count].reg != 0,
3722 0, NULL_TREE, &args_size, &argvec[count].locate);
3724 if (argvec[count].reg == 0 || argvec[count].partial != 0
3725 || reg_parm_stack_space > 0)
3726 args_size.constant += argvec[count].locate.size.constant;
3728 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3733 for (; count < nargs; count++)
3735 rtx val = va_arg (p, rtx);
3736 enum machine_mode mode = va_arg (p, enum machine_mode);
3738 /* We cannot convert the arg value to the mode the library wants here;
3739 must do it earlier where we know the signedness of the arg. */
3741 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3744 /* There's no need to call protect_from_queue, because
3745 either emit_move_insn or emit_push_insn will do that. */
3747 /* Make sure it is a reasonable operand for a move or push insn. */
3748 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3749 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3750 val = force_operand (val, NULL_RTX);
3752 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3753 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3757 #ifdef FUNCTION_ARG_CALLEE_COPIES
3758 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3763 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3764 functions, so we have to pretend this isn't such a function. */
3765 if (flags & ECF_LIBCALL_BLOCK)
3767 rtx insns = get_insns ();
3771 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3773 /* If this was a CONST function, it is now PURE since
3774 it now reads memory. */
3775 if (flags & ECF_CONST)
3777 flags &= ~ECF_CONST;
3781 if (GET_MODE (val) == MEM && ! must_copy)
3785 slot = assign_temp ((*lang_hooks.types.type_for_mode) (mode, 0),
3787 emit_move_insn (slot, val);
3791 tree type = (*lang_hooks.types.type_for_mode) (mode, 0);
3794 = gen_rtx_MEM (mode,
3795 expand_expr (build1 (ADDR_EXPR,
3796 build_pointer_type (type),
3797 make_tree (type, val)),
3798 NULL_RTX, VOIDmode, 0));
3801 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3802 gen_rtx_USE (VOIDmode, slot),
3805 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3806 gen_rtx_CLOBBER (VOIDmode,
3811 val = force_operand (XEXP (slot, 0), NULL_RTX);
3815 argvec[count].value = val;
3816 argvec[count].mode = mode;
3818 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3820 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3821 argvec[count].partial
3822 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3824 argvec[count].partial = 0;
3827 locate_and_pad_parm (mode, NULL_TREE,
3828 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3831 argvec[count].reg != 0,
3833 argvec[count].partial,
3834 NULL_TREE, &args_size, &argvec[count].locate);
3836 if (argvec[count].locate.size.var)
3839 if (argvec[count].reg == 0 || argvec[count].partial != 0
3840 || reg_parm_stack_space > 0)
3841 args_size.constant += argvec[count].locate.size.constant;
3843 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3846 #ifdef FINAL_REG_PARM_STACK_SPACE
3847 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3850 /* If this machine requires an external definition for library
3851 functions, write one out. */
3852 assemble_external_libcall (fun);
3854 original_args_size = args_size;
3855 args_size.constant = (((args_size.constant
3856 + stack_pointer_delta
3860 - stack_pointer_delta);
3862 args_size.constant = MAX (args_size.constant,
3863 reg_parm_stack_space);
3865 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3866 args_size.constant -= reg_parm_stack_space;
3869 if (args_size.constant > current_function_outgoing_args_size)
3870 current_function_outgoing_args_size = args_size.constant;
3872 if (ACCUMULATE_OUTGOING_ARGS)
3874 /* Since the stack pointer will never be pushed, it is possible for
3875 the evaluation of a parm to clobber something we have already
3876 written to the stack. Since most function calls on RISC machines
3877 do not use the stack, this is uncommon, but must work correctly.
3879 Therefore, we save any area of the stack that was already written
3880 and that we are using. Here we set up to do this by making a new
3881 stack usage map from the old one.
3883 Another approach might be to try to reorder the argument
3884 evaluations to avoid this conflicting stack usage. */
3886 needed = args_size.constant;
3888 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3889 /* Since we will be writing into the entire argument area, the
3890 map must be allocated for its entire size, not just the part that
3891 is the responsibility of the caller. */
3892 needed += reg_parm_stack_space;
3895 #ifdef ARGS_GROW_DOWNWARD
3896 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3899 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3902 stack_usage_map = alloca (highest_outgoing_arg_in_use);
3904 if (initial_highest_arg_in_use)
3905 memcpy (stack_usage_map, initial_stack_usage_map,
3906 initial_highest_arg_in_use);
3908 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3909 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3910 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3913 /* We must be careful to use virtual regs before they're instantiated,
3914 and real regs afterwards. Loop optimization, for example, can create
3915 new libcalls after we've instantiated the virtual regs, and if we
3916 use virtuals anyway, they won't match the rtl patterns. */
3918 if (virtuals_instantiated)
3919 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3921 argblock = virtual_outgoing_args_rtx;
3926 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3929 /* If we push args individually in reverse order, perform stack alignment
3930 before the first push (the last arg). */
3931 if (argblock == 0 && PUSH_ARGS_REVERSED)
3932 anti_adjust_stack (GEN_INT (args_size.constant
3933 - original_args_size.constant));
3935 if (PUSH_ARGS_REVERSED)
3946 #ifdef REG_PARM_STACK_SPACE
3947 if (ACCUMULATE_OUTGOING_ARGS)
3949 /* The argument list is the property of the called routine and it
3950 may clobber it. If the fixed area has been used for previous
3951 parameters, we must save and restore it. */
3952 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3953 &low_to_save, &high_to_save);
3957 /* Push the args that need to be pushed. */
3959 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3960 are to be pushed. */
3961 for (count = 0; count < nargs; count++, argnum += inc)
3963 enum machine_mode mode = argvec[argnum].mode;
3964 rtx val = argvec[argnum].value;
3965 rtx reg = argvec[argnum].reg;
3966 int partial = argvec[argnum].partial;
3967 int lower_bound = 0, upper_bound = 0, i;
3969 if (! (reg != 0 && partial == 0))
3971 if (ACCUMULATE_OUTGOING_ARGS)
3973 /* If this is being stored into a pre-allocated, fixed-size,
3974 stack area, save any previous data at that location. */
3976 #ifdef ARGS_GROW_DOWNWARD
3977 /* stack_slot is negative, but we want to index stack_usage_map
3978 with positive values. */
3979 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3980 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3982 lower_bound = argvec[argnum].locate.offset.constant;
3983 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3987 /* Don't worry about things in the fixed argument area;
3988 it has already been saved. */
3989 if (i < reg_parm_stack_space)
3990 i = reg_parm_stack_space;
3991 while (i < upper_bound && stack_usage_map[i] == 0)
3994 if (i < upper_bound)
3996 /* We need to make a save area. */
3998 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3999 enum machine_mode save_mode
4000 = mode_for_size (size, MODE_INT, 1);
4002 = plus_constant (argblock,
4003 argvec[argnum].locate.offset.constant);
4005 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4006 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4008 emit_move_insn (argvec[argnum].save_area, stack_area);
4012 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
4013 partial, reg, 0, argblock,
4014 GEN_INT (argvec[argnum].locate.offset.constant),
4015 reg_parm_stack_space,
4016 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
4018 /* Now mark the segment we just used. */
4019 if (ACCUMULATE_OUTGOING_ARGS)
4020 for (i = lower_bound; i < upper_bound; i++)
4021 stack_usage_map[i] = 1;
4027 /* If we pushed args in forward order, perform stack alignment
4028 after pushing the last arg. */
4029 if (argblock == 0 && !PUSH_ARGS_REVERSED)
4030 anti_adjust_stack (GEN_INT (args_size.constant
4031 - original_args_size.constant));
4033 if (PUSH_ARGS_REVERSED)
4038 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
4040 /* Now load any reg parms into their regs. */
4042 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4043 are to be pushed. */
4044 for (count = 0; count < nargs; count++, argnum += inc)
4046 rtx val = argvec[argnum].value;
4047 rtx reg = argvec[argnum].reg;
4048 int partial = argvec[argnum].partial;
4050 /* Handle calls that pass values in multiple non-contiguous
4051 locations. The PA64 has examples of this for library calls. */
4052 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4053 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (GET_MODE (val)));
4054 else if (reg != 0 && partial == 0)
4055 emit_move_insn (reg, val);
4060 /* Any regs containing parms remain in use through the call. */
4061 for (count = 0; count < nargs; count++)
4063 rtx reg = argvec[count].reg;
4064 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4065 use_group_regs (&call_fusage, reg);
4067 use_reg (&call_fusage, reg);
4070 /* Pass the function the address in which to return a structure value. */
4071 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
4073 emit_move_insn (struct_value_rtx,
4075 force_operand (XEXP (mem_value, 0),
4077 if (GET_CODE (struct_value_rtx) == REG)
4078 use_reg (&call_fusage, struct_value_rtx);
4081 /* Don't allow popping to be deferred, since then
4082 cse'ing of library calls could delete a call and leave the pop. */
4084 valreg = (mem_value == 0 && outmode != VOIDmode
4085 ? hard_libcall_value (outmode) : NULL_RTX);
4087 /* Stack must be properly aligned now. */
4088 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
4091 before_call = get_last_insn ();
4093 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4094 will set inhibit_defer_pop to that value. */
4095 /* The return type is needed to decide how many bytes the function pops.
4096 Signedness plays no role in that, so for simplicity, we pretend it's
4097 always signed. We also assume that the list of arguments passed has
4098 no impact, so we pretend it is unknown. */
4101 get_identifier (XSTR (orgfun, 0)),
4102 build_function_type (tfom, NULL_TREE),
4103 original_args_size.constant, args_size.constant,
4105 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4107 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
4109 /* For calls to `setjmp', etc., inform flow.c it should complain
4110 if nonvolatile values are live. For functions that cannot return,
4111 inform flow that control does not fall through. */
4113 if (flags & (ECF_NORETURN | ECF_LONGJMP))
4115 /* The barrier note must be emitted
4116 immediately after the CALL_INSN. Some ports emit more than
4117 just a CALL_INSN above, so we must search for it here. */
4119 rtx last = get_last_insn ();
4120 while (GET_CODE (last) != CALL_INSN)
4122 last = PREV_INSN (last);
4123 /* There was no CALL_INSN? */
4124 if (last == before_call)
4128 emit_barrier_after (last);
4131 /* Now restore inhibit_defer_pop to its actual original value. */
4134 /* If call is cse'able, make appropriate pair of reg-notes around it.
4135 Test valreg so we don't crash; may safely ignore `const'
4136 if return type is void. Disable for PARALLEL return values, because
4137 we have no way to move such values into a pseudo register. */
4138 if (flags & ECF_LIBCALL_BLOCK)
4144 insns = get_insns ();
4154 if (GET_CODE (valreg) == PARALLEL)
4156 temp = gen_reg_rtx (outmode);
4157 emit_group_store (temp, valreg, NULL_TREE,
4158 GET_MODE_SIZE (outmode));
4162 temp = gen_reg_rtx (GET_MODE (valreg));
4164 /* Construct an "equal form" for the value which mentions all the
4165 arguments in order as well as the function name. */
4166 for (i = 0; i < nargs; i++)
4167 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4168 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
4170 insns = get_insns ();
4173 if (flags & ECF_PURE)
4174 note = gen_rtx_EXPR_LIST (VOIDmode,
4175 gen_rtx_USE (VOIDmode,
4176 gen_rtx_MEM (BLKmode,
4177 gen_rtx_SCRATCH (VOIDmode))),
4180 emit_libcall_block (insns, temp, valreg, note);
4187 /* Copy the value to the right place. */
4188 if (outmode != VOIDmode && retval)
4194 if (value != mem_value)
4195 emit_move_insn (value, mem_value);
4197 else if (GET_CODE (valreg) == PARALLEL)
4200 value = gen_reg_rtx (outmode);
4201 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
4203 else if (value != 0)
4204 emit_move_insn (value, valreg);
4209 if (ACCUMULATE_OUTGOING_ARGS)
4211 #ifdef REG_PARM_STACK_SPACE
4213 restore_fixed_argument_area (save_area, argblock,
4214 high_to_save, low_to_save);
4217 /* If we saved any argument areas, restore them. */
4218 for (count = 0; count < nargs; count++)
4219 if (argvec[count].save_area)
4221 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4222 rtx adr = plus_constant (argblock,
4223 argvec[count].locate.offset.constant);
4224 rtx stack_area = gen_rtx_MEM (save_mode,
4225 memory_address (save_mode, adr));
4227 emit_move_insn (stack_area, argvec[count].save_area);
4230 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4231 stack_usage_map = initial_stack_usage_map;
4238 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4239 (emitting the queue unless NO_QUEUE is nonzero),
4240 for a value of mode OUTMODE,
4241 with NARGS different arguments, passed as alternating rtx values
4242 and machine_modes to convert them to.
4243 The rtx values should have been passed through protect_from_queue already.
4245 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4246 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4247 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4248 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4249 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4250 or other LCT_ value for other types of library calls. */
4253 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4254 enum machine_mode outmode, int nargs, ...)
4258 va_start (p, nargs);
4259 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4263 /* Like emit_library_call except that an extra argument, VALUE,
4264 comes second and says where to store the result.
4265 (If VALUE is zero, this function chooses a convenient way
4266 to return the value.
4268 This function returns an rtx for where the value is to be found.
4269 If VALUE is nonzero, VALUE is returned. */
4272 emit_library_call_value (rtx orgfun, rtx value,
4273 enum libcall_type fn_type,
4274 enum machine_mode outmode, int nargs, ...)
4279 va_start (p, nargs);
4280 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4287 /* Store a single argument for a function call
4288 into the register or memory area where it must be passed.
4289 *ARG describes the argument value and where to pass it.
4291 ARGBLOCK is the address of the stack-block for all the arguments,
4292 or 0 on a machine where arguments are pushed individually.
4294 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4295 so must be careful about how the stack is used.
4297 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4298 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4299 that we need not worry about saving and restoring the stack.
4301 FNDECL is the declaration of the function we are calling.
4303 Return nonzero if this arg should cause sibcall failure,
4307 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4308 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4310 tree pval = arg->tree_value;
4314 int i, lower_bound = 0, upper_bound = 0;
4315 int sibcall_failure = 0;
4317 if (TREE_CODE (pval) == ERROR_MARK)
4320 /* Push a new temporary level for any temporaries we make for
4324 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4326 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4327 save any previous data at that location. */
4328 if (argblock && ! variable_size && arg->stack)
4330 #ifdef ARGS_GROW_DOWNWARD
4331 /* stack_slot is negative, but we want to index stack_usage_map
4332 with positive values. */
4333 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4334 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4338 lower_bound = upper_bound - arg->locate.size.constant;
4340 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4341 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4345 upper_bound = lower_bound + arg->locate.size.constant;
4349 /* Don't worry about things in the fixed argument area;
4350 it has already been saved. */
4351 if (i < reg_parm_stack_space)
4352 i = reg_parm_stack_space;
4353 while (i < upper_bound && stack_usage_map[i] == 0)
4356 if (i < upper_bound)
4358 /* We need to make a save area. */
4359 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4360 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4361 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4362 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4364 if (save_mode == BLKmode)
4366 tree ot = TREE_TYPE (arg->tree_value);
4367 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4368 | TYPE_QUAL_CONST));
4370 arg->save_area = assign_temp (nt, 0, 1, 1);
4371 preserve_temp_slots (arg->save_area);
4372 emit_block_move (validize_mem (arg->save_area), stack_area,
4373 expr_size (arg->tree_value),
4374 BLOCK_OP_CALL_PARM);
4378 arg->save_area = gen_reg_rtx (save_mode);
4379 emit_move_insn (arg->save_area, stack_area);
4385 /* If this isn't going to be placed on both the stack and in registers,
4386 set up the register and number of words. */
4387 if (! arg->pass_on_stack)
4389 if (flags & ECF_SIBCALL)
4390 reg = arg->tail_call_reg;
4393 partial = arg->partial;
4396 if (reg != 0 && partial == 0)
4397 /* Being passed entirely in a register. We shouldn't be called in
4401 /* If this arg needs special alignment, don't load the registers
4403 if (arg->n_aligned_regs != 0)
4406 /* If this is being passed partially in a register, we can't evaluate
4407 it directly into its stack slot. Otherwise, we can. */
4408 if (arg->value == 0)
4410 /* stack_arg_under_construction is nonzero if a function argument is
4411 being evaluated directly into the outgoing argument list and
4412 expand_call must take special action to preserve the argument list
4413 if it is called recursively.
4415 For scalar function arguments stack_usage_map is sufficient to
4416 determine which stack slots must be saved and restored. Scalar
4417 arguments in general have pass_on_stack == 0.
4419 If this argument is initialized by a function which takes the
4420 address of the argument (a C++ constructor or a C function
4421 returning a BLKmode structure), then stack_usage_map is
4422 insufficient and expand_call must push the stack around the
4423 function call. Such arguments have pass_on_stack == 1.
4425 Note that it is always safe to set stack_arg_under_construction,
4426 but this generates suboptimal code if set when not needed. */
4428 if (arg->pass_on_stack)
4429 stack_arg_under_construction++;
4431 arg->value = expand_expr (pval,
4433 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4434 ? NULL_RTX : arg->stack,
4435 VOIDmode, EXPAND_STACK_PARM);
4437 /* If we are promoting object (or for any other reason) the mode
4438 doesn't agree, convert the mode. */
4440 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4441 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4442 arg->value, arg->unsignedp);
4444 if (arg->pass_on_stack)
4445 stack_arg_under_construction--;
4448 /* Don't allow anything left on stack from computation
4449 of argument to alloca. */
4450 if (flags & ECF_MAY_BE_ALLOCA)
4451 do_pending_stack_adjust ();
4453 if (arg->value == arg->stack)
4454 /* If the value is already in the stack slot, we are done. */
4456 else if (arg->mode != BLKmode)
4460 /* Argument is a scalar, not entirely passed in registers.
4461 (If part is passed in registers, arg->partial says how much
4462 and emit_push_insn will take care of putting it there.)
4464 Push it, and if its size is less than the
4465 amount of space allocated to it,
4466 also bump stack pointer by the additional space.
4467 Note that in C the default argument promotions
4468 will prevent such mismatches. */
4470 size = GET_MODE_SIZE (arg->mode);
4471 /* Compute how much space the push instruction will push.
4472 On many machines, pushing a byte will advance the stack
4473 pointer by a halfword. */
4474 #ifdef PUSH_ROUNDING
4475 size = PUSH_ROUNDING (size);
4479 /* Compute how much space the argument should get:
4480 round up to a multiple of the alignment for arguments. */
4481 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4482 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4483 / (PARM_BOUNDARY / BITS_PER_UNIT))
4484 * (PARM_BOUNDARY / BITS_PER_UNIT));
4486 /* This isn't already where we want it on the stack, so put it there.
4487 This can either be done with push or copy insns. */
4488 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4489 PARM_BOUNDARY, partial, reg, used - size, argblock,
4490 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4491 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4493 /* Unless this is a partially-in-register argument, the argument is now
4496 arg->value = arg->stack;
4500 /* BLKmode, at least partly to be pushed. */
4502 unsigned int parm_align;
4506 /* Pushing a nonscalar.
4507 If part is passed in registers, PARTIAL says how much
4508 and emit_push_insn will take care of putting it there. */
4510 /* Round its size up to a multiple
4511 of the allocation unit for arguments. */
4513 if (arg->locate.size.var != 0)
4516 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4520 /* PUSH_ROUNDING has no effect on us, because
4521 emit_push_insn for BLKmode is careful to avoid it. */
4522 excess = (arg->locate.size.constant
4523 - int_size_in_bytes (TREE_TYPE (pval))
4524 + partial * UNITS_PER_WORD);
4525 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4526 NULL_RTX, TYPE_MODE (sizetype), 0);
4529 /* Some types will require stricter alignment, which will be
4530 provided for elsewhere in argument layout. */
4531 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4533 /* When an argument is padded down, the block is aligned to
4534 PARM_BOUNDARY, but the actual argument isn't. */
4535 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4537 if (arg->locate.size.var)
4538 parm_align = BITS_PER_UNIT;
4541 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4542 parm_align = MIN (parm_align, excess_align);
4546 if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4548 /* emit_push_insn might not work properly if arg->value and
4549 argblock + arg->locate.offset areas overlap. */
4553 if (XEXP (x, 0) == current_function_internal_arg_pointer
4554 || (GET_CODE (XEXP (x, 0)) == PLUS
4555 && XEXP (XEXP (x, 0), 0) ==
4556 current_function_internal_arg_pointer
4557 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4559 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4560 i = INTVAL (XEXP (XEXP (x, 0), 1));
4562 /* expand_call should ensure this */
4563 if (arg->locate.offset.var || GET_CODE (size_rtx) != CONST_INT)
4566 if (arg->locate.offset.constant > i)
4568 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4569 sibcall_failure = 1;
4571 else if (arg->locate.offset.constant < i)
4573 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4574 sibcall_failure = 1;
4579 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4580 parm_align, partial, reg, excess, argblock,
4581 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4582 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4584 /* Unless this is a partially-in-register argument, the argument is now
4587 ??? Unlike the case above, in which we want the actual
4588 address of the data, so that we can load it directly into a
4589 register, here we want the address of the stack slot, so that
4590 it's properly aligned for word-by-word copying or something
4591 like that. It's not clear that this is always correct. */
4593 arg->value = arg->stack_slot;
4596 /* Mark all slots this store used. */
4597 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4598 && argblock && ! variable_size && arg->stack)
4599 for (i = lower_bound; i < upper_bound; i++)
4600 stack_usage_map[i] = 1;
4602 /* Once we have pushed something, pops can't safely
4603 be deferred during the rest of the arguments. */
4606 /* ANSI doesn't require a sequence point here,
4607 but PCC has one, so this will avoid some problems. */
4610 /* Free any temporary slots made in processing this argument. Show
4611 that we might have taken the address of something and pushed that
4613 preserve_temp_slots (NULL_RTX);
4617 return sibcall_failure;
4620 /* Nonzero if we do not know how to pass TYPE solely in registers.
4621 We cannot do so in the following cases:
4623 - if the type has variable size
4624 - if the type is marked as addressable (it is required to be constructed
4626 - if the padding and mode of the type is such that a copy into a register
4627 would put it into the wrong part of the register.
4629 Which padding can't be supported depends on the byte endianness.
4631 A value in a register is implicitly padded at the most significant end.
4632 On a big-endian machine, that is the lower end in memory.
4633 So a value padded in memory at the upper end can't go in a register.
4634 For a little-endian machine, the reverse is true. */
4637 default_must_pass_in_stack (enum machine_mode mode, tree type)
4642 /* If the type has variable size... */
4643 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4646 /* If the type is marked as addressable (it is required
4647 to be constructed into the stack)... */
4648 if (TREE_ADDRESSABLE (type))
4651 /* If the padding and mode of the type is such that a copy into
4652 a register would put it into the wrong part of the register. */
4654 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4655 && (FUNCTION_ARG_PADDING (mode, type)
4656 == (BYTES_BIG_ENDIAN ? upward : downward)))