1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
39 #include "langhooks.h"
44 #ifndef STACK_POINTER_OFFSET
45 #define STACK_POINTER_OFFSET 0
48 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
49 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51 /* Data structure and subroutines used within expand_call. */
55 /* Tree node for this argument. */
57 /* Mode for value; TYPE_MODE unless promoted. */
58 enum machine_mode mode;
59 /* Current RTL value for argument, or 0 if it isn't precomputed. */
61 /* Initially-compute RTL value for argument; only for const functions. */
63 /* Register to pass this argument in, 0 if passed on stack, or an
64 PARALLEL if the arg is to be copied into multiple non-contiguous
67 /* Register to pass this argument in when generating tail call sequence.
68 This is not the same register as for normal calls on machines with
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
74 /* Number of registers to use. 0 means put the whole arg in registers.
75 Also 0 if not passed in registers. */
77 /* Nonzero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
82 /* Some fields packaged up for locate_and_pad_parm. */
83 struct locate_and_pad_arg_data locate;
84 /* Location on the stack at which parameter should be stored. The store
85 has already been done if STACK == VALUE. */
87 /* Location on the stack of the start of this argument slot. This can
88 differ from STACK if this arg pads downward. This location is known
89 to be aligned to FUNCTION_ARG_BOUNDARY. */
91 /* Place that this stack area has been saved, if needed. */
93 /* If an argument's alignment does not permit direct copying into registers,
94 copy in smaller-sized pieces into pseudos. These are stored in a
95 block pointed to by this field. The next field says how many
96 word-sized pseudos we made. */
101 /* A vector of one char per byte of stack space. A byte if nonzero if
102 the corresponding stack location has been used.
103 This vector is used to prevent a function call within an argument from
104 clobbering any stack already set up. */
105 static char *stack_usage_map;
107 /* Size of STACK_USAGE_MAP. */
108 static int highest_outgoing_arg_in_use;
110 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
111 stack location's tail call argument has been already stored into the stack.
112 This bitmap is used to prevent sibling call optimization if function tries
113 to use parent's incoming argument slots when they have been already
114 overwritten with tail call arguments. */
115 static sbitmap stored_args_map;
117 /* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
122 int stack_arg_under_construction;
124 static int calls_function (tree, int);
125 static int calls_function_1 (tree, int);
127 static void emit_call_1 (rtx, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
128 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
130 static void precompute_register_parameters (int, struct arg_data *, int *);
131 static int store_one_arg (struct arg_data *, rtx, int, int, int);
132 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
133 static int finalize_must_preallocate (int, int, struct arg_data *,
135 static void precompute_arguments (int, int, struct arg_data *);
136 static int compute_argument_block_size (int, struct args_size *, int);
137 static void initialize_argument_information (int, struct arg_data *,
138 struct args_size *, int, tree,
139 tree, CUMULATIVE_ARGS *, int,
140 rtx *, int *, int *, int *);
141 static void compute_argument_addresses (struct arg_data *, rtx, int);
142 static rtx rtx_for_function_call (tree, tree);
143 static void load_register_parameters (struct arg_data *, int, rtx *, int,
145 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
146 enum machine_mode, int, va_list);
147 static int special_function_p (tree, int);
148 static rtx try_to_integrate (tree, tree, rtx, int, tree, rtx);
149 static int check_sibcall_argument_overlap_1 (rtx);
150 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
152 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
154 static tree fix_unsafe_tree (tree);
156 #ifdef REG_PARM_STACK_SPACE
157 static rtx save_fixed_argument_area (int, rtx, int *, int *);
158 static void restore_fixed_argument_area (rtx, rtx, int, int);
161 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
164 If WHICH is 0, return 1 if EXP contains a call to any function.
165 Actually, we only need return 1 if evaluating EXP would require pushing
166 arguments on the stack, but that is too difficult to compute, so we just
167 assume any function call might require the stack. */
169 static tree calls_function_save_exprs;
172 calls_function (tree exp, int which)
176 calls_function_save_exprs = 0;
177 val = calls_function_1 (exp, which);
178 calls_function_save_exprs = 0;
182 /* Recursive function to do the work of above function. */
185 calls_function_1 (tree exp, int which)
188 enum tree_code code = TREE_CODE (exp);
189 int class = TREE_CODE_CLASS (code);
190 int length = first_rtl_op (code);
192 /* If this code is language-specific, we don't know what it will do. */
193 if ((int) code >= NUM_TREE_CODES)
201 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
203 && (TYPE_RETURNS_STACK_DEPRESSED
204 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
206 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
207 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
209 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
211 & ECF_MAY_BE_ALLOCA))
220 for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
221 if (calls_function_1 (TREE_VALUE (tem), which))
228 if (SAVE_EXPR_RTL (exp) != 0)
230 if (value_member (exp, calls_function_save_exprs))
232 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
233 calls_function_save_exprs);
234 return (TREE_OPERAND (exp, 0) != 0
235 && calls_function_1 (TREE_OPERAND (exp, 0), which));
242 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
243 if (DECL_INITIAL (local) != 0
244 && calls_function_1 (DECL_INITIAL (local), which))
247 for (subblock = BLOCK_SUBBLOCKS (exp);
249 subblock = TREE_CHAIN (subblock))
250 if (calls_function_1 (subblock, which))
256 for (; exp != 0; exp = TREE_CHAIN (exp))
257 if (calls_function_1 (TREE_VALUE (exp), which))
265 /* Only expressions and blocks can contain calls. */
266 if (! IS_EXPR_CODE_CLASS (class) && class != 'b')
269 for (i = 0; i < length; i++)
270 if (TREE_OPERAND (exp, i) != 0
271 && calls_function_1 (TREE_OPERAND (exp, i), which))
277 /* Force FUNEXP into a form suitable for the address of a CALL,
278 and return that as an rtx. Also load the static chain register
279 if FNDECL is a nested function.
281 CALL_FUSAGE points to a variable holding the prospective
282 CALL_INSN_FUNCTION_USAGE information. */
285 prepare_call_address (rtx funexp, tree fndecl, rtx *call_fusage,
286 int reg_parm_seen, int sibcallp)
288 rtx static_chain_value = 0;
290 funexp = protect_from_queue (funexp, 0);
293 /* Get possible static chain value for nested function in C. */
294 static_chain_value = lookup_static_chain (fndecl);
296 /* Make a valid memory address and copy constants thru pseudo-regs,
297 but not for a constant address if -fno-function-cse. */
298 if (GET_CODE (funexp) != SYMBOL_REF)
299 /* If we are using registers for parameters, force the
300 function address into a register now. */
301 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
302 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
303 : memory_address (FUNCTION_MODE, funexp));
306 #ifndef NO_FUNCTION_CSE
307 if (optimize && ! flag_no_function_cse)
308 #ifdef NO_RECURSIVE_FUNCTION_CSE
309 if (fndecl != current_function_decl)
311 funexp = force_reg (Pmode, funexp);
315 if (static_chain_value != 0)
317 emit_move_insn (static_chain_rtx, static_chain_value);
319 if (GET_CODE (static_chain_rtx) == REG)
320 use_reg (call_fusage, static_chain_rtx);
326 /* Generate instructions to call function FUNEXP,
327 and optionally pop the results.
328 The CALL_INSN is the first insn generated.
330 FNDECL is the declaration node of the function. This is given to the
331 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
333 FUNTYPE is the data type of the function. This is given to the macro
334 RETURN_POPS_ARGS to determine whether this function pops its own args.
335 We used to allow an identifier for library functions, but that doesn't
336 work when the return type is an aggregate type and the calling convention
337 says that the pointer to this aggregate is to be popped by the callee.
339 STACK_SIZE is the number of bytes of arguments on the stack,
340 ROUNDED_STACK_SIZE is that number rounded up to
341 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
342 both to put into the call insn and to generate explicit popping
345 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
346 It is zero if this call doesn't want a structure value.
348 NEXT_ARG_REG is the rtx that results from executing
349 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
350 just after all the args have had their registers assigned.
351 This could be whatever you like, but normally it is the first
352 arg-register beyond those used for args in this call,
353 or 0 if all the arg-registers are used in this call.
354 It is passed on to `gen_call' so you can put this info in the call insn.
356 VALREG is a hard register in which a value is returned,
357 or 0 if the call does not return a value.
359 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
360 the args to this call were processed.
361 We restore `inhibit_defer_pop' to that value.
363 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
364 denote registers used by the called function. */
367 emit_call_1 (rtx funexp, tree fndecl ATTRIBUTE_UNUSED, tree funtype ATTRIBUTE_UNUSED,
368 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
369 HOST_WIDE_INT rounded_stack_size,
370 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
371 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
372 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
373 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
375 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
377 int already_popped = 0;
378 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
379 #if defined (HAVE_call) && defined (HAVE_call_value)
380 rtx struct_value_size_rtx;
381 struct_value_size_rtx = GEN_INT (struct_value_size);
384 #ifdef CALL_POPS_ARGS
385 n_popped += CALL_POPS_ARGS (* args_so_far);
388 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
389 and we don't want to load it into a register as an optimization,
390 because prepare_call_address already did it if it should be done. */
391 if (GET_CODE (funexp) != SYMBOL_REF)
392 funexp = memory_address (FUNCTION_MODE, funexp);
394 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
395 if ((ecf_flags & ECF_SIBCALL)
396 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
397 && (n_popped > 0 || stack_size == 0))
399 rtx n_pop = GEN_INT (n_popped);
402 /* If this subroutine pops its own args, record that in the call insn
403 if possible, for the sake of frame pointer elimination. */
406 pat = GEN_SIBCALL_VALUE_POP (valreg,
407 gen_rtx_MEM (FUNCTION_MODE, funexp),
408 rounded_stack_size_rtx, next_arg_reg,
411 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
412 rounded_stack_size_rtx, next_arg_reg, n_pop);
414 emit_call_insn (pat);
420 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
421 /* If the target has "call" or "call_value" insns, then prefer them
422 if no arguments are actually popped. If the target does not have
423 "call" or "call_value" insns, then we must use the popping versions
424 even if the call has no arguments to pop. */
425 #if defined (HAVE_call) && defined (HAVE_call_value)
426 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
427 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
429 if (HAVE_call_pop && HAVE_call_value_pop)
432 rtx n_pop = GEN_INT (n_popped);
435 /* If this subroutine pops its own args, record that in the call insn
436 if possible, for the sake of frame pointer elimination. */
439 pat = GEN_CALL_VALUE_POP (valreg,
440 gen_rtx_MEM (FUNCTION_MODE, funexp),
441 rounded_stack_size_rtx, next_arg_reg, n_pop);
443 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
444 rounded_stack_size_rtx, next_arg_reg, n_pop);
446 emit_call_insn (pat);
452 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
453 if ((ecf_flags & ECF_SIBCALL)
454 && HAVE_sibcall && HAVE_sibcall_value)
457 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
458 gen_rtx_MEM (FUNCTION_MODE, funexp),
459 rounded_stack_size_rtx,
460 next_arg_reg, NULL_RTX));
462 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
463 rounded_stack_size_rtx, next_arg_reg,
464 struct_value_size_rtx));
469 #if defined (HAVE_call) && defined (HAVE_call_value)
470 if (HAVE_call && HAVE_call_value)
473 emit_call_insn (GEN_CALL_VALUE (valreg,
474 gen_rtx_MEM (FUNCTION_MODE, funexp),
475 rounded_stack_size_rtx, next_arg_reg,
478 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
479 rounded_stack_size_rtx, next_arg_reg,
480 struct_value_size_rtx));
486 /* Find the call we just emitted. */
487 call_insn = last_call_insn ();
489 /* Mark memory as used for "pure" function call. */
490 if (ecf_flags & ECF_PURE)
494 gen_rtx_USE (VOIDmode,
495 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
498 /* Put the register usage information there. */
499 add_function_usage_to (call_insn, call_fusage);
501 /* If this is a const call, then set the insn's unchanging bit. */
502 if (ecf_flags & (ECF_CONST | ECF_PURE))
503 CONST_OR_PURE_CALL_P (call_insn) = 1;
505 /* If this call can't throw, attach a REG_EH_REGION reg note to that
507 if (ecf_flags & ECF_NOTHROW)
508 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
509 REG_NOTES (call_insn));
511 note_eh_region_may_contain_throw ();
513 if (ecf_flags & ECF_NORETURN)
514 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
515 REG_NOTES (call_insn));
516 if (ecf_flags & ECF_ALWAYS_RETURN)
517 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
518 REG_NOTES (call_insn));
520 if (ecf_flags & ECF_RETURNS_TWICE)
522 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
523 REG_NOTES (call_insn));
524 current_function_calls_setjmp = 1;
527 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
529 /* Restore this now, so that we do defer pops for this call's args
530 if the context of the call as a whole permits. */
531 inhibit_defer_pop = old_inhibit_defer_pop;
536 CALL_INSN_FUNCTION_USAGE (call_insn)
537 = gen_rtx_EXPR_LIST (VOIDmode,
538 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
539 CALL_INSN_FUNCTION_USAGE (call_insn));
540 rounded_stack_size -= n_popped;
541 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
542 stack_pointer_delta -= n_popped;
545 if (!ACCUMULATE_OUTGOING_ARGS)
547 /* If returning from the subroutine does not automatically pop the args,
548 we need an instruction to pop them sooner or later.
549 Perhaps do it now; perhaps just record how much space to pop later.
551 If returning from the subroutine does pop the args, indicate that the
552 stack pointer will be changed. */
554 if (rounded_stack_size != 0)
556 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN | ECF_LONGJMP))
557 /* Just pretend we did the pop. */
558 stack_pointer_delta -= rounded_stack_size;
559 else if (flag_defer_pop && inhibit_defer_pop == 0
560 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
561 pending_stack_adjust += rounded_stack_size;
563 adjust_stack (rounded_stack_size_rtx);
566 /* When we accumulate outgoing args, we must avoid any stack manipulations.
567 Restore the stack pointer to its original value now. Usually
568 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
569 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
570 popping variants of functions exist as well.
572 ??? We may optimize similar to defer_pop above, but it is
573 probably not worthwhile.
575 ??? It will be worthwhile to enable combine_stack_adjustments even for
578 anti_adjust_stack (GEN_INT (n_popped));
581 /* Determine if the function identified by NAME and FNDECL is one with
582 special properties we wish to know about.
584 For example, if the function might return more than one time (setjmp), then
585 set RETURNS_TWICE to a nonzero value.
587 Similarly set LONGJMP for if the function is in the longjmp family.
589 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
590 space from the stack such as alloca. */
593 special_function_p (tree fndecl, int flags)
595 if (! (flags & ECF_MALLOC)
596 && fndecl && DECL_NAME (fndecl)
597 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
598 /* Exclude functions not at the file scope, or not `extern',
599 since they are not the magic functions we would otherwise
601 FIXME: this should be handled with attributes, not with this
602 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
603 because you can declare fork() inside a function if you
605 && (DECL_CONTEXT (fndecl) == NULL_TREE
606 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
607 && TREE_PUBLIC (fndecl))
609 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
610 const char *tname = name;
612 /* We assume that alloca will always be called by name. It
613 makes no sense to pass it as a pointer-to-function to
614 anything that does not understand its behavior. */
615 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
617 && ! strcmp (name, "alloca"))
618 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
620 && ! strcmp (name, "__builtin_alloca"))))
621 flags |= ECF_MAY_BE_ALLOCA;
623 /* Disregard prefix _, __ or __x. */
626 if (name[1] == '_' && name[2] == 'x')
628 else if (name[1] == '_')
637 && (! strcmp (tname, "setjmp")
638 || ! strcmp (tname, "setjmp_syscall")))
640 && ! strcmp (tname, "sigsetjmp"))
642 && ! strcmp (tname, "savectx")))
643 flags |= ECF_RETURNS_TWICE;
646 && ! strcmp (tname, "siglongjmp"))
647 flags |= ECF_LONGJMP;
649 else if ((tname[0] == 'q' && tname[1] == 's'
650 && ! strcmp (tname, "qsetjmp"))
651 || (tname[0] == 'v' && tname[1] == 'f'
652 && ! strcmp (tname, "vfork")))
653 flags |= ECF_RETURNS_TWICE;
655 else if (tname[0] == 'l' && tname[1] == 'o'
656 && ! strcmp (tname, "longjmp"))
657 flags |= ECF_LONGJMP;
659 else if ((tname[0] == 'f' && tname[1] == 'o'
660 && ! strcmp (tname, "fork"))
661 /* Linux specific: __clone. check NAME to insist on the
662 leading underscores, to avoid polluting the ISO / POSIX
664 || (name[0] == '_' && name[1] == '_'
665 && ! strcmp (tname, "clone"))
666 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
667 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
669 || ((tname[5] == 'p' || tname[5] == 'e')
670 && tname[6] == '\0'))))
671 flags |= ECF_FORK_OR_EXEC;
676 /* Return nonzero when tree represent call to longjmp. */
679 setjmp_call_p (tree fndecl)
681 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
684 /* Return true when exp contains alloca call. */
686 alloca_call_p (tree exp)
688 if (TREE_CODE (exp) == CALL_EXPR
689 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
690 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
692 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
693 0) & ECF_MAY_BE_ALLOCA))
698 /* Detect flags (function attributes) from the function decl or type node. */
701 flags_from_decl_or_type (tree exp)
708 struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
709 type = TREE_TYPE (exp);
713 if (i->pure_function)
714 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
715 if (i->const_function)
716 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
719 /* The function exp may have the `malloc' attribute. */
720 if (DECL_IS_MALLOC (exp))
723 /* The function exp may have the `pure' attribute. */
724 if (DECL_IS_PURE (exp))
725 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
727 if (TREE_NOTHROW (exp))
728 flags |= ECF_NOTHROW;
730 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
731 flags |= ECF_LIBCALL_BLOCK;
734 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
737 if (TREE_THIS_VOLATILE (exp))
738 flags |= ECF_NORETURN;
740 /* Mark if the function returns with the stack pointer depressed. We
741 cannot consider it pure or constant in that case. */
742 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
744 flags |= ECF_SP_DEPRESSED;
745 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
751 /* Precompute all register parameters as described by ARGS, storing values
752 into fields within the ARGS array.
754 NUM_ACTUALS indicates the total number elements in the ARGS array.
756 Set REG_PARM_SEEN if we encounter a register parameter. */
759 precompute_register_parameters (int num_actuals, struct arg_data *args, int *reg_parm_seen)
765 for (i = 0; i < num_actuals; i++)
766 if (args[i].reg != 0 && ! args[i].pass_on_stack)
770 if (args[i].value == 0)
773 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
775 preserve_temp_slots (args[i].value);
778 /* ANSI doesn't require a sequence point here,
779 but PCC has one, so this will avoid some problems. */
783 /* If the value is a non-legitimate constant, force it into a
784 pseudo now. TLS symbols sometimes need a call to resolve. */
785 if (CONSTANT_P (args[i].value)
786 && !LEGITIMATE_CONSTANT_P (args[i].value))
787 args[i].value = force_reg (args[i].mode, args[i].value);
789 /* If we are to promote the function arg to a wider mode,
792 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
794 = convert_modes (args[i].mode,
795 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
796 args[i].value, args[i].unsignedp);
798 /* If the value is expensive, and we are inside an appropriately
799 short loop, put the value into a pseudo and then put the pseudo
802 For small register classes, also do this if this call uses
803 register parameters. This is to avoid reload conflicts while
804 loading the parameters registers. */
806 if ((! (GET_CODE (args[i].value) == REG
807 || (GET_CODE (args[i].value) == SUBREG
808 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
809 && args[i].mode != BLKmode
810 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
811 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
812 || preserve_subexpressions_p ()))
813 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
817 #ifdef REG_PARM_STACK_SPACE
819 /* The argument list is the property of the called routine and it
820 may clobber it. If the fixed area has been used for previous
821 parameters, we must save and restore it. */
824 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
829 /* Compute the boundary of the area that needs to be saved, if any. */
830 high = reg_parm_stack_space;
831 #ifdef ARGS_GROW_DOWNWARD
834 if (high > highest_outgoing_arg_in_use)
835 high = highest_outgoing_arg_in_use;
837 for (low = 0; low < high; low++)
838 if (stack_usage_map[low] != 0)
841 enum machine_mode save_mode;
846 while (stack_usage_map[--high] == 0)
850 *high_to_save = high;
852 num_to_save = high - low + 1;
853 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
855 /* If we don't have the required alignment, must do this
857 if ((low & (MIN (GET_MODE_SIZE (save_mode),
858 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
861 #ifdef ARGS_GROW_DOWNWARD
866 stack_area = gen_rtx_MEM (save_mode,
867 memory_address (save_mode,
868 plus_constant (argblock,
871 set_mem_align (stack_area, PARM_BOUNDARY);
872 if (save_mode == BLKmode)
874 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
875 emit_block_move (validize_mem (save_area), stack_area,
876 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
880 save_area = gen_reg_rtx (save_mode);
881 emit_move_insn (save_area, stack_area);
891 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
893 enum machine_mode save_mode = GET_MODE (save_area);
897 #ifdef ARGS_GROW_DOWNWARD
898 delta = -high_to_save;
902 stack_area = gen_rtx_MEM (save_mode,
903 memory_address (save_mode,
904 plus_constant (argblock, delta)));
905 set_mem_align (stack_area, PARM_BOUNDARY);
907 if (save_mode != BLKmode)
908 emit_move_insn (stack_area, save_area);
910 emit_block_move (stack_area, validize_mem (save_area),
911 GEN_INT (high_to_save - low_to_save + 1),
914 #endif /* REG_PARM_STACK_SPACE */
916 /* If any elements in ARGS refer to parameters that are to be passed in
917 registers, but not in memory, and whose alignment does not permit a
918 direct copy into registers. Copy the values into a group of pseudos
919 which we will later copy into the appropriate hard registers.
921 Pseudos for each unaligned argument will be stored into the array
922 args[argnum].aligned_regs. The caller is responsible for deallocating
923 the aligned_regs array if it is nonzero. */
926 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
930 for (i = 0; i < num_actuals; i++)
931 if (args[i].reg != 0 && ! args[i].pass_on_stack
932 && args[i].mode == BLKmode
933 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
934 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
936 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
937 int nregs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
938 int endian_correction = 0;
940 args[i].n_aligned_regs = args[i].partial ? args[i].partial : nregs;
941 args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
943 /* Structures smaller than a word are normally aligned to the
944 least significant byte. On a BYTES_BIG_ENDIAN machine,
945 this means we must skip the empty high order bytes when
946 calculating the bit offset. */
947 if (bytes < UNITS_PER_WORD
948 #ifdef BLOCK_REG_PADDING
949 && (BLOCK_REG_PADDING (args[i].mode,
950 TREE_TYPE (args[i].tree_value), 1)
956 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
958 for (j = 0; j < args[i].n_aligned_regs; j++)
960 rtx reg = gen_reg_rtx (word_mode);
961 rtx word = operand_subword_force (args[i].value, j, BLKmode);
962 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
964 args[i].aligned_regs[j] = reg;
965 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
966 word_mode, word_mode, BITS_PER_WORD);
968 /* There is no need to restrict this code to loading items
969 in TYPE_ALIGN sized hunks. The bitfield instructions can
970 load up entire word sized registers efficiently.
972 ??? This may not be needed anymore.
973 We use to emit a clobber here but that doesn't let later
974 passes optimize the instructions we emit. By storing 0 into
975 the register later passes know the first AND to zero out the
976 bitfield being set in the register is unnecessary. The store
977 of 0 will be deleted as will at least the first AND. */
979 emit_move_insn (reg, const0_rtx);
981 bytes -= bitsize / BITS_PER_UNIT;
982 store_bit_field (reg, bitsize, endian_correction, word_mode,
983 word, BITS_PER_WORD);
988 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
991 NUM_ACTUALS is the total number of parameters.
993 N_NAMED_ARGS is the total number of named arguments.
995 FNDECL is the tree code for the target of this call (if known)
997 ARGS_SO_FAR holds state needed by the target to know where to place
1000 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1001 for arguments which are passed in registers.
1003 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1004 and may be modified by this routine.
1006 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1007 flags which may may be modified by this routine. */
1010 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1011 struct arg_data *args,
1012 struct args_size *args_size,
1013 int n_named_args ATTRIBUTE_UNUSED,
1014 tree actparms, tree fndecl,
1015 CUMULATIVE_ARGS *args_so_far,
1016 int reg_parm_stack_space,
1017 rtx *old_stack_level, int *old_pending_adj,
1018 int *must_preallocate, int *ecf_flags)
1020 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1023 /* Count arg position in order args appear. */
1029 args_size->constant = 0;
1032 /* In this loop, we consider args in the order they are written.
1033 We fill up ARGS from the front or from the back if necessary
1034 so that in any case the first arg to be pushed ends up at the front. */
1036 if (PUSH_ARGS_REVERSED)
1038 i = num_actuals - 1, inc = -1;
1039 /* In this case, must reverse order of args
1040 so that we compute and push the last arg first. */
1047 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1048 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1050 tree type = TREE_TYPE (TREE_VALUE (p));
1052 enum machine_mode mode;
1054 args[i].tree_value = TREE_VALUE (p);
1056 /* Replace erroneous argument with constant zero. */
1057 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1058 args[i].tree_value = integer_zero_node, type = integer_type_node;
1060 /* If TYPE is a transparent union, pass things the way we would
1061 pass the first field of the union. We have already verified that
1062 the modes are the same. */
1063 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1064 type = TREE_TYPE (TYPE_FIELDS (type));
1066 /* Decide where to pass this arg.
1068 args[i].reg is nonzero if all or part is passed in registers.
1070 args[i].partial is nonzero if part but not all is passed in registers,
1071 and the exact value says how many words are passed in registers.
1073 args[i].pass_on_stack is nonzero if the argument must at least be
1074 computed on the stack. It may then be loaded back into registers
1075 if args[i].reg is nonzero.
1077 These decisions are driven by the FUNCTION_... macros and must agree
1078 with those made by function.c. */
1080 /* See if this argument should be passed by invisible reference. */
1081 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
1082 || TREE_ADDRESSABLE (type)
1083 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1084 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1085 type, argpos < n_named_args)
1089 /* If we're compiling a thunk, pass through invisible
1090 references instead of making a copy. */
1091 if (current_function_is_thunk
1092 #ifdef FUNCTION_ARG_CALLEE_COPIES
1093 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1094 type, argpos < n_named_args)
1095 /* If it's in a register, we must make a copy of it too. */
1096 /* ??? Is this a sufficient test? Is there a better one? */
1097 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1098 && REG_P (DECL_RTL (args[i].tree_value)))
1099 && ! TREE_ADDRESSABLE (type))
1103 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1104 new object from the argument. If we are passing by
1105 invisible reference, the callee will do that for us, so we
1106 can strip off the TARGET_EXPR. This is not always safe,
1107 but it is safe in the only case where this is a useful
1108 optimization; namely, when the argument is a plain object.
1109 In that case, the frontend is just asking the backend to
1110 make a bitwise copy of the argument. */
1112 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1113 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1114 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1115 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1117 args[i].tree_value = build1 (ADDR_EXPR,
1118 build_pointer_type (type),
1119 args[i].tree_value);
1120 type = build_pointer_type (type);
1122 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1124 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1125 We implement this by passing the address of the temporary
1126 rather than expanding it into another allocated slot. */
1127 args[i].tree_value = build1 (ADDR_EXPR,
1128 build_pointer_type (type),
1129 args[i].tree_value);
1130 type = build_pointer_type (type);
1134 /* We make a copy of the object and pass the address to the
1135 function being called. */
1138 if (!COMPLETE_TYPE_P (type)
1139 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1140 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1141 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1142 STACK_CHECK_MAX_VAR_SIZE))))
1144 /* This is a variable-sized object. Make space on the stack
1146 rtx size_rtx = expr_size (TREE_VALUE (p));
1148 if (*old_stack_level == 0)
1150 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1151 *old_pending_adj = pending_stack_adjust;
1152 pending_stack_adjust = 0;
1155 copy = gen_rtx_MEM (BLKmode,
1156 allocate_dynamic_stack_space
1157 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1158 set_mem_attributes (copy, type, 1);
1161 copy = assign_temp (type, 0, 1, 0);
1163 store_expr (args[i].tree_value, copy, 0);
1164 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1166 args[i].tree_value = build1 (ADDR_EXPR,
1167 build_pointer_type (type),
1168 make_tree (type, copy));
1169 type = build_pointer_type (type);
1173 mode = TYPE_MODE (type);
1174 unsignedp = TREE_UNSIGNED (type);
1176 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1177 mode = promote_mode (type, mode, &unsignedp, 1);
1179 args[i].unsignedp = unsignedp;
1180 args[i].mode = mode;
1182 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1183 argpos < n_named_args);
1184 #ifdef FUNCTION_INCOMING_ARG
1185 /* If this is a sibling call and the machine has register windows, the
1186 register window has to be unwinded before calling the routine, so
1187 arguments have to go into the incoming registers. */
1188 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1189 argpos < n_named_args);
1191 args[i].tail_call_reg = args[i].reg;
1194 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1197 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1198 argpos < n_named_args);
1201 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1203 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1204 it means that we are to pass this arg in the register(s) designated
1205 by the PARALLEL, but also to pass it in the stack. */
1206 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1207 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1208 args[i].pass_on_stack = 1;
1210 /* If this is an addressable type, we must preallocate the stack
1211 since we must evaluate the object into its final location.
1213 If this is to be passed in both registers and the stack, it is simpler
1215 if (TREE_ADDRESSABLE (type)
1216 || (args[i].pass_on_stack && args[i].reg != 0))
1217 *must_preallocate = 1;
1219 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1220 we cannot consider this function call constant. */
1221 if (TREE_ADDRESSABLE (type))
1222 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1224 /* Compute the stack-size of this argument. */
1225 if (args[i].reg == 0 || args[i].partial != 0
1226 || reg_parm_stack_space > 0
1227 || args[i].pass_on_stack)
1228 locate_and_pad_parm (mode, type,
1229 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1234 args[i].pass_on_stack ? 0 : args[i].partial,
1235 fndecl, args_size, &args[i].locate);
1236 #ifdef BLOCK_REG_PADDING
1238 /* The argument is passed entirely in registers. See at which
1239 end it should be padded. */
1240 args[i].locate.where_pad =
1241 BLOCK_REG_PADDING (mode, type,
1242 int_size_in_bytes (type) <= UNITS_PER_WORD);
1245 /* Update ARGS_SIZE, the total stack space for args so far. */
1247 args_size->constant += args[i].locate.size.constant;
1248 if (args[i].locate.size.var)
1249 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1251 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1252 have been used, etc. */
1254 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1255 argpos < n_named_args);
1259 /* Update ARGS_SIZE to contain the total size for the argument block.
1260 Return the original constant component of the argument block's size.
1262 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1263 for arguments passed in registers. */
1266 compute_argument_block_size (int reg_parm_stack_space,
1267 struct args_size *args_size,
1268 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1270 int unadjusted_args_size = args_size->constant;
1272 /* For accumulate outgoing args mode we don't need to align, since the frame
1273 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1274 backends from generating misaligned frame sizes. */
1275 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1276 preferred_stack_boundary = STACK_BOUNDARY;
1278 /* Compute the actual size of the argument block required. The variable
1279 and constant sizes must be combined, the size may have to be rounded,
1280 and there may be a minimum required size. */
1284 args_size->var = ARGS_SIZE_TREE (*args_size);
1285 args_size->constant = 0;
1287 preferred_stack_boundary /= BITS_PER_UNIT;
1288 if (preferred_stack_boundary > 1)
1290 /* We don't handle this case yet. To handle it correctly we have
1291 to add the delta, round and subtract the delta.
1292 Currently no machine description requires this support. */
1293 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1295 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1298 if (reg_parm_stack_space > 0)
1301 = size_binop (MAX_EXPR, args_size->var,
1302 ssize_int (reg_parm_stack_space));
1304 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1305 /* The area corresponding to register parameters is not to count in
1306 the size of the block we need. So make the adjustment. */
1308 = size_binop (MINUS_EXPR, args_size->var,
1309 ssize_int (reg_parm_stack_space));
1315 preferred_stack_boundary /= BITS_PER_UNIT;
1316 if (preferred_stack_boundary < 1)
1317 preferred_stack_boundary = 1;
1318 args_size->constant = (((args_size->constant
1319 + stack_pointer_delta
1320 + preferred_stack_boundary - 1)
1321 / preferred_stack_boundary
1322 * preferred_stack_boundary)
1323 - stack_pointer_delta);
1325 args_size->constant = MAX (args_size->constant,
1326 reg_parm_stack_space);
1328 #ifdef MAYBE_REG_PARM_STACK_SPACE
1329 if (reg_parm_stack_space == 0)
1330 args_size->constant = 0;
1333 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1334 args_size->constant -= reg_parm_stack_space;
1337 return unadjusted_args_size;
1340 /* Precompute parameters as needed for a function call.
1342 FLAGS is mask of ECF_* constants.
1344 NUM_ACTUALS is the number of arguments.
1346 ARGS is an array containing information for each argument; this
1347 routine fills in the INITIAL_VALUE and VALUE fields for each
1348 precomputed argument. */
1351 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1355 /* If this function call is cse'able, precompute all the parameters.
1356 Note that if the parameter is constructed into a temporary, this will
1357 cause an additional copy because the parameter will be constructed
1358 into a temporary location and then copied into the outgoing arguments.
1359 If a parameter contains a call to alloca and this function uses the
1360 stack, precompute the parameter. */
1362 /* If we preallocated the stack space, and some arguments must be passed
1363 on the stack, then we must precompute any parameter which contains a
1364 function call which will store arguments on the stack.
1365 Otherwise, evaluating the parameter may clobber previous parameters
1366 which have already been stored into the stack. (we have code to avoid
1367 such case by saving the outgoing stack arguments, but it results in
1370 for (i = 0; i < num_actuals; i++)
1371 if ((flags & ECF_LIBCALL_BLOCK)
1372 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1374 enum machine_mode mode;
1376 /* If this is an addressable type, we cannot pre-evaluate it. */
1377 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1381 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1383 /* ANSI doesn't require a sequence point here,
1384 but PCC has one, so this will avoid some problems. */
1387 args[i].initial_value = args[i].value
1388 = protect_from_queue (args[i].value, 0);
1390 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1391 if (mode != args[i].mode)
1394 = convert_modes (args[i].mode, mode,
1395 args[i].value, args[i].unsignedp);
1396 #ifdef PROMOTE_FOR_CALL_ONLY
1397 /* CSE will replace this only if it contains args[i].value
1398 pseudo, so convert it down to the declared mode using
1400 if (GET_CODE (args[i].value) == REG
1401 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1403 args[i].initial_value
1404 = gen_lowpart_SUBREG (mode, args[i].value);
1405 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1406 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1414 /* Given the current state of MUST_PREALLOCATE and information about
1415 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1416 compute and return the final value for MUST_PREALLOCATE. */
1419 finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1421 /* See if we have or want to preallocate stack space.
1423 If we would have to push a partially-in-regs parm
1424 before other stack parms, preallocate stack space instead.
1426 If the size of some parm is not a multiple of the required stack
1427 alignment, we must preallocate.
1429 If the total size of arguments that would otherwise create a copy in
1430 a temporary (such as a CALL) is more than half the total argument list
1431 size, preallocation is faster.
1433 Another reason to preallocate is if we have a machine (like the m88k)
1434 where stack alignment is required to be maintained between every
1435 pair of insns, not just when the call is made. However, we assume here
1436 that such machines either do not have push insns (and hence preallocation
1437 would occur anyway) or the problem is taken care of with
1440 if (! must_preallocate)
1442 int partial_seen = 0;
1443 int copy_to_evaluate_size = 0;
1446 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1448 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1450 else if (partial_seen && args[i].reg == 0)
1451 must_preallocate = 1;
1453 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1454 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1455 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1456 || TREE_CODE (args[i].tree_value) == COND_EXPR
1457 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1458 copy_to_evaluate_size
1459 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1462 if (copy_to_evaluate_size * 2 >= args_size->constant
1463 && args_size->constant > 0)
1464 must_preallocate = 1;
1466 return must_preallocate;
1469 /* If we preallocated stack space, compute the address of each argument
1470 and store it into the ARGS array.
1472 We need not ensure it is a valid memory address here; it will be
1473 validized when it is used.
1475 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1478 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1482 rtx arg_reg = argblock;
1483 int i, arg_offset = 0;
1485 if (GET_CODE (argblock) == PLUS)
1486 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1488 for (i = 0; i < num_actuals; i++)
1490 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1491 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1494 /* Skip this parm if it will not be passed on the stack. */
1495 if (! args[i].pass_on_stack && args[i].reg != 0)
1498 if (GET_CODE (offset) == CONST_INT)
1499 addr = plus_constant (arg_reg, INTVAL (offset));
1501 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1503 addr = plus_constant (addr, arg_offset);
1504 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1505 set_mem_align (args[i].stack, PARM_BOUNDARY);
1506 set_mem_attributes (args[i].stack,
1507 TREE_TYPE (args[i].tree_value), 1);
1509 if (GET_CODE (slot_offset) == CONST_INT)
1510 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1512 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1514 addr = plus_constant (addr, arg_offset);
1515 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1516 set_mem_align (args[i].stack_slot, PARM_BOUNDARY);
1517 set_mem_attributes (args[i].stack_slot,
1518 TREE_TYPE (args[i].tree_value), 1);
1520 /* Function incoming arguments may overlap with sibling call
1521 outgoing arguments and we cannot allow reordering of reads
1522 from function arguments with stores to outgoing arguments
1523 of sibling calls. */
1524 set_mem_alias_set (args[i].stack, 0);
1525 set_mem_alias_set (args[i].stack_slot, 0);
1530 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1531 in a call instruction.
1533 FNDECL is the tree node for the target function. For an indirect call
1534 FNDECL will be NULL_TREE.
1536 ADDR is the operand 0 of CALL_EXPR for this call. */
1539 rtx_for_function_call (tree fndecl, tree addr)
1543 /* Get the function to call, in the form of RTL. */
1546 /* If this is the first use of the function, see if we need to
1547 make an external definition for it. */
1548 if (! TREE_USED (fndecl))
1550 assemble_external (fndecl);
1551 TREE_USED (fndecl) = 1;
1554 /* Get a SYMBOL_REF rtx for the function address. */
1555 funexp = XEXP (DECL_RTL (fndecl), 0);
1558 /* Generate an rtx (probably a pseudo-register) for the address. */
1561 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1562 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1568 /* Do the register loads required for any wholly-register parms or any
1569 parms which are passed both on the stack and in a register. Their
1570 expressions were already evaluated.
1572 Mark all register-parms as living through the call, putting these USE
1573 insns in the CALL_INSN_FUNCTION_USAGE field.
1575 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1576 checking, setting *SIBCALL_FAILURE if appropriate. */
1579 load_register_parameters (struct arg_data *args, int num_actuals,
1580 rtx *call_fusage, int flags, int is_sibcall,
1581 int *sibcall_failure)
1585 #ifdef LOAD_ARGS_REVERSED
1586 for (i = num_actuals - 1; i >= 0; i--)
1588 for (i = 0; i < num_actuals; i++)
1591 rtx reg = ((flags & ECF_SIBCALL)
1592 ? args[i].tail_call_reg : args[i].reg);
1595 int partial = args[i].partial;
1598 rtx before_arg = get_last_insn ();
1599 /* Set to non-negative if must move a word at a time, even if just
1600 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1601 we just use a normal move insn. This value can be zero if the
1602 argument is a zero size structure with no fields. */
1606 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1608 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1609 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1612 size = GET_MODE_SIZE (args[i].mode);
1614 /* Handle calls that pass values in multiple non-contiguous
1615 locations. The Irix 6 ABI has examples of this. */
1617 if (GET_CODE (reg) == PARALLEL)
1619 tree type = TREE_TYPE (args[i].tree_value);
1620 emit_group_load (reg, args[i].value, type,
1621 int_size_in_bytes (type));
1624 /* If simple case, just do move. If normal partial, store_one_arg
1625 has already loaded the register for us. In all other cases,
1626 load the register(s) from memory. */
1628 else if (nregs == -1)
1630 emit_move_insn (reg, args[i].value);
1631 #ifdef BLOCK_REG_PADDING
1632 /* Handle case where we have a value that needs shifting
1633 up to the msb. eg. a QImode value and we're padding
1634 upward on a BYTES_BIG_ENDIAN machine. */
1635 if (size < UNITS_PER_WORD
1636 && (args[i].locate.where_pad
1637 == (BYTES_BIG_ENDIAN ? upward : downward)))
1640 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1642 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1643 report the whole reg as used. Strictly speaking, the
1644 call only uses SIZE bytes at the msb end, but it doesn't
1645 seem worth generating rtl to say that. */
1646 reg = gen_rtx_REG (word_mode, REGNO (reg));
1647 x = expand_binop (word_mode, ashl_optab, reg,
1648 GEN_INT (shift), reg, 1, OPTAB_WIDEN);
1650 emit_move_insn (reg, x);
1655 /* If we have pre-computed the values to put in the registers in
1656 the case of non-aligned structures, copy them in now. */
1658 else if (args[i].n_aligned_regs != 0)
1659 for (j = 0; j < args[i].n_aligned_regs; j++)
1660 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1661 args[i].aligned_regs[j]);
1663 else if (partial == 0 || args[i].pass_on_stack)
1665 rtx mem = validize_mem (args[i].value);
1667 #ifdef BLOCK_REG_PADDING
1668 /* Handle a BLKmode that needs shifting. */
1669 if (nregs == 1 && size < UNITS_PER_WORD
1670 && args[i].locate.where_pad == downward)
1672 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1673 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1674 rtx x = gen_reg_rtx (word_mode);
1675 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1676 optab dir = BYTES_BIG_ENDIAN ? lshr_optab : ashl_optab;
1678 emit_move_insn (x, tem);
1679 x = expand_binop (word_mode, dir, x, GEN_INT (shift),
1680 ri, 1, OPTAB_WIDEN);
1682 emit_move_insn (ri, x);
1686 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1689 /* When a parameter is a block, and perhaps in other cases, it is
1690 possible that it did a load from an argument slot that was
1691 already clobbered. */
1693 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1694 *sibcall_failure = 1;
1696 /* Handle calls that pass values in multiple non-contiguous
1697 locations. The Irix 6 ABI has examples of this. */
1698 if (GET_CODE (reg) == PARALLEL)
1699 use_group_regs (call_fusage, reg);
1700 else if (nregs == -1)
1701 use_reg (call_fusage, reg);
1703 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1708 /* Try to integrate function. See expand_inline_function for documentation
1709 about the parameters. */
1712 try_to_integrate (tree fndecl, tree actparms, rtx target, int ignore,
1713 tree type, rtx structure_value_addr)
1718 rtx old_stack_level = 0;
1719 int reg_parm_stack_space = 0;
1721 #ifdef REG_PARM_STACK_SPACE
1722 #ifdef MAYBE_REG_PARM_STACK_SPACE
1723 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1725 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1729 before_call = get_last_insn ();
1731 timevar_push (TV_INTEGRATION);
1733 temp = expand_inline_function (fndecl, actparms, target,
1735 structure_value_addr);
1737 timevar_pop (TV_INTEGRATION);
1739 /* If inlining succeeded, return. */
1740 if (temp != (rtx) (size_t) - 1)
1742 if (ACCUMULATE_OUTGOING_ARGS)
1744 /* If the outgoing argument list must be preserved, push
1745 the stack before executing the inlined function if it
1748 i = reg_parm_stack_space;
1749 if (i > highest_outgoing_arg_in_use)
1750 i = highest_outgoing_arg_in_use;
1751 while (--i >= 0 && stack_usage_map[i] == 0)
1754 if (stack_arg_under_construction || i >= 0)
1757 = before_call ? NEXT_INSN (before_call) : get_insns ();
1758 rtx insn = NULL_RTX, seq;
1760 /* Look for a call in the inline function code.
1761 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1762 nonzero then there is a call and it is not necessary
1763 to scan the insns. */
1765 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1766 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1767 if (GET_CODE (insn) == CALL_INSN)
1772 /* Reserve enough stack space so that the largest
1773 argument list of any function call in the inline
1774 function does not overlap the argument list being
1775 evaluated. This is usually an overestimate because
1776 allocate_dynamic_stack_space reserves space for an
1777 outgoing argument list in addition to the requested
1778 space, but there is no way to ask for stack space such
1779 that an argument list of a certain length can be
1782 Add the stack space reserved for register arguments, if
1783 any, in the inline function. What is really needed is the
1784 largest value of reg_parm_stack_space in the inline
1785 function, but that is not available. Using the current
1786 value of reg_parm_stack_space is wrong, but gives
1787 correct results on all supported machines. */
1789 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1790 + reg_parm_stack_space);
1793 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1794 allocate_dynamic_stack_space (GEN_INT (adjust),
1795 NULL_RTX, BITS_PER_UNIT);
1798 emit_insn_before (seq, first_insn);
1799 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1804 /* If the result is equivalent to TARGET, return TARGET to simplify
1805 checks in store_expr. They can be equivalent but not equal in the
1806 case of a function that returns BLKmode. */
1807 if (temp != target && rtx_equal_p (temp, target))
1812 /* If inlining failed, mark FNDECL as needing to be compiled
1813 separately after all. If function was declared inline,
1815 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1816 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1818 warning ("%Jinlining failed in call to '%F'", fndecl, fndecl);
1819 warning ("called from here");
1821 (*lang_hooks.mark_addressable) (fndecl);
1822 return (rtx) (size_t) - 1;
1825 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1826 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1827 bytes, then we would need to push some additional bytes to pad the
1828 arguments. So, we compute an adjust to the stack pointer for an
1829 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1830 bytes. Then, when the arguments are pushed the stack will be perfectly
1831 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1832 be popped after the call. Returns the adjustment. */
1835 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1836 struct args_size *args_size,
1837 int preferred_unit_stack_boundary)
1839 /* The number of bytes to pop so that the stack will be
1840 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1841 HOST_WIDE_INT adjustment;
1842 /* The alignment of the stack after the arguments are pushed, if we
1843 just pushed the arguments without adjust the stack here. */
1844 HOST_WIDE_INT unadjusted_alignment;
1846 unadjusted_alignment
1847 = ((stack_pointer_delta + unadjusted_args_size)
1848 % preferred_unit_stack_boundary);
1850 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1851 as possible -- leaving just enough left to cancel out the
1852 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1853 PENDING_STACK_ADJUST is non-negative, and congruent to
1854 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1856 /* Begin by trying to pop all the bytes. */
1857 unadjusted_alignment
1858 = (unadjusted_alignment
1859 - (pending_stack_adjust % preferred_unit_stack_boundary));
1860 adjustment = pending_stack_adjust;
1861 /* Push enough additional bytes that the stack will be aligned
1862 after the arguments are pushed. */
1863 if (preferred_unit_stack_boundary > 1)
1865 if (unadjusted_alignment > 0)
1866 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1868 adjustment += unadjusted_alignment;
1871 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1872 bytes after the call. The right number is the entire
1873 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1874 by the arguments in the first place. */
1876 = pending_stack_adjust - adjustment + unadjusted_args_size;
1881 /* Scan X expression if it does not dereference any argument slots
1882 we already clobbered by tail call arguments (as noted in stored_args_map
1884 Return nonzero if X expression dereferences such argument slots,
1888 check_sibcall_argument_overlap_1 (rtx x)
1898 code = GET_CODE (x);
1902 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1904 else if (GET_CODE (XEXP (x, 0)) == PLUS
1905 && XEXP (XEXP (x, 0), 0) ==
1906 current_function_internal_arg_pointer
1907 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1908 i = INTVAL (XEXP (XEXP (x, 0), 1));
1912 #ifdef ARGS_GROW_DOWNWARD
1913 i = -i - GET_MODE_SIZE (GET_MODE (x));
1916 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1917 if (i + k < stored_args_map->n_bits
1918 && TEST_BIT (stored_args_map, i + k))
1924 /* Scan all subexpressions. */
1925 fmt = GET_RTX_FORMAT (code);
1926 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1930 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1933 else if (*fmt == 'E')
1935 for (j = 0; j < XVECLEN (x, i); j++)
1936 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1943 /* Scan sequence after INSN if it does not dereference any argument slots
1944 we already clobbered by tail call arguments (as noted in stored_args_map
1945 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1946 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1947 should be 0). Return nonzero if sequence after INSN dereferences such argument
1948 slots, zero otherwise. */
1951 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1955 if (insn == NULL_RTX)
1956 insn = get_insns ();
1958 insn = NEXT_INSN (insn);
1960 for (; insn; insn = NEXT_INSN (insn))
1962 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1965 if (mark_stored_args_map)
1967 #ifdef ARGS_GROW_DOWNWARD
1968 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1970 low = arg->locate.slot_offset.constant;
1973 for (high = low + arg->locate.size.constant; low < high; low++)
1974 SET_BIT (stored_args_map, low);
1976 return insn != NULL_RTX;
1980 fix_unsafe_tree (tree t)
1982 switch (unsafe_for_reeval (t))
1987 case 1: /* Mildly unsafe. */
1988 t = unsave_expr (t);
1991 case 2: /* Wildly unsafe. */
1993 tree var = build_decl (VAR_DECL, NULL_TREE,
1996 expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
2007 /* Generate all the code for a function call
2008 and return an rtx for its value.
2009 Store the value in TARGET (specified as an rtx) if convenient.
2010 If the value is stored in TARGET then TARGET is returned.
2011 If IGNORE is nonzero, then we ignore the value of the function call. */
2014 expand_call (tree exp, rtx target, int ignore)
2016 /* Nonzero if we are currently expanding a call. */
2017 static int currently_expanding_call = 0;
2019 /* List of actual parameters. */
2020 tree actparms = TREE_OPERAND (exp, 1);
2021 /* RTX for the function to be called. */
2023 /* Sequence of insns to perform a tail recursive "call". */
2024 rtx tail_recursion_insns = NULL_RTX;
2025 /* Sequence of insns to perform a normal "call". */
2026 rtx normal_call_insns = NULL_RTX;
2027 /* Sequence of insns to perform a tail recursive "call". */
2028 rtx tail_call_insns = NULL_RTX;
2029 /* Data type of the function. */
2031 tree type_arg_types;
2032 /* Declaration of the function being called,
2033 or 0 if the function is computed (not known by name). */
2036 int try_tail_call = 1;
2037 int try_tail_recursion = 1;
2040 /* Register in which non-BLKmode value will be returned,
2041 or 0 if no value or if value is BLKmode. */
2043 /* Address where we should return a BLKmode value;
2044 0 if value not BLKmode. */
2045 rtx structure_value_addr = 0;
2046 /* Nonzero if that address is being passed by treating it as
2047 an extra, implicit first parameter. Otherwise,
2048 it is passed by being copied directly into struct_value_rtx. */
2049 int structure_value_addr_parm = 0;
2050 /* Size of aggregate value wanted, or zero if none wanted
2051 or if we are using the non-reentrant PCC calling convention
2052 or expecting the value in registers. */
2053 HOST_WIDE_INT struct_value_size = 0;
2054 /* Nonzero if called function returns an aggregate in memory PCC style,
2055 by returning the address of where to find it. */
2056 int pcc_struct_value = 0;
2057 rtx struct_value = 0;
2059 /* Number of actual parameters in this call, including struct value addr. */
2061 /* Number of named args. Args after this are anonymous ones
2062 and they must all go on the stack. */
2065 /* Vector of information about each argument.
2066 Arguments are numbered in the order they will be pushed,
2067 not the order they are written. */
2068 struct arg_data *args;
2070 /* Total size in bytes of all the stack-parms scanned so far. */
2071 struct args_size args_size;
2072 struct args_size adjusted_args_size;
2073 /* Size of arguments before any adjustments (such as rounding). */
2074 int unadjusted_args_size;
2075 /* Data on reg parms scanned so far. */
2076 CUMULATIVE_ARGS args_so_far;
2077 /* Nonzero if a reg parm has been scanned. */
2079 /* Nonzero if this is an indirect function call. */
2081 /* Nonzero if we must avoid push-insns in the args for this call.
2082 If stack space is allocated for register parameters, but not by the
2083 caller, then it is preallocated in the fixed part of the stack frame.
2084 So the entire argument block must then be preallocated (i.e., we
2085 ignore PUSH_ROUNDING in that case). */
2087 int must_preallocate = !PUSH_ARGS;
2089 /* Size of the stack reserved for parameter registers. */
2090 int reg_parm_stack_space = 0;
2092 /* Address of space preallocated for stack parms
2093 (on machines that lack push insns), or 0 if space not preallocated. */
2096 /* Mask of ECF_ flags. */
2098 /* Nonzero if this is a call to an inline function. */
2099 int is_integrable = 0;
2100 #ifdef REG_PARM_STACK_SPACE
2101 /* Define the boundary of the register parm stack space that needs to be
2103 int low_to_save, high_to_save;
2104 rtx save_area = 0; /* Place that it is saved */
2107 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2108 char *initial_stack_usage_map = stack_usage_map;
2110 int old_stack_allocated;
2112 /* State variables to track stack modifications. */
2113 rtx old_stack_level = 0;
2114 int old_stack_arg_under_construction = 0;
2115 int old_pending_adj = 0;
2116 int old_inhibit_defer_pop = inhibit_defer_pop;
2118 /* Some stack pointer alterations we make are performed via
2119 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2120 which we then also need to save/restore along the way. */
2121 int old_stack_pointer_delta = 0;
2124 tree p = TREE_OPERAND (exp, 0);
2125 tree addr = TREE_OPERAND (exp, 0);
2127 /* The alignment of the stack, in bits. */
2128 HOST_WIDE_INT preferred_stack_boundary;
2129 /* The alignment of the stack, in bytes. */
2130 HOST_WIDE_INT preferred_unit_stack_boundary;
2132 /* See if this is "nothrow" function call. */
2133 if (TREE_NOTHROW (exp))
2134 flags |= ECF_NOTHROW;
2136 /* See if we can find a DECL-node for the actual function.
2137 As a result, decide whether this is a call to an integrable function. */
2139 fndecl = get_callee_fndecl (exp);
2143 && fndecl != current_function_decl
2144 && DECL_INLINE (fndecl)
2145 && DECL_SAVED_INSNS (fndecl)
2146 && DECL_SAVED_INSNS (fndecl)->inlinable)
2148 else if (! TREE_ADDRESSABLE (fndecl))
2150 /* In case this function later becomes inlinable,
2151 record that there was already a non-inline call to it.
2153 Use abstraction instead of setting TREE_ADDRESSABLE
2155 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2158 warning ("%Jcan't inline call to '%F'", fndecl, fndecl);
2159 warning ("called from here");
2161 (*lang_hooks.mark_addressable) (fndecl);
2165 && lookup_attribute ("warn_unused_result",
2166 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
2167 warning ("ignoring return value of `%D', "
2168 "declared with attribute warn_unused_result", fndecl);
2170 flags |= flags_from_decl_or_type (fndecl);
2173 /* If we don't have specific function to call, see if we have a
2174 attributes set in the type. */
2178 && lookup_attribute ("warn_unused_result",
2179 TYPE_ATTRIBUTES (TREE_TYPE (TREE_TYPE (p)))))
2180 warning ("ignoring return value of function "
2181 "declared with attribute warn_unused_result");
2182 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2185 struct_value = targetm.calls.struct_value_rtx (fndecl ? TREE_TYPE (fndecl) : 0, 0);
2187 /* Warn if this value is an aggregate type,
2188 regardless of which calling convention we are using for it. */
2189 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2190 warning ("function call has aggregate value");
2192 /* If the result of a pure or const function call is ignored (or void),
2193 and none of its arguments are volatile, we can avoid expanding the
2194 call and just evaluate the arguments for side-effects. */
2195 if ((flags & (ECF_CONST | ECF_PURE))
2196 && (ignore || target == const0_rtx
2197 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2199 bool volatilep = false;
2202 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2203 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
2211 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2212 expand_expr (TREE_VALUE (arg), const0_rtx,
2213 VOIDmode, EXPAND_NORMAL);
2218 #ifdef REG_PARM_STACK_SPACE
2219 #ifdef MAYBE_REG_PARM_STACK_SPACE
2220 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2222 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2226 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2227 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2228 must_preallocate = 1;
2231 /* Set up a place to return a structure. */
2233 /* Cater to broken compilers. */
2234 if (aggregate_value_p (exp, fndecl))
2236 /* This call returns a big structure. */
2237 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2239 #ifdef PCC_STATIC_STRUCT_RETURN
2241 pcc_struct_value = 1;
2242 /* Easier than making that case work right. */
2245 /* In case this is a static function, note that it has been
2247 if (! TREE_ADDRESSABLE (fndecl))
2248 (*lang_hooks.mark_addressable) (fndecl);
2252 #else /* not PCC_STATIC_STRUCT_RETURN */
2254 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2256 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
2258 /* The structure value address arg is already in actparms.
2259 Pull it out. It might be nice to just leave it there, but
2260 we need to set structure_value_addr. */
2261 tree return_arg = TREE_VALUE (actparms);
2262 actparms = TREE_CHAIN (actparms);
2263 structure_value_addr = expand_expr (return_arg, NULL_RTX,
2264 VOIDmode, EXPAND_NORMAL);
2266 else if (target && GET_CODE (target) == MEM)
2267 structure_value_addr = XEXP (target, 0);
2270 /* For variable-sized objects, we must be called with a target
2271 specified. If we were to allocate space on the stack here,
2272 we would have no way of knowing when to free it. */
2273 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2275 mark_temp_addr_taken (d);
2276 structure_value_addr = XEXP (d, 0);
2280 #endif /* not PCC_STATIC_STRUCT_RETURN */
2283 /* If called function is inline, try to integrate it. */
2287 rtx temp = try_to_integrate (fndecl, actparms, target,
2288 ignore, TREE_TYPE (exp),
2289 structure_value_addr);
2290 if (temp != (rtx) (size_t) - 1)
2294 /* Figure out the amount to which the stack should be aligned. */
2295 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2298 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2299 if (i && i->preferred_incoming_stack_boundary)
2300 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2303 /* Operand 0 is a pointer-to-function; get the type of the function. */
2304 funtype = TREE_TYPE (addr);
2305 if (! POINTER_TYPE_P (funtype))
2307 funtype = TREE_TYPE (funtype);
2309 /* Munge the tree to split complex arguments into their imaginary
2311 if (SPLIT_COMPLEX_ARGS)
2313 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2314 actparms = split_complex_values (actparms);
2317 type_arg_types = TYPE_ARG_TYPES (funtype);
2319 /* See if this is a call to a function that can return more than once
2320 or a call to longjmp or malloc. */
2321 flags |= special_function_p (fndecl, flags);
2323 if (flags & ECF_MAY_BE_ALLOCA)
2324 current_function_calls_alloca = 1;
2326 /* If struct_value_rtx is 0, it means pass the address
2327 as if it were an extra parameter. */
2328 if (structure_value_addr && struct_value == 0)
2330 /* If structure_value_addr is a REG other than
2331 virtual_outgoing_args_rtx, we can use always use it. If it
2332 is not a REG, we must always copy it into a register.
2333 If it is virtual_outgoing_args_rtx, we must copy it to another
2334 register in some cases. */
2335 rtx temp = (GET_CODE (structure_value_addr) != REG
2336 || (ACCUMULATE_OUTGOING_ARGS
2337 && stack_arg_under_construction
2338 && structure_value_addr == virtual_outgoing_args_rtx)
2339 ? copy_addr_to_reg (structure_value_addr)
2340 : structure_value_addr);
2343 = tree_cons (error_mark_node,
2344 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2347 structure_value_addr_parm = 1;
2350 /* Count the arguments and set NUM_ACTUALS. */
2351 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2354 /* Start updating where the next arg would go.
2356 On some machines (such as the PA) indirect calls have a different
2357 calling convention than normal calls. The last argument in
2358 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2360 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl);
2362 /* Compute number of named args.
2363 Normally, don't include the last named arg if anonymous args follow.
2364 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2365 (If no anonymous args follow, the result of list_length is actually
2366 one too large. This is harmless.)
2368 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2369 zero, this machine will be able to place unnamed args that were
2370 passed in registers into the stack. So treat all args as named.
2371 This allows the insns emitting for a specific argument list to be
2372 independent of the function declaration.
2374 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2375 reliable way to pass unnamed args in registers, so we must force
2376 them into memory. */
2378 if ((targetm.calls.strict_argument_naming (&args_so_far)
2379 || ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2380 && type_arg_types != 0)
2382 = (list_length (type_arg_types)
2383 /* Don't include the last named arg. */
2384 - (targetm.calls.strict_argument_naming (&args_so_far) ? 0 : 1)
2385 /* Count the struct value address, if it is passed as a parm. */
2386 + structure_value_addr_parm);
2388 /* If we know nothing, treat all args as named. */
2389 n_named_args = num_actuals;
2391 /* Make a vector to hold all the information about each arg. */
2392 args = alloca (num_actuals * sizeof (struct arg_data));
2393 memset (args, 0, num_actuals * sizeof (struct arg_data));
2395 /* Build up entries in the ARGS array, compute the size of the
2396 arguments into ARGS_SIZE, etc. */
2397 initialize_argument_information (num_actuals, args, &args_size,
2398 n_named_args, actparms, fndecl,
2399 &args_so_far, reg_parm_stack_space,
2400 &old_stack_level, &old_pending_adj,
2401 &must_preallocate, &flags);
2405 /* If this function requires a variable-sized argument list, don't
2406 try to make a cse'able block for this call. We may be able to
2407 do this eventually, but it is too complicated to keep track of
2408 what insns go in the cse'able block and which don't. */
2410 flags &= ~ECF_LIBCALL_BLOCK;
2411 must_preallocate = 1;
2414 /* Now make final decision about preallocating stack space. */
2415 must_preallocate = finalize_must_preallocate (must_preallocate,
2419 /* If the structure value address will reference the stack pointer, we
2420 must stabilize it. We don't need to do this if we know that we are
2421 not going to adjust the stack pointer in processing this call. */
2423 if (structure_value_addr
2424 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2425 || reg_mentioned_p (virtual_outgoing_args_rtx,
2426 structure_value_addr))
2428 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2429 structure_value_addr = copy_to_reg (structure_value_addr);
2431 /* Tail calls can make things harder to debug, and we're traditionally
2432 pushed these optimizations into -O2. Don't try if we're already
2433 expanding a call, as that means we're an argument. Don't try if
2434 there's cleanups, as we know there's code to follow the call.
2436 If rtx_equal_function_value_matters is false, that means we've
2437 finished with regular parsing. Which means that some of the
2438 machinery we use to generate tail-calls is no longer in place.
2439 This is most often true of sjlj-exceptions, which we couldn't
2440 tail-call to anyway. */
2442 if (currently_expanding_call++ != 0
2443 || !flag_optimize_sibling_calls
2444 || !rtx_equal_function_value_matters
2445 || any_pending_cleanups ()
2447 try_tail_call = try_tail_recursion = 0;
2449 /* Tail recursion fails, when we are not dealing with recursive calls. */
2450 if (!try_tail_recursion
2451 || TREE_CODE (addr) != ADDR_EXPR
2452 || TREE_OPERAND (addr, 0) != current_function_decl)
2453 try_tail_recursion = 0;
2455 /* Rest of purposes for tail call optimizations to fail. */
2457 #ifdef HAVE_sibcall_epilogue
2458 !HAVE_sibcall_epilogue
2463 /* Doing sibling call optimization needs some work, since
2464 structure_value_addr can be allocated on the stack.
2465 It does not seem worth the effort since few optimizable
2466 sibling calls will return a structure. */
2467 || structure_value_addr != NULL_RTX
2468 /* Check whether the target is able to optimize the call
2470 || !(*targetm.function_ok_for_sibcall) (fndecl, exp)
2471 /* Functions that do not return exactly once may not be sibcall
2473 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
2474 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2475 /* If the called function is nested in the current one, it might access
2476 some of the caller's arguments, but could clobber them beforehand if
2477 the argument areas are shared. */
2478 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2479 /* If this function requires more stack slots than the current
2480 function, we cannot change it into a sibling call. */
2481 || args_size.constant > current_function_args_size
2482 /* If the callee pops its own arguments, then it must pop exactly
2483 the same number of arguments as the current function. */
2484 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2485 != RETURN_POPS_ARGS (current_function_decl,
2486 TREE_TYPE (current_function_decl),
2487 current_function_args_size))
2488 || !(*lang_hooks.decls.ok_for_sibcall) (fndecl))
2491 if (try_tail_call || try_tail_recursion)
2494 actparms = NULL_TREE;
2495 /* Ok, we're going to give the tail call the old college try.
2496 This means we're going to evaluate the function arguments
2497 up to three times. There are two degrees of badness we can
2498 encounter, those that can be unsaved and those that can't.
2499 (See unsafe_for_reeval commentary for details.)
2501 Generate a new argument list. Pass safe arguments through
2502 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2503 For hard badness, evaluate them now and put their resulting
2504 rtx in a temporary VAR_DECL.
2506 initialize_argument_information has ordered the array for the
2507 order to be pushed, and we must remember this when reconstructing
2508 the original argument order. */
2510 if (PUSH_ARGS_REVERSED)
2519 i = num_actuals - 1;
2523 for (; i != end; i += inc)
2525 args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
2526 /* We need to build actparms for optimize_tail_recursion. We can
2527 safely trash away TREE_PURPOSE, since it is unused by this
2529 if (try_tail_recursion)
2530 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2532 /* Do the same for the function address if it is an expression. */
2534 addr = fix_unsafe_tree (addr);
2535 /* Expanding one of those dangerous arguments could have added
2536 cleanups, but otherwise give it a whirl. */
2537 if (any_pending_cleanups ())
2538 try_tail_call = try_tail_recursion = 0;
2541 /* Generate a tail recursion sequence when calling ourselves. */
2543 if (try_tail_recursion)
2545 /* We want to emit any pending stack adjustments before the tail
2546 recursion "call". That way we know any adjustment after the tail
2547 recursion call can be ignored if we indeed use the tail recursion
2549 int save_pending_stack_adjust = pending_stack_adjust;
2550 int save_stack_pointer_delta = stack_pointer_delta;
2552 /* Emit any queued insns now; otherwise they would end up in
2553 only one of the alternates. */
2556 /* Use a new sequence to hold any RTL we generate. We do not even
2557 know if we will use this RTL yet. The final decision can not be
2558 made until after RTL generation for the entire function is
2561 /* If expanding any of the arguments creates cleanups, we can't
2562 do a tailcall. So, we'll need to pop the pending cleanups
2563 list. If, however, all goes well, and there are no cleanups
2564 then the call to expand_start_target_temps will have no
2566 expand_start_target_temps ();
2567 if (optimize_tail_recursion (actparms, get_last_insn ()))
2569 if (any_pending_cleanups ())
2570 try_tail_call = try_tail_recursion = 0;
2572 tail_recursion_insns = get_insns ();
2574 expand_end_target_temps ();
2577 /* Restore the original pending stack adjustment for the sibling and
2578 normal call cases below. */
2579 pending_stack_adjust = save_pending_stack_adjust;
2580 stack_pointer_delta = save_stack_pointer_delta;
2583 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2585 /* A fork duplicates the profile information, and an exec discards
2586 it. We can't rely on fork/exec to be paired. So write out the
2587 profile information we have gathered so far, and clear it. */
2588 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2589 is subject to race conditions, just as with multithreaded
2592 emit_library_call (gcov_flush_libfunc, LCT_ALWAYS_RETURN, VOIDmode, 0);
2595 /* Ensure current function's preferred stack boundary is at least
2596 what we need. We don't have to increase alignment for recursive
2598 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2599 && fndecl != current_function_decl)
2600 cfun->preferred_stack_boundary = preferred_stack_boundary;
2601 if (fndecl == current_function_decl)
2602 cfun->recursive_call_emit = true;
2604 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2606 function_call_count++;
2608 /* We want to make two insn chains; one for a sibling call, the other
2609 for a normal call. We will select one of the two chains after
2610 initial RTL generation is complete. */
2611 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2613 int sibcall_failure = 0;
2614 /* We want to emit any pending stack adjustments before the tail
2615 recursion "call". That way we know any adjustment after the tail
2616 recursion call can be ignored if we indeed use the tail recursion
2618 int save_pending_stack_adjust = 0;
2619 int save_stack_pointer_delta = 0;
2621 rtx before_call, next_arg_reg;
2625 /* Emit any queued insns now; otherwise they would end up in
2626 only one of the alternates. */
2629 /* State variables we need to save and restore between
2631 save_pending_stack_adjust = pending_stack_adjust;
2632 save_stack_pointer_delta = stack_pointer_delta;
2635 flags &= ~ECF_SIBCALL;
2637 flags |= ECF_SIBCALL;
2639 /* Other state variables that we must reinitialize each time
2640 through the loop (that are not initialized by the loop itself). */
2644 /* Start a new sequence for the normal call case.
2646 From this point on, if the sibling call fails, we want to set
2647 sibcall_failure instead of continuing the loop. */
2652 /* We know at this point that there are not currently any
2653 pending cleanups. If, however, in the process of evaluating
2654 the arguments we were to create some, we'll need to be
2655 able to get rid of them. */
2656 expand_start_target_temps ();
2659 /* Don't let pending stack adjusts add up to too much.
2660 Also, do all pending adjustments now if there is any chance
2661 this might be a call to alloca or if we are expanding a sibling
2662 call sequence or if we are calling a function that is to return
2663 with stack pointer depressed. */
2664 if (pending_stack_adjust >= 32
2665 || (pending_stack_adjust > 0
2666 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2668 do_pending_stack_adjust ();
2670 /* When calling a const function, we must pop the stack args right away,
2671 so that the pop is deleted or moved with the call. */
2672 if (pass && (flags & ECF_LIBCALL_BLOCK))
2675 #ifdef FINAL_REG_PARM_STACK_SPACE
2676 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2679 /* Precompute any arguments as needed. */
2681 precompute_arguments (flags, num_actuals, args);
2683 /* Now we are about to start emitting insns that can be deleted
2684 if a libcall is deleted. */
2685 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2688 adjusted_args_size = args_size;
2689 /* Compute the actual size of the argument block required. The variable
2690 and constant sizes must be combined, the size may have to be rounded,
2691 and there may be a minimum required size. When generating a sibcall
2692 pattern, do not round up, since we'll be re-using whatever space our
2694 unadjusted_args_size
2695 = compute_argument_block_size (reg_parm_stack_space,
2696 &adjusted_args_size,
2698 : preferred_stack_boundary));
2700 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2702 /* The argument block when performing a sibling call is the
2703 incoming argument block. */
2706 argblock = virtual_incoming_args_rtx;
2708 #ifdef STACK_GROWS_DOWNWARD
2709 = plus_constant (argblock, current_function_pretend_args_size);
2711 = plus_constant (argblock, -current_function_pretend_args_size);
2713 stored_args_map = sbitmap_alloc (args_size.constant);
2714 sbitmap_zero (stored_args_map);
2717 /* If we have no actual push instructions, or shouldn't use them,
2718 make space for all args right now. */
2719 else if (adjusted_args_size.var != 0)
2721 if (old_stack_level == 0)
2723 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2724 old_stack_pointer_delta = stack_pointer_delta;
2725 old_pending_adj = pending_stack_adjust;
2726 pending_stack_adjust = 0;
2727 /* stack_arg_under_construction says whether a stack arg is
2728 being constructed at the old stack level. Pushing the stack
2729 gets a clean outgoing argument block. */
2730 old_stack_arg_under_construction = stack_arg_under_construction;
2731 stack_arg_under_construction = 0;
2733 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2737 /* Note that we must go through the motions of allocating an argument
2738 block even if the size is zero because we may be storing args
2739 in the area reserved for register arguments, which may be part of
2742 int needed = adjusted_args_size.constant;
2744 /* Store the maximum argument space used. It will be pushed by
2745 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2748 if (needed > current_function_outgoing_args_size)
2749 current_function_outgoing_args_size = needed;
2751 if (must_preallocate)
2753 if (ACCUMULATE_OUTGOING_ARGS)
2755 /* Since the stack pointer will never be pushed, it is
2756 possible for the evaluation of a parm to clobber
2757 something we have already written to the stack.
2758 Since most function calls on RISC machines do not use
2759 the stack, this is uncommon, but must work correctly.
2761 Therefore, we save any area of the stack that was already
2762 written and that we are using. Here we set up to do this
2763 by making a new stack usage map from the old one. The
2764 actual save will be done by store_one_arg.
2766 Another approach might be to try to reorder the argument
2767 evaluations to avoid this conflicting stack usage. */
2769 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2770 /* Since we will be writing into the entire argument area,
2771 the map must be allocated for its entire size, not just
2772 the part that is the responsibility of the caller. */
2773 needed += reg_parm_stack_space;
2776 #ifdef ARGS_GROW_DOWNWARD
2777 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2780 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2783 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2785 if (initial_highest_arg_in_use)
2786 memcpy (stack_usage_map, initial_stack_usage_map,
2787 initial_highest_arg_in_use);
2789 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2790 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2791 (highest_outgoing_arg_in_use
2792 - initial_highest_arg_in_use));
2795 /* The address of the outgoing argument list must not be
2796 copied to a register here, because argblock would be left
2797 pointing to the wrong place after the call to
2798 allocate_dynamic_stack_space below. */
2800 argblock = virtual_outgoing_args_rtx;
2804 if (inhibit_defer_pop == 0)
2806 /* Try to reuse some or all of the pending_stack_adjust
2807 to get this space. */
2809 = (combine_pending_stack_adjustment_and_call
2810 (unadjusted_args_size,
2811 &adjusted_args_size,
2812 preferred_unit_stack_boundary));
2814 /* combine_pending_stack_adjustment_and_call computes
2815 an adjustment before the arguments are allocated.
2816 Account for them and see whether or not the stack
2817 needs to go up or down. */
2818 needed = unadjusted_args_size - needed;
2822 /* We're releasing stack space. */
2823 /* ??? We can avoid any adjustment at all if we're
2824 already aligned. FIXME. */
2825 pending_stack_adjust = -needed;
2826 do_pending_stack_adjust ();
2830 /* We need to allocate space. We'll do that in
2831 push_block below. */
2832 pending_stack_adjust = 0;
2835 /* Special case this because overhead of `push_block' in
2836 this case is non-trivial. */
2838 argblock = virtual_outgoing_args_rtx;
2841 argblock = push_block (GEN_INT (needed), 0, 0);
2842 #ifdef ARGS_GROW_DOWNWARD
2843 argblock = plus_constant (argblock, needed);
2847 /* We only really need to call `copy_to_reg' in the case
2848 where push insns are going to be used to pass ARGBLOCK
2849 to a function call in ARGS. In that case, the stack
2850 pointer changes value from the allocation point to the
2851 call point, and hence the value of
2852 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2853 as well always do it. */
2854 argblock = copy_to_reg (argblock);
2859 if (ACCUMULATE_OUTGOING_ARGS)
2861 /* The save/restore code in store_one_arg handles all
2862 cases except one: a constructor call (including a C
2863 function returning a BLKmode struct) to initialize
2865 if (stack_arg_under_construction)
2867 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2868 rtx push_size = GEN_INT (reg_parm_stack_space
2869 + adjusted_args_size.constant);
2871 rtx push_size = GEN_INT (adjusted_args_size.constant);
2873 if (old_stack_level == 0)
2875 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2877 old_stack_pointer_delta = stack_pointer_delta;
2878 old_pending_adj = pending_stack_adjust;
2879 pending_stack_adjust = 0;
2880 /* stack_arg_under_construction says whether a stack
2881 arg is being constructed at the old stack level.
2882 Pushing the stack gets a clean outgoing argument
2884 old_stack_arg_under_construction
2885 = stack_arg_under_construction;
2886 stack_arg_under_construction = 0;
2887 /* Make a new map for the new argument list. */
2888 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2889 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2890 highest_outgoing_arg_in_use = 0;
2892 allocate_dynamic_stack_space (push_size, NULL_RTX,
2896 /* If argument evaluation might modify the stack pointer,
2897 copy the address of the argument list to a register. */
2898 for (i = 0; i < num_actuals; i++)
2899 if (args[i].pass_on_stack)
2901 argblock = copy_addr_to_reg (argblock);
2906 compute_argument_addresses (args, argblock, num_actuals);
2908 /* If we push args individually in reverse order, perform stack alignment
2909 before the first push (the last arg). */
2910 if (PUSH_ARGS_REVERSED && argblock == 0
2911 && adjusted_args_size.constant != unadjusted_args_size)
2913 /* When the stack adjustment is pending, we get better code
2914 by combining the adjustments. */
2915 if (pending_stack_adjust
2916 && ! (flags & ECF_LIBCALL_BLOCK)
2917 && ! inhibit_defer_pop)
2919 pending_stack_adjust
2920 = (combine_pending_stack_adjustment_and_call
2921 (unadjusted_args_size,
2922 &adjusted_args_size,
2923 preferred_unit_stack_boundary));
2924 do_pending_stack_adjust ();
2926 else if (argblock == 0)
2927 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2928 - unadjusted_args_size));
2930 /* Now that the stack is properly aligned, pops can't safely
2931 be deferred during the evaluation of the arguments. */
2934 funexp = rtx_for_function_call (fndecl, addr);
2936 /* Figure out the register where the value, if any, will come back. */
2938 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2939 && ! structure_value_addr)
2941 if (pcc_struct_value)
2942 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2943 fndecl, (pass == 0));
2945 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2948 /* Precompute all register parameters. It isn't safe to compute anything
2949 once we have started filling any specific hard regs. */
2950 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2952 #ifdef REG_PARM_STACK_SPACE
2953 /* Save the fixed argument area if it's part of the caller's frame and
2954 is clobbered by argument setup for this call. */
2955 if (ACCUMULATE_OUTGOING_ARGS && pass)
2956 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2957 &low_to_save, &high_to_save);
2960 /* Now store (and compute if necessary) all non-register parms.
2961 These come before register parms, since they can require block-moves,
2962 which could clobber the registers used for register parms.
2963 Parms which have partial registers are not stored here,
2964 but we do preallocate space here if they want that. */
2966 for (i = 0; i < num_actuals; i++)
2967 if (args[i].reg == 0 || args[i].pass_on_stack)
2969 rtx before_arg = get_last_insn ();
2971 if (store_one_arg (&args[i], argblock, flags,
2972 adjusted_args_size.var != 0,
2973 reg_parm_stack_space)
2975 && check_sibcall_argument_overlap (before_arg,
2977 sibcall_failure = 1;
2980 /* If we have a parm that is passed in registers but not in memory
2981 and whose alignment does not permit a direct copy into registers,
2982 make a group of pseudos that correspond to each register that we
2984 if (STRICT_ALIGNMENT)
2985 store_unaligned_arguments_into_pseudos (args, num_actuals);
2987 /* Now store any partially-in-registers parm.
2988 This is the last place a block-move can happen. */
2990 for (i = 0; i < num_actuals; i++)
2991 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2993 rtx before_arg = get_last_insn ();
2995 if (store_one_arg (&args[i], argblock, flags,
2996 adjusted_args_size.var != 0,
2997 reg_parm_stack_space)
2999 && check_sibcall_argument_overlap (before_arg,
3001 sibcall_failure = 1;
3004 /* If we pushed args in forward order, perform stack alignment
3005 after pushing the last arg. */
3006 if (!PUSH_ARGS_REVERSED && argblock == 0)
3007 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3008 - unadjusted_args_size));
3010 /* If register arguments require space on the stack and stack space
3011 was not preallocated, allocate stack space here for arguments
3012 passed in registers. */
3013 #ifdef OUTGOING_REG_PARM_STACK_SPACE
3014 if (!ACCUMULATE_OUTGOING_ARGS
3015 && must_preallocate == 0 && reg_parm_stack_space > 0)
3016 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3019 /* Pass the function the address in which to return a
3021 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3023 structure_value_addr
3024 = convert_memory_address (Pmode, structure_value_addr);
3025 emit_move_insn (struct_value,
3027 force_operand (structure_value_addr,
3030 if (GET_CODE (struct_value) == REG)
3031 use_reg (&call_fusage, struct_value);
3034 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3035 reg_parm_seen, pass == 0);
3037 load_register_parameters (args, num_actuals, &call_fusage, flags,
3038 pass == 0, &sibcall_failure);
3040 /* Perform postincrements before actually calling the function. */
3043 /* Save a pointer to the last insn before the call, so that we can
3044 later safely search backwards to find the CALL_INSN. */
3045 before_call = get_last_insn ();
3047 /* Set up next argument register. For sibling calls on machines
3048 with register windows this should be the incoming register. */
3049 #ifdef FUNCTION_INCOMING_ARG
3051 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3055 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3058 /* All arguments and registers used for the call must be set up by
3061 /* Stack must be properly aligned now. */
3062 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
3065 /* Generate the actual call instruction. */
3066 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
3067 adjusted_args_size.constant, struct_value_size,
3068 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3069 flags, & args_so_far);
3071 /* If call is cse'able, make appropriate pair of reg-notes around it.
3072 Test valreg so we don't crash; may safely ignore `const'
3073 if return type is void. Disable for PARALLEL return values, because
3074 we have no way to move such values into a pseudo register. */
3075 if (pass && (flags & ECF_LIBCALL_BLOCK))
3079 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
3081 insns = get_insns ();
3083 /* Expansion of block moves possibly introduced a loop that may
3084 not appear inside libcall block. */
3085 for (insn = insns; insn; insn = NEXT_INSN (insn))
3086 if (GET_CODE (insn) == JUMP_INSN)
3097 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3099 /* Mark the return value as a pointer if needed. */
3100 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3101 mark_reg_pointer (temp,
3102 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3104 /* Construct an "equal form" for the value which mentions all the
3105 arguments in order as well as the function name. */
3106 for (i = 0; i < num_actuals; i++)
3107 note = gen_rtx_EXPR_LIST (VOIDmode,
3108 args[i].initial_value, note);
3109 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3113 if (flags & ECF_PURE)
3114 note = gen_rtx_EXPR_LIST (VOIDmode,
3115 gen_rtx_USE (VOIDmode,
3116 gen_rtx_MEM (BLKmode,
3117 gen_rtx_SCRATCH (VOIDmode))),
3120 emit_libcall_block (insns, temp, valreg, note);
3125 else if (pass && (flags & ECF_MALLOC))
3127 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3130 /* The return value from a malloc-like function is a pointer. */
3131 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3132 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3134 emit_move_insn (temp, valreg);
3136 /* The return value from a malloc-like function can not alias
3138 last = get_last_insn ();
3140 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3142 /* Write out the sequence. */
3143 insns = get_insns ();
3149 /* For calls to `setjmp', etc., inform flow.c it should complain
3150 if nonvolatile values are live. For functions that cannot return,
3151 inform flow that control does not fall through. */
3153 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3155 /* The barrier must be emitted
3156 immediately after the CALL_INSN. Some ports emit more
3157 than just a CALL_INSN above, so we must search for it here. */
3159 rtx last = get_last_insn ();
3160 while (GET_CODE (last) != CALL_INSN)
3162 last = PREV_INSN (last);
3163 /* There was no CALL_INSN? */
3164 if (last == before_call)
3168 emit_barrier_after (last);
3170 /* Stack adjustments after a noreturn call are dead code.
3171 However when NO_DEFER_POP is in effect, we must preserve
3172 stack_pointer_delta. */
3173 if (inhibit_defer_pop == 0)
3175 stack_pointer_delta = old_stack_allocated;
3176 pending_stack_adjust = 0;
3180 if (flags & ECF_LONGJMP)
3181 current_function_calls_longjmp = 1;
3183 /* If value type not void, return an rtx for the value. */
3185 /* If there are cleanups to be called, don't use a hard reg as target.
3186 We need to double check this and see if it matters anymore. */
3187 if (any_pending_cleanups ())
3189 if (target && REG_P (target)
3190 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3192 sibcall_failure = 1;
3195 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3197 target = const0_rtx;
3198 else if (structure_value_addr)
3200 if (target == 0 || GET_CODE (target) != MEM)
3203 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3204 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3205 structure_value_addr));
3206 set_mem_attributes (target, exp, 1);
3209 else if (pcc_struct_value)
3211 /* This is the special C++ case where we need to
3212 know what the true target was. We take care to
3213 never use this value more than once in one expression. */
3214 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3215 copy_to_reg (valreg));
3216 set_mem_attributes (target, exp, 1);
3218 /* Handle calls that return values in multiple non-contiguous locations.
3219 The Irix 6 ABI has examples of this. */
3220 else if (GET_CODE (valreg) == PARALLEL)
3224 /* This will only be assigned once, so it can be readonly. */
3225 tree nt = build_qualified_type (TREE_TYPE (exp),
3226 (TYPE_QUALS (TREE_TYPE (exp))
3227 | TYPE_QUAL_CONST));
3229 target = assign_temp (nt, 0, 1, 1);
3230 preserve_temp_slots (target);
3233 if (! rtx_equal_p (target, valreg))
3234 emit_group_store (target, valreg, TREE_TYPE (exp),
3235 int_size_in_bytes (TREE_TYPE (exp)));
3237 /* We can not support sibling calls for this case. */
3238 sibcall_failure = 1;
3241 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3242 && GET_MODE (target) == GET_MODE (valreg))
3244 /* TARGET and VALREG cannot be equal at this point because the
3245 latter would not have REG_FUNCTION_VALUE_P true, while the
3246 former would if it were referring to the same register.
3248 If they refer to the same register, this move will be a no-op,
3249 except when function inlining is being done. */
3250 emit_move_insn (target, valreg);
3252 /* If we are setting a MEM, this code must be executed. Since it is
3253 emitted after the call insn, sibcall optimization cannot be
3254 performed in that case. */
3255 if (GET_CODE (target) == MEM)
3256 sibcall_failure = 1;
3258 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3260 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3262 /* We can not support sibling calls for this case. */
3263 sibcall_failure = 1;
3266 target = copy_to_reg (valreg);
3268 if (targetm.calls.promote_function_return(funtype))
3270 /* If we promoted this return value, make the proper SUBREG. TARGET
3271 might be const0_rtx here, so be careful. */
3272 if (GET_CODE (target) == REG
3273 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3274 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3276 tree type = TREE_TYPE (exp);
3277 int unsignedp = TREE_UNSIGNED (type);
3280 /* If we don't promote as expected, something is wrong. */
3281 if (GET_MODE (target)
3282 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3285 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3286 && GET_MODE_SIZE (GET_MODE (target))
3287 > GET_MODE_SIZE (TYPE_MODE (type)))
3289 offset = GET_MODE_SIZE (GET_MODE (target))
3290 - GET_MODE_SIZE (TYPE_MODE (type));
3291 if (! BYTES_BIG_ENDIAN)
3292 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3293 else if (! WORDS_BIG_ENDIAN)
3294 offset %= UNITS_PER_WORD;
3296 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3297 SUBREG_PROMOTED_VAR_P (target) = 1;
3298 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3302 /* If size of args is variable or this was a constructor call for a stack
3303 argument, restore saved stack-pointer value. */
3305 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3307 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3308 stack_pointer_delta = old_stack_pointer_delta;
3309 pending_stack_adjust = old_pending_adj;
3310 stack_arg_under_construction = old_stack_arg_under_construction;
3311 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3312 stack_usage_map = initial_stack_usage_map;
3313 sibcall_failure = 1;
3315 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3317 #ifdef REG_PARM_STACK_SPACE
3319 restore_fixed_argument_area (save_area, argblock,
3320 high_to_save, low_to_save);
3323 /* If we saved any argument areas, restore them. */
3324 for (i = 0; i < num_actuals; i++)
3325 if (args[i].save_area)
3327 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3329 = gen_rtx_MEM (save_mode,
3330 memory_address (save_mode,
3331 XEXP (args[i].stack_slot, 0)));
3333 if (save_mode != BLKmode)
3334 emit_move_insn (stack_area, args[i].save_area);
3336 emit_block_move (stack_area, args[i].save_area,
3337 GEN_INT (args[i].locate.size.constant),
3338 BLOCK_OP_CALL_PARM);
3341 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3342 stack_usage_map = initial_stack_usage_map;
3345 /* If this was alloca, record the new stack level for nonlocal gotos.
3346 Check for the handler slots since we might not have a save area
3347 for non-local gotos. */
3349 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3350 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3352 /* Free up storage we no longer need. */
3353 for (i = 0; i < num_actuals; ++i)
3354 if (args[i].aligned_regs)
3355 free (args[i].aligned_regs);
3359 /* Undo the fake expand_start_target_temps we did earlier. If
3360 there had been any cleanups created, we've already set
3362 expand_end_target_temps ();
3365 /* If this function is returning into a memory location marked as
3366 readonly, it means it is initializing that location. We normally treat
3367 functions as not clobbering such locations, so we need to specify that
3368 this one does. We do this by adding the appropriate CLOBBER to the
3369 CALL_INSN function usage list. This cannot be done by emitting a
3370 standalone CLOBBER after the call because the latter would be ignored
3371 by at least the delay slot scheduling pass. We do this now instead of
3372 adding to call_fusage before the call to emit_call_1 because TARGET
3373 may be modified in the meantime. */
3374 if (structure_value_addr != 0 && target != 0
3375 && GET_CODE (target) == MEM && RTX_UNCHANGING_P (target))
3376 add_function_usage_to
3378 gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
3381 insns = get_insns ();
3386 tail_call_insns = insns;
3388 /* Restore the pending stack adjustment now that we have
3389 finished generating the sibling call sequence. */
3391 pending_stack_adjust = save_pending_stack_adjust;
3392 stack_pointer_delta = save_stack_pointer_delta;
3394 /* Prepare arg structure for next iteration. */
3395 for (i = 0; i < num_actuals; i++)
3398 args[i].aligned_regs = 0;
3402 sbitmap_free (stored_args_map);
3406 normal_call_insns = insns;
3408 /* Verify that we've deallocated all the stack we used. */
3409 if (! (flags & (ECF_NORETURN | ECF_LONGJMP))
3410 && old_stack_allocated != stack_pointer_delta
3411 - pending_stack_adjust)
3415 /* If something prevents making this a sibling call,
3416 zero out the sequence. */
3417 if (sibcall_failure)
3418 tail_call_insns = NULL_RTX;
3421 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3422 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3423 can happen if the arguments to this function call an inline
3424 function who's expansion contains another CALL_PLACEHOLDER.
3426 If there are any C_Ps in any of these sequences, replace them
3427 with their normal call. */
3429 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3430 if (GET_CODE (insn) == CALL_INSN
3431 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3432 replace_call_placeholder (insn, sibcall_use_normal);
3434 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3435 if (GET_CODE (insn) == CALL_INSN
3436 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3437 replace_call_placeholder (insn, sibcall_use_normal);
3439 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3440 if (GET_CODE (insn) == CALL_INSN
3441 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3442 replace_call_placeholder (insn, sibcall_use_normal);
3444 /* If this was a potential tail recursion site, then emit a
3445 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3446 One of them will be selected later. */
3447 if (tail_recursion_insns || tail_call_insns)
3449 /* The tail recursion label must be kept around. We could expose
3450 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3451 and makes determining true tail recursion sites difficult.
3453 So we set LABEL_PRESERVE_P here, then clear it when we select
3454 one of the call sequences after rtl generation is complete. */
3455 if (tail_recursion_insns)
3456 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3457 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3459 tail_recursion_insns,
3460 tail_recursion_label));
3463 emit_insn (normal_call_insns);
3465 currently_expanding_call--;
3467 /* If this function returns with the stack pointer depressed, ensure
3468 this block saves and restores the stack pointer, show it was
3469 changed, and adjust for any outgoing arg space. */
3470 if (flags & ECF_SP_DEPRESSED)
3472 clear_pending_stack_adjust ();
3473 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
3474 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3475 save_stack_pointer ();
3481 /* Traverse an argument list in VALUES and expand all complex
3482 arguments into their components. */
3484 split_complex_values (tree values)
3488 values = copy_list (values);
3490 for (p = values; p; p = TREE_CHAIN (p))
3492 tree complex_value = TREE_VALUE (p);
3495 complex_type = TREE_TYPE (complex_value);
3499 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
3502 tree real, imag, next;
3504 subtype = TREE_TYPE (complex_type);
3505 complex_value = save_expr (complex_value);
3506 real = build1 (REALPART_EXPR, subtype, complex_value);
3507 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3509 TREE_VALUE (p) = real;
3510 next = TREE_CHAIN (p);
3511 imag = build_tree_list (NULL_TREE, imag);
3512 TREE_CHAIN (p) = imag;
3513 TREE_CHAIN (imag) = next;
3515 /* Skip the newly created node. */
3523 /* Traverse a list of TYPES and expand all complex types into their
3526 split_complex_types (tree types)
3530 types = copy_list (types);
3532 for (p = types; p; p = TREE_CHAIN (p))
3534 tree complex_type = TREE_VALUE (p);
3536 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
3540 /* Rewrite complex type with component type. */
3541 TREE_VALUE (p) = TREE_TYPE (complex_type);
3542 next = TREE_CHAIN (p);
3544 /* Add another component type for the imaginary part. */
3545 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3546 TREE_CHAIN (p) = imag;
3547 TREE_CHAIN (imag) = next;
3549 /* Skip the newly created node. */
3557 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3558 The RETVAL parameter specifies whether return value needs to be saved, other
3559 parameters are documented in the emit_library_call function below. */
3562 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3563 enum libcall_type fn_type,
3564 enum machine_mode outmode, int nargs, va_list p)
3566 /* Total size in bytes of all the stack-parms scanned so far. */
3567 struct args_size args_size;
3568 /* Size of arguments before any adjustments (such as rounding). */
3569 struct args_size original_args_size;
3575 CUMULATIVE_ARGS args_so_far;
3579 enum machine_mode mode;
3582 struct locate_and_pad_arg_data locate;
3586 int old_inhibit_defer_pop = inhibit_defer_pop;
3587 rtx call_fusage = 0;
3590 int pcc_struct_value = 0;
3591 int struct_value_size = 0;
3593 int reg_parm_stack_space = 0;
3596 tree tfom; /* type_for_mode (outmode, 0) */
3598 #ifdef REG_PARM_STACK_SPACE
3599 /* Define the boundary of the register parm stack space that needs to be
3601 int low_to_save, high_to_save;
3602 rtx save_area = 0; /* Place that it is saved. */
3605 /* Size of the stack reserved for parameter registers. */
3606 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3607 char *initial_stack_usage_map = stack_usage_map;
3609 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3611 #ifdef REG_PARM_STACK_SPACE
3612 #ifdef MAYBE_REG_PARM_STACK_SPACE
3613 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3615 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3619 /* By default, library functions can not throw. */
3620 flags = ECF_NOTHROW;
3632 case LCT_CONST_MAKE_BLOCK:
3633 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3635 case LCT_PURE_MAKE_BLOCK:
3636 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3639 flags |= ECF_NORETURN;
3642 flags = ECF_NORETURN;
3644 case LCT_ALWAYS_RETURN:
3645 flags = ECF_ALWAYS_RETURN;
3647 case LCT_RETURNS_TWICE:
3648 flags = ECF_RETURNS_TWICE;
3653 /* Ensure current function's preferred stack boundary is at least
3655 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3656 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3658 /* If this kind of value comes back in memory,
3659 decide where in memory it should come back. */
3660 if (outmode != VOIDmode)
3662 tfom = (*lang_hooks.types.type_for_mode) (outmode, 0);
3663 if (aggregate_value_p (tfom, 0))
3665 #ifdef PCC_STATIC_STRUCT_RETURN
3667 = hard_function_value (build_pointer_type (tfom), 0, 0);
3668 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3669 pcc_struct_value = 1;
3671 value = gen_reg_rtx (outmode);
3672 #else /* not PCC_STATIC_STRUCT_RETURN */
3673 struct_value_size = GET_MODE_SIZE (outmode);
3674 if (value != 0 && GET_CODE (value) == MEM)
3677 mem_value = assign_temp (tfom, 0, 1, 1);
3679 /* This call returns a big structure. */
3680 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3684 tfom = void_type_node;
3686 /* ??? Unfinished: must pass the memory address as an argument. */
3688 /* Copy all the libcall-arguments out of the varargs data
3689 and into a vector ARGVEC.
3691 Compute how to pass each argument. We only support a very small subset
3692 of the full argument passing conventions to limit complexity here since
3693 library functions shouldn't have many args. */
3695 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3696 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3698 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3699 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3701 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3704 args_size.constant = 0;
3709 /* Now we are about to start emitting insns that can be deleted
3710 if a libcall is deleted. */
3711 if (flags & ECF_LIBCALL_BLOCK)
3716 /* If there's a structure value address to be passed,
3717 either pass it in the special place, or pass it as an extra argument. */
3718 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3720 rtx addr = XEXP (mem_value, 0);
3723 /* Make sure it is a reasonable operand for a move or push insn. */
3724 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3725 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3726 addr = force_operand (addr, NULL_RTX);
3728 argvec[count].value = addr;
3729 argvec[count].mode = Pmode;
3730 argvec[count].partial = 0;
3732 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3733 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3734 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3738 locate_and_pad_parm (Pmode, NULL_TREE,
3739 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3742 argvec[count].reg != 0,
3744 0, NULL_TREE, &args_size, &argvec[count].locate);
3746 if (argvec[count].reg == 0 || argvec[count].partial != 0
3747 || reg_parm_stack_space > 0)
3748 args_size.constant += argvec[count].locate.size.constant;
3750 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3755 for (; count < nargs; count++)
3757 rtx val = va_arg (p, rtx);
3758 enum machine_mode mode = va_arg (p, enum machine_mode);
3760 /* We cannot convert the arg value to the mode the library wants here;
3761 must do it earlier where we know the signedness of the arg. */
3763 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3766 /* There's no need to call protect_from_queue, because
3767 either emit_move_insn or emit_push_insn will do that. */
3769 /* Make sure it is a reasonable operand for a move or push insn. */
3770 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3771 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3772 val = force_operand (val, NULL_RTX);
3774 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3775 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3779 #ifdef FUNCTION_ARG_CALLEE_COPIES
3780 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3785 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3786 functions, so we have to pretend this isn't such a function. */
3787 if (flags & ECF_LIBCALL_BLOCK)
3789 rtx insns = get_insns ();
3793 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3795 /* If this was a CONST function, it is now PURE since
3796 it now reads memory. */
3797 if (flags & ECF_CONST)
3799 flags &= ~ECF_CONST;
3803 if (GET_MODE (val) == MEM && ! must_copy)
3807 slot = assign_temp ((*lang_hooks.types.type_for_mode) (mode, 0),
3809 emit_move_insn (slot, val);
3813 tree type = (*lang_hooks.types.type_for_mode) (mode, 0);
3816 = gen_rtx_MEM (mode,
3817 expand_expr (build1 (ADDR_EXPR,
3818 build_pointer_type (type),
3819 make_tree (type, val)),
3820 NULL_RTX, VOIDmode, 0));
3823 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3824 gen_rtx_USE (VOIDmode, slot),
3827 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3828 gen_rtx_CLOBBER (VOIDmode,
3833 val = force_operand (XEXP (slot, 0), NULL_RTX);
3837 argvec[count].value = val;
3838 argvec[count].mode = mode;
3840 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3842 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3843 argvec[count].partial
3844 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3846 argvec[count].partial = 0;
3849 locate_and_pad_parm (mode, NULL_TREE,
3850 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3853 argvec[count].reg != 0,
3855 argvec[count].partial,
3856 NULL_TREE, &args_size, &argvec[count].locate);
3858 if (argvec[count].locate.size.var)
3861 if (argvec[count].reg == 0 || argvec[count].partial != 0
3862 || reg_parm_stack_space > 0)
3863 args_size.constant += argvec[count].locate.size.constant;
3865 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3868 #ifdef FINAL_REG_PARM_STACK_SPACE
3869 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3872 /* If this machine requires an external definition for library
3873 functions, write one out. */
3874 assemble_external_libcall (fun);
3876 original_args_size = args_size;
3877 args_size.constant = (((args_size.constant
3878 + stack_pointer_delta
3882 - stack_pointer_delta);
3884 args_size.constant = MAX (args_size.constant,
3885 reg_parm_stack_space);
3887 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3888 args_size.constant -= reg_parm_stack_space;
3891 if (args_size.constant > current_function_outgoing_args_size)
3892 current_function_outgoing_args_size = args_size.constant;
3894 if (ACCUMULATE_OUTGOING_ARGS)
3896 /* Since the stack pointer will never be pushed, it is possible for
3897 the evaluation of a parm to clobber something we have already
3898 written to the stack. Since most function calls on RISC machines
3899 do not use the stack, this is uncommon, but must work correctly.
3901 Therefore, we save any area of the stack that was already written
3902 and that we are using. Here we set up to do this by making a new
3903 stack usage map from the old one.
3905 Another approach might be to try to reorder the argument
3906 evaluations to avoid this conflicting stack usage. */
3908 needed = args_size.constant;
3910 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3911 /* Since we will be writing into the entire argument area, the
3912 map must be allocated for its entire size, not just the part that
3913 is the responsibility of the caller. */
3914 needed += reg_parm_stack_space;
3917 #ifdef ARGS_GROW_DOWNWARD
3918 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3921 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3924 stack_usage_map = alloca (highest_outgoing_arg_in_use);
3926 if (initial_highest_arg_in_use)
3927 memcpy (stack_usage_map, initial_stack_usage_map,
3928 initial_highest_arg_in_use);
3930 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3931 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3932 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3935 /* We must be careful to use virtual regs before they're instantiated,
3936 and real regs afterwards. Loop optimization, for example, can create
3937 new libcalls after we've instantiated the virtual regs, and if we
3938 use virtuals anyway, they won't match the rtl patterns. */
3940 if (virtuals_instantiated)
3941 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3943 argblock = virtual_outgoing_args_rtx;
3948 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3951 /* If we push args individually in reverse order, perform stack alignment
3952 before the first push (the last arg). */
3953 if (argblock == 0 && PUSH_ARGS_REVERSED)
3954 anti_adjust_stack (GEN_INT (args_size.constant
3955 - original_args_size.constant));
3957 if (PUSH_ARGS_REVERSED)
3968 #ifdef REG_PARM_STACK_SPACE
3969 if (ACCUMULATE_OUTGOING_ARGS)
3971 /* The argument list is the property of the called routine and it
3972 may clobber it. If the fixed area has been used for previous
3973 parameters, we must save and restore it. */
3974 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3975 &low_to_save, &high_to_save);
3979 /* Push the args that need to be pushed. */
3981 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3982 are to be pushed. */
3983 for (count = 0; count < nargs; count++, argnum += inc)
3985 enum machine_mode mode = argvec[argnum].mode;
3986 rtx val = argvec[argnum].value;
3987 rtx reg = argvec[argnum].reg;
3988 int partial = argvec[argnum].partial;
3989 int lower_bound = 0, upper_bound = 0, i;
3991 if (! (reg != 0 && partial == 0))
3993 if (ACCUMULATE_OUTGOING_ARGS)
3995 /* If this is being stored into a pre-allocated, fixed-size,
3996 stack area, save any previous data at that location. */
3998 #ifdef ARGS_GROW_DOWNWARD
3999 /* stack_slot is negative, but we want to index stack_usage_map
4000 with positive values. */
4001 upper_bound = -argvec[argnum].locate.offset.constant + 1;
4002 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
4004 lower_bound = argvec[argnum].locate.offset.constant;
4005 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
4009 /* Don't worry about things in the fixed argument area;
4010 it has already been saved. */
4011 if (i < reg_parm_stack_space)
4012 i = reg_parm_stack_space;
4013 while (i < upper_bound && stack_usage_map[i] == 0)
4016 if (i < upper_bound)
4018 /* We need to make a save area. */
4020 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
4021 enum machine_mode save_mode
4022 = mode_for_size (size, MODE_INT, 1);
4024 = plus_constant (argblock,
4025 argvec[argnum].locate.offset.constant);
4027 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4029 if (save_mode == BLKmode)
4031 argvec[argnum].save_area
4032 = assign_stack_temp (BLKmode,
4033 argvec[argnum].locate.size.constant,
4036 emit_block_move (validize_mem (argvec[argnum].save_area),
4038 GEN_INT (argvec[argnum].locate.size.constant),
4039 BLOCK_OP_CALL_PARM);
4043 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4045 emit_move_insn (argvec[argnum].save_area, stack_area);
4050 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
4051 partial, reg, 0, argblock,
4052 GEN_INT (argvec[argnum].locate.offset.constant),
4053 reg_parm_stack_space,
4054 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
4056 /* Now mark the segment we just used. */
4057 if (ACCUMULATE_OUTGOING_ARGS)
4058 for (i = lower_bound; i < upper_bound; i++)
4059 stack_usage_map[i] = 1;
4065 /* If we pushed args in forward order, perform stack alignment
4066 after pushing the last arg. */
4067 if (argblock == 0 && !PUSH_ARGS_REVERSED)
4068 anti_adjust_stack (GEN_INT (args_size.constant
4069 - original_args_size.constant));
4071 if (PUSH_ARGS_REVERSED)
4076 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
4078 /* Now load any reg parms into their regs. */
4080 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4081 are to be pushed. */
4082 for (count = 0; count < nargs; count++, argnum += inc)
4084 rtx val = argvec[argnum].value;
4085 rtx reg = argvec[argnum].reg;
4086 int partial = argvec[argnum].partial;
4088 /* Handle calls that pass values in multiple non-contiguous
4089 locations. The PA64 has examples of this for library calls. */
4090 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4091 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (GET_MODE (val)));
4092 else if (reg != 0 && partial == 0)
4093 emit_move_insn (reg, val);
4098 /* Any regs containing parms remain in use through the call. */
4099 for (count = 0; count < nargs; count++)
4101 rtx reg = argvec[count].reg;
4102 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4103 use_group_regs (&call_fusage, reg);
4105 use_reg (&call_fusage, reg);
4108 /* Pass the function the address in which to return a structure value. */
4109 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
4111 emit_move_insn (struct_value,
4113 force_operand (XEXP (mem_value, 0),
4115 if (GET_CODE (struct_value) == REG)
4116 use_reg (&call_fusage, struct_value);
4119 /* Don't allow popping to be deferred, since then
4120 cse'ing of library calls could delete a call and leave the pop. */
4122 valreg = (mem_value == 0 && outmode != VOIDmode
4123 ? hard_libcall_value (outmode) : NULL_RTX);
4125 /* Stack must be properly aligned now. */
4126 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
4129 before_call = get_last_insn ();
4131 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4132 will set inhibit_defer_pop to that value. */
4133 /* The return type is needed to decide how many bytes the function pops.
4134 Signedness plays no role in that, so for simplicity, we pretend it's
4135 always signed. We also assume that the list of arguments passed has
4136 no impact, so we pretend it is unknown. */
4139 get_identifier (XSTR (orgfun, 0)),
4140 build_function_type (tfom, NULL_TREE),
4141 original_args_size.constant, args_size.constant,
4143 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4145 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
4147 /* For calls to `setjmp', etc., inform flow.c it should complain
4148 if nonvolatile values are live. For functions that cannot return,
4149 inform flow that control does not fall through. */
4151 if (flags & (ECF_NORETURN | ECF_LONGJMP))
4153 /* The barrier note must be emitted
4154 immediately after the CALL_INSN. Some ports emit more than
4155 just a CALL_INSN above, so we must search for it here. */
4157 rtx last = get_last_insn ();
4158 while (GET_CODE (last) != CALL_INSN)
4160 last = PREV_INSN (last);
4161 /* There was no CALL_INSN? */
4162 if (last == before_call)
4166 emit_barrier_after (last);
4169 /* Now restore inhibit_defer_pop to its actual original value. */
4172 /* If call is cse'able, make appropriate pair of reg-notes around it.
4173 Test valreg so we don't crash; may safely ignore `const'
4174 if return type is void. Disable for PARALLEL return values, because
4175 we have no way to move such values into a pseudo register. */
4176 if (flags & ECF_LIBCALL_BLOCK)
4182 insns = get_insns ();
4192 if (GET_CODE (valreg) == PARALLEL)
4194 temp = gen_reg_rtx (outmode);
4195 emit_group_store (temp, valreg, NULL_TREE,
4196 GET_MODE_SIZE (outmode));
4200 temp = gen_reg_rtx (GET_MODE (valreg));
4202 /* Construct an "equal form" for the value which mentions all the
4203 arguments in order as well as the function name. */
4204 for (i = 0; i < nargs; i++)
4205 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4206 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
4208 insns = get_insns ();
4211 if (flags & ECF_PURE)
4212 note = gen_rtx_EXPR_LIST (VOIDmode,
4213 gen_rtx_USE (VOIDmode,
4214 gen_rtx_MEM (BLKmode,
4215 gen_rtx_SCRATCH (VOIDmode))),
4218 emit_libcall_block (insns, temp, valreg, note);
4225 /* Copy the value to the right place. */
4226 if (outmode != VOIDmode && retval)
4232 if (value != mem_value)
4233 emit_move_insn (value, mem_value);
4235 else if (GET_CODE (valreg) == PARALLEL)
4238 value = gen_reg_rtx (outmode);
4239 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
4241 else if (value != 0)
4242 emit_move_insn (value, valreg);
4247 if (ACCUMULATE_OUTGOING_ARGS)
4249 #ifdef REG_PARM_STACK_SPACE
4251 restore_fixed_argument_area (save_area, argblock,
4252 high_to_save, low_to_save);
4255 /* If we saved any argument areas, restore them. */
4256 for (count = 0; count < nargs; count++)
4257 if (argvec[count].save_area)
4259 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4260 rtx adr = plus_constant (argblock,
4261 argvec[count].locate.offset.constant);
4262 rtx stack_area = gen_rtx_MEM (save_mode,
4263 memory_address (save_mode, adr));
4265 if (save_mode == BLKmode)
4266 emit_block_move (stack_area,
4267 validize_mem (argvec[count].save_area),
4268 GEN_INT (argvec[count].locate.size.constant),
4269 BLOCK_OP_CALL_PARM);
4271 emit_move_insn (stack_area, argvec[count].save_area);
4274 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4275 stack_usage_map = initial_stack_usage_map;
4282 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4283 (emitting the queue unless NO_QUEUE is nonzero),
4284 for a value of mode OUTMODE,
4285 with NARGS different arguments, passed as alternating rtx values
4286 and machine_modes to convert them to.
4287 The rtx values should have been passed through protect_from_queue already.
4289 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4290 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4291 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4292 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4293 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4294 or other LCT_ value for other types of library calls. */
4297 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4298 enum machine_mode outmode, int nargs, ...)
4302 va_start (p, nargs);
4303 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4307 /* Like emit_library_call except that an extra argument, VALUE,
4308 comes second and says where to store the result.
4309 (If VALUE is zero, this function chooses a convenient way
4310 to return the value.
4312 This function returns an rtx for where the value is to be found.
4313 If VALUE is nonzero, VALUE is returned. */
4316 emit_library_call_value (rtx orgfun, rtx value,
4317 enum libcall_type fn_type,
4318 enum machine_mode outmode, int nargs, ...)
4323 va_start (p, nargs);
4324 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4331 /* Store a single argument for a function call
4332 into the register or memory area where it must be passed.
4333 *ARG describes the argument value and where to pass it.
4335 ARGBLOCK is the address of the stack-block for all the arguments,
4336 or 0 on a machine where arguments are pushed individually.
4338 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4339 so must be careful about how the stack is used.
4341 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4342 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4343 that we need not worry about saving and restoring the stack.
4345 FNDECL is the declaration of the function we are calling.
4347 Return nonzero if this arg should cause sibcall failure,
4351 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4352 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4354 tree pval = arg->tree_value;
4358 int i, lower_bound = 0, upper_bound = 0;
4359 int sibcall_failure = 0;
4361 if (TREE_CODE (pval) == ERROR_MARK)
4364 /* Push a new temporary level for any temporaries we make for
4368 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4370 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4371 save any previous data at that location. */
4372 if (argblock && ! variable_size && arg->stack)
4374 #ifdef ARGS_GROW_DOWNWARD
4375 /* stack_slot is negative, but we want to index stack_usage_map
4376 with positive values. */
4377 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4378 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4382 lower_bound = upper_bound - arg->locate.size.constant;
4384 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4385 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4389 upper_bound = lower_bound + arg->locate.size.constant;
4393 /* Don't worry about things in the fixed argument area;
4394 it has already been saved. */
4395 if (i < reg_parm_stack_space)
4396 i = reg_parm_stack_space;
4397 while (i < upper_bound && stack_usage_map[i] == 0)
4400 if (i < upper_bound)
4402 /* We need to make a save area. */
4403 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4404 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4405 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4406 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4408 if (save_mode == BLKmode)
4410 tree ot = TREE_TYPE (arg->tree_value);
4411 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4412 | TYPE_QUAL_CONST));
4414 arg->save_area = assign_temp (nt, 0, 1, 1);
4415 preserve_temp_slots (arg->save_area);
4416 emit_block_move (validize_mem (arg->save_area), stack_area,
4417 expr_size (arg->tree_value),
4418 BLOCK_OP_CALL_PARM);
4422 arg->save_area = gen_reg_rtx (save_mode);
4423 emit_move_insn (arg->save_area, stack_area);
4429 /* If this isn't going to be placed on both the stack and in registers,
4430 set up the register and number of words. */
4431 if (! arg->pass_on_stack)
4433 if (flags & ECF_SIBCALL)
4434 reg = arg->tail_call_reg;
4437 partial = arg->partial;
4440 if (reg != 0 && partial == 0)
4441 /* Being passed entirely in a register. We shouldn't be called in
4445 /* If this arg needs special alignment, don't load the registers
4447 if (arg->n_aligned_regs != 0)
4450 /* If this is being passed partially in a register, we can't evaluate
4451 it directly into its stack slot. Otherwise, we can. */
4452 if (arg->value == 0)
4454 /* stack_arg_under_construction is nonzero if a function argument is
4455 being evaluated directly into the outgoing argument list and
4456 expand_call must take special action to preserve the argument list
4457 if it is called recursively.
4459 For scalar function arguments stack_usage_map is sufficient to
4460 determine which stack slots must be saved and restored. Scalar
4461 arguments in general have pass_on_stack == 0.
4463 If this argument is initialized by a function which takes the
4464 address of the argument (a C++ constructor or a C function
4465 returning a BLKmode structure), then stack_usage_map is
4466 insufficient and expand_call must push the stack around the
4467 function call. Such arguments have pass_on_stack == 1.
4469 Note that it is always safe to set stack_arg_under_construction,
4470 but this generates suboptimal code if set when not needed. */
4472 if (arg->pass_on_stack)
4473 stack_arg_under_construction++;
4475 arg->value = expand_expr (pval,
4477 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4478 ? NULL_RTX : arg->stack,
4479 VOIDmode, EXPAND_STACK_PARM);
4481 /* If we are promoting object (or for any other reason) the mode
4482 doesn't agree, convert the mode. */
4484 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4485 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4486 arg->value, arg->unsignedp);
4488 if (arg->pass_on_stack)
4489 stack_arg_under_construction--;
4492 /* Don't allow anything left on stack from computation
4493 of argument to alloca. */
4494 if (flags & ECF_MAY_BE_ALLOCA)
4495 do_pending_stack_adjust ();
4497 if (arg->value == arg->stack)
4498 /* If the value is already in the stack slot, we are done. */
4500 else if (arg->mode != BLKmode)
4504 /* Argument is a scalar, not entirely passed in registers.
4505 (If part is passed in registers, arg->partial says how much
4506 and emit_push_insn will take care of putting it there.)
4508 Push it, and if its size is less than the
4509 amount of space allocated to it,
4510 also bump stack pointer by the additional space.
4511 Note that in C the default argument promotions
4512 will prevent such mismatches. */
4514 size = GET_MODE_SIZE (arg->mode);
4515 /* Compute how much space the push instruction will push.
4516 On many machines, pushing a byte will advance the stack
4517 pointer by a halfword. */
4518 #ifdef PUSH_ROUNDING
4519 size = PUSH_ROUNDING (size);
4523 /* Compute how much space the argument should get:
4524 round up to a multiple of the alignment for arguments. */
4525 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4526 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4527 / (PARM_BOUNDARY / BITS_PER_UNIT))
4528 * (PARM_BOUNDARY / BITS_PER_UNIT));
4530 /* This isn't already where we want it on the stack, so put it there.
4531 This can either be done with push or copy insns. */
4532 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4533 PARM_BOUNDARY, partial, reg, used - size, argblock,
4534 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4535 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4537 /* Unless this is a partially-in-register argument, the argument is now
4540 arg->value = arg->stack;
4544 /* BLKmode, at least partly to be pushed. */
4546 unsigned int parm_align;
4550 /* Pushing a nonscalar.
4551 If part is passed in registers, PARTIAL says how much
4552 and emit_push_insn will take care of putting it there. */
4554 /* Round its size up to a multiple
4555 of the allocation unit for arguments. */
4557 if (arg->locate.size.var != 0)
4560 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4564 /* PUSH_ROUNDING has no effect on us, because
4565 emit_push_insn for BLKmode is careful to avoid it. */
4566 excess = (arg->locate.size.constant
4567 - int_size_in_bytes (TREE_TYPE (pval))
4568 + partial * UNITS_PER_WORD);
4569 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4570 NULL_RTX, TYPE_MODE (sizetype), 0);
4573 /* Some types will require stricter alignment, which will be
4574 provided for elsewhere in argument layout. */
4575 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4577 /* When an argument is padded down, the block is aligned to
4578 PARM_BOUNDARY, but the actual argument isn't. */
4579 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4581 if (arg->locate.size.var)
4582 parm_align = BITS_PER_UNIT;
4585 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4586 parm_align = MIN (parm_align, excess_align);
4590 if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4592 /* emit_push_insn might not work properly if arg->value and
4593 argblock + arg->locate.offset areas overlap. */
4597 if (XEXP (x, 0) == current_function_internal_arg_pointer
4598 || (GET_CODE (XEXP (x, 0)) == PLUS
4599 && XEXP (XEXP (x, 0), 0) ==
4600 current_function_internal_arg_pointer
4601 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4603 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4604 i = INTVAL (XEXP (XEXP (x, 0), 1));
4606 /* expand_call should ensure this */
4607 if (arg->locate.offset.var || GET_CODE (size_rtx) != CONST_INT)
4610 if (arg->locate.offset.constant > i)
4612 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4613 sibcall_failure = 1;
4615 else if (arg->locate.offset.constant < i)
4617 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4618 sibcall_failure = 1;
4623 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4624 parm_align, partial, reg, excess, argblock,
4625 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4626 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4628 /* Unless this is a partially-in-register argument, the argument is now
4631 ??? Unlike the case above, in which we want the actual
4632 address of the data, so that we can load it directly into a
4633 register, here we want the address of the stack slot, so that
4634 it's properly aligned for word-by-word copying or something
4635 like that. It's not clear that this is always correct. */
4637 arg->value = arg->stack_slot;
4640 /* Mark all slots this store used. */
4641 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4642 && argblock && ! variable_size && arg->stack)
4643 for (i = lower_bound; i < upper_bound; i++)
4644 stack_usage_map[i] = 1;
4646 /* Once we have pushed something, pops can't safely
4647 be deferred during the rest of the arguments. */
4650 /* ANSI doesn't require a sequence point here,
4651 but PCC has one, so this will avoid some problems. */
4654 /* Free any temporary slots made in processing this argument. Show
4655 that we might have taken the address of something and pushed that
4657 preserve_temp_slots (NULL_RTX);
4661 return sibcall_failure;
4664 /* Nonzero if we do not know how to pass TYPE solely in registers.
4665 We cannot do so in the following cases:
4667 - if the type has variable size
4668 - if the type is marked as addressable (it is required to be constructed
4670 - if the padding and mode of the type is such that a copy into a register
4671 would put it into the wrong part of the register.
4673 Which padding can't be supported depends on the byte endianness.
4675 A value in a register is implicitly padded at the most significant end.
4676 On a big-endian machine, that is the lower end in memory.
4677 So a value padded in memory at the upper end can't go in a register.
4678 For a little-endian machine, the reverse is true. */
4681 default_must_pass_in_stack (enum machine_mode mode, tree type)
4686 /* If the type has variable size... */
4687 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4690 /* If the type is marked as addressable (it is required
4691 to be constructed into the stack)... */
4692 if (TREE_ADDRESSABLE (type))
4695 /* If the padding and mode of the type is such that a copy into
4696 a register would put it into the wrong part of the register. */
4698 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4699 && (FUNCTION_ARG_PADDING (mode, type)
4700 == (BYTES_BIG_ENDIAN ? upward : downward)))