1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
28 #include "insn-flags.h"
32 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
33 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
50 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
51 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
53 /* Data structure and subroutines used within expand_call. */
57 /* Tree node for this argument. */
59 /* Mode for value; TYPE_MODE unless promoted. */
60 enum machine_mode mode;
61 /* Current RTL value for argument, or 0 if it isn't precomputed. */
63 /* Initially-compute RTL value for argument; only for const functions. */
65 /* Register to pass this argument in, 0 if passed on stack, or an
66 PARALLEL if the arg is to be copied into multiple non-contiguous
69 /* If REG was promoted from the actual mode of the argument expression,
70 indicates whether the promotion is sign- or zero-extended. */
72 /* Number of registers to use. 0 means put the whole arg in registers.
73 Also 0 if not passed in registers. */
75 /* Non-zero if argument must be passed on stack.
76 Note that some arguments may be passed on the stack
77 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
78 pass_on_stack identifies arguments that *cannot* go in registers. */
80 /* Offset of this argument from beginning of stack-args. */
81 struct args_size offset;
82 /* Similar, but offset to the start of the stack slot. Different from
83 OFFSET if this arg pads downward. */
84 struct args_size slot_offset;
85 /* Size of this argument on the stack, rounded up for any padding it gets,
86 parts of the argument passed in registers do not count.
87 If REG_PARM_STACK_SPACE is defined, then register parms
88 are counted here as well. */
89 struct args_size size;
90 /* Location on the stack at which parameter should be stored. The store
91 has already been done if STACK == VALUE. */
93 /* Location on the stack of the start of this argument slot. This can
94 differ from STACK if this arg pads downward. This location is known
95 to be aligned to FUNCTION_ARG_BOUNDARY. */
97 #ifdef ACCUMULATE_OUTGOING_ARGS
98 /* Place that this stack area has been saved, if needed. */
101 /* If an argument's alignment does not permit direct copying into registers,
102 copy in smaller-sized pieces into pseudos. These are stored in a
103 block pointed to by this field. The next field says how many
104 word-sized pseudos we made. */
109 #ifdef ACCUMULATE_OUTGOING_ARGS
110 /* A vector of one char per byte of stack space. A byte if non-zero if
111 the corresponding stack location has been used.
112 This vector is used to prevent a function call within an argument from
113 clobbering any stack already set up. */
114 static char *stack_usage_map;
116 /* Size of STACK_USAGE_MAP. */
117 static int highest_outgoing_arg_in_use;
119 /* stack_arg_under_construction is nonzero when an argument may be
120 initialized with a constructor call (including a C function that
121 returns a BLKmode struct) and expand_call must take special action
122 to make sure the object being constructed does not overlap the
123 argument list for the constructor call. */
124 int stack_arg_under_construction;
127 static int calls_function PROTO((tree, int));
128 static int calls_function_1 PROTO((tree, int));
129 static void emit_call_1 PROTO((rtx, tree, tree, HOST_WIDE_INT,
130 HOST_WIDE_INT, rtx, rtx,
132 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
135 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
138 If WHICH is 0, return 1 if EXP contains a call to any function.
139 Actually, we only need return 1 if evaluating EXP would require pushing
140 arguments on the stack, but that is too difficult to compute, so we just
141 assume any function call might require the stack. */
143 static tree calls_function_save_exprs;
146 calls_function (exp, which)
151 calls_function_save_exprs = 0;
152 val = calls_function_1 (exp, which);
153 calls_function_save_exprs = 0;
158 calls_function_1 (exp, which)
163 enum tree_code code = TREE_CODE (exp);
164 int type = TREE_CODE_CLASS (code);
165 int length = tree_code_length[(int) code];
167 /* If this code is language-specific, we don't know what it will do. */
168 if ((int) code >= NUM_TREE_CODES)
171 /* Only expressions and references can contain calls. */
172 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
181 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
182 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
185 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
187 if ((DECL_BUILT_IN (fndecl)
188 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
189 || (DECL_SAVED_INSNS (fndecl)
190 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
191 & FUNCTION_FLAGS_CALLS_ALLOCA)))
195 /* Third operand is RTL. */
200 if (SAVE_EXPR_RTL (exp) != 0)
202 if (value_member (exp, calls_function_save_exprs))
204 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
205 calls_function_save_exprs);
206 return (TREE_OPERAND (exp, 0) != 0
207 && calls_function_1 (TREE_OPERAND (exp, 0), which));
213 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
214 if (DECL_INITIAL (local) != 0
215 && calls_function_1 (DECL_INITIAL (local), which))
219 register tree subblock;
221 for (subblock = BLOCK_SUBBLOCKS (exp);
223 subblock = TREE_CHAIN (subblock))
224 if (calls_function_1 (subblock, which))
229 case METHOD_CALL_EXPR:
233 case WITH_CLEANUP_EXPR:
244 for (i = 0; i < length; i++)
245 if (TREE_OPERAND (exp, i) != 0
246 && calls_function_1 (TREE_OPERAND (exp, i), which))
252 /* Force FUNEXP into a form suitable for the address of a CALL,
253 and return that as an rtx. Also load the static chain register
254 if FNDECL is a nested function.
256 CALL_FUSAGE points to a variable holding the prospective
257 CALL_INSN_FUNCTION_USAGE information. */
260 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
266 rtx static_chain_value = 0;
268 funexp = protect_from_queue (funexp, 0);
271 /* Get possible static chain value for nested function in C. */
272 static_chain_value = lookup_static_chain (fndecl);
274 /* Make a valid memory address and copy constants thru pseudo-regs,
275 but not for a constant address if -fno-function-cse. */
276 if (GET_CODE (funexp) != SYMBOL_REF)
277 /* If we are using registers for parameters, force the
278 function address into a register now. */
279 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
280 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
281 : memory_address (FUNCTION_MODE, funexp));
284 #ifndef NO_FUNCTION_CSE
285 if (optimize && ! flag_no_function_cse)
286 #ifdef NO_RECURSIVE_FUNCTION_CSE
287 if (fndecl != current_function_decl)
289 funexp = force_reg (Pmode, funexp);
293 if (static_chain_value != 0)
295 emit_move_insn (static_chain_rtx, static_chain_value);
297 if (GET_CODE (static_chain_rtx) == REG)
298 use_reg (call_fusage, static_chain_rtx);
304 /* Generate instructions to call function FUNEXP,
305 and optionally pop the results.
306 The CALL_INSN is the first insn generated.
308 FNDECL is the declaration node of the function. This is given to the
309 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
311 FUNTYPE is the data type of the function. This is given to the macro
312 RETURN_POPS_ARGS to determine whether this function pops its own args.
313 We used to allow an identifier for library functions, but that doesn't
314 work when the return type is an aggregate type and the calling convention
315 says that the pointer to this aggregate is to be popped by the callee.
317 STACK_SIZE is the number of bytes of arguments on the stack,
318 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
319 This is both to put into the call insn and
320 to generate explicit popping code if necessary.
322 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
323 It is zero if this call doesn't want a structure value.
325 NEXT_ARG_REG is the rtx that results from executing
326 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
327 just after all the args have had their registers assigned.
328 This could be whatever you like, but normally it is the first
329 arg-register beyond those used for args in this call,
330 or 0 if all the arg-registers are used in this call.
331 It is passed on to `gen_call' so you can put this info in the call insn.
333 VALREG is a hard register in which a value is returned,
334 or 0 if the call does not return a value.
336 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
337 the args to this call were processed.
338 We restore `inhibit_defer_pop' to that value.
340 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
341 denote registers used by the called function.
343 IS_CONST is true if this is a `const' call. */
346 emit_call_1 (funexp, fndecl, funtype, stack_size, struct_value_size,
347 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
350 tree fndecl ATTRIBUTE_UNUSED;
351 tree funtype ATTRIBUTE_UNUSED;
352 HOST_WIDE_INT stack_size;
353 HOST_WIDE_INT struct_value_size;
356 int old_inhibit_defer_pop;
360 rtx stack_size_rtx = GEN_INT (stack_size);
361 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
363 #ifndef ACCUMULATE_OUTGOING_ARGS
364 int already_popped = 0;
367 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
368 and we don't want to load it into a register as an optimization,
369 because prepare_call_address already did it if it should be done. */
370 if (GET_CODE (funexp) != SYMBOL_REF)
371 funexp = memory_address (FUNCTION_MODE, funexp);
373 #ifndef ACCUMULATE_OUTGOING_ARGS
374 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
375 if (HAVE_call_pop && HAVE_call_value_pop
376 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
379 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
382 /* If this subroutine pops its own args, record that in the call insn
383 if possible, for the sake of frame pointer elimination. */
386 pat = gen_call_value_pop (valreg,
387 gen_rtx_MEM (FUNCTION_MODE, funexp),
388 stack_size_rtx, next_arg_reg, n_pop);
390 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
391 stack_size_rtx, next_arg_reg, n_pop);
393 emit_call_insn (pat);
400 #if defined (HAVE_call) && defined (HAVE_call_value)
401 if (HAVE_call && HAVE_call_value)
404 emit_call_insn (gen_call_value (valreg,
405 gen_rtx_MEM (FUNCTION_MODE, funexp),
406 stack_size_rtx, next_arg_reg,
409 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
410 stack_size_rtx, next_arg_reg,
411 struct_value_size_rtx));
417 /* Find the CALL insn we just emitted. */
418 for (call_insn = get_last_insn ();
419 call_insn && GET_CODE (call_insn) != CALL_INSN;
420 call_insn = PREV_INSN (call_insn))
426 /* Put the register usage information on the CALL. If there is already
427 some usage information, put ours at the end. */
428 if (CALL_INSN_FUNCTION_USAGE (call_insn))
432 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
433 link = XEXP (link, 1))
436 XEXP (link, 1) = call_fusage;
439 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
441 /* If this is a const call, then set the insn's unchanging bit. */
443 CONST_CALL_P (call_insn) = 1;
445 /* Restore this now, so that we do defer pops for this call's args
446 if the context of the call as a whole permits. */
447 inhibit_defer_pop = old_inhibit_defer_pop;
449 #ifndef ACCUMULATE_OUTGOING_ARGS
450 /* If returning from the subroutine does not automatically pop the args,
451 we need an instruction to pop them sooner or later.
452 Perhaps do it now; perhaps just record how much space to pop later.
454 If returning from the subroutine does pop the args, indicate that the
455 stack pointer will be changed. */
457 if (stack_size != 0 && RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0)
460 CALL_INSN_FUNCTION_USAGE (call_insn)
461 = gen_rtx_EXPR_LIST (VOIDmode,
462 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
463 CALL_INSN_FUNCTION_USAGE (call_insn));
464 stack_size -= RETURN_POPS_ARGS (fndecl, funtype, stack_size);
465 stack_size_rtx = GEN_INT (stack_size);
470 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
471 pending_stack_adjust += stack_size;
473 adjust_stack (stack_size_rtx);
478 /* Generate all the code for a function call
479 and return an rtx for its value.
480 Store the value in TARGET (specified as an rtx) if convenient.
481 If the value is stored in TARGET then TARGET is returned.
482 If IGNORE is nonzero, then we ignore the value of the function call. */
485 expand_call (exp, target, ignore)
490 /* List of actual parameters. */
491 tree actparms = TREE_OPERAND (exp, 1);
492 /* RTX for the function to be called. */
494 /* Data type of the function. */
496 /* Declaration of the function being called,
497 or 0 if the function is computed (not known by name). */
501 /* Register in which non-BLKmode value will be returned,
502 or 0 if no value or if value is BLKmode. */
504 /* Address where we should return a BLKmode value;
505 0 if value not BLKmode. */
506 rtx structure_value_addr = 0;
507 /* Nonzero if that address is being passed by treating it as
508 an extra, implicit first parameter. Otherwise,
509 it is passed by being copied directly into struct_value_rtx. */
510 int structure_value_addr_parm = 0;
511 /* Size of aggregate value wanted, or zero if none wanted
512 or if we are using the non-reentrant PCC calling convention
513 or expecting the value in registers. */
514 HOST_WIDE_INT struct_value_size = 0;
515 /* Nonzero if called function returns an aggregate in memory PCC style,
516 by returning the address of where to find it. */
517 int pcc_struct_value = 0;
519 /* Number of actual parameters in this call, including struct value addr. */
521 /* Number of named args. Args after this are anonymous ones
522 and they must all go on the stack. */
524 /* Count arg position in order args appear. */
527 /* Vector of information about each argument.
528 Arguments are numbered in the order they will be pushed,
529 not the order they are written. */
530 struct arg_data *args;
532 /* Total size in bytes of all the stack-parms scanned so far. */
533 struct args_size args_size;
534 /* Size of arguments before any adjustments (such as rounding). */
535 struct args_size original_args_size;
536 /* Data on reg parms scanned so far. */
537 CUMULATIVE_ARGS args_so_far;
538 /* Nonzero if a reg parm has been scanned. */
540 /* Nonzero if this is an indirect function call. */
542 /* Nonzero if we must avoid push-insns in the args for this call.
543 If stack space is allocated for register parameters, but not by the
544 caller, then it is preallocated in the fixed part of the stack frame.
545 So the entire argument block must then be preallocated (i.e., we
546 ignore PUSH_ROUNDING in that case). */
549 int must_preallocate = 0;
551 int must_preallocate = 1;
554 /* Size of the stack reserved for parameter registers. */
555 int reg_parm_stack_space = 0;
557 /* 1 if scanning parms front to back, -1 if scanning back to front. */
559 /* Address of space preallocated for stack parms
560 (on machines that lack push insns), or 0 if space not preallocated. */
563 /* Nonzero if it is plausible that this is a call to alloca. */
565 /* Nonzero if this is a call to malloc or a related function. */
567 /* Nonzero if this is a call to setjmp or a related function. */
569 /* Nonzero if this is a call to `longjmp'. */
571 /* Nonzero if this is a call to an inline function. */
572 int is_integrable = 0;
573 /* Nonzero if this is a call to a `const' function.
574 Note that only explicitly named functions are handled as `const' here. */
576 /* Nonzero if this is a call to a `volatile' function. */
578 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
579 /* Define the boundary of the register parm stack space that needs to be
581 int low_to_save = -1, high_to_save;
582 rtx save_area = 0; /* Place that it is saved */
585 #ifdef ACCUMULATE_OUTGOING_ARGS
586 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
587 char *initial_stack_usage_map = stack_usage_map;
588 int old_stack_arg_under_construction;
591 rtx old_stack_level = 0;
592 int old_pending_adj = 0;
593 int old_inhibit_defer_pop = inhibit_defer_pop;
598 /* The value of the function call can be put in a hard register. But
599 if -fcheck-memory-usage, code which invokes functions (and thus
600 damages some hard registers) can be inserted before using the value.
601 So, target is always a pseudo-register in that case. */
602 if (current_function_check_memory_usage)
605 /* See if we can find a DECL-node for the actual function.
606 As a result, decide whether this is a call to an integrable function. */
608 p = TREE_OPERAND (exp, 0);
609 if (TREE_CODE (p) == ADDR_EXPR)
611 fndecl = TREE_OPERAND (p, 0);
612 if (TREE_CODE (fndecl) != FUNCTION_DECL)
617 && fndecl != current_function_decl
618 && DECL_INLINE (fndecl)
619 && DECL_SAVED_INSNS (fndecl)
620 && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl)))
622 else if (! TREE_ADDRESSABLE (fndecl))
624 /* In case this function later becomes inlinable,
625 record that there was already a non-inline call to it.
627 Use abstraction instead of setting TREE_ADDRESSABLE
629 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
632 warning_with_decl (fndecl, "can't inline call to `%s'");
633 warning ("called from here");
635 mark_addressable (fndecl);
638 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
639 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
642 if (TREE_THIS_VOLATILE (fndecl))
647 /* If we don't have specific function to call, see if we have a
648 constant or `noreturn' function from the type. */
651 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
652 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
655 #ifdef REG_PARM_STACK_SPACE
656 #ifdef MAYBE_REG_PARM_STACK_SPACE
657 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
659 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
663 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
664 if (reg_parm_stack_space > 0)
665 must_preallocate = 1;
668 /* Warn if this value is an aggregate type,
669 regardless of which calling convention we are using for it. */
670 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
671 warning ("function call has aggregate value");
673 /* Set up a place to return a structure. */
675 /* Cater to broken compilers. */
676 if (aggregate_value_p (exp))
678 /* This call returns a big structure. */
681 #ifdef PCC_STATIC_STRUCT_RETURN
683 pcc_struct_value = 1;
684 /* Easier than making that case work right. */
687 /* In case this is a static function, note that it has been
689 if (! TREE_ADDRESSABLE (fndecl))
690 mark_addressable (fndecl);
694 #else /* not PCC_STATIC_STRUCT_RETURN */
696 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
698 if (target && GET_CODE (target) == MEM)
699 structure_value_addr = XEXP (target, 0);
702 /* Assign a temporary to hold the value. */
705 /* For variable-sized objects, we must be called with a target
706 specified. If we were to allocate space on the stack here,
707 we would have no way of knowing when to free it. */
709 if (struct_value_size < 0)
712 /* This DECL is just something to feed to mark_addressable;
713 it doesn't get pushed. */
714 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
715 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
716 mark_addressable (d);
717 structure_value_addr = XEXP (DECL_RTL (d), 0);
722 #endif /* not PCC_STATIC_STRUCT_RETURN */
725 /* If called function is inline, try to integrate it. */
730 #ifdef ACCUMULATE_OUTGOING_ARGS
731 rtx before_call = get_last_insn ();
734 temp = expand_inline_function (fndecl, actparms, target,
735 ignore, TREE_TYPE (exp),
736 structure_value_addr);
738 /* If inlining succeeded, return. */
739 if (temp != (rtx) (HOST_WIDE_INT) -1)
741 #ifdef ACCUMULATE_OUTGOING_ARGS
742 /* If the outgoing argument list must be preserved, push
743 the stack before executing the inlined function if it
746 for (i = reg_parm_stack_space - 1; i >= 0; i--)
747 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
750 if (stack_arg_under_construction || i >= 0)
753 = before_call ? NEXT_INSN (before_call) : get_insns ();
756 /* Look for a call in the inline function code.
757 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
758 nonzero then there is a call and it is not necessary
759 to scan the insns. */
761 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
762 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
763 if (GET_CODE (insn) == CALL_INSN)
768 /* Reserve enough stack space so that the largest
769 argument list of any function call in the inline
770 function does not overlap the argument list being
771 evaluated. This is usually an overestimate because
772 allocate_dynamic_stack_space reserves space for an
773 outgoing argument list in addition to the requested
774 space, but there is no way to ask for stack space such
775 that an argument list of a certain length can be
778 Add the stack space reserved for register arguments, if
779 any, in the inline function. What is really needed is the
780 largest value of reg_parm_stack_space in the inline
781 function, but that is not available. Using the current
782 value of reg_parm_stack_space is wrong, but gives
783 correct results on all supported machines. */
785 int adjust = (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl))
786 + reg_parm_stack_space);
789 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
790 allocate_dynamic_stack_space (GEN_INT (adjust),
791 NULL_RTX, BITS_PER_UNIT);
794 emit_insns_before (seq, first_insn);
795 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
800 /* If the result is equivalent to TARGET, return TARGET to simplify
801 checks in store_expr. They can be equivalent but not equal in the
802 case of a function that returns BLKmode. */
803 if (temp != target && rtx_equal_p (temp, target))
808 /* If inlining failed, mark FNDECL as needing to be compiled
809 separately after all. If function was declared inline,
811 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
812 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
814 warning_with_decl (fndecl, "inlining failed in call to `%s'");
815 warning ("called from here");
817 mark_addressable (fndecl);
820 /* When calling a const function, we must pop the stack args right away,
821 so that the pop is deleted or moved with the call. */
825 function_call_count++;
827 if (fndecl && DECL_NAME (fndecl))
828 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
831 /* Unless it's a call to a specific function that isn't alloca,
832 if it has one argument, we must assume it might be alloca. */
835 = (!(fndecl != 0 && strcmp (name, "alloca"))
837 && TREE_CHAIN (actparms) == 0);
839 /* We assume that alloca will always be called by name. It
840 makes no sense to pass it as a pointer-to-function to
841 anything that does not understand its behavior. */
843 = (name && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
845 && ! strcmp (name, "alloca"))
846 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
848 && ! strcmp (name, "__builtin_alloca"))));
851 /* See if this is a call to a function that can return more than once
852 or a call to longjmp. */
858 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
859 /* Exclude functions not at the file scope, or not `extern',
860 since they are not the magic functions we would otherwise
862 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
866 /* Disregard prefix _, __ or __x. */
869 if (name[1] == '_' && name[2] == 'x')
871 else if (name[1] == '_')
881 && (! strcmp (tname, "setjmp")
882 || ! strcmp (tname, "setjmp_syscall")))
884 && ! strcmp (tname, "sigsetjmp"))
886 && ! strcmp (tname, "savectx")));
888 && ! strcmp (tname, "siglongjmp"))
891 else if ((tname[0] == 'q' && tname[1] == 's'
892 && ! strcmp (tname, "qsetjmp"))
893 || (tname[0] == 'v' && tname[1] == 'f'
894 && ! strcmp (tname, "vfork")))
897 else if (tname[0] == 'l' && tname[1] == 'o'
898 && ! strcmp (tname, "longjmp"))
900 /* XXX should have "malloc" attribute on functions instead
901 of recognizing them by name. */
902 else if (! strcmp (tname, "malloc")
903 || ! strcmp (tname, "calloc")
904 || ! strcmp (tname, "realloc")
905 /* Note use of NAME rather than TNAME here. These functions
906 are only reserved when preceded with __. */
907 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
908 || ! strcmp (name, "__nw") /* mangled __builtin_new */
909 || ! strcmp (name, "__builtin_new")
910 || ! strcmp (name, "__builtin_vec_new"))
915 current_function_calls_alloca = 1;
917 /* Don't let pending stack adjusts add up to too much.
918 Also, do all pending adjustments now
919 if there is any chance this might be a call to alloca. */
921 if (pending_stack_adjust >= 32
922 || (pending_stack_adjust > 0 && may_be_alloca))
923 do_pending_stack_adjust ();
925 /* Operand 0 is a pointer-to-function; get the type of the function. */
926 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
927 if (TREE_CODE (funtype) != POINTER_TYPE)
929 funtype = TREE_TYPE (funtype);
931 /* Push the temporary stack slot level so that we can free any temporaries
935 /* Start updating where the next arg would go.
937 On some machines (such as the PA) indirect calls have a different
938 calling convention than normal calls. The last argument in
939 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
941 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
943 /* If struct_value_rtx is 0, it means pass the address
944 as if it were an extra parameter. */
945 if (structure_value_addr && struct_value_rtx == 0)
947 /* If structure_value_addr is a REG other than
948 virtual_outgoing_args_rtx, we can use always use it. If it
949 is not a REG, we must always copy it into a register.
950 If it is virtual_outgoing_args_rtx, we must copy it to another
951 register in some cases. */
952 rtx temp = (GET_CODE (structure_value_addr) != REG
953 #ifdef ACCUMULATE_OUTGOING_ARGS
954 || (stack_arg_under_construction
955 && structure_value_addr == virtual_outgoing_args_rtx)
957 ? copy_addr_to_reg (structure_value_addr)
958 : structure_value_addr);
961 = tree_cons (error_mark_node,
962 make_tree (build_pointer_type (TREE_TYPE (funtype)),
965 structure_value_addr_parm = 1;
968 /* Count the arguments and set NUM_ACTUALS. */
969 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
972 /* Compute number of named args.
973 Normally, don't include the last named arg if anonymous args follow.
974 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
975 (If no anonymous args follow, the result of list_length is actually
976 one too large. This is harmless.)
978 If SETUP_INCOMING_VARARGS is defined and STRICT_ARGUMENT_NAMING is zero,
979 this machine will be able to place unnamed args that were passed in
980 registers into the stack. So treat all args as named. This allows the
981 insns emitting for a specific argument list to be independent of the
982 function declaration.
984 If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable
985 way to pass unnamed args in registers, so we must force them into
988 if ((STRICT_ARGUMENT_NAMING
989 #ifndef SETUP_INCOMING_VARARGS
993 && TYPE_ARG_TYPES (funtype) != 0)
995 = (list_length (TYPE_ARG_TYPES (funtype))
996 /* Don't include the last named arg. */
997 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
998 /* Count the struct value address, if it is passed as a parm. */
999 + structure_value_addr_parm);
1001 /* If we know nothing, treat all args as named. */
1002 n_named_args = num_actuals;
1004 /* Make a vector to hold all the information about each arg. */
1005 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
1006 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
1008 args_size.constant = 0;
1011 /* In this loop, we consider args in the order they are written.
1012 We fill up ARGS from the front or from the back if necessary
1013 so that in any case the first arg to be pushed ends up at the front. */
1015 #ifdef PUSH_ARGS_REVERSED
1016 i = num_actuals - 1, inc = -1;
1017 /* In this case, must reverse order of args
1018 so that we compute and push the last arg first. */
1023 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1024 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1026 tree type = TREE_TYPE (TREE_VALUE (p));
1028 enum machine_mode mode;
1030 args[i].tree_value = TREE_VALUE (p);
1032 /* Replace erroneous argument with constant zero. */
1033 if (type == error_mark_node || TYPE_SIZE (type) == 0)
1034 args[i].tree_value = integer_zero_node, type = integer_type_node;
1036 /* If TYPE is a transparent union, pass things the way we would
1037 pass the first field of the union. We have already verified that
1038 the modes are the same. */
1039 if (TYPE_TRANSPARENT_UNION (type))
1040 type = TREE_TYPE (TYPE_FIELDS (type));
1042 /* Decide where to pass this arg.
1044 args[i].reg is nonzero if all or part is passed in registers.
1046 args[i].partial is nonzero if part but not all is passed in registers,
1047 and the exact value says how many words are passed in registers.
1049 args[i].pass_on_stack is nonzero if the argument must at least be
1050 computed on the stack. It may then be loaded back into registers
1051 if args[i].reg is nonzero.
1053 These decisions are driven by the FUNCTION_... macros and must agree
1054 with those made by function.c. */
1056 /* See if this argument should be passed by invisible reference. */
1057 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1058 && contains_placeholder_p (TYPE_SIZE (type)))
1059 || TREE_ADDRESSABLE (type)
1060 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1061 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type),
1062 type, argpos < n_named_args)
1066 /* If we're compiling a thunk, pass through invisible
1067 references instead of making a copy. */
1068 if (current_function_is_thunk
1069 #ifdef FUNCTION_ARG_CALLEE_COPIES
1070 || (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type),
1071 type, argpos < n_named_args)
1072 /* If it's in a register, we must make a copy of it too. */
1073 /* ??? Is this a sufficient test? Is there a better one? */
1074 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1075 && REG_P (DECL_RTL (args[i].tree_value)))
1076 && ! TREE_ADDRESSABLE (type))
1080 args[i].tree_value = build1 (ADDR_EXPR,
1081 build_pointer_type (type),
1082 args[i].tree_value);
1083 type = build_pointer_type (type);
1087 /* We make a copy of the object and pass the address to the
1088 function being called. */
1091 if (TYPE_SIZE (type) == 0
1092 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1093 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1094 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1095 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1096 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
1098 /* This is a variable-sized object. Make space on the stack
1100 rtx size_rtx = expr_size (TREE_VALUE (p));
1102 if (old_stack_level == 0)
1104 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1105 old_pending_adj = pending_stack_adjust;
1106 pending_stack_adjust = 0;
1109 copy = gen_rtx_MEM (BLKmode,
1110 allocate_dynamic_stack_space (size_rtx,
1112 TYPE_ALIGN (type)));
1116 int size = int_size_in_bytes (type);
1117 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1120 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
1122 store_expr (args[i].tree_value, copy, 0);
1125 args[i].tree_value = build1 (ADDR_EXPR,
1126 build_pointer_type (type),
1127 make_tree (type, copy));
1128 type = build_pointer_type (type);
1132 mode = TYPE_MODE (type);
1133 unsignedp = TREE_UNSIGNED (type);
1135 #ifdef PROMOTE_FUNCTION_ARGS
1136 mode = promote_mode (type, mode, &unsignedp, 1);
1139 args[i].unsignedp = unsignedp;
1140 args[i].mode = mode;
1141 args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
1142 argpos < n_named_args);
1143 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1146 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type,
1147 argpos < n_named_args);
1150 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1152 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1153 it means that we are to pass this arg in the register(s) designated
1154 by the PARALLEL, but also to pass it in the stack. */
1155 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1156 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1157 args[i].pass_on_stack = 1;
1159 /* If this is an addressable type, we must preallocate the stack
1160 since we must evaluate the object into its final location.
1162 If this is to be passed in both registers and the stack, it is simpler
1164 if (TREE_ADDRESSABLE (type)
1165 || (args[i].pass_on_stack && args[i].reg != 0))
1166 must_preallocate = 1;
1168 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1169 we cannot consider this function call constant. */
1170 if (TREE_ADDRESSABLE (type))
1173 /* Compute the stack-size of this argument. */
1174 if (args[i].reg == 0 || args[i].partial != 0
1175 || reg_parm_stack_space > 0
1176 || args[i].pass_on_stack)
1177 locate_and_pad_parm (mode, type,
1178 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1183 fndecl, &args_size, &args[i].offset,
1186 #ifndef ARGS_GROW_DOWNWARD
1187 args[i].slot_offset = args_size;
1190 /* If a part of the arg was put into registers,
1191 don't include that part in the amount pushed. */
1192 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1193 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1194 / (PARM_BOUNDARY / BITS_PER_UNIT)
1195 * (PARM_BOUNDARY / BITS_PER_UNIT));
1197 /* Update ARGS_SIZE, the total stack space for args so far. */
1199 args_size.constant += args[i].size.constant;
1200 if (args[i].size.var)
1202 ADD_PARM_SIZE (args_size, args[i].size.var);
1205 /* Since the slot offset points to the bottom of the slot,
1206 we must record it after incrementing if the args grow down. */
1207 #ifdef ARGS_GROW_DOWNWARD
1208 args[i].slot_offset = args_size;
1210 args[i].slot_offset.constant = -args_size.constant;
1213 SUB_PARM_SIZE (args[i].slot_offset, args_size.var);
1217 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1218 have been used, etc. */
1220 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
1221 argpos < n_named_args);
1224 #ifdef FINAL_REG_PARM_STACK_SPACE
1225 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1229 /* Compute the actual size of the argument block required. The variable
1230 and constant sizes must be combined, the size may have to be rounded,
1231 and there may be a minimum required size. */
1233 original_args_size = args_size;
1236 /* If this function requires a variable-sized argument list, don't try to
1237 make a cse'able block for this call. We may be able to do this
1238 eventually, but it is too complicated to keep track of what insns go
1239 in the cse'able block and which don't. */
1242 must_preallocate = 1;
1244 args_size.var = ARGS_SIZE_TREE (args_size);
1245 args_size.constant = 0;
1247 #ifdef PREFERRED_STACK_BOUNDARY
1248 if (PREFERRED_STACK_BOUNDARY != BITS_PER_UNIT)
1249 args_size.var = round_up (args_size.var, STACK_BYTES);
1252 if (reg_parm_stack_space > 0)
1255 = size_binop (MAX_EXPR, args_size.var,
1256 size_int (reg_parm_stack_space));
1258 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1259 /* The area corresponding to register parameters is not to count in
1260 the size of the block we need. So make the adjustment. */
1262 = size_binop (MINUS_EXPR, args_size.var,
1263 size_int (reg_parm_stack_space));
1269 #ifdef PREFERRED_STACK_BOUNDARY
1270 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1271 / STACK_BYTES) * STACK_BYTES);
1274 args_size.constant = MAX (args_size.constant,
1275 reg_parm_stack_space);
1277 #ifdef MAYBE_REG_PARM_STACK_SPACE
1278 if (reg_parm_stack_space == 0)
1279 args_size.constant = 0;
1282 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1283 args_size.constant -= reg_parm_stack_space;
1287 /* See if we have or want to preallocate stack space.
1289 If we would have to push a partially-in-regs parm
1290 before other stack parms, preallocate stack space instead.
1292 If the size of some parm is not a multiple of the required stack
1293 alignment, we must preallocate.
1295 If the total size of arguments that would otherwise create a copy in
1296 a temporary (such as a CALL) is more than half the total argument list
1297 size, preallocation is faster.
1299 Another reason to preallocate is if we have a machine (like the m88k)
1300 where stack alignment is required to be maintained between every
1301 pair of insns, not just when the call is made. However, we assume here
1302 that such machines either do not have push insns (and hence preallocation
1303 would occur anyway) or the problem is taken care of with
1306 if (! must_preallocate)
1308 int partial_seen = 0;
1309 int copy_to_evaluate_size = 0;
1311 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1313 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1315 else if (partial_seen && args[i].reg == 0)
1316 must_preallocate = 1;
1318 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1319 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1320 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1321 || TREE_CODE (args[i].tree_value) == COND_EXPR
1322 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1323 copy_to_evaluate_size
1324 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1327 if (copy_to_evaluate_size * 2 >= args_size.constant
1328 && args_size.constant > 0)
1329 must_preallocate = 1;
1332 /* If the structure value address will reference the stack pointer, we must
1333 stabilize it. We don't need to do this if we know that we are not going
1334 to adjust the stack pointer in processing this call. */
1336 if (structure_value_addr
1337 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1338 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1340 #ifndef ACCUMULATE_OUTGOING_ARGS
1341 || args_size.constant
1344 structure_value_addr = copy_to_reg (structure_value_addr);
1346 /* If this function call is cse'able, precompute all the parameters.
1347 Note that if the parameter is constructed into a temporary, this will
1348 cause an additional copy because the parameter will be constructed
1349 into a temporary location and then copied into the outgoing arguments.
1350 If a parameter contains a call to alloca and this function uses the
1351 stack, precompute the parameter. */
1353 /* If we preallocated the stack space, and some arguments must be passed
1354 on the stack, then we must precompute any parameter which contains a
1355 function call which will store arguments on the stack.
1356 Otherwise, evaluating the parameter may clobber previous parameters
1357 which have already been stored into the stack. */
1359 for (i = 0; i < num_actuals; i++)
1361 || ((args_size.var != 0 || args_size.constant != 0)
1362 && calls_function (args[i].tree_value, 1))
1363 || (must_preallocate && (args_size.var != 0 || args_size.constant != 0)
1364 && calls_function (args[i].tree_value, 0)))
1366 /* If this is an addressable type, we cannot pre-evaluate it. */
1367 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1372 args[i].initial_value = args[i].value
1373 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1375 preserve_temp_slots (args[i].value);
1378 /* ANSI doesn't require a sequence point here,
1379 but PCC has one, so this will avoid some problems. */
1382 args[i].initial_value = args[i].value
1383 = protect_from_queue (args[i].initial_value, 0);
1385 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1387 = convert_modes (args[i].mode,
1388 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1389 args[i].value, args[i].unsignedp);
1392 /* Now we are about to start emitting insns that can be deleted
1393 if a libcall is deleted. */
1394 if (is_const || is_malloc)
1397 /* If we have no actual push instructions, or shouldn't use them,
1398 make space for all args right now. */
1400 if (args_size.var != 0)
1402 if (old_stack_level == 0)
1404 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1405 old_pending_adj = pending_stack_adjust;
1406 pending_stack_adjust = 0;
1407 #ifdef ACCUMULATE_OUTGOING_ARGS
1408 /* stack_arg_under_construction says whether a stack arg is
1409 being constructed at the old stack level. Pushing the stack
1410 gets a clean outgoing argument block. */
1411 old_stack_arg_under_construction = stack_arg_under_construction;
1412 stack_arg_under_construction = 0;
1415 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1419 /* Note that we must go through the motions of allocating an argument
1420 block even if the size is zero because we may be storing args
1421 in the area reserved for register arguments, which may be part of
1424 int needed = args_size.constant;
1426 /* Store the maximum argument space used. It will be pushed by
1427 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
1430 if (needed > current_function_outgoing_args_size)
1431 current_function_outgoing_args_size = needed;
1433 if (must_preallocate)
1435 #ifdef ACCUMULATE_OUTGOING_ARGS
1436 /* Since the stack pointer will never be pushed, it is possible for
1437 the evaluation of a parm to clobber something we have already
1438 written to the stack. Since most function calls on RISC machines
1439 do not use the stack, this is uncommon, but must work correctly.
1441 Therefore, we save any area of the stack that was already written
1442 and that we are using. Here we set up to do this by making a new
1443 stack usage map from the old one. The actual save will be done
1446 Another approach might be to try to reorder the argument
1447 evaluations to avoid this conflicting stack usage. */
1449 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1450 /* Since we will be writing into the entire argument area, the
1451 map must be allocated for its entire size, not just the part that
1452 is the responsibility of the caller. */
1453 needed += reg_parm_stack_space;
1456 #ifdef ARGS_GROW_DOWNWARD
1457 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1460 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1463 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
1465 if (initial_highest_arg_in_use)
1466 bcopy (initial_stack_usage_map, stack_usage_map,
1467 initial_highest_arg_in_use);
1469 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1470 bzero (&stack_usage_map[initial_highest_arg_in_use],
1471 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1474 /* The address of the outgoing argument list must not be copied to a
1475 register here, because argblock would be left pointing to the
1476 wrong place after the call to allocate_dynamic_stack_space below.
1479 argblock = virtual_outgoing_args_rtx;
1481 #else /* not ACCUMULATE_OUTGOING_ARGS */
1482 if (inhibit_defer_pop == 0)
1484 /* Try to reuse some or all of the pending_stack_adjust
1485 to get this space. Maybe we can avoid any pushing. */
1486 if (needed > pending_stack_adjust)
1488 needed -= pending_stack_adjust;
1489 pending_stack_adjust = 0;
1493 pending_stack_adjust -= needed;
1497 /* Special case this because overhead of `push_block' in this
1498 case is non-trivial. */
1500 argblock = virtual_outgoing_args_rtx;
1502 argblock = push_block (GEN_INT (needed), 0, 0);
1504 /* We only really need to call `copy_to_reg' in the case where push
1505 insns are going to be used to pass ARGBLOCK to a function
1506 call in ARGS. In that case, the stack pointer changes value
1507 from the allocation point to the call point, and hence
1508 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1509 But might as well always do it. */
1510 argblock = copy_to_reg (argblock);
1511 #endif /* not ACCUMULATE_OUTGOING_ARGS */
1515 #ifdef ACCUMULATE_OUTGOING_ARGS
1516 /* The save/restore code in store_one_arg handles all cases except one:
1517 a constructor call (including a C function returning a BLKmode struct)
1518 to initialize an argument. */
1519 if (stack_arg_under_construction)
1521 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1522 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
1524 rtx push_size = GEN_INT (args_size.constant);
1526 if (old_stack_level == 0)
1528 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1529 old_pending_adj = pending_stack_adjust;
1530 pending_stack_adjust = 0;
1531 /* stack_arg_under_construction says whether a stack arg is
1532 being constructed at the old stack level. Pushing the stack
1533 gets a clean outgoing argument block. */
1534 old_stack_arg_under_construction = stack_arg_under_construction;
1535 stack_arg_under_construction = 0;
1536 /* Make a new map for the new argument list. */
1537 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
1538 bzero (stack_usage_map, highest_outgoing_arg_in_use);
1539 highest_outgoing_arg_in_use = 0;
1541 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
1543 /* If argument evaluation might modify the stack pointer, copy the
1544 address of the argument list to a register. */
1545 for (i = 0; i < num_actuals; i++)
1546 if (args[i].pass_on_stack)
1548 argblock = copy_addr_to_reg (argblock);
1554 /* If we preallocated stack space, compute the address of each argument.
1555 We need not ensure it is a valid memory address here; it will be
1556 validized when it is used. */
1559 rtx arg_reg = argblock;
1562 if (GET_CODE (argblock) == PLUS)
1563 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1565 for (i = 0; i < num_actuals; i++)
1567 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1568 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1571 /* Skip this parm if it will not be passed on the stack. */
1572 if (! args[i].pass_on_stack && args[i].reg != 0)
1575 if (GET_CODE (offset) == CONST_INT)
1576 addr = plus_constant (arg_reg, INTVAL (offset));
1578 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1580 addr = plus_constant (addr, arg_offset);
1581 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1582 MEM_IN_STRUCT_P (args[i].stack)
1583 = AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value));
1585 if (GET_CODE (slot_offset) == CONST_INT)
1586 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1588 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1590 addr = plus_constant (addr, arg_offset);
1591 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1595 #ifdef PUSH_ARGS_REVERSED
1596 #ifdef PREFERRED_STACK_BOUNDARY
1597 /* If we push args individually in reverse order, perform stack alignment
1598 before the first push (the last arg). */
1600 anti_adjust_stack (GEN_INT (args_size.constant
1601 - original_args_size.constant));
1605 /* Don't try to defer pops if preallocating, not even from the first arg,
1606 since ARGBLOCK probably refers to the SP. */
1610 /* Get the function to call, in the form of RTL. */
1613 /* If this is the first use of the function, see if we need to
1614 make an external definition for it. */
1615 if (! TREE_USED (fndecl))
1617 assemble_external (fndecl);
1618 TREE_USED (fndecl) = 1;
1621 /* Get a SYMBOL_REF rtx for the function address. */
1622 funexp = XEXP (DECL_RTL (fndecl), 0);
1625 /* Generate an rtx (probably a pseudo-register) for the address. */
1628 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1629 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1631 /* Check the function is executable. */
1632 if (current_function_check_memory_usage)
1633 emit_library_call (chkr_check_exec_libfunc, 1,
1639 /* Figure out the register where the value, if any, will come back. */
1641 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
1642 && ! structure_value_addr)
1644 if (pcc_struct_value)
1645 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1648 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
1651 /* Precompute all register parameters. It isn't safe to compute anything
1652 once we have started filling any specific hard regs. */
1654 for (i = 0; i < num_actuals; i++)
1655 if (args[i].reg != 0 && ! args[i].pass_on_stack)
1659 if (args[i].value == 0)
1662 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
1664 preserve_temp_slots (args[i].value);
1667 /* ANSI doesn't require a sequence point here,
1668 but PCC has one, so this will avoid some problems. */
1672 /* If we are to promote the function arg to a wider mode,
1675 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
1677 = convert_modes (args[i].mode,
1678 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1679 args[i].value, args[i].unsignedp);
1681 /* If the value is expensive, and we are inside an appropriately
1682 short loop, put the value into a pseudo and then put the pseudo
1685 For small register classes, also do this if this call uses
1686 register parameters. This is to avoid reload conflicts while
1687 loading the parameters registers. */
1689 if ((! (GET_CODE (args[i].value) == REG
1690 || (GET_CODE (args[i].value) == SUBREG
1691 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
1692 && args[i].mode != BLKmode
1693 && rtx_cost (args[i].value, SET) > 2
1694 && ((SMALL_REGISTER_CLASSES && reg_parm_seen)
1695 || preserve_subexpressions_p ()))
1696 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1699 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1701 /* The argument list is the property of the called routine and it
1702 may clobber it. If the fixed area has been used for previous
1703 parameters, we must save and restore it.
1705 Here we compute the boundary of the that needs to be saved, if any. */
1707 #ifdef ARGS_GROW_DOWNWARD
1708 for (i = 0; i < reg_parm_stack_space + 1; i++)
1710 for (i = 0; i < reg_parm_stack_space; i++)
1713 if (i >= highest_outgoing_arg_in_use
1714 || stack_usage_map[i] == 0)
1717 if (low_to_save == -1)
1723 if (low_to_save >= 0)
1725 int num_to_save = high_to_save - low_to_save + 1;
1726 enum machine_mode save_mode
1727 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
1730 /* If we don't have the required alignment, must do this in BLKmode. */
1731 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
1732 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
1733 save_mode = BLKmode;
1735 #ifdef ARGS_GROW_DOWNWARD
1736 stack_area = gen_rtx_MEM (save_mode,
1737 memory_address (save_mode,
1738 plus_constant (argblock,
1741 stack_area = gen_rtx_MEM (save_mode,
1742 memory_address (save_mode,
1743 plus_constant (argblock,
1746 if (save_mode == BLKmode)
1748 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
1749 MEM_IN_STRUCT_P (save_area) = 0;
1750 emit_block_move (validize_mem (save_area), stack_area,
1751 GEN_INT (num_to_save),
1752 PARM_BOUNDARY / BITS_PER_UNIT);
1756 save_area = gen_reg_rtx (save_mode);
1757 emit_move_insn (save_area, stack_area);
1763 /* Now store (and compute if necessary) all non-register parms.
1764 These come before register parms, since they can require block-moves,
1765 which could clobber the registers used for register parms.
1766 Parms which have partial registers are not stored here,
1767 but we do preallocate space here if they want that. */
1769 for (i = 0; i < num_actuals; i++)
1770 if (args[i].reg == 0 || args[i].pass_on_stack)
1771 store_one_arg (&args[i], argblock, may_be_alloca,
1772 args_size.var != 0, reg_parm_stack_space);
1774 /* If we have a parm that is passed in registers but not in memory
1775 and whose alignment does not permit a direct copy into registers,
1776 make a group of pseudos that correspond to each register that we
1779 if (STRICT_ALIGNMENT)
1780 for (i = 0; i < num_actuals; i++)
1781 if (args[i].reg != 0 && ! args[i].pass_on_stack
1782 && args[i].mode == BLKmode
1783 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1784 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1786 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1787 int big_endian_correction = 0;
1789 args[i].n_aligned_regs
1790 = args[i].partial ? args[i].partial
1791 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1793 args[i].aligned_regs = (rtx *) alloca (sizeof (rtx)
1794 * args[i].n_aligned_regs);
1796 /* Structures smaller than a word are aligned to the least
1797 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1798 this means we must skip the empty high order bytes when
1799 calculating the bit offset. */
1800 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
1801 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1803 for (j = 0; j < args[i].n_aligned_regs; j++)
1805 rtx reg = gen_reg_rtx (word_mode);
1806 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1807 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1808 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1810 args[i].aligned_regs[j] = reg;
1812 /* There is no need to restrict this code to loading items
1813 in TYPE_ALIGN sized hunks. The bitfield instructions can
1814 load up entire word sized registers efficiently.
1816 ??? This may not be needed anymore.
1817 We use to emit a clobber here but that doesn't let later
1818 passes optimize the instructions we emit. By storing 0 into
1819 the register later passes know the first AND to zero out the
1820 bitfield being set in the register is unnecessary. The store
1821 of 0 will be deleted as will at least the first AND. */
1823 emit_move_insn (reg, const0_rtx);
1825 bytes -= bitsize / BITS_PER_UNIT;
1826 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
1827 extract_bit_field (word, bitsize, 0, 1,
1828 NULL_RTX, word_mode,
1830 bitalign / BITS_PER_UNIT,
1832 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
1836 /* Now store any partially-in-registers parm.
1837 This is the last place a block-move can happen. */
1839 for (i = 0; i < num_actuals; i++)
1840 if (args[i].partial != 0 && ! args[i].pass_on_stack)
1841 store_one_arg (&args[i], argblock, may_be_alloca,
1842 args_size.var != 0, reg_parm_stack_space);
1844 #ifndef PUSH_ARGS_REVERSED
1845 #ifdef PREFERRED_STACK_BOUNDARY
1846 /* If we pushed args in forward order, perform stack alignment
1847 after pushing the last arg. */
1849 anti_adjust_stack (GEN_INT (args_size.constant
1850 - original_args_size.constant));
1854 /* If register arguments require space on the stack and stack space
1855 was not preallocated, allocate stack space here for arguments
1856 passed in registers. */
1857 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
1858 if (must_preallocate == 0 && reg_parm_stack_space > 0)
1859 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
1862 /* Pass the function the address in which to return a structure value. */
1863 if (structure_value_addr && ! structure_value_addr_parm)
1865 emit_move_insn (struct_value_rtx,
1867 force_operand (structure_value_addr,
1870 /* Mark the memory for the aggregate as write-only. */
1871 if (current_function_check_memory_usage)
1872 emit_library_call (chkr_set_right_libfunc, 1,
1874 structure_value_addr, ptr_mode,
1875 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
1876 GEN_INT (MEMORY_USE_WO),
1877 TYPE_MODE (integer_type_node));
1879 if (GET_CODE (struct_value_rtx) == REG)
1880 use_reg (&call_fusage, struct_value_rtx);
1883 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
1885 /* Now do the register loads required for any wholly-register parms or any
1886 parms which are passed both on the stack and in a register. Their
1887 expressions were already evaluated.
1889 Mark all register-parms as living through the call, putting these USE
1890 insns in the CALL_INSN_FUNCTION_USAGE field. */
1892 #ifdef LOAD_ARGS_REVERSED
1893 for (i = num_actuals - 1; i >= 0; i--)
1895 for (i = 0; i < num_actuals; i++)
1898 rtx reg = args[i].reg;
1899 int partial = args[i].partial;
1904 /* Set to non-negative if must move a word at a time, even if just
1905 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1906 we just use a normal move insn. This value can be zero if the
1907 argument is a zero size structure with no fields. */
1908 nregs = (partial ? partial
1909 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1910 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1911 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1914 /* Handle calls that pass values in multiple non-contiguous
1915 locations. The Irix 6 ABI has examples of this. */
1917 if (GET_CODE (reg) == PARALLEL)
1919 emit_group_load (reg, args[i].value,
1920 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1921 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1925 /* If simple case, just do move. If normal partial, store_one_arg
1926 has already loaded the register for us. In all other cases,
1927 load the register(s) from memory. */
1929 else if (nregs == -1)
1930 emit_move_insn (reg, args[i].value);
1932 /* If we have pre-computed the values to put in the registers in
1933 the case of non-aligned structures, copy them in now. */
1935 else if (args[i].n_aligned_regs != 0)
1936 for (j = 0; j < args[i].n_aligned_regs; j++)
1937 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1938 args[i].aligned_regs[j]);
1940 else if (partial == 0 || args[i].pass_on_stack)
1941 move_block_to_reg (REGNO (reg),
1942 validize_mem (args[i].value), nregs,
1945 /* Handle calls that pass values in multiple non-contiguous
1946 locations. The Irix 6 ABI has examples of this. */
1947 if (GET_CODE (reg) == PARALLEL)
1948 use_group_regs (&call_fusage, reg);
1949 else if (nregs == -1)
1950 use_reg (&call_fusage, reg);
1952 use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1956 /* Perform postincrements before actually calling the function. */
1959 /* All arguments and registers used for the call must be set up by now! */
1961 /* Generate the actual call instruction. */
1962 emit_call_1 (funexp, fndecl, funtype, args_size.constant, struct_value_size,
1963 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
1964 valreg, old_inhibit_defer_pop, call_fusage, is_const);
1966 /* If call is cse'able, make appropriate pair of reg-notes around it.
1967 Test valreg so we don't crash; may safely ignore `const'
1968 if return type is void. Disable for PARALLEL return values, because
1969 we have no way to move such values into a pseudo register. */
1970 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
1973 rtx temp = gen_reg_rtx (GET_MODE (valreg));
1976 /* Mark the return value as a pointer if needed. */
1977 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
1979 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
1980 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
1983 /* Construct an "equal form" for the value which mentions all the
1984 arguments in order as well as the function name. */
1985 #ifdef PUSH_ARGS_REVERSED
1986 for (i = 0; i < num_actuals; i++)
1987 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
1989 for (i = num_actuals - 1; i >= 0; i--)
1990 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
1992 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
1994 insns = get_insns ();
1997 emit_libcall_block (insns, temp, valreg, note);
2003 /* Otherwise, just write out the sequence without a note. */
2004 rtx insns = get_insns ();
2011 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2014 /* The return value from a malloc-like function is a pointer. */
2015 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2016 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2018 emit_move_insn (temp, valreg);
2020 /* The return value from a malloc-like function can not alias
2022 last = get_last_insn ();
2024 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2026 /* Write out the sequence. */
2027 insns = get_insns ();
2033 /* For calls to `setjmp', etc., inform flow.c it should complain
2034 if nonvolatile values are live. */
2038 emit_note (name, NOTE_INSN_SETJMP);
2039 current_function_calls_setjmp = 1;
2043 current_function_calls_longjmp = 1;
2045 /* Notice functions that cannot return.
2046 If optimizing, insns emitted below will be dead.
2047 If not optimizing, they will exist, which is useful
2048 if the user uses the `return' command in the debugger. */
2050 if (is_volatile || is_longjmp)
2053 /* If value type not void, return an rtx for the value. */
2055 /* If there are cleanups to be called, don't use a hard reg as target.
2056 We need to double check this and see if it matters anymore. */
2057 if (any_pending_cleanups (1)
2058 && target && REG_P (target)
2059 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2062 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2065 target = const0_rtx;
2067 else if (structure_value_addr)
2069 if (target == 0 || GET_CODE (target) != MEM)
2071 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2072 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2073 structure_value_addr));
2074 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2077 else if (pcc_struct_value)
2079 /* This is the special C++ case where we need to
2080 know what the true target was. We take care to
2081 never use this value more than once in one expression. */
2082 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2083 copy_to_reg (valreg));
2084 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2086 /* Handle calls that return values in multiple non-contiguous locations.
2087 The Irix 6 ABI has examples of this. */
2088 else if (GET_CODE (valreg) == PARALLEL)
2090 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2094 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2095 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2096 preserve_temp_slots (target);
2099 emit_group_store (target, valreg, bytes,
2100 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2102 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2103 && GET_MODE (target) == GET_MODE (valreg))
2104 /* TARGET and VALREG cannot be equal at this point because the latter
2105 would not have REG_FUNCTION_VALUE_P true, while the former would if
2106 it were referring to the same register.
2108 If they refer to the same register, this move will be a no-op, except
2109 when function inlining is being done. */
2110 emit_move_insn (target, valreg);
2111 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2112 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2114 target = copy_to_reg (valreg);
2116 #ifdef PROMOTE_FUNCTION_RETURN
2117 /* If we promoted this return value, make the proper SUBREG. TARGET
2118 might be const0_rtx here, so be careful. */
2119 if (GET_CODE (target) == REG
2120 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2121 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2123 tree type = TREE_TYPE (exp);
2124 int unsignedp = TREE_UNSIGNED (type);
2126 /* If we don't promote as expected, something is wrong. */
2127 if (GET_MODE (target)
2128 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2131 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2132 SUBREG_PROMOTED_VAR_P (target) = 1;
2133 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2137 /* If size of args is variable or this was a constructor call for a stack
2138 argument, restore saved stack-pointer value. */
2140 if (old_stack_level)
2142 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2143 pending_stack_adjust = old_pending_adj;
2144 #ifdef ACCUMULATE_OUTGOING_ARGS
2145 stack_arg_under_construction = old_stack_arg_under_construction;
2146 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2147 stack_usage_map = initial_stack_usage_map;
2150 #ifdef ACCUMULATE_OUTGOING_ARGS
2153 #ifdef REG_PARM_STACK_SPACE
2156 enum machine_mode save_mode = GET_MODE (save_area);
2157 #ifdef ARGS_GROW_DOWNWARD
2159 = gen_rtx_MEM (save_mode,
2160 memory_address (save_mode,
2161 plus_constant (argblock,
2165 = gen_rtx_MEM (save_mode,
2166 memory_address (save_mode,
2167 plus_constant (argblock,
2171 if (save_mode != BLKmode)
2172 emit_move_insn (stack_area, save_area);
2174 emit_block_move (stack_area, validize_mem (save_area),
2175 GEN_INT (high_to_save - low_to_save + 1),
2176 PARM_BOUNDARY / BITS_PER_UNIT);
2180 /* If we saved any argument areas, restore them. */
2181 for (i = 0; i < num_actuals; i++)
2182 if (args[i].save_area)
2184 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2186 = gen_rtx_MEM (save_mode,
2187 memory_address (save_mode,
2188 XEXP (args[i].stack_slot, 0)));
2190 if (save_mode != BLKmode)
2191 emit_move_insn (stack_area, args[i].save_area);
2193 emit_block_move (stack_area, validize_mem (args[i].save_area),
2194 GEN_INT (args[i].size.constant),
2195 PARM_BOUNDARY / BITS_PER_UNIT);
2198 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2199 stack_usage_map = initial_stack_usage_map;
2203 /* If this was alloca, record the new stack level for nonlocal gotos.
2204 Check for the handler slots since we might not have a save area
2205 for non-local gotos. */
2207 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
2208 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2215 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2216 (emitting the queue unless NO_QUEUE is nonzero),
2217 for a value of mode OUTMODE,
2218 with NARGS different arguments, passed as alternating rtx values
2219 and machine_modes to convert them to.
2220 The rtx values should have been passed through protect_from_queue already.
2222 NO_QUEUE will be true if and only if the library call is a `const' call
2223 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2224 to the variable is_const in expand_call.
2226 NO_QUEUE must be true for const calls, because if it isn't, then
2227 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2228 and will be lost if the libcall sequence is optimized away.
2230 NO_QUEUE must be false for non-const calls, because if it isn't, the
2231 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2232 optimized. For instance, the instruction scheduler may incorrectly
2233 move memory references across the non-const call. */
2236 emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2239 #ifndef ANSI_PROTOTYPES
2242 enum machine_mode outmode;
2246 /* Total size in bytes of all the stack-parms scanned so far. */
2247 struct args_size args_size;
2248 /* Size of arguments before any adjustments (such as rounding). */
2249 struct args_size original_args_size;
2250 register int argnum;
2255 CUMULATIVE_ARGS args_so_far;
2256 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2257 struct args_size offset; struct args_size size; rtx save_area; };
2259 int old_inhibit_defer_pop = inhibit_defer_pop;
2260 rtx call_fusage = 0;
2261 int reg_parm_stack_space = 0;
2262 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2263 /* Define the boundary of the register parm stack space that needs to be
2265 int low_to_save = -1, high_to_save;
2266 rtx save_area = 0; /* Place that it is saved */
2269 #ifdef ACCUMULATE_OUTGOING_ARGS
2270 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2271 char *initial_stack_usage_map = stack_usage_map;
2275 #ifdef REG_PARM_STACK_SPACE
2276 /* Size of the stack reserved for parameter registers. */
2277 #ifdef MAYBE_REG_PARM_STACK_SPACE
2278 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2280 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2284 VA_START (p, nargs);
2286 #ifndef ANSI_PROTOTYPES
2287 orgfun = va_arg (p, rtx);
2288 no_queue = va_arg (p, int);
2289 outmode = va_arg (p, enum machine_mode);
2290 nargs = va_arg (p, int);
2295 /* Copy all the libcall-arguments out of the varargs data
2296 and into a vector ARGVEC.
2298 Compute how to pass each argument. We only support a very small subset
2299 of the full argument passing conventions to limit complexity here since
2300 library functions shouldn't have many args. */
2302 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2303 bzero ((char *) argvec, nargs * sizeof (struct arg));
2306 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2308 args_size.constant = 0;
2313 for (count = 0; count < nargs; count++)
2315 rtx val = va_arg (p, rtx);
2316 enum machine_mode mode = va_arg (p, enum machine_mode);
2318 /* We cannot convert the arg value to the mode the library wants here;
2319 must do it earlier where we know the signedness of the arg. */
2321 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2324 /* On some machines, there's no way to pass a float to a library fcn.
2325 Pass it as a double instead. */
2326 #ifdef LIBGCC_NEEDS_DOUBLE
2327 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2328 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2331 /* There's no need to call protect_from_queue, because
2332 either emit_move_insn or emit_push_insn will do that. */
2334 /* Make sure it is a reasonable operand for a move or push insn. */
2335 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2336 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2337 val = force_operand (val, NULL_RTX);
2339 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2340 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2342 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2343 be viewed as just an efficiency improvement. */
2344 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2345 emit_move_insn (slot, val);
2346 val = force_operand (XEXP (slot, 0), NULL_RTX);
2351 argvec[count].value = val;
2352 argvec[count].mode = mode;
2354 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2355 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2357 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2358 argvec[count].partial
2359 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2361 argvec[count].partial = 0;
2364 locate_and_pad_parm (mode, NULL_TREE,
2365 argvec[count].reg && argvec[count].partial == 0,
2366 NULL_TREE, &args_size, &argvec[count].offset,
2367 &argvec[count].size);
2369 if (argvec[count].size.var)
2372 if (reg_parm_stack_space == 0 && argvec[count].partial)
2373 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2375 if (argvec[count].reg == 0 || argvec[count].partial != 0
2376 || reg_parm_stack_space > 0)
2377 args_size.constant += argvec[count].size.constant;
2379 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2383 #ifdef FINAL_REG_PARM_STACK_SPACE
2384 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2388 /* If this machine requires an external definition for library
2389 functions, write one out. */
2390 assemble_external_libcall (fun);
2392 original_args_size = args_size;
2393 #ifdef PREFERRED_STACK_BOUNDARY
2394 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2395 / STACK_BYTES) * STACK_BYTES);
2398 args_size.constant = MAX (args_size.constant,
2399 reg_parm_stack_space);
2401 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2402 args_size.constant -= reg_parm_stack_space;
2405 if (args_size.constant > current_function_outgoing_args_size)
2406 current_function_outgoing_args_size = args_size.constant;
2408 #ifdef ACCUMULATE_OUTGOING_ARGS
2409 /* Since the stack pointer will never be pushed, it is possible for
2410 the evaluation of a parm to clobber something we have already
2411 written to the stack. Since most function calls on RISC machines
2412 do not use the stack, this is uncommon, but must work correctly.
2414 Therefore, we save any area of the stack that was already written
2415 and that we are using. Here we set up to do this by making a new
2416 stack usage map from the old one.
2418 Another approach might be to try to reorder the argument
2419 evaluations to avoid this conflicting stack usage. */
2421 needed = args_size.constant;
2423 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2424 /* Since we will be writing into the entire argument area, the
2425 map must be allocated for its entire size, not just the part that
2426 is the responsibility of the caller. */
2427 needed += reg_parm_stack_space;
2430 #ifdef ARGS_GROW_DOWNWARD
2431 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2434 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2437 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2439 if (initial_highest_arg_in_use)
2440 bcopy (initial_stack_usage_map, stack_usage_map,
2441 initial_highest_arg_in_use);
2443 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2444 bzero (&stack_usage_map[initial_highest_arg_in_use],
2445 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2448 /* The address of the outgoing argument list must not be copied to a
2449 register here, because argblock would be left pointing to the
2450 wrong place after the call to allocate_dynamic_stack_space below.
2453 argblock = virtual_outgoing_args_rtx;
2454 #else /* not ACCUMULATE_OUTGOING_ARGS */
2455 #ifndef PUSH_ROUNDING
2456 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2460 #ifdef PUSH_ARGS_REVERSED
2461 #ifdef PREFERRED_STACK_BOUNDARY
2462 /* If we push args individually in reverse order, perform stack alignment
2463 before the first push (the last arg). */
2465 anti_adjust_stack (GEN_INT (args_size.constant
2466 - original_args_size.constant));
2470 #ifdef PUSH_ARGS_REVERSED
2478 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2479 /* The argument list is the property of the called routine and it
2480 may clobber it. If the fixed area has been used for previous
2481 parameters, we must save and restore it.
2483 Here we compute the boundary of the that needs to be saved, if any. */
2485 #ifdef ARGS_GROW_DOWNWARD
2486 for (count = 0; count < reg_parm_stack_space + 1; count++)
2488 for (count = 0; count < reg_parm_stack_space; count++)
2491 if (count >= highest_outgoing_arg_in_use
2492 || stack_usage_map[count] == 0)
2495 if (low_to_save == -1)
2496 low_to_save = count;
2498 high_to_save = count;
2501 if (low_to_save >= 0)
2503 int num_to_save = high_to_save - low_to_save + 1;
2504 enum machine_mode save_mode
2505 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2508 /* If we don't have the required alignment, must do this in BLKmode. */
2509 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2510 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2511 save_mode = BLKmode;
2513 #ifdef ARGS_GROW_DOWNWARD
2514 stack_area = gen_rtx_MEM (save_mode,
2515 memory_address (save_mode,
2516 plus_constant (argblock,
2519 stack_area = gen_rtx_MEM (save_mode,
2520 memory_address (save_mode,
2521 plus_constant (argblock,
2524 if (save_mode == BLKmode)
2526 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2527 MEM_IN_STRUCT_P (save_area) = 0;
2528 emit_block_move (validize_mem (save_area), stack_area,
2529 GEN_INT (num_to_save),
2530 PARM_BOUNDARY / BITS_PER_UNIT);
2534 save_area = gen_reg_rtx (save_mode);
2535 emit_move_insn (save_area, stack_area);
2540 /* Push the args that need to be pushed. */
2542 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2543 are to be pushed. */
2544 for (count = 0; count < nargs; count++, argnum += inc)
2546 register enum machine_mode mode = argvec[argnum].mode;
2547 register rtx val = argvec[argnum].value;
2548 rtx reg = argvec[argnum].reg;
2549 int partial = argvec[argnum].partial;
2550 #ifdef ACCUMULATE_OUTGOING_ARGS
2551 int lower_bound, upper_bound, i;
2554 if (! (reg != 0 && partial == 0))
2556 #ifdef ACCUMULATE_OUTGOING_ARGS
2557 /* If this is being stored into a pre-allocated, fixed-size, stack
2558 area, save any previous data at that location. */
2560 #ifdef ARGS_GROW_DOWNWARD
2561 /* stack_slot is negative, but we want to index stack_usage_map
2562 with positive values. */
2563 upper_bound = -argvec[argnum].offset.constant + 1;
2564 lower_bound = upper_bound - argvec[argnum].size.constant;
2566 lower_bound = argvec[argnum].offset.constant;
2567 upper_bound = lower_bound + argvec[argnum].size.constant;
2570 for (i = lower_bound; i < upper_bound; i++)
2571 if (stack_usage_map[i]
2572 /* Don't store things in the fixed argument area at this point;
2573 it has already been saved. */
2574 && i > reg_parm_stack_space)
2577 if (i != upper_bound)
2579 /* We need to make a save area. See what mode we can make it. */
2580 enum machine_mode save_mode
2581 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
2584 = gen_rtx_MEM (save_mode,
2585 memory_address (save_mode,
2586 plus_constant (argblock, argvec[argnum].offset.constant)));
2587 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2588 emit_move_insn (argvec[argnum].save_area, stack_area);
2591 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2592 argblock, GEN_INT (argvec[argnum].offset.constant),
2593 reg_parm_stack_space);
2595 #ifdef ACCUMULATE_OUTGOING_ARGS
2596 /* Now mark the segment we just used. */
2597 for (i = lower_bound; i < upper_bound; i++)
2598 stack_usage_map[i] = 1;
2605 #ifndef PUSH_ARGS_REVERSED
2606 #ifdef PREFERRED_STACK_BOUNDARY
2607 /* If we pushed args in forward order, perform stack alignment
2608 after pushing the last arg. */
2610 anti_adjust_stack (GEN_INT (args_size.constant
2611 - original_args_size.constant));
2615 #ifdef PUSH_ARGS_REVERSED
2621 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2623 /* Now load any reg parms into their regs. */
2625 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2626 are to be pushed. */
2627 for (count = 0; count < nargs; count++, argnum += inc)
2629 register rtx val = argvec[argnum].value;
2630 rtx reg = argvec[argnum].reg;
2631 int partial = argvec[argnum].partial;
2633 if (reg != 0 && partial == 0)
2634 emit_move_insn (reg, val);
2638 /* For version 1.37, try deleting this entirely. */
2642 /* Any regs containing parms remain in use through the call. */
2643 for (count = 0; count < nargs; count++)
2644 if (argvec[count].reg != 0)
2645 use_reg (&call_fusage, argvec[count].reg);
2647 /* Don't allow popping to be deferred, since then
2648 cse'ing of library calls could delete a call and leave the pop. */
2651 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2652 will set inhibit_defer_pop to that value. */
2654 /* The return type is needed to decide how many bytes the function pops.
2655 Signedness plays no role in that, so for simplicity, we pretend it's
2656 always signed. We also assume that the list of arguments passed has
2657 no impact, so we pretend it is unknown. */
2660 get_identifier (XSTR (orgfun, 0)),
2661 build_function_type (outmode == VOIDmode ? void_type_node
2662 : type_for_mode (outmode, 0), NULL_TREE),
2663 args_size.constant, 0,
2664 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2665 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2666 old_inhibit_defer_pop + 1, call_fusage, no_queue);
2670 /* Now restore inhibit_defer_pop to its actual original value. */
2673 #ifdef ACCUMULATE_OUTGOING_ARGS
2674 #ifdef REG_PARM_STACK_SPACE
2677 enum machine_mode save_mode = GET_MODE (save_area);
2678 #ifdef ARGS_GROW_DOWNWARD
2680 = gen_rtx_MEM (save_mode,
2681 memory_address (save_mode,
2682 plus_constant (argblock,
2686 = gen_rtx_MEM (save_mode,
2687 memory_address (save_mode,
2688 plus_constant (argblock, low_to_save)));
2691 if (save_mode != BLKmode)
2692 emit_move_insn (stack_area, save_area);
2694 emit_block_move (stack_area, validize_mem (save_area),
2695 GEN_INT (high_to_save - low_to_save + 1),
2696 PARM_BOUNDARY / BITS_PER_UNIT);
2700 /* If we saved any argument areas, restore them. */
2701 for (count = 0; count < nargs; count++)
2702 if (argvec[count].save_area)
2704 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
2706 = gen_rtx_MEM (save_mode,
2707 memory_address (save_mode,
2708 plus_constant (argblock, argvec[count].offset.constant)));
2710 emit_move_insn (stack_area, argvec[count].save_area);
2713 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2714 stack_usage_map = initial_stack_usage_map;
2718 /* Like emit_library_call except that an extra argument, VALUE,
2719 comes second and says where to store the result.
2720 (If VALUE is zero, this function chooses a convenient way
2721 to return the value.
2723 This function returns an rtx for where the value is to be found.
2724 If VALUE is nonzero, VALUE is returned. */
2727 emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
2728 enum machine_mode outmode, int nargs, ...))
2730 #ifndef ANSI_PROTOTYPES
2734 enum machine_mode outmode;
2738 /* Total size in bytes of all the stack-parms scanned so far. */
2739 struct args_size args_size;
2740 /* Size of arguments before any adjustments (such as rounding). */
2741 struct args_size original_args_size;
2742 register int argnum;
2747 CUMULATIVE_ARGS args_so_far;
2748 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2749 struct args_size offset; struct args_size size; rtx save_area; };
2751 int old_inhibit_defer_pop = inhibit_defer_pop;
2752 rtx call_fusage = 0;
2754 int pcc_struct_value = 0;
2755 int struct_value_size = 0;
2757 int reg_parm_stack_space = 0;
2758 #ifdef ACCUMULATE_OUTGOING_ARGS
2762 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2763 /* Define the boundary of the register parm stack space that needs to be
2765 int low_to_save = -1, high_to_save;
2766 rtx save_area = 0; /* Place that it is saved */
2769 #ifdef ACCUMULATE_OUTGOING_ARGS
2770 /* Size of the stack reserved for parameter registers. */
2771 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2772 char *initial_stack_usage_map = stack_usage_map;
2775 #ifdef REG_PARM_STACK_SPACE
2776 #ifdef MAYBE_REG_PARM_STACK_SPACE
2777 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2779 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2783 VA_START (p, nargs);
2785 #ifndef ANSI_PROTOTYPES
2786 orgfun = va_arg (p, rtx);
2787 value = va_arg (p, rtx);
2788 no_queue = va_arg (p, int);
2789 outmode = va_arg (p, enum machine_mode);
2790 nargs = va_arg (p, int);
2793 is_const = no_queue;
2796 /* If this kind of value comes back in memory,
2797 decide where in memory it should come back. */
2798 if (aggregate_value_p (type_for_mode (outmode, 0)))
2800 #ifdef PCC_STATIC_STRUCT_RETURN
2802 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
2804 mem_value = gen_rtx_MEM (outmode, pointer_reg);
2805 pcc_struct_value = 1;
2807 value = gen_reg_rtx (outmode);
2808 #else /* not PCC_STATIC_STRUCT_RETURN */
2809 struct_value_size = GET_MODE_SIZE (outmode);
2810 if (value != 0 && GET_CODE (value) == MEM)
2813 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
2816 /* This call returns a big structure. */
2820 /* ??? Unfinished: must pass the memory address as an argument. */
2822 /* Copy all the libcall-arguments out of the varargs data
2823 and into a vector ARGVEC.
2825 Compute how to pass each argument. We only support a very small subset
2826 of the full argument passing conventions to limit complexity here since
2827 library functions shouldn't have many args. */
2829 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2830 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
2832 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2834 args_size.constant = 0;
2841 /* If there's a structure value address to be passed,
2842 either pass it in the special place, or pass it as an extra argument. */
2843 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
2845 rtx addr = XEXP (mem_value, 0);
2848 /* Make sure it is a reasonable operand for a move or push insn. */
2849 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2850 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2851 addr = force_operand (addr, NULL_RTX);
2853 argvec[count].value = addr;
2854 argvec[count].mode = Pmode;
2855 argvec[count].partial = 0;
2857 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
2858 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2859 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
2863 locate_and_pad_parm (Pmode, NULL_TREE,
2864 argvec[count].reg && argvec[count].partial == 0,
2865 NULL_TREE, &args_size, &argvec[count].offset,
2866 &argvec[count].size);
2869 if (argvec[count].reg == 0 || argvec[count].partial != 0
2870 || reg_parm_stack_space > 0)
2871 args_size.constant += argvec[count].size.constant;
2873 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
2878 for (; count < nargs; count++)
2880 rtx val = va_arg (p, rtx);
2881 enum machine_mode mode = va_arg (p, enum machine_mode);
2883 /* We cannot convert the arg value to the mode the library wants here;
2884 must do it earlier where we know the signedness of the arg. */
2886 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2889 /* On some machines, there's no way to pass a float to a library fcn.
2890 Pass it as a double instead. */
2891 #ifdef LIBGCC_NEEDS_DOUBLE
2892 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2893 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2896 /* There's no need to call protect_from_queue, because
2897 either emit_move_insn or emit_push_insn will do that. */
2899 /* Make sure it is a reasonable operand for a move or push insn. */
2900 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2901 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2902 val = force_operand (val, NULL_RTX);
2904 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2905 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2907 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2908 be viewed as just an efficiency improvement. */
2909 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2910 emit_move_insn (slot, val);
2911 val = XEXP (slot, 0);
2916 argvec[count].value = val;
2917 argvec[count].mode = mode;
2919 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2920 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2922 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2923 argvec[count].partial
2924 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2926 argvec[count].partial = 0;
2929 locate_and_pad_parm (mode, NULL_TREE,
2930 argvec[count].reg && argvec[count].partial == 0,
2931 NULL_TREE, &args_size, &argvec[count].offset,
2932 &argvec[count].size);
2934 if (argvec[count].size.var)
2937 if (reg_parm_stack_space == 0 && argvec[count].partial)
2938 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2940 if (argvec[count].reg == 0 || argvec[count].partial != 0
2941 || reg_parm_stack_space > 0)
2942 args_size.constant += argvec[count].size.constant;
2944 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2948 #ifdef FINAL_REG_PARM_STACK_SPACE
2949 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2952 /* If this machine requires an external definition for library
2953 functions, write one out. */
2954 assemble_external_libcall (fun);
2956 original_args_size = args_size;
2957 #ifdef PREFERRED_STACK_BOUNDARY
2958 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2959 / STACK_BYTES) * STACK_BYTES);
2962 args_size.constant = MAX (args_size.constant,
2963 reg_parm_stack_space);
2965 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2966 args_size.constant -= reg_parm_stack_space;
2969 if (args_size.constant > current_function_outgoing_args_size)
2970 current_function_outgoing_args_size = args_size.constant;
2972 #ifdef ACCUMULATE_OUTGOING_ARGS
2973 /* Since the stack pointer will never be pushed, it is possible for
2974 the evaluation of a parm to clobber something we have already
2975 written to the stack. Since most function calls on RISC machines
2976 do not use the stack, this is uncommon, but must work correctly.
2978 Therefore, we save any area of the stack that was already written
2979 and that we are using. Here we set up to do this by making a new
2980 stack usage map from the old one.
2982 Another approach might be to try to reorder the argument
2983 evaluations to avoid this conflicting stack usage. */
2985 needed = args_size.constant;
2987 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2988 /* Since we will be writing into the entire argument area, the
2989 map must be allocated for its entire size, not just the part that
2990 is the responsibility of the caller. */
2991 needed += reg_parm_stack_space;
2994 #ifdef ARGS_GROW_DOWNWARD
2995 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2998 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3001 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3003 if (initial_highest_arg_in_use)
3004 bcopy (initial_stack_usage_map, stack_usage_map,
3005 initial_highest_arg_in_use);
3007 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3008 bzero (&stack_usage_map[initial_highest_arg_in_use],
3009 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3012 /* The address of the outgoing argument list must not be copied to a
3013 register here, because argblock would be left pointing to the
3014 wrong place after the call to allocate_dynamic_stack_space below.
3017 argblock = virtual_outgoing_args_rtx;
3018 #else /* not ACCUMULATE_OUTGOING_ARGS */
3019 #ifndef PUSH_ROUNDING
3020 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3024 #ifdef PUSH_ARGS_REVERSED
3025 #ifdef PREFERRED_STACK_BOUNDARY
3026 /* If we push args individually in reverse order, perform stack alignment
3027 before the first push (the last arg). */
3029 anti_adjust_stack (GEN_INT (args_size.constant
3030 - original_args_size.constant));
3034 #ifdef PUSH_ARGS_REVERSED
3042 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3043 /* The argument list is the property of the called routine and it
3044 may clobber it. If the fixed area has been used for previous
3045 parameters, we must save and restore it.
3047 Here we compute the boundary of the that needs to be saved, if any. */
3049 #ifdef ARGS_GROW_DOWNWARD
3050 for (count = 0; count < reg_parm_stack_space + 1; count++)
3052 for (count = 0; count < reg_parm_stack_space; count++)
3055 if (count >= highest_outgoing_arg_in_use
3056 || stack_usage_map[count] == 0)
3059 if (low_to_save == -1)
3060 low_to_save = count;
3062 high_to_save = count;
3065 if (low_to_save >= 0)
3067 int num_to_save = high_to_save - low_to_save + 1;
3068 enum machine_mode save_mode
3069 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3072 /* If we don't have the required alignment, must do this in BLKmode. */
3073 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3074 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3075 save_mode = BLKmode;
3077 #ifdef ARGS_GROW_DOWNWARD
3078 stack_area = gen_rtx_MEM (save_mode,
3079 memory_address (save_mode,
3080 plus_constant (argblock,
3083 stack_area = gen_rtx_MEM (save_mode,
3084 memory_address (save_mode,
3085 plus_constant (argblock,
3088 if (save_mode == BLKmode)
3090 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3091 MEM_IN_STRUCT_P (save_area) = 0;
3092 emit_block_move (validize_mem (save_area), stack_area,
3093 GEN_INT (num_to_save),
3094 PARM_BOUNDARY / BITS_PER_UNIT);
3098 save_area = gen_reg_rtx (save_mode);
3099 emit_move_insn (save_area, stack_area);
3104 /* Push the args that need to be pushed. */
3106 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3107 are to be pushed. */
3108 for (count = 0; count < nargs; count++, argnum += inc)
3110 register enum machine_mode mode = argvec[argnum].mode;
3111 register rtx val = argvec[argnum].value;
3112 rtx reg = argvec[argnum].reg;
3113 int partial = argvec[argnum].partial;
3114 #ifdef ACCUMULATE_OUTGOING_ARGS
3115 int lower_bound, upper_bound, i;
3118 if (! (reg != 0 && partial == 0))
3120 #ifdef ACCUMULATE_OUTGOING_ARGS
3121 /* If this is being stored into a pre-allocated, fixed-size, stack
3122 area, save any previous data at that location. */
3124 #ifdef ARGS_GROW_DOWNWARD
3125 /* stack_slot is negative, but we want to index stack_usage_map
3126 with positive values. */
3127 upper_bound = -argvec[argnum].offset.constant + 1;
3128 lower_bound = upper_bound - argvec[argnum].size.constant;
3130 lower_bound = argvec[argnum].offset.constant;
3131 upper_bound = lower_bound + argvec[argnum].size.constant;
3134 for (i = lower_bound; i < upper_bound; i++)
3135 if (stack_usage_map[i]
3136 /* Don't store things in the fixed argument area at this point;
3137 it has already been saved. */
3138 && i > reg_parm_stack_space)
3141 if (i != upper_bound)
3143 /* We need to make a save area. See what mode we can make it. */
3144 enum machine_mode save_mode
3145 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3148 = gen_rtx_MEM (save_mode,
3149 memory_address (save_mode,
3150 plus_constant (argblock,
3151 argvec[argnum].offset.constant)));
3152 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3153 emit_move_insn (argvec[argnum].save_area, stack_area);
3156 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3157 argblock, GEN_INT (argvec[argnum].offset.constant),
3158 reg_parm_stack_space);
3160 #ifdef ACCUMULATE_OUTGOING_ARGS
3161 /* Now mark the segment we just used. */
3162 for (i = lower_bound; i < upper_bound; i++)
3163 stack_usage_map[i] = 1;
3170 #ifndef PUSH_ARGS_REVERSED
3171 #ifdef PREFERRED_STACK_BOUNDARY
3172 /* If we pushed args in forward order, perform stack alignment
3173 after pushing the last arg. */
3175 anti_adjust_stack (GEN_INT (args_size.constant
3176 - original_args_size.constant));
3180 #ifdef PUSH_ARGS_REVERSED
3186 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3188 /* Now load any reg parms into their regs. */
3190 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3191 are to be pushed. */
3192 for (count = 0; count < nargs; count++, argnum += inc)
3194 register rtx val = argvec[argnum].value;
3195 rtx reg = argvec[argnum].reg;
3196 int partial = argvec[argnum].partial;
3198 if (reg != 0 && partial == 0)
3199 emit_move_insn (reg, val);
3204 /* For version 1.37, try deleting this entirely. */
3209 /* Any regs containing parms remain in use through the call. */
3210 for (count = 0; count < nargs; count++)
3211 if (argvec[count].reg != 0)
3212 use_reg (&call_fusage, argvec[count].reg);
3214 /* Pass the function the address in which to return a structure value. */
3215 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3217 emit_move_insn (struct_value_rtx,
3219 force_operand (XEXP (mem_value, 0),
3221 if (GET_CODE (struct_value_rtx) == REG)
3222 use_reg (&call_fusage, struct_value_rtx);
3225 /* Don't allow popping to be deferred, since then
3226 cse'ing of library calls could delete a call and leave the pop. */
3229 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3230 will set inhibit_defer_pop to that value. */
3231 /* See the comment in emit_library_call about the function type we build
3235 get_identifier (XSTR (orgfun, 0)),
3236 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3237 args_size.constant, struct_value_size,
3238 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3239 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3240 old_inhibit_defer_pop + 1, call_fusage, is_const);
3242 /* Now restore inhibit_defer_pop to its actual original value. */
3247 /* Copy the value to the right place. */
3248 if (outmode != VOIDmode)
3254 if (value != mem_value)
3255 emit_move_insn (value, mem_value);
3257 else if (value != 0)
3258 emit_move_insn (value, hard_libcall_value (outmode));
3260 value = hard_libcall_value (outmode);
3263 #ifdef ACCUMULATE_OUTGOING_ARGS
3264 #ifdef REG_PARM_STACK_SPACE
3267 enum machine_mode save_mode = GET_MODE (save_area);
3268 #ifdef ARGS_GROW_DOWNWARD
3270 = gen_rtx_MEM (save_mode,
3271 memory_address (save_mode,
3272 plus_constant (argblock,
3276 = gen_rtx_MEM (save_mode,
3277 memory_address (save_mode,
3278 plus_constant (argblock, low_to_save)));
3280 if (save_mode != BLKmode)
3281 emit_move_insn (stack_area, save_area);
3283 emit_block_move (stack_area, validize_mem (save_area),
3284 GEN_INT (high_to_save - low_to_save + 1),
3285 PARM_BOUNDARY / BITS_PER_UNIT);
3289 /* If we saved any argument areas, restore them. */
3290 for (count = 0; count < nargs; count++)
3291 if (argvec[count].save_area)
3293 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3295 = gen_rtx_MEM (save_mode,
3296 memory_address (save_mode, plus_constant (argblock,
3297 argvec[count].offset.constant)));
3299 emit_move_insn (stack_area, argvec[count].save_area);
3302 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3303 stack_usage_map = initial_stack_usage_map;
3310 /* Return an rtx which represents a suitable home on the stack
3311 given TYPE, the type of the argument looking for a home.
3312 This is called only for BLKmode arguments.
3314 SIZE is the size needed for this target.
3315 ARGS_ADDR is the address of the bottom of the argument block for this call.
3316 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3317 if this machine uses push insns. */
3320 target_for_arg (type, size, args_addr, offset)
3324 struct args_size offset;
3327 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3329 /* We do not call memory_address if possible,
3330 because we want to address as close to the stack
3331 as possible. For non-variable sized arguments,
3332 this will be stack-pointer relative addressing. */
3333 if (GET_CODE (offset_rtx) == CONST_INT)
3334 target = plus_constant (args_addr, INTVAL (offset_rtx));
3337 /* I have no idea how to guarantee that this
3338 will work in the presence of register parameters. */
3339 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3340 target = memory_address (QImode, target);
3343 return gen_rtx_MEM (BLKmode, target);
3347 /* Store a single argument for a function call
3348 into the register or memory area where it must be passed.
3349 *ARG describes the argument value and where to pass it.
3351 ARGBLOCK is the address of the stack-block for all the arguments,
3352 or 0 on a machine where arguments are pushed individually.
3354 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3355 so must be careful about how the stack is used.
3357 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3358 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3359 that we need not worry about saving and restoring the stack.
3361 FNDECL is the declaration of the function we are calling. */
3364 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3365 reg_parm_stack_space)
3366 struct arg_data *arg;
3370 int reg_parm_stack_space;
3372 register tree pval = arg->tree_value;
3376 #ifdef ACCUMULATE_OUTGOING_ARGS
3377 int i, lower_bound, upper_bound;
3380 if (TREE_CODE (pval) == ERROR_MARK)
3383 /* Push a new temporary level for any temporaries we make for
3387 #ifdef ACCUMULATE_OUTGOING_ARGS
3388 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3389 save any previous data at that location. */
3390 if (argblock && ! variable_size && arg->stack)
3392 #ifdef ARGS_GROW_DOWNWARD
3393 /* stack_slot is negative, but we want to index stack_usage_map
3394 with positive values. */
3395 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3396 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3400 lower_bound = upper_bound - arg->size.constant;
3402 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3403 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3407 upper_bound = lower_bound + arg->size.constant;
3410 for (i = lower_bound; i < upper_bound; i++)
3411 if (stack_usage_map[i]
3412 /* Don't store things in the fixed argument area at this point;
3413 it has already been saved. */
3414 && i > reg_parm_stack_space)
3417 if (i != upper_bound)
3419 /* We need to make a save area. See what mode we can make it. */
3420 enum machine_mode save_mode
3421 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3423 = gen_rtx_MEM (save_mode,
3424 memory_address (save_mode,
3425 XEXP (arg->stack_slot, 0)));
3427 if (save_mode == BLKmode)
3429 arg->save_area = assign_stack_temp (BLKmode,
3430 arg->size.constant, 0);
3431 MEM_IN_STRUCT_P (arg->save_area)
3432 = AGGREGATE_TYPE_P (TREE_TYPE (arg->tree_value));
3433 preserve_temp_slots (arg->save_area);
3434 emit_block_move (validize_mem (arg->save_area), stack_area,
3435 GEN_INT (arg->size.constant),
3436 PARM_BOUNDARY / BITS_PER_UNIT);
3440 arg->save_area = gen_reg_rtx (save_mode);
3441 emit_move_insn (arg->save_area, stack_area);
3447 /* If this isn't going to be placed on both the stack and in registers,
3448 set up the register and number of words. */
3449 if (! arg->pass_on_stack)
3450 reg = arg->reg, partial = arg->partial;
3452 if (reg != 0 && partial == 0)
3453 /* Being passed entirely in a register. We shouldn't be called in
3457 /* If this arg needs special alignment, don't load the registers
3459 if (arg->n_aligned_regs != 0)
3462 /* If this is being passed partially in a register, we can't evaluate
3463 it directly into its stack slot. Otherwise, we can. */
3464 if (arg->value == 0)
3466 #ifdef ACCUMULATE_OUTGOING_ARGS
3467 /* stack_arg_under_construction is nonzero if a function argument is
3468 being evaluated directly into the outgoing argument list and
3469 expand_call must take special action to preserve the argument list
3470 if it is called recursively.
3472 For scalar function arguments stack_usage_map is sufficient to
3473 determine which stack slots must be saved and restored. Scalar
3474 arguments in general have pass_on_stack == 0.
3476 If this argument is initialized by a function which takes the
3477 address of the argument (a C++ constructor or a C function
3478 returning a BLKmode structure), then stack_usage_map is
3479 insufficient and expand_call must push the stack around the
3480 function call. Such arguments have pass_on_stack == 1.
3482 Note that it is always safe to set stack_arg_under_construction,
3483 but this generates suboptimal code if set when not needed. */
3485 if (arg->pass_on_stack)
3486 stack_arg_under_construction++;
3488 arg->value = expand_expr (pval,
3490 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3491 ? NULL_RTX : arg->stack,
3494 /* If we are promoting object (or for any other reason) the mode
3495 doesn't agree, convert the mode. */
3497 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3498 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3499 arg->value, arg->unsignedp);
3501 #ifdef ACCUMULATE_OUTGOING_ARGS
3502 if (arg->pass_on_stack)
3503 stack_arg_under_construction--;
3507 /* Don't allow anything left on stack from computation
3508 of argument to alloca. */
3510 do_pending_stack_adjust ();
3512 if (arg->value == arg->stack)
3514 /* If the value is already in the stack slot, we are done moving
3516 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
3518 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3519 XEXP (arg->stack, 0), ptr_mode,
3520 ARGS_SIZE_RTX (arg->size),
3521 TYPE_MODE (sizetype),
3522 GEN_INT (MEMORY_USE_RW),
3523 TYPE_MODE (integer_type_node));
3526 else if (arg->mode != BLKmode)
3530 /* Argument is a scalar, not entirely passed in registers.
3531 (If part is passed in registers, arg->partial says how much
3532 and emit_push_insn will take care of putting it there.)
3534 Push it, and if its size is less than the
3535 amount of space allocated to it,
3536 also bump stack pointer by the additional space.
3537 Note that in C the default argument promotions
3538 will prevent such mismatches. */
3540 size = GET_MODE_SIZE (arg->mode);
3541 /* Compute how much space the push instruction will push.
3542 On many machines, pushing a byte will advance the stack
3543 pointer by a halfword. */
3544 #ifdef PUSH_ROUNDING
3545 size = PUSH_ROUNDING (size);
3549 /* Compute how much space the argument should get:
3550 round up to a multiple of the alignment for arguments. */
3551 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3552 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3553 / (PARM_BOUNDARY / BITS_PER_UNIT))
3554 * (PARM_BOUNDARY / BITS_PER_UNIT));
3556 /* This isn't already where we want it on the stack, so put it there.
3557 This can either be done with push or copy insns. */
3558 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3559 partial, reg, used - size, argblock,
3560 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space);
3564 /* BLKmode, at least partly to be pushed. */
3566 register int excess;
3569 /* Pushing a nonscalar.
3570 If part is passed in registers, PARTIAL says how much
3571 and emit_push_insn will take care of putting it there. */
3573 /* Round its size up to a multiple
3574 of the allocation unit for arguments. */
3576 if (arg->size.var != 0)
3579 size_rtx = ARGS_SIZE_RTX (arg->size);
3583 /* PUSH_ROUNDING has no effect on us, because
3584 emit_push_insn for BLKmode is careful to avoid it. */
3585 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3586 + partial * UNITS_PER_WORD);
3587 size_rtx = expr_size (pval);
3590 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3591 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3592 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3593 reg_parm_stack_space);
3597 /* Unless this is a partially-in-register argument, the argument is now
3600 ??? Note that this can change arg->value from arg->stack to
3601 arg->stack_slot and it matters when they are not the same.
3602 It isn't totally clear that this is correct in all cases. */
3604 arg->value = arg->stack_slot;
3606 /* Once we have pushed something, pops can't safely
3607 be deferred during the rest of the arguments. */
3610 /* ANSI doesn't require a sequence point here,
3611 but PCC has one, so this will avoid some problems. */
3614 /* Free any temporary slots made in processing this argument. Show
3615 that we might have taken the address of something and pushed that
3617 preserve_temp_slots (NULL_RTX);
3621 #ifdef ACCUMULATE_OUTGOING_ARGS
3622 /* Now mark the segment we just used. */
3623 if (argblock && ! variable_size && arg->stack)
3624 for (i = lower_bound; i < upper_bound; i++)
3625 stack_usage_map[i] = 1;