1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
31 #include "insn-flags.h"
33 /* Decide whether a function's arguments should be processed
34 from first to last or from last to first.
36 They should if the stack and args grow in opposite directions, but
37 only if we have push insns. */
41 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
47 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
48 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
50 /* Data structure and subroutines used within expand_call. */
54 /* Tree node for this argument. */
56 /* Mode for value; TYPE_MODE unless promoted. */
57 enum machine_mode mode;
58 /* Current RTL value for argument, or 0 if it isn't precomputed. */
60 /* Initially-compute RTL value for argument; only for const functions. */
62 /* Register to pass this argument in, 0 if passed on stack, or an
63 PARALLEL if the arg is to be copied into multiple non-contiguous
66 /* If REG was promoted from the actual mode of the argument expression,
67 indicates whether the promotion is sign- or zero-extended. */
69 /* Number of registers to use. 0 means put the whole arg in registers.
70 Also 0 if not passed in registers. */
72 /* Non-zero if argument must be passed on stack.
73 Note that some arguments may be passed on the stack
74 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
75 pass_on_stack identifies arguments that *cannot* go in registers. */
77 /* Offset of this argument from beginning of stack-args. */
78 struct args_size offset;
79 /* Similar, but offset to the start of the stack slot. Different from
80 OFFSET if this arg pads downward. */
81 struct args_size slot_offset;
82 /* Size of this argument on the stack, rounded up for any padding it gets,
83 parts of the argument passed in registers do not count.
84 If REG_PARM_STACK_SPACE is defined, then register parms
85 are counted here as well. */
86 struct args_size size;
87 /* Location on the stack at which parameter should be stored. The store
88 has already been done if STACK == VALUE. */
90 /* Location on the stack of the start of this argument slot. This can
91 differ from STACK if this arg pads downward. This location is known
92 to be aligned to FUNCTION_ARG_BOUNDARY. */
94 #ifdef ACCUMULATE_OUTGOING_ARGS
95 /* Place that this stack area has been saved, if needed. */
98 /* If an argument's alignment does not permit direct copying into registers,
99 copy in smaller-sized pieces into pseudos. These are stored in a
100 block pointed to by this field. The next field says how many
101 word-sized pseudos we made. */
106 #ifdef ACCUMULATE_OUTGOING_ARGS
107 /* A vector of one char per byte of stack space. A byte if non-zero if
108 the corresponding stack location has been used.
109 This vector is used to prevent a function call within an argument from
110 clobbering any stack already set up. */
111 static char *stack_usage_map;
113 /* Size of STACK_USAGE_MAP. */
114 static int highest_outgoing_arg_in_use;
116 /* stack_arg_under_construction is nonzero when an argument may be
117 initialized with a constructor call (including a C function that
118 returns a BLKmode struct) and expand_call must take special action
119 to make sure the object being constructed does not overlap the
120 argument list for the constructor call. */
121 int stack_arg_under_construction;
124 static int calls_function PROTO((tree, int));
125 static int calls_function_1 PROTO((tree, int));
126 static void emit_call_1 PROTO((rtx, tree, tree, int, int, rtx, rtx,
128 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
131 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
134 If WHICH is 0, return 1 if EXP contains a call to any function.
135 Actually, we only need return 1 if evaluating EXP would require pushing
136 arguments on the stack, but that is too difficult to compute, so we just
137 assume any function call might require the stack. */
139 static tree calls_function_save_exprs;
142 calls_function (exp, which)
147 calls_function_save_exprs = 0;
148 val = calls_function_1 (exp, which);
149 calls_function_save_exprs = 0;
154 calls_function_1 (exp, which)
159 enum tree_code code = TREE_CODE (exp);
160 int type = TREE_CODE_CLASS (code);
161 int length = tree_code_length[(int) code];
163 /* If this code is language-specific, we don't know what it will do. */
164 if ((int) code >= NUM_TREE_CODES)
167 /* Only expressions and references can contain calls. */
168 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
177 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
178 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
181 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
183 if ((DECL_BUILT_IN (fndecl)
184 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
185 || (DECL_SAVED_INSNS (fndecl)
186 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
187 & FUNCTION_FLAGS_CALLS_ALLOCA)))
191 /* Third operand is RTL. */
196 if (SAVE_EXPR_RTL (exp) != 0)
198 if (value_member (exp, calls_function_save_exprs))
200 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
201 calls_function_save_exprs);
202 return (TREE_OPERAND (exp, 0) != 0
203 && calls_function_1 (TREE_OPERAND (exp, 0), which));
209 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
210 if (DECL_INITIAL (local) != 0
211 && calls_function_1 (DECL_INITIAL (local), which))
215 register tree subblock;
217 for (subblock = BLOCK_SUBBLOCKS (exp);
219 subblock = TREE_CHAIN (subblock))
220 if (calls_function_1 (subblock, which))
225 case METHOD_CALL_EXPR:
229 case WITH_CLEANUP_EXPR:
237 for (i = 0; i < length; i++)
238 if (TREE_OPERAND (exp, i) != 0
239 && calls_function_1 (TREE_OPERAND (exp, i), which))
245 /* Force FUNEXP into a form suitable for the address of a CALL,
246 and return that as an rtx. Also load the static chain register
247 if FNDECL is a nested function.
249 CALL_FUSAGE points to a variable holding the prospective
250 CALL_INSN_FUNCTION_USAGE information. */
253 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
259 rtx static_chain_value = 0;
261 funexp = protect_from_queue (funexp, 0);
264 /* Get possible static chain value for nested function in C. */
265 static_chain_value = lookup_static_chain (fndecl);
267 /* Make a valid memory address and copy constants thru pseudo-regs,
268 but not for a constant address if -fno-function-cse. */
269 if (GET_CODE (funexp) != SYMBOL_REF)
271 #ifdef SMALL_REGISTER_CLASSES
272 /* If we are using registers for parameters, force the
273 function address into a register now. */
274 (SMALL_REGISTER_CLASSES && reg_parm_seen)
275 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
278 memory_address (FUNCTION_MODE, funexp);
281 #ifndef NO_FUNCTION_CSE
282 if (optimize && ! flag_no_function_cse)
283 #ifdef NO_RECURSIVE_FUNCTION_CSE
284 if (fndecl != current_function_decl)
286 funexp = force_reg (Pmode, funexp);
290 if (static_chain_value != 0)
292 emit_move_insn (static_chain_rtx, static_chain_value);
294 if (GET_CODE (static_chain_rtx) == REG)
295 use_reg (call_fusage, static_chain_rtx);
301 /* Generate instructions to call function FUNEXP,
302 and optionally pop the results.
303 The CALL_INSN is the first insn generated.
305 FNDECL is the declaration node of the function. This is given to the
306 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
308 FUNTYPE is the data type of the function. This is given to the macro
309 RETURN_POPS_ARGS to determine whether this function pops its own args.
310 We used to allow an identifier for library functions, but that doesn't
311 work when the return type is an aggregate type and the calling convention
312 says that the pointer to this aggregate is to be popped by the callee.
314 STACK_SIZE is the number of bytes of arguments on the stack,
315 rounded up to STACK_BOUNDARY; zero if the size is variable.
316 This is both to put into the call insn and
317 to generate explicit popping code if necessary.
319 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
320 It is zero if this call doesn't want a structure value.
322 NEXT_ARG_REG is the rtx that results from executing
323 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
324 just after all the args have had their registers assigned.
325 This could be whatever you like, but normally it is the first
326 arg-register beyond those used for args in this call,
327 or 0 if all the arg-registers are used in this call.
328 It is passed on to `gen_call' so you can put this info in the call insn.
330 VALREG is a hard register in which a value is returned,
331 or 0 if the call does not return a value.
333 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
334 the args to this call were processed.
335 We restore `inhibit_defer_pop' to that value.
337 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
338 denote registers used by the called function.
340 IS_CONST is true if this is a `const' call. */
343 emit_call_1 (funexp, fndecl, funtype, stack_size, struct_value_size,
344 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
350 int struct_value_size;
353 int old_inhibit_defer_pop;
357 rtx stack_size_rtx = GEN_INT (stack_size);
358 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
360 int already_popped = 0;
362 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
363 and we don't want to load it into a register as an optimization,
364 because prepare_call_address already did it if it should be done. */
365 if (GET_CODE (funexp) != SYMBOL_REF)
366 funexp = memory_address (FUNCTION_MODE, funexp);
368 #ifndef ACCUMULATE_OUTGOING_ARGS
369 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
370 if (HAVE_call_pop && HAVE_call_value_pop
371 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
374 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
377 /* If this subroutine pops its own args, record that in the call insn
378 if possible, for the sake of frame pointer elimination. */
381 pat = gen_call_value_pop (valreg,
382 gen_rtx (MEM, FUNCTION_MODE, funexp),
383 stack_size_rtx, next_arg_reg, n_pop);
385 pat = gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, funexp),
386 stack_size_rtx, next_arg_reg, n_pop);
388 emit_call_insn (pat);
395 #if defined (HAVE_call) && defined (HAVE_call_value)
396 if (HAVE_call && HAVE_call_value)
399 emit_call_insn (gen_call_value (valreg,
400 gen_rtx (MEM, FUNCTION_MODE, funexp),
401 stack_size_rtx, next_arg_reg,
404 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, funexp),
405 stack_size_rtx, next_arg_reg,
406 struct_value_size_rtx));
412 /* Find the CALL insn we just emitted. */
413 for (call_insn = get_last_insn ();
414 call_insn && GET_CODE (call_insn) != CALL_INSN;
415 call_insn = PREV_INSN (call_insn))
421 /* Put the register usage information on the CALL. If there is already
422 some usage information, put ours at the end. */
423 if (CALL_INSN_FUNCTION_USAGE (call_insn))
427 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
428 link = XEXP (link, 1))
431 XEXP (link, 1) = call_fusage;
434 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
436 /* If this is a const call, then set the insn's unchanging bit. */
438 CONST_CALL_P (call_insn) = 1;
440 /* Restore this now, so that we do defer pops for this call's args
441 if the context of the call as a whole permits. */
442 inhibit_defer_pop = old_inhibit_defer_pop;
444 #ifndef ACCUMULATE_OUTGOING_ARGS
445 /* If returning from the subroutine does not automatically pop the args,
446 we need an instruction to pop them sooner or later.
447 Perhaps do it now; perhaps just record how much space to pop later.
449 If returning from the subroutine does pop the args, indicate that the
450 stack pointer will be changed. */
452 if (stack_size != 0 && RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0)
455 CALL_INSN_FUNCTION_USAGE (call_insn)
456 = gen_rtx (EXPR_LIST, VOIDmode,
457 gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx),
458 CALL_INSN_FUNCTION_USAGE (call_insn));
459 stack_size -= RETURN_POPS_ARGS (fndecl, funtype, stack_size);
460 stack_size_rtx = GEN_INT (stack_size);
465 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
466 pending_stack_adjust += stack_size;
468 adjust_stack (stack_size_rtx);
473 /* Generate all the code for a function call
474 and return an rtx for its value.
475 Store the value in TARGET (specified as an rtx) if convenient.
476 If the value is stored in TARGET then TARGET is returned.
477 If IGNORE is nonzero, then we ignore the value of the function call. */
480 expand_call (exp, target, ignore)
485 /* List of actual parameters. */
486 tree actparms = TREE_OPERAND (exp, 1);
487 /* RTX for the function to be called. */
489 /* Tree node for the function to be called (not the address!). */
491 /* Data type of the function. */
493 /* Declaration of the function being called,
494 or 0 if the function is computed (not known by name). */
498 /* Register in which non-BLKmode value will be returned,
499 or 0 if no value or if value is BLKmode. */
501 /* Address where we should return a BLKmode value;
502 0 if value not BLKmode. */
503 rtx structure_value_addr = 0;
504 /* Nonzero if that address is being passed by treating it as
505 an extra, implicit first parameter. Otherwise,
506 it is passed by being copied directly into struct_value_rtx. */
507 int structure_value_addr_parm = 0;
508 /* Size of aggregate value wanted, or zero if none wanted
509 or if we are using the non-reentrant PCC calling convention
510 or expecting the value in registers. */
511 int struct_value_size = 0;
512 /* Nonzero if called function returns an aggregate in memory PCC style,
513 by returning the address of where to find it. */
514 int pcc_struct_value = 0;
516 /* Number of actual parameters in this call, including struct value addr. */
518 /* Number of named args. Args after this are anonymous ones
519 and they must all go on the stack. */
521 /* Count arg position in order args appear. */
524 /* Vector of information about each argument.
525 Arguments are numbered in the order they will be pushed,
526 not the order they are written. */
527 struct arg_data *args;
529 /* Total size in bytes of all the stack-parms scanned so far. */
530 struct args_size args_size;
531 /* Size of arguments before any adjustments (such as rounding). */
532 struct args_size original_args_size;
533 /* Data on reg parms scanned so far. */
534 CUMULATIVE_ARGS args_so_far;
535 /* Nonzero if a reg parm has been scanned. */
537 /* Nonzero if this is an indirect function call. */
539 /* Nonzero if we must avoid push-insns in the args for this call.
540 If stack space is allocated for register parameters, but not by the
541 caller, then it is preallocated in the fixed part of the stack frame.
542 So the entire argument block must then be preallocated (i.e., we
543 ignore PUSH_ROUNDING in that case). */
545 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
546 int must_preallocate = 1;
549 int must_preallocate = 0;
551 int must_preallocate = 1;
555 /* Size of the stack reserved for parameter registers. */
556 int reg_parm_stack_space = 0;
558 /* 1 if scanning parms front to back, -1 if scanning back to front. */
560 /* Address of space preallocated for stack parms
561 (on machines that lack push insns), or 0 if space not preallocated. */
564 /* Nonzero if it is plausible that this is a call to alloca. */
566 /* Nonzero if this is a call to setjmp or a related function. */
568 /* Nonzero if this is a call to `longjmp'. */
570 /* Nonzero if this is a call to an inline function. */
571 int is_integrable = 0;
572 /* Nonzero if this is a call to a `const' function.
573 Note that only explicitly named functions are handled as `const' here. */
575 /* Nonzero if this is a call to a `volatile' function. */
577 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
578 /* Define the boundary of the register parm stack space that needs to be
580 int low_to_save = -1, high_to_save;
581 rtx save_area = 0; /* Place that it is saved */
584 #ifdef ACCUMULATE_OUTGOING_ARGS
585 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
586 char *initial_stack_usage_map = stack_usage_map;
589 rtx old_stack_level = 0;
590 int old_pending_adj = 0;
591 int old_stack_arg_under_construction;
592 int old_inhibit_defer_pop = inhibit_defer_pop;
597 /* The value of the function call can be put in a hard register. But
598 if -fcheck-memory-usage, code which invokes functions (and thus
599 damages some hard registers) can be inserted before using the value.
600 So, target is always a pseudo-register in that case. */
601 if (flag_check_memory_usage)
604 /* See if we can find a DECL-node for the actual function.
605 As a result, decide whether this is a call to an integrable function. */
607 p = TREE_OPERAND (exp, 0);
608 if (TREE_CODE (p) == ADDR_EXPR)
610 fndecl = TREE_OPERAND (p, 0);
611 if (TREE_CODE (fndecl) != FUNCTION_DECL)
616 && fndecl != current_function_decl
617 && DECL_INLINE (fndecl)
618 && DECL_SAVED_INSNS (fndecl)
619 && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl)))
621 else if (! TREE_ADDRESSABLE (fndecl))
623 /* In case this function later becomes inlinable,
624 record that there was already a non-inline call to it.
626 Use abstraction instead of setting TREE_ADDRESSABLE
628 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
631 warning_with_decl (fndecl, "can't inline call to `%s'");
632 warning ("called from here");
634 mark_addressable (fndecl);
637 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
638 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
641 if (TREE_THIS_VOLATILE (fndecl))
646 /* If we don't have specific function to call, see if we have a
647 constant or `noreturn' function from the type. */
650 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
651 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
654 #ifdef REG_PARM_STACK_SPACE
655 #ifdef MAYBE_REG_PARM_STACK_SPACE
656 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
658 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
662 /* Warn if this value is an aggregate type,
663 regardless of which calling convention we are using for it. */
664 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
665 warning ("function call has aggregate value");
667 /* Set up a place to return a structure. */
669 /* Cater to broken compilers. */
670 if (aggregate_value_p (exp))
672 /* This call returns a big structure. */
675 #ifdef PCC_STATIC_STRUCT_RETURN
677 pcc_struct_value = 1;
678 /* Easier than making that case work right. */
681 /* In case this is a static function, note that it has been
683 if (! TREE_ADDRESSABLE (fndecl))
684 mark_addressable (fndecl);
688 #else /* not PCC_STATIC_STRUCT_RETURN */
690 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
692 if (target && GET_CODE (target) == MEM)
693 structure_value_addr = XEXP (target, 0);
696 /* Assign a temporary on the stack to hold the value. */
698 /* For variable-sized objects, we must be called with a target
699 specified. If we were to allocate space on the stack here,
700 we would have no way of knowing when to free it. */
702 if (struct_value_size < 0)
706 = XEXP (assign_stack_temp (BLKmode, struct_value_size, 1), 0);
707 MEM_IN_STRUCT_P (structure_value_addr)
708 = AGGREGATE_TYPE_P (TREE_TYPE (exp));
712 #endif /* not PCC_STATIC_STRUCT_RETURN */
715 /* If called function is inline, try to integrate it. */
720 rtx before_call = get_last_insn ();
722 temp = expand_inline_function (fndecl, actparms, target,
723 ignore, TREE_TYPE (exp),
724 structure_value_addr);
726 /* If inlining succeeded, return. */
727 if ((HOST_WIDE_INT) temp != -1)
729 #ifdef ACCUMULATE_OUTGOING_ARGS
730 /* If the outgoing argument list must be preserved, push
731 the stack before executing the inlined function if it
734 for (i = reg_parm_stack_space - 1; i >= 0; i--)
735 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
738 if (stack_arg_under_construction || i >= 0)
741 = before_call ? NEXT_INSN (before_call) : get_insns ();
744 /* Look for a call in the inline function code.
745 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
746 nonzero then there is a call and it is not necessary
747 to scan the insns. */
749 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
750 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
751 if (GET_CODE (insn) == CALL_INSN)
756 /* Reserve enough stack space so that the largest
757 argument list of any function call in the inline
758 function does not overlap the argument list being
759 evaluated. This is usually an overestimate because
760 allocate_dynamic_stack_space reserves space for an
761 outgoing argument list in addition to the requested
762 space, but there is no way to ask for stack space such
763 that an argument list of a certain length can be
764 safely constructed. */
766 int adjust = OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl));
767 #ifdef REG_PARM_STACK_SPACE
768 /* Add the stack space reserved for register arguments
769 in the inline function. What is really needed is the
770 largest value of reg_parm_stack_space in the inline
771 function, but that is not available. Using the current
772 value of reg_parm_stack_space is wrong, but gives
773 correct results on all supported machines. */
774 adjust += reg_parm_stack_space;
777 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
778 allocate_dynamic_stack_space (GEN_INT (adjust),
779 NULL_RTX, BITS_PER_UNIT);
782 emit_insns_before (seq, first_insn);
783 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
788 /* If the result is equivalent to TARGET, return TARGET to simplify
789 checks in store_expr. They can be equivalent but not equal in the
790 case of a function that returns BLKmode. */
791 if (temp != target && rtx_equal_p (temp, target))
796 /* If inlining failed, mark FNDECL as needing to be compiled
797 separately after all. If function was declared inline,
799 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
800 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
802 warning_with_decl (fndecl, "inlining failed in call to `%s'");
803 warning ("called from here");
805 mark_addressable (fndecl);
808 /* When calling a const function, we must pop the stack args right away,
809 so that the pop is deleted or moved with the call. */
813 function_call_count++;
815 if (fndecl && DECL_NAME (fndecl))
816 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
819 /* Unless it's a call to a specific function that isn't alloca,
820 if it has one argument, we must assume it might be alloca. */
823 = (!(fndecl != 0 && strcmp (name, "alloca"))
825 && TREE_CHAIN (actparms) == 0);
827 /* We assume that alloca will always be called by name. It
828 makes no sense to pass it as a pointer-to-function to
829 anything that does not understand its behavior. */
831 = (name && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
833 && ! strcmp (name, "alloca"))
834 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
836 && ! strcmp (name, "__builtin_alloca"))));
839 /* See if this is a call to a function that can return more than once
840 or a call to longjmp. */
845 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 15)
849 /* Disregard prefix _, __ or __x. */
852 if (name[1] == '_' && name[2] == 'x')
854 else if (name[1] == '_')
864 && (! strcmp (tname, "setjmp")
865 || ! strcmp (tname, "setjmp_syscall")))
867 && ! strcmp (tname, "sigsetjmp"))
869 && ! strcmp (tname, "savectx")));
871 && ! strcmp (tname, "siglongjmp"))
874 else if ((tname[0] == 'q' && tname[1] == 's'
875 && ! strcmp (tname, "qsetjmp"))
876 || (tname[0] == 'v' && tname[1] == 'f'
877 && ! strcmp (tname, "vfork")))
880 else if (tname[0] == 'l' && tname[1] == 'o'
881 && ! strcmp (tname, "longjmp"))
886 current_function_calls_alloca = 1;
888 /* Don't let pending stack adjusts add up to too much.
889 Also, do all pending adjustments now
890 if there is any chance this might be a call to alloca. */
892 if (pending_stack_adjust >= 32
893 || (pending_stack_adjust > 0 && may_be_alloca))
894 do_pending_stack_adjust ();
896 /* Operand 0 is a pointer-to-function; get the type of the function. */
897 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
898 if (TREE_CODE (funtype) != POINTER_TYPE)
900 funtype = TREE_TYPE (funtype);
902 /* Push the temporary stack slot level so that we can free any temporaries
906 /* Start updating where the next arg would go.
908 On some machines (such as the PA) indirect calls have a different
909 calling convention than normal calls. The last argument in
910 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
912 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
914 /* If struct_value_rtx is 0, it means pass the address
915 as if it were an extra parameter. */
916 if (structure_value_addr && struct_value_rtx == 0)
918 /* If structure_value_addr is a REG other than
919 virtual_outgoing_args_rtx, we can use always use it. If it
920 is not a REG, we must always copy it into a register.
921 If it is virtual_outgoing_args_rtx, we must copy it to another
922 register in some cases. */
923 rtx temp = (GET_CODE (structure_value_addr) != REG
924 #ifdef ACCUMULATE_OUTGOING_ARGS
925 || (stack_arg_under_construction
926 && structure_value_addr == virtual_outgoing_args_rtx)
928 ? copy_addr_to_reg (structure_value_addr)
929 : structure_value_addr);
932 = tree_cons (error_mark_node,
933 make_tree (build_pointer_type (TREE_TYPE (funtype)),
936 structure_value_addr_parm = 1;
939 /* Count the arguments and set NUM_ACTUALS. */
940 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
943 /* Compute number of named args.
944 Normally, don't include the last named arg if anonymous args follow.
945 We do include the last named arg if STRICT_ARGUMENT_NAMING is defined.
946 (If no anonymous args follow, the result of list_length is actually
947 one too large. This is harmless.)
949 If SETUP_INCOMING_VARARGS is defined and STRICT_ARGUMENT_NAMING is not,
950 this machine will be able to place unnamed args that were passed in
951 registers into the stack. So treat all args as named. This allows the
952 insns emitting for a specific argument list to be independent of the
953 function declaration.
955 If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable
956 way to pass unnamed args in registers, so we must force them into
958 #if !defined(SETUP_INCOMING_VARARGS) || defined(STRICT_ARGUMENT_NAMING)
959 if (TYPE_ARG_TYPES (funtype) != 0)
961 = (list_length (TYPE_ARG_TYPES (funtype))
962 #ifndef STRICT_ARGUMENT_NAMING
963 /* Don't include the last named arg. */
966 /* Count the struct value address, if it is passed as a parm. */
967 + structure_value_addr_parm);
970 /* If we know nothing, treat all args as named. */
971 n_named_args = num_actuals;
973 /* Make a vector to hold all the information about each arg. */
974 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
975 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
977 args_size.constant = 0;
980 /* In this loop, we consider args in the order they are written.
981 We fill up ARGS from the front or from the back if necessary
982 so that in any case the first arg to be pushed ends up at the front. */
984 #ifdef PUSH_ARGS_REVERSED
985 i = num_actuals - 1, inc = -1;
986 /* In this case, must reverse order of args
987 so that we compute and push the last arg first. */
992 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
993 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
995 tree type = TREE_TYPE (TREE_VALUE (p));
997 enum machine_mode mode;
999 args[i].tree_value = TREE_VALUE (p);
1001 /* Replace erroneous argument with constant zero. */
1002 if (type == error_mark_node || TYPE_SIZE (type) == 0)
1003 args[i].tree_value = integer_zero_node, type = integer_type_node;
1005 /* If TYPE is a transparent union, pass things the way we would
1006 pass the first field of the union. We have already verified that
1007 the modes are the same. */
1008 if (TYPE_TRANSPARENT_UNION (type))
1009 type = TREE_TYPE (TYPE_FIELDS (type));
1011 /* Decide where to pass this arg.
1013 args[i].reg is nonzero if all or part is passed in registers.
1015 args[i].partial is nonzero if part but not all is passed in registers,
1016 and the exact value says how many words are passed in registers.
1018 args[i].pass_on_stack is nonzero if the argument must at least be
1019 computed on the stack. It may then be loaded back into registers
1020 if args[i].reg is nonzero.
1022 These decisions are driven by the FUNCTION_... macros and must agree
1023 with those made by function.c. */
1025 /* See if this argument should be passed by invisible reference. */
1026 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1027 && contains_placeholder_p (TYPE_SIZE (type)))
1028 || TREE_ADDRESSABLE (type)
1029 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1030 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type),
1031 type, argpos < n_named_args)
1035 /* If we're compiling a thunk, pass through invisible
1036 references instead of making a copy. */
1037 if (current_function_is_thunk
1038 #ifdef FUNCTION_ARG_CALLEE_COPIES
1039 || (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type),
1040 type, argpos < n_named_args)
1041 /* If it's in a register, we must make a copy of it too. */
1042 /* ??? Is this a sufficient test? Is there a better one? */
1043 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1044 && REG_P (DECL_RTL (args[i].tree_value)))
1045 && ! TREE_ADDRESSABLE (type))
1049 args[i].tree_value = build1 (ADDR_EXPR,
1050 build_pointer_type (type),
1051 args[i].tree_value);
1052 type = build_pointer_type (type);
1056 /* We make a copy of the object and pass the address to the
1057 function being called. */
1060 if (TYPE_SIZE (type) == 0
1061 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1062 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1063 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1064 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1065 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
1067 /* This is a variable-sized object. Make space on the stack
1069 rtx size_rtx = expr_size (TREE_VALUE (p));
1071 if (old_stack_level == 0)
1073 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1074 old_pending_adj = pending_stack_adjust;
1075 pending_stack_adjust = 0;
1078 copy = gen_rtx (MEM, BLKmode,
1079 allocate_dynamic_stack_space (size_rtx,
1081 TYPE_ALIGN (type)));
1085 int size = int_size_in_bytes (type);
1086 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1089 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
1091 store_expr (args[i].tree_value, copy, 0);
1094 args[i].tree_value = build1 (ADDR_EXPR,
1095 build_pointer_type (type),
1096 make_tree (type, copy));
1097 type = build_pointer_type (type);
1101 mode = TYPE_MODE (type);
1102 unsignedp = TREE_UNSIGNED (type);
1104 #ifdef PROMOTE_FUNCTION_ARGS
1105 mode = promote_mode (type, mode, &unsignedp, 1);
1108 args[i].unsignedp = unsignedp;
1109 args[i].mode = mode;
1110 args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
1111 argpos < n_named_args);
1112 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1115 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type,
1116 argpos < n_named_args);
1119 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1121 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1122 it means that we are to pass this arg in the register(s) designated
1123 by the PARALLEL, but also to pass it in the stack. */
1124 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1125 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1126 args[i].pass_on_stack = 1;
1128 /* If this is an addressable type, we must preallocate the stack
1129 since we must evaluate the object into its final location.
1131 If this is to be passed in both registers and the stack, it is simpler
1133 if (TREE_ADDRESSABLE (type)
1134 || (args[i].pass_on_stack && args[i].reg != 0))
1135 must_preallocate = 1;
1137 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1138 we cannot consider this function call constant. */
1139 if (TREE_ADDRESSABLE (type))
1142 /* Compute the stack-size of this argument. */
1143 if (args[i].reg == 0 || args[i].partial != 0
1144 #ifdef REG_PARM_STACK_SPACE
1145 || reg_parm_stack_space > 0
1147 || args[i].pass_on_stack)
1148 locate_and_pad_parm (mode, type,
1149 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1154 fndecl, &args_size, &args[i].offset,
1157 #ifndef ARGS_GROW_DOWNWARD
1158 args[i].slot_offset = args_size;
1161 #ifndef REG_PARM_STACK_SPACE
1162 /* If a part of the arg was put into registers,
1163 don't include that part in the amount pushed. */
1164 if (! args[i].pass_on_stack)
1165 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1166 / (PARM_BOUNDARY / BITS_PER_UNIT)
1167 * (PARM_BOUNDARY / BITS_PER_UNIT));
1170 /* Update ARGS_SIZE, the total stack space for args so far. */
1172 args_size.constant += args[i].size.constant;
1173 if (args[i].size.var)
1175 ADD_PARM_SIZE (args_size, args[i].size.var);
1178 /* Since the slot offset points to the bottom of the slot,
1179 we must record it after incrementing if the args grow down. */
1180 #ifdef ARGS_GROW_DOWNWARD
1181 args[i].slot_offset = args_size;
1183 args[i].slot_offset.constant = -args_size.constant;
1186 SUB_PARM_SIZE (args[i].slot_offset, args_size.var);
1190 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1191 have been used, etc. */
1193 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
1194 argpos < n_named_args);
1197 #ifdef FINAL_REG_PARM_STACK_SPACE
1198 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1202 /* Compute the actual size of the argument block required. The variable
1203 and constant sizes must be combined, the size may have to be rounded,
1204 and there may be a minimum required size. */
1206 original_args_size = args_size;
1209 /* If this function requires a variable-sized argument list, don't try to
1210 make a cse'able block for this call. We may be able to do this
1211 eventually, but it is too complicated to keep track of what insns go
1212 in the cse'able block and which don't. */
1215 must_preallocate = 1;
1217 args_size.var = ARGS_SIZE_TREE (args_size);
1218 args_size.constant = 0;
1220 #ifdef STACK_BOUNDARY
1221 if (STACK_BOUNDARY != BITS_PER_UNIT)
1222 args_size.var = round_up (args_size.var, STACK_BYTES);
1225 #ifdef REG_PARM_STACK_SPACE
1226 if (reg_parm_stack_space > 0)
1229 = size_binop (MAX_EXPR, args_size.var,
1230 size_int (REG_PARM_STACK_SPACE (fndecl)));
1232 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1233 /* The area corresponding to register parameters is not to count in
1234 the size of the block we need. So make the adjustment. */
1236 = size_binop (MINUS_EXPR, args_size.var,
1237 size_int (reg_parm_stack_space));
1244 #ifdef STACK_BOUNDARY
1245 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1246 / STACK_BYTES) * STACK_BYTES);
1249 #ifdef REG_PARM_STACK_SPACE
1250 args_size.constant = MAX (args_size.constant,
1251 reg_parm_stack_space);
1252 #ifdef MAYBE_REG_PARM_STACK_SPACE
1253 if (reg_parm_stack_space == 0)
1254 args_size.constant = 0;
1256 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1257 args_size.constant -= reg_parm_stack_space;
1262 /* See if we have or want to preallocate stack space.
1264 If we would have to push a partially-in-regs parm
1265 before other stack parms, preallocate stack space instead.
1267 If the size of some parm is not a multiple of the required stack
1268 alignment, we must preallocate.
1270 If the total size of arguments that would otherwise create a copy in
1271 a temporary (such as a CALL) is more than half the total argument list
1272 size, preallocation is faster.
1274 Another reason to preallocate is if we have a machine (like the m88k)
1275 where stack alignment is required to be maintained between every
1276 pair of insns, not just when the call is made. However, we assume here
1277 that such machines either do not have push insns (and hence preallocation
1278 would occur anyway) or the problem is taken care of with
1281 if (! must_preallocate)
1283 int partial_seen = 0;
1284 int copy_to_evaluate_size = 0;
1286 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1288 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1290 else if (partial_seen && args[i].reg == 0)
1291 must_preallocate = 1;
1293 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1294 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1295 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1296 || TREE_CODE (args[i].tree_value) == COND_EXPR
1297 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1298 copy_to_evaluate_size
1299 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1302 if (copy_to_evaluate_size * 2 >= args_size.constant
1303 && args_size.constant > 0)
1304 must_preallocate = 1;
1307 /* If the structure value address will reference the stack pointer, we must
1308 stabilize it. We don't need to do this if we know that we are not going
1309 to adjust the stack pointer in processing this call. */
1311 if (structure_value_addr
1312 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1313 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1315 #ifndef ACCUMULATE_OUTGOING_ARGS
1316 || args_size.constant
1319 structure_value_addr = copy_to_reg (structure_value_addr);
1321 /* If this function call is cse'able, precompute all the parameters.
1322 Note that if the parameter is constructed into a temporary, this will
1323 cause an additional copy because the parameter will be constructed
1324 into a temporary location and then copied into the outgoing arguments.
1325 If a parameter contains a call to alloca and this function uses the
1326 stack, precompute the parameter. */
1328 /* If we preallocated the stack space, and some arguments must be passed
1329 on the stack, then we must precompute any parameter which contains a
1330 function call which will store arguments on the stack.
1331 Otherwise, evaluating the parameter may clobber previous parameters
1332 which have already been stored into the stack. */
1334 for (i = 0; i < num_actuals; i++)
1336 || ((args_size.var != 0 || args_size.constant != 0)
1337 && calls_function (args[i].tree_value, 1))
1338 || (must_preallocate && (args_size.var != 0 || args_size.constant != 0)
1339 && calls_function (args[i].tree_value, 0)))
1341 /* If this is an addressable type, we cannot pre-evaluate it. */
1342 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1347 args[i].initial_value = args[i].value
1348 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1350 preserve_temp_slots (args[i].value);
1353 /* ANSI doesn't require a sequence point here,
1354 but PCC has one, so this will avoid some problems. */
1357 args[i].initial_value = args[i].value
1358 = protect_from_queue (args[i].initial_value, 0);
1360 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1362 = convert_modes (args[i].mode,
1363 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1364 args[i].value, args[i].unsignedp);
1367 /* Now we are about to start emitting insns that can be deleted
1368 if a libcall is deleted. */
1372 /* If we have no actual push instructions, or shouldn't use them,
1373 make space for all args right now. */
1375 if (args_size.var != 0)
1377 if (old_stack_level == 0)
1379 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1380 old_pending_adj = pending_stack_adjust;
1381 pending_stack_adjust = 0;
1382 #ifdef ACCUMULATE_OUTGOING_ARGS
1383 /* stack_arg_under_construction says whether a stack arg is
1384 being constructed at the old stack level. Pushing the stack
1385 gets a clean outgoing argument block. */
1386 old_stack_arg_under_construction = stack_arg_under_construction;
1387 stack_arg_under_construction = 0;
1390 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1394 /* Note that we must go through the motions of allocating an argument
1395 block even if the size is zero because we may be storing args
1396 in the area reserved for register arguments, which may be part of
1399 int needed = args_size.constant;
1401 /* Store the maximum argument space used. It will be pushed by
1402 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
1405 if (needed > current_function_outgoing_args_size)
1406 current_function_outgoing_args_size = needed;
1408 if (must_preallocate)
1410 #ifdef ACCUMULATE_OUTGOING_ARGS
1411 /* Since the stack pointer will never be pushed, it is possible for
1412 the evaluation of a parm to clobber something we have already
1413 written to the stack. Since most function calls on RISC machines
1414 do not use the stack, this is uncommon, but must work correctly.
1416 Therefore, we save any area of the stack that was already written
1417 and that we are using. Here we set up to do this by making a new
1418 stack usage map from the old one. The actual save will be done
1421 Another approach might be to try to reorder the argument
1422 evaluations to avoid this conflicting stack usage. */
1424 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1425 /* Since we will be writing into the entire argument area, the
1426 map must be allocated for its entire size, not just the part that
1427 is the responsibility of the caller. */
1428 needed += reg_parm_stack_space;
1431 #ifdef ARGS_GROW_DOWNWARD
1432 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1435 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1438 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
1440 if (initial_highest_arg_in_use)
1441 bcopy (initial_stack_usage_map, stack_usage_map,
1442 initial_highest_arg_in_use);
1444 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1445 bzero (&stack_usage_map[initial_highest_arg_in_use],
1446 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1449 /* The address of the outgoing argument list must not be copied to a
1450 register here, because argblock would be left pointing to the
1451 wrong place after the call to allocate_dynamic_stack_space below.
1454 argblock = virtual_outgoing_args_rtx;
1456 #else /* not ACCUMULATE_OUTGOING_ARGS */
1457 if (inhibit_defer_pop == 0)
1459 /* Try to reuse some or all of the pending_stack_adjust
1460 to get this space. Maybe we can avoid any pushing. */
1461 if (needed > pending_stack_adjust)
1463 needed -= pending_stack_adjust;
1464 pending_stack_adjust = 0;
1468 pending_stack_adjust -= needed;
1472 /* Special case this because overhead of `push_block' in this
1473 case is non-trivial. */
1475 argblock = virtual_outgoing_args_rtx;
1477 argblock = push_block (GEN_INT (needed), 0, 0);
1479 /* We only really need to call `copy_to_reg' in the case where push
1480 insns are going to be used to pass ARGBLOCK to a function
1481 call in ARGS. In that case, the stack pointer changes value
1482 from the allocation point to the call point, and hence
1483 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1484 But might as well always do it. */
1485 argblock = copy_to_reg (argblock);
1486 #endif /* not ACCUMULATE_OUTGOING_ARGS */
1490 #ifdef ACCUMULATE_OUTGOING_ARGS
1491 /* The save/restore code in store_one_arg handles all cases except one:
1492 a constructor call (including a C function returning a BLKmode struct)
1493 to initialize an argument. */
1494 if (stack_arg_under_construction)
1496 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1497 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
1499 rtx push_size = GEN_INT (args_size.constant);
1501 if (old_stack_level == 0)
1503 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1504 old_pending_adj = pending_stack_adjust;
1505 pending_stack_adjust = 0;
1506 /* stack_arg_under_construction says whether a stack arg is
1507 being constructed at the old stack level. Pushing the stack
1508 gets a clean outgoing argument block. */
1509 old_stack_arg_under_construction = stack_arg_under_construction;
1510 stack_arg_under_construction = 0;
1511 /* Make a new map for the new argument list. */
1512 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
1513 bzero (stack_usage_map, highest_outgoing_arg_in_use);
1514 highest_outgoing_arg_in_use = 0;
1516 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
1518 /* If argument evaluation might modify the stack pointer, copy the
1519 address of the argument list to a register. */
1520 for (i = 0; i < num_actuals; i++)
1521 if (args[i].pass_on_stack)
1523 argblock = copy_addr_to_reg (argblock);
1529 /* If we preallocated stack space, compute the address of each argument.
1530 We need not ensure it is a valid memory address here; it will be
1531 validized when it is used. */
1534 rtx arg_reg = argblock;
1537 if (GET_CODE (argblock) == PLUS)
1538 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1540 for (i = 0; i < num_actuals; i++)
1542 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1543 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1546 /* Skip this parm if it will not be passed on the stack. */
1547 if (! args[i].pass_on_stack && args[i].reg != 0)
1550 if (GET_CODE (offset) == CONST_INT)
1551 addr = plus_constant (arg_reg, INTVAL (offset));
1553 addr = gen_rtx (PLUS, Pmode, arg_reg, offset);
1555 addr = plus_constant (addr, arg_offset);
1556 args[i].stack = gen_rtx (MEM, args[i].mode, addr);
1557 MEM_IN_STRUCT_P (args[i].stack)
1558 = AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value));
1560 if (GET_CODE (slot_offset) == CONST_INT)
1561 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1563 addr = gen_rtx (PLUS, Pmode, arg_reg, slot_offset);
1565 addr = plus_constant (addr, arg_offset);
1566 args[i].stack_slot = gen_rtx (MEM, args[i].mode, addr);
1570 #ifdef PUSH_ARGS_REVERSED
1571 #ifdef STACK_BOUNDARY
1572 /* If we push args individually in reverse order, perform stack alignment
1573 before the first push (the last arg). */
1575 anti_adjust_stack (GEN_INT (args_size.constant
1576 - original_args_size.constant));
1580 /* Don't try to defer pops if preallocating, not even from the first arg,
1581 since ARGBLOCK probably refers to the SP. */
1585 /* Get the function to call, in the form of RTL. */
1588 /* If this is the first use of the function, see if we need to
1589 make an external definition for it. */
1590 if (! TREE_USED (fndecl))
1592 assemble_external (fndecl);
1593 TREE_USED (fndecl) = 1;
1596 /* Get a SYMBOL_REF rtx for the function address. */
1597 funexp = XEXP (DECL_RTL (fndecl), 0);
1600 /* Generate an rtx (probably a pseudo-register) for the address. */
1603 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1604 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1606 /* Check the function is executable. */
1607 if (flag_check_memory_usage)
1608 emit_library_call (chkr_check_exec_libfunc, 1,
1614 /* Figure out the register where the value, if any, will come back. */
1616 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
1617 && ! structure_value_addr)
1619 if (pcc_struct_value)
1620 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1623 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
1626 /* Precompute all register parameters. It isn't safe to compute anything
1627 once we have started filling any specific hard regs. */
1629 for (i = 0; i < num_actuals; i++)
1630 if (args[i].reg != 0 && ! args[i].pass_on_stack)
1634 if (args[i].value == 0)
1637 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
1639 preserve_temp_slots (args[i].value);
1642 /* ANSI doesn't require a sequence point here,
1643 but PCC has one, so this will avoid some problems. */
1647 /* If we are to promote the function arg to a wider mode,
1650 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
1652 = convert_modes (args[i].mode,
1653 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1654 args[i].value, args[i].unsignedp);
1656 /* If the value is expensive, and we are inside an appropriately
1657 short loop, put the value into a pseudo and then put the pseudo
1660 For small register classes, also do this if this call uses
1661 register parameters. This is to avoid reload conflicts while
1662 loading the parameters registers. */
1664 if ((! (GET_CODE (args[i].value) == REG
1665 || (GET_CODE (args[i].value) == SUBREG
1666 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
1667 && args[i].mode != BLKmode
1668 && rtx_cost (args[i].value, SET) > 2
1669 #ifdef SMALL_REGISTER_CLASSES
1670 && ((SMALL_REGISTER_CLASSES && reg_parm_seen)
1671 || preserve_subexpressions_p ())
1673 && preserve_subexpressions_p ()
1676 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1679 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1680 /* The argument list is the property of the called routine and it
1681 may clobber it. If the fixed area has been used for previous
1682 parameters, we must save and restore it.
1684 Here we compute the boundary of the that needs to be saved, if any. */
1686 #ifdef ARGS_GROW_DOWNWARD
1687 for (i = 0; i < reg_parm_stack_space + 1; i++)
1689 for (i = 0; i < reg_parm_stack_space; i++)
1692 if (i >= highest_outgoing_arg_in_use
1693 || stack_usage_map[i] == 0)
1696 if (low_to_save == -1)
1702 if (low_to_save >= 0)
1704 int num_to_save = high_to_save - low_to_save + 1;
1705 enum machine_mode save_mode
1706 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
1709 /* If we don't have the required alignment, must do this in BLKmode. */
1710 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
1711 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
1712 save_mode = BLKmode;
1714 stack_area = gen_rtx (MEM, save_mode,
1715 memory_address (save_mode,
1717 #ifdef ARGS_GROW_DOWNWARD
1718 plus_constant (argblock,
1721 plus_constant (argblock,
1725 if (save_mode == BLKmode)
1727 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
1728 MEM_IN_STRUCT_P (save_area) = 0;
1729 emit_block_move (validize_mem (save_area), stack_area,
1730 GEN_INT (num_to_save),
1731 PARM_BOUNDARY / BITS_PER_UNIT);
1735 save_area = gen_reg_rtx (save_mode);
1736 emit_move_insn (save_area, stack_area);
1742 /* Now store (and compute if necessary) all non-register parms.
1743 These come before register parms, since they can require block-moves,
1744 which could clobber the registers used for register parms.
1745 Parms which have partial registers are not stored here,
1746 but we do preallocate space here if they want that. */
1748 for (i = 0; i < num_actuals; i++)
1749 if (args[i].reg == 0 || args[i].pass_on_stack)
1750 store_one_arg (&args[i], argblock, may_be_alloca,
1751 args_size.var != 0, fndecl, reg_parm_stack_space);
1753 /* If we have a parm that is passed in registers but not in memory
1754 and whose alignment does not permit a direct copy into registers,
1755 make a group of pseudos that correspond to each register that we
1758 if (STRICT_ALIGNMENT)
1759 for (i = 0; i < num_actuals; i++)
1760 if (args[i].reg != 0 && ! args[i].pass_on_stack
1761 && args[i].mode == BLKmode
1762 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1763 < MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1765 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1766 int big_endian_correction = 0;
1768 args[i].n_aligned_regs
1769 = args[i].partial ? args[i].partial
1770 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1772 args[i].aligned_regs = (rtx *) alloca (sizeof (rtx)
1773 * args[i].n_aligned_regs);
1775 /* Structures smaller than a word are aligned to the least
1776 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1777 this means we must skip the empty high order bytes when
1778 calculating the bit offset. */
1779 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
1780 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1782 for (j = 0; j < args[i].n_aligned_regs; j++)
1784 rtx reg = gen_reg_rtx (word_mode);
1785 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1786 int bitsize = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1789 args[i].aligned_regs[j] = reg;
1791 /* Clobber REG and move each partword into it. Ensure we don't
1792 go past the end of the structure. Note that the loop below
1793 works because we've already verified that padding
1794 and endianness are compatible.
1796 We use to emit a clobber here but that doesn't let later
1797 passes optimize the instructions we emit. By storing 0 into
1798 the register later passes know the first AND to zero out the
1799 bitfield being set in the register is unnecessary. The store
1800 of 0 will be deleted as will at least the first AND. */
1802 emit_move_insn (reg, const0_rtx);
1805 bitpos < BITS_PER_WORD && bytes > 0;
1806 bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
1808 int xbitpos = bitpos + big_endian_correction;
1810 store_bit_field (reg, bitsize, xbitpos, word_mode,
1811 extract_bit_field (word, bitsize, bitpos, 1,
1812 NULL_RTX, word_mode,
1814 bitsize / BITS_PER_UNIT,
1816 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
1821 /* Now store any partially-in-registers parm.
1822 This is the last place a block-move can happen. */
1824 for (i = 0; i < num_actuals; i++)
1825 if (args[i].partial != 0 && ! args[i].pass_on_stack)
1826 store_one_arg (&args[i], argblock, may_be_alloca,
1827 args_size.var != 0, fndecl, reg_parm_stack_space);
1829 #ifndef PUSH_ARGS_REVERSED
1830 #ifdef STACK_BOUNDARY
1831 /* If we pushed args in forward order, perform stack alignment
1832 after pushing the last arg. */
1834 anti_adjust_stack (GEN_INT (args_size.constant
1835 - original_args_size.constant));
1839 /* If register arguments require space on the stack and stack space
1840 was not preallocated, allocate stack space here for arguments
1841 passed in registers. */
1842 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
1843 if (must_preallocate == 0 && reg_parm_stack_space > 0)
1844 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
1847 /* Pass the function the address in which to return a structure value. */
1848 if (structure_value_addr && ! structure_value_addr_parm)
1850 emit_move_insn (struct_value_rtx,
1852 force_operand (structure_value_addr,
1855 /* Mark the memory for the aggregate as write-only. */
1856 if (flag_check_memory_usage)
1857 emit_library_call (chkr_set_right_libfunc, 1,
1859 structure_value_addr, ptr_mode,
1860 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
1861 GEN_INT (MEMORY_USE_WO), QImode);
1863 if (GET_CODE (struct_value_rtx) == REG)
1864 use_reg (&call_fusage, struct_value_rtx);
1867 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
1869 /* Now do the register loads required for any wholly-register parms or any
1870 parms which are passed both on the stack and in a register. Their
1871 expressions were already evaluated.
1873 Mark all register-parms as living through the call, putting these USE
1874 insns in the CALL_INSN_FUNCTION_USAGE field. */
1876 for (i = 0; i < num_actuals; i++)
1878 rtx reg = args[i].reg;
1879 int partial = args[i].partial;
1884 /* Set to non-negative if must move a word at a time, even if just
1885 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1886 we just use a normal move insn. This value can be zero if the
1887 argument is a zero size structure with no fields. */
1888 nregs = (partial ? partial
1889 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1890 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1891 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1894 /* Handle calls that pass values in multiple non-contiguous
1895 locations. The Irix 6 ABI has examples of this. */
1897 if (GET_CODE (reg) == PARALLEL)
1898 emit_group_load (reg, args[i].value);
1900 /* If simple case, just do move. If normal partial, store_one_arg
1901 has already loaded the register for us. In all other cases,
1902 load the register(s) from memory. */
1904 else if (nregs == -1)
1905 emit_move_insn (reg, args[i].value);
1907 /* If we have pre-computed the values to put in the registers in
1908 the case of non-aligned structures, copy them in now. */
1910 else if (args[i].n_aligned_regs != 0)
1911 for (j = 0; j < args[i].n_aligned_regs; j++)
1912 emit_move_insn (gen_rtx (REG, word_mode, REGNO (reg) + j),
1913 args[i].aligned_regs[j]);
1915 else if (partial == 0 || args[i].pass_on_stack)
1916 move_block_to_reg (REGNO (reg),
1917 validize_mem (args[i].value), nregs,
1920 /* Handle calls that pass values in multiple non-contiguous
1921 locations. The Irix 6 ABI has examples of this. */
1922 if (GET_CODE (reg) == PARALLEL)
1923 use_group_regs (&call_fusage, reg);
1924 else if (nregs == -1)
1925 use_reg (&call_fusage, reg);
1927 use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1931 /* Perform postincrements before actually calling the function. */
1934 /* All arguments and registers used for the call must be set up by now! */
1936 /* Generate the actual call instruction. */
1937 emit_call_1 (funexp, fndecl, funtype, args_size.constant, struct_value_size,
1938 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
1939 valreg, old_inhibit_defer_pop, call_fusage, is_const);
1941 /* If call is cse'able, make appropriate pair of reg-notes around it.
1942 Test valreg so we don't crash; may safely ignore `const'
1943 if return type is void. Disable for PARALLEL return values, because
1944 we have no way to move such values into a pseudo register. */
1945 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
1948 rtx temp = gen_reg_rtx (GET_MODE (valreg));
1951 /* Construct an "equal form" for the value which mentions all the
1952 arguments in order as well as the function name. */
1953 #ifdef PUSH_ARGS_REVERSED
1954 for (i = 0; i < num_actuals; i++)
1955 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1957 for (i = num_actuals - 1; i >= 0; i--)
1958 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1960 note = gen_rtx (EXPR_LIST, VOIDmode, funexp, note);
1962 insns = get_insns ();
1965 emit_libcall_block (insns, temp, valreg, note);
1971 /* Otherwise, just write out the sequence without a note. */
1972 rtx insns = get_insns ();
1978 /* For calls to `setjmp', etc., inform flow.c it should complain
1979 if nonvolatile values are live. */
1983 emit_note (name, NOTE_INSN_SETJMP);
1984 current_function_calls_setjmp = 1;
1988 current_function_calls_longjmp = 1;
1990 /* Notice functions that cannot return.
1991 If optimizing, insns emitted below will be dead.
1992 If not optimizing, they will exist, which is useful
1993 if the user uses the `return' command in the debugger. */
1995 if (is_volatile || is_longjmp)
1998 /* If value type not void, return an rtx for the value. */
2000 /* If there are cleanups to be called, don't use a hard reg as target.
2001 We need to double check this and see if it matters anymore. */
2002 if (any_pending_cleanups ()
2003 && target && REG_P (target)
2004 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2007 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2010 target = const0_rtx;
2012 else if (structure_value_addr)
2014 if (target == 0 || GET_CODE (target) != MEM)
2016 target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
2017 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2018 structure_value_addr));
2019 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2022 else if (pcc_struct_value)
2026 /* We used leave the value in the location that it is
2027 returned in, but that causes problems if it is used more
2028 than once in one expression. Rather than trying to track
2029 when a copy is required, we always copy when TARGET is
2030 not specified. This calling sequence is only used on
2031 a few machines and TARGET is usually nonzero. */
2032 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2034 target = assign_stack_temp (BLKmode,
2035 int_size_in_bytes (TREE_TYPE (exp)),
2038 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2040 /* Save this temp slot around the pop below. */
2041 preserve_temp_slots (target);
2044 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2047 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
2048 emit_move_insn (target, gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
2049 copy_to_reg (valreg)));
2051 emit_block_move (target, gen_rtx (MEM, BLKmode, copy_to_reg (valreg)),
2053 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2055 /* Handle calls that return values in multiple non-contiguous locations.
2056 The Irix 6 ABI has examples of this. */
2057 else if (GET_CODE (valreg) == PARALLEL)
2061 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2062 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2063 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2064 preserve_temp_slots (target);
2067 emit_group_store (target, valreg);
2069 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2070 && GET_MODE (target) == GET_MODE (valreg))
2071 /* TARGET and VALREG cannot be equal at this point because the latter
2072 would not have REG_FUNCTION_VALUE_P true, while the former would if
2073 it were referring to the same register.
2075 If they refer to the same register, this move will be a no-op, except
2076 when function inlining is being done. */
2077 emit_move_insn (target, valreg);
2078 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2080 /* Some machines (the PA for example) want to return all small
2081 structures in registers regardless of the structure's alignment.
2083 Deal with them explicitly by copying from the return registers
2084 into the target MEM locations. */
2085 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2086 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2088 enum machine_mode tmpmode;
2090 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (exp)), BITS_PER_WORD);
2091 int bitpos, xbitpos, big_endian_correction = 0;
2095 target = assign_stack_temp (BLKmode, bytes, 0);
2096 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2097 preserve_temp_slots (target);
2100 /* This code assumes valreg is at least a full word. If it isn't,
2101 copy it into a new pseudo which is a full word. */
2102 if (GET_MODE (valreg) != BLKmode
2103 && GET_MODE_SIZE (GET_MODE (valreg)) < UNITS_PER_WORD)
2104 valreg = convert_to_mode (word_mode, valreg,
2105 TREE_UNSIGNED (TREE_TYPE (exp)));
2107 /* Structures whose size is not a multiple of a word are aligned
2108 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2109 machine, this means we must skip the empty high order bytes when
2110 calculating the bit offset. */
2111 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2112 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2115 /* Copy the structure BITSIZE bites at a time.
2117 We could probably emit more efficient code for machines
2118 which do not use strict alignment, but it doesn't seem
2119 worth the effort at the current time. */
2120 for (bitpos = 0, xbitpos = big_endian_correction;
2121 bitpos < bytes * BITS_PER_UNIT;
2122 bitpos += bitsize, xbitpos += bitsize)
2125 /* We need a new source operand each time xbitpos is on a
2126 word boundary and when xbitpos == big_endian_correction
2127 (the first time through). */
2128 if (xbitpos % BITS_PER_WORD == 0
2129 || xbitpos == big_endian_correction)
2130 src = operand_subword_force (valreg,
2131 xbitpos / BITS_PER_WORD,
2134 /* We need a new destination operand each time bitpos is on
2136 if (bitpos % BITS_PER_WORD == 0)
2137 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, BLKmode);
2139 /* Use xbitpos for the source extraction (right justified) and
2140 xbitpos for the destination store (left justified). */
2141 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2142 extract_bit_field (src, bitsize,
2143 xbitpos % BITS_PER_WORD, 1,
2144 NULL_RTX, word_mode,
2146 bitsize / BITS_PER_UNIT,
2148 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2152 target = copy_to_reg (valreg);
2154 #ifdef PROMOTE_FUNCTION_RETURN
2155 /* If we promoted this return value, make the proper SUBREG. TARGET
2156 might be const0_rtx here, so be careful. */
2157 if (GET_CODE (target) == REG
2158 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2159 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2161 tree type = TREE_TYPE (exp);
2162 int unsignedp = TREE_UNSIGNED (type);
2164 /* If we don't promote as expected, something is wrong. */
2165 if (GET_MODE (target)
2166 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2169 target = gen_rtx (SUBREG, TYPE_MODE (type), target, 0);
2170 SUBREG_PROMOTED_VAR_P (target) = 1;
2171 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2175 /* If size of args is variable or this was a constructor call for a stack
2176 argument, restore saved stack-pointer value. */
2178 if (old_stack_level)
2180 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2181 pending_stack_adjust = old_pending_adj;
2182 #ifdef ACCUMULATE_OUTGOING_ARGS
2183 stack_arg_under_construction = old_stack_arg_under_construction;
2184 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2185 stack_usage_map = initial_stack_usage_map;
2188 #ifdef ACCUMULATE_OUTGOING_ARGS
2191 #ifdef REG_PARM_STACK_SPACE
2194 enum machine_mode save_mode = GET_MODE (save_area);
2196 = gen_rtx (MEM, save_mode,
2197 memory_address (save_mode,
2198 #ifdef ARGS_GROW_DOWNWARD
2199 plus_constant (argblock, - high_to_save)
2201 plus_constant (argblock, low_to_save)
2205 if (save_mode != BLKmode)
2206 emit_move_insn (stack_area, save_area);
2208 emit_block_move (stack_area, validize_mem (save_area),
2209 GEN_INT (high_to_save - low_to_save + 1),
2210 PARM_BOUNDARY / BITS_PER_UNIT);
2214 /* If we saved any argument areas, restore them. */
2215 for (i = 0; i < num_actuals; i++)
2216 if (args[i].save_area)
2218 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2220 = gen_rtx (MEM, save_mode,
2221 memory_address (save_mode,
2222 XEXP (args[i].stack_slot, 0)));
2224 if (save_mode != BLKmode)
2225 emit_move_insn (stack_area, args[i].save_area);
2227 emit_block_move (stack_area, validize_mem (args[i].save_area),
2228 GEN_INT (args[i].size.constant),
2229 PARM_BOUNDARY / BITS_PER_UNIT);
2232 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2233 stack_usage_map = initial_stack_usage_map;
2237 /* If this was alloca, record the new stack level for nonlocal gotos.
2238 Check for the handler slots since we might not have a save area
2239 for non-local gotos. */
2241 if (may_be_alloca && nonlocal_goto_handler_slot != 0)
2242 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2249 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2250 (emitting the queue unless NO_QUEUE is nonzero),
2251 for a value of mode OUTMODE,
2252 with NARGS different arguments, passed as alternating rtx values
2253 and machine_modes to convert them to.
2254 The rtx values should have been passed through protect_from_queue already.
2256 NO_QUEUE will be true if and only if the library call is a `const' call
2257 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2258 to the variable is_const in expand_call.
2260 NO_QUEUE must be true for const calls, because if it isn't, then
2261 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2262 and will be lost if the libcall sequence is optimized away.
2264 NO_QUEUE must be false for non-const calls, because if it isn't, the
2265 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2266 optimized. For instance, the instruction scheduler may incorrectly
2267 move memory references across the non-const call. */
2270 emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2276 enum machine_mode outmode;
2280 /* Total size in bytes of all the stack-parms scanned so far. */
2281 struct args_size args_size;
2282 /* Size of arguments before any adjustments (such as rounding). */
2283 struct args_size original_args_size;
2284 register int argnum;
2289 CUMULATIVE_ARGS args_so_far;
2290 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2291 struct args_size offset; struct args_size size; rtx save_area; };
2293 int old_inhibit_defer_pop = inhibit_defer_pop;
2294 rtx call_fusage = 0;
2295 /* Size of the stack reserved for parameter registers. */
2296 int reg_parm_stack_space = 0;
2297 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2298 /* Define the boundary of the register parm stack space that needs to be
2300 int low_to_save = -1, high_to_save;
2301 rtx save_area = 0; /* Place that it is saved */
2304 #ifdef ACCUMULATE_OUTGOING_ARGS
2305 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2306 char *initial_stack_usage_map = stack_usage_map;
2310 #ifdef REG_PARM_STACK_SPACE
2311 #ifdef MAYBE_REG_PARM_STACK_SPACE
2312 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2314 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2318 VA_START (p, nargs);
2321 orgfun = va_arg (p, rtx);
2322 no_queue = va_arg (p, int);
2323 outmode = va_arg (p, enum machine_mode);
2324 nargs = va_arg (p, int);
2329 /* Copy all the libcall-arguments out of the varargs data
2330 and into a vector ARGVEC.
2332 Compute how to pass each argument. We only support a very small subset
2333 of the full argument passing conventions to limit complexity here since
2334 library functions shouldn't have many args. */
2336 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2337 bzero ((char *) argvec, nargs * sizeof (struct arg));
2340 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2342 args_size.constant = 0;
2347 for (count = 0; count < nargs; count++)
2349 rtx val = va_arg (p, rtx);
2350 enum machine_mode mode = va_arg (p, enum machine_mode);
2352 /* We cannot convert the arg value to the mode the library wants here;
2353 must do it earlier where we know the signedness of the arg. */
2355 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2358 /* On some machines, there's no way to pass a float to a library fcn.
2359 Pass it as a double instead. */
2360 #ifdef LIBGCC_NEEDS_DOUBLE
2361 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2362 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2365 /* There's no need to call protect_from_queue, because
2366 either emit_move_insn or emit_push_insn will do that. */
2368 /* Make sure it is a reasonable operand for a move or push insn. */
2369 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2370 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2371 val = force_operand (val, NULL_RTX);
2373 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2374 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2376 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2377 be viewed as just an efficiency improvement. */
2378 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2379 emit_move_insn (slot, val);
2380 val = force_operand (XEXP (slot, 0), NULL_RTX);
2385 argvec[count].value = val;
2386 argvec[count].mode = mode;
2388 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2389 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2391 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2392 argvec[count].partial
2393 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2395 argvec[count].partial = 0;
2398 locate_and_pad_parm (mode, NULL_TREE,
2399 argvec[count].reg && argvec[count].partial == 0,
2400 NULL_TREE, &args_size, &argvec[count].offset,
2401 &argvec[count].size);
2403 if (argvec[count].size.var)
2406 #ifndef REG_PARM_STACK_SPACE
2407 if (argvec[count].partial)
2408 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2411 if (argvec[count].reg == 0 || argvec[count].partial != 0
2412 #ifdef REG_PARM_STACK_SPACE
2416 args_size.constant += argvec[count].size.constant;
2418 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2422 #ifdef FINAL_REG_PARM_STACK_SPACE
2423 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2427 /* If this machine requires an external definition for library
2428 functions, write one out. */
2429 assemble_external_libcall (fun);
2431 original_args_size = args_size;
2432 #ifdef STACK_BOUNDARY
2433 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2434 / STACK_BYTES) * STACK_BYTES);
2437 #ifdef REG_PARM_STACK_SPACE
2438 args_size.constant = MAX (args_size.constant,
2439 reg_parm_stack_space);
2440 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2441 args_size.constant -= reg_parm_stack_space;
2445 if (args_size.constant > current_function_outgoing_args_size)
2446 current_function_outgoing_args_size = args_size.constant;
2448 #ifdef ACCUMULATE_OUTGOING_ARGS
2449 /* Since the stack pointer will never be pushed, it is possible for
2450 the evaluation of a parm to clobber something we have already
2451 written to the stack. Since most function calls on RISC machines
2452 do not use the stack, this is uncommon, but must work correctly.
2454 Therefore, we save any area of the stack that was already written
2455 and that we are using. Here we set up to do this by making a new
2456 stack usage map from the old one.
2458 Another approach might be to try to reorder the argument
2459 evaluations to avoid this conflicting stack usage. */
2461 needed = args_size.constant;
2462 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2463 /* Since we will be writing into the entire argument area, the
2464 map must be allocated for its entire size, not just the part that
2465 is the responsibility of the caller. */
2466 needed += reg_parm_stack_space;
2469 #ifdef ARGS_GROW_DOWNWARD
2470 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2473 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2476 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2478 if (initial_highest_arg_in_use)
2479 bcopy (initial_stack_usage_map, stack_usage_map,
2480 initial_highest_arg_in_use);
2482 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2483 bzero (&stack_usage_map[initial_highest_arg_in_use],
2484 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2487 /* The address of the outgoing argument list must not be copied to a
2488 register here, because argblock would be left pointing to the
2489 wrong place after the call to allocate_dynamic_stack_space below.
2492 argblock = virtual_outgoing_args_rtx;
2493 #else /* not ACCUMULATE_OUTGOING_ARGS */
2494 #ifndef PUSH_ROUNDING
2495 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2499 #ifdef PUSH_ARGS_REVERSED
2500 #ifdef STACK_BOUNDARY
2501 /* If we push args individually in reverse order, perform stack alignment
2502 before the first push (the last arg). */
2504 anti_adjust_stack (GEN_INT (args_size.constant
2505 - original_args_size.constant));
2509 #ifdef PUSH_ARGS_REVERSED
2517 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2518 /* The argument list is the property of the called routine and it
2519 may clobber it. If the fixed area has been used for previous
2520 parameters, we must save and restore it.
2522 Here we compute the boundary of the that needs to be saved, if any. */
2524 #ifdef ARGS_GROW_DOWNWARD
2525 for (count = 0; count < reg_parm_stack_space + 1; count++)
2527 for (count = 0; count < reg_parm_stack_space; count++)
2530 if (count >= highest_outgoing_arg_in_use
2531 || stack_usage_map[count] == 0)
2534 if (low_to_save == -1)
2535 low_to_save = count;
2537 high_to_save = count;
2540 if (low_to_save >= 0)
2542 int num_to_save = high_to_save - low_to_save + 1;
2543 enum machine_mode save_mode
2544 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2547 /* If we don't have the required alignment, must do this in BLKmode. */
2548 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2549 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2550 save_mode = BLKmode;
2552 stack_area = gen_rtx (MEM, save_mode,
2553 memory_address (save_mode,
2555 #ifdef ARGS_GROW_DOWNWARD
2556 plus_constant (argblock,
2559 plus_constant (argblock,
2563 if (save_mode == BLKmode)
2565 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2566 MEM_IN_STRUCT_P (save_area) = 0;
2567 emit_block_move (validize_mem (save_area), stack_area,
2568 GEN_INT (num_to_save),
2569 PARM_BOUNDARY / BITS_PER_UNIT);
2573 save_area = gen_reg_rtx (save_mode);
2574 emit_move_insn (save_area, stack_area);
2579 /* Push the args that need to be pushed. */
2581 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2582 are to be pushed. */
2583 for (count = 0; count < nargs; count++, argnum += inc)
2585 register enum machine_mode mode = argvec[argnum].mode;
2586 register rtx val = argvec[argnum].value;
2587 rtx reg = argvec[argnum].reg;
2588 int partial = argvec[argnum].partial;
2589 int lower_bound, upper_bound, i;
2591 if (! (reg != 0 && partial == 0))
2593 #ifdef ACCUMULATE_OUTGOING_ARGS
2594 /* If this is being stored into a pre-allocated, fixed-size, stack
2595 area, save any previous data at that location. */
2597 #ifdef ARGS_GROW_DOWNWARD
2598 /* stack_slot is negative, but we want to index stack_usage_map
2599 with positive values. */
2600 upper_bound = -argvec[argnum].offset.constant + 1;
2601 lower_bound = upper_bound - argvec[argnum].size.constant;
2603 lower_bound = argvec[argnum].offset.constant;
2604 upper_bound = lower_bound + argvec[argnum].size.constant;
2607 for (i = lower_bound; i < upper_bound; i++)
2608 if (stack_usage_map[i]
2609 #ifdef REG_PARM_STACK_SPACE
2610 /* Don't store things in the fixed argument area at this point;
2611 it has already been saved. */
2612 && i > reg_parm_stack_space
2617 if (i != upper_bound)
2619 /* We need to make a save area. See what mode we can make it. */
2620 enum machine_mode save_mode
2621 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
2624 = gen_rtx (MEM, save_mode,
2625 memory_address (save_mode, plus_constant (argblock,
2626 argvec[argnum].offset.constant)));
2627 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2628 emit_move_insn (argvec[argnum].save_area, stack_area);
2631 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2632 argblock, GEN_INT (argvec[argnum].offset.constant));
2634 #ifdef ACCUMULATE_OUTGOING_ARGS
2635 /* Now mark the segment we just used. */
2636 for (i = lower_bound; i < upper_bound; i++)
2637 stack_usage_map[i] = 1;
2644 #ifndef PUSH_ARGS_REVERSED
2645 #ifdef STACK_BOUNDARY
2646 /* If we pushed args in forward order, perform stack alignment
2647 after pushing the last arg. */
2649 anti_adjust_stack (GEN_INT (args_size.constant
2650 - original_args_size.constant));
2654 #ifdef PUSH_ARGS_REVERSED
2660 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2662 /* Now load any reg parms into their regs. */
2664 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2665 are to be pushed. */
2666 for (count = 0; count < nargs; count++, argnum += inc)
2668 register enum machine_mode mode = argvec[argnum].mode;
2669 register rtx val = argvec[argnum].value;
2670 rtx reg = argvec[argnum].reg;
2671 int partial = argvec[argnum].partial;
2673 if (reg != 0 && partial == 0)
2674 emit_move_insn (reg, val);
2678 /* For version 1.37, try deleting this entirely. */
2682 /* Any regs containing parms remain in use through the call. */
2683 for (count = 0; count < nargs; count++)
2684 if (argvec[count].reg != 0)
2685 use_reg (&call_fusage, argvec[count].reg);
2687 /* Don't allow popping to be deferred, since then
2688 cse'ing of library calls could delete a call and leave the pop. */
2691 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2692 will set inhibit_defer_pop to that value. */
2694 /* The return type is needed to decide how many bytes the function pops.
2695 Signedness plays no role in that, so for simplicity, we pretend it's
2696 always signed. We also assume that the list of arguments passed has
2697 no impact, so we pretend it is unknown. */
2700 get_identifier (XSTR (orgfun, 0)),
2701 build_function_type (outmode == VOIDmode ? void_type_node
2702 : type_for_mode (outmode, 0), NULL_TREE),
2703 args_size.constant, 0,
2704 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2705 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2706 old_inhibit_defer_pop + 1, call_fusage, no_queue);
2710 /* Now restore inhibit_defer_pop to its actual original value. */
2713 #ifdef ACCUMULATE_OUTGOING_ARGS
2714 #ifdef REG_PARM_STACK_SPACE
2717 enum machine_mode save_mode = GET_MODE (save_area);
2719 = gen_rtx (MEM, save_mode,
2720 memory_address (save_mode,
2721 #ifdef ARGS_GROW_DOWNWARD
2722 plus_constant (argblock, - high_to_save)
2724 plus_constant (argblock, low_to_save)
2728 if (save_mode != BLKmode)
2729 emit_move_insn (stack_area, save_area);
2731 emit_block_move (stack_area, validize_mem (save_area),
2732 GEN_INT (high_to_save - low_to_save + 1),
2733 PARM_BOUNDARY / BITS_PER_UNIT);
2737 /* If we saved any argument areas, restore them. */
2738 for (count = 0; count < nargs; count++)
2739 if (argvec[count].save_area)
2741 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
2743 = gen_rtx (MEM, save_mode,
2744 memory_address (save_mode, plus_constant (argblock,
2745 argvec[count].offset.constant)));
2747 emit_move_insn (stack_area, argvec[count].save_area);
2750 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2751 stack_usage_map = initial_stack_usage_map;
2756 /* Like emit_library_call except that an extra argument, VALUE,
2757 comes second and says where to store the result.
2758 (If VALUE is zero, this function chooses a convenient way
2759 to return the value.
2761 This function returns an rtx for where the value is to be found.
2762 If VALUE is nonzero, VALUE is returned. */
2765 emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
2766 enum machine_mode outmode, int nargs, ...))
2772 enum machine_mode outmode;
2776 /* Total size in bytes of all the stack-parms scanned so far. */
2777 struct args_size args_size;
2778 /* Size of arguments before any adjustments (such as rounding). */
2779 struct args_size original_args_size;
2780 register int argnum;
2785 CUMULATIVE_ARGS args_so_far;
2786 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2787 struct args_size offset; struct args_size size; rtx save_area; };
2789 int old_inhibit_defer_pop = inhibit_defer_pop;
2790 rtx call_fusage = 0;
2791 /* Size of the stack reserved for parameter registers. */
2792 int reg_parm_stack_space = 0;
2794 int pcc_struct_value = 0;
2795 int struct_value_size = 0;
2799 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2800 /* Define the boundary of the register parm stack space that needs to be
2802 int low_to_save = -1, high_to_save;
2803 rtx save_area = 0; /* Place that it is saved */
2806 #ifdef ACCUMULATE_OUTGOING_ARGS
2807 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2808 char *initial_stack_usage_map = stack_usage_map;
2811 #ifdef REG_PARM_STACK_SPACE
2812 #ifdef MAYBE_REG_PARM_STACK_SPACE
2813 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2815 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2819 VA_START (p, nargs);
2822 orgfun = va_arg (p, rtx);
2823 value = va_arg (p, rtx);
2824 no_queue = va_arg (p, int);
2825 outmode = va_arg (p, enum machine_mode);
2826 nargs = va_arg (p, int);
2829 is_const = no_queue;
2832 /* If this kind of value comes back in memory,
2833 decide where in memory it should come back. */
2834 if (aggregate_value_p (type_for_mode (outmode, 0)))
2836 #ifdef PCC_STATIC_STRUCT_RETURN
2838 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
2840 mem_value = gen_rtx (MEM, outmode, pointer_reg);
2841 pcc_struct_value = 1;
2843 value = gen_reg_rtx (outmode);
2844 #else /* not PCC_STATIC_STRUCT_RETURN */
2845 struct_value_size = GET_MODE_SIZE (outmode);
2846 if (value != 0 && GET_CODE (value) == MEM)
2849 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
2852 /* This call returns a big structure. */
2856 /* ??? Unfinished: must pass the memory address as an argument. */
2858 /* Copy all the libcall-arguments out of the varargs data
2859 and into a vector ARGVEC.
2861 Compute how to pass each argument. We only support a very small subset
2862 of the full argument passing conventions to limit complexity here since
2863 library functions shouldn't have many args. */
2865 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2866 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
2868 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2870 args_size.constant = 0;
2877 /* If there's a structure value address to be passed,
2878 either pass it in the special place, or pass it as an extra argument. */
2879 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
2881 rtx addr = XEXP (mem_value, 0);
2884 /* Make sure it is a reasonable operand for a move or push insn. */
2885 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2886 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2887 addr = force_operand (addr, NULL_RTX);
2889 argvec[count].value = addr;
2890 argvec[count].mode = Pmode;
2891 argvec[count].partial = 0;
2893 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
2894 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2895 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
2899 locate_and_pad_parm (Pmode, NULL_TREE,
2900 argvec[count].reg && argvec[count].partial == 0,
2901 NULL_TREE, &args_size, &argvec[count].offset,
2902 &argvec[count].size);
2905 if (argvec[count].reg == 0 || argvec[count].partial != 0
2906 #ifdef REG_PARM_STACK_SPACE
2910 args_size.constant += argvec[count].size.constant;
2912 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
2917 for (; count < nargs; count++)
2919 rtx val = va_arg (p, rtx);
2920 enum machine_mode mode = va_arg (p, enum machine_mode);
2922 /* We cannot convert the arg value to the mode the library wants here;
2923 must do it earlier where we know the signedness of the arg. */
2925 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2928 /* On some machines, there's no way to pass a float to a library fcn.
2929 Pass it as a double instead. */
2930 #ifdef LIBGCC_NEEDS_DOUBLE
2931 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2932 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2935 /* There's no need to call protect_from_queue, because
2936 either emit_move_insn or emit_push_insn will do that. */
2938 /* Make sure it is a reasonable operand for a move or push insn. */
2939 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2940 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2941 val = force_operand (val, NULL_RTX);
2943 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2944 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2946 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2947 be viewed as just an efficiency improvement. */
2948 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2949 emit_move_insn (slot, val);
2950 val = XEXP (slot, 0);
2955 argvec[count].value = val;
2956 argvec[count].mode = mode;
2958 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2959 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2961 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2962 argvec[count].partial
2963 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2965 argvec[count].partial = 0;
2968 locate_and_pad_parm (mode, NULL_TREE,
2969 argvec[count].reg && argvec[count].partial == 0,
2970 NULL_TREE, &args_size, &argvec[count].offset,
2971 &argvec[count].size);
2973 if (argvec[count].size.var)
2976 #ifndef REG_PARM_STACK_SPACE
2977 if (argvec[count].partial)
2978 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2981 if (argvec[count].reg == 0 || argvec[count].partial != 0
2982 #ifdef REG_PARM_STACK_SPACE
2986 args_size.constant += argvec[count].size.constant;
2988 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2992 #ifdef FINAL_REG_PARM_STACK_SPACE
2993 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2996 /* If this machine requires an external definition for library
2997 functions, write one out. */
2998 assemble_external_libcall (fun);
3000 original_args_size = args_size;
3001 #ifdef STACK_BOUNDARY
3002 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3003 / STACK_BYTES) * STACK_BYTES);
3006 #ifdef REG_PARM_STACK_SPACE
3007 args_size.constant = MAX (args_size.constant,
3008 reg_parm_stack_space);
3009 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3010 args_size.constant -= reg_parm_stack_space;
3014 if (args_size.constant > current_function_outgoing_args_size)
3015 current_function_outgoing_args_size = args_size.constant;
3017 #ifdef ACCUMULATE_OUTGOING_ARGS
3018 /* Since the stack pointer will never be pushed, it is possible for
3019 the evaluation of a parm to clobber something we have already
3020 written to the stack. Since most function calls on RISC machines
3021 do not use the stack, this is uncommon, but must work correctly.
3023 Therefore, we save any area of the stack that was already written
3024 and that we are using. Here we set up to do this by making a new
3025 stack usage map from the old one.
3027 Another approach might be to try to reorder the argument
3028 evaluations to avoid this conflicting stack usage. */
3030 needed = args_size.constant;
3031 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
3032 /* Since we will be writing into the entire argument area, the
3033 map must be allocated for its entire size, not just the part that
3034 is the responsibility of the caller. */
3035 needed += reg_parm_stack_space;
3038 #ifdef ARGS_GROW_DOWNWARD
3039 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3042 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3045 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3047 if (initial_highest_arg_in_use)
3048 bcopy (initial_stack_usage_map, stack_usage_map,
3049 initial_highest_arg_in_use);
3051 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3052 bzero (&stack_usage_map[initial_highest_arg_in_use],
3053 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3056 /* The address of the outgoing argument list must not be copied to a
3057 register here, because argblock would be left pointing to the
3058 wrong place after the call to allocate_dynamic_stack_space below.
3061 argblock = virtual_outgoing_args_rtx;
3062 #else /* not ACCUMULATE_OUTGOING_ARGS */
3063 #ifndef PUSH_ROUNDING
3064 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3068 #ifdef PUSH_ARGS_REVERSED
3069 #ifdef STACK_BOUNDARY
3070 /* If we push args individually in reverse order, perform stack alignment
3071 before the first push (the last arg). */
3073 anti_adjust_stack (GEN_INT (args_size.constant
3074 - original_args_size.constant));
3078 #ifdef PUSH_ARGS_REVERSED
3086 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3087 /* The argument list is the property of the called routine and it
3088 may clobber it. If the fixed area has been used for previous
3089 parameters, we must save and restore it.
3091 Here we compute the boundary of the that needs to be saved, if any. */
3093 #ifdef ARGS_GROW_DOWNWARD
3094 for (count = 0; count < reg_parm_stack_space + 1; count++)
3096 for (count = 0; count < reg_parm_stack_space; count++)
3099 if (count >= highest_outgoing_arg_in_use
3100 || stack_usage_map[count] == 0)
3103 if (low_to_save == -1)
3104 low_to_save = count;
3106 high_to_save = count;
3109 if (low_to_save >= 0)
3111 int num_to_save = high_to_save - low_to_save + 1;
3112 enum machine_mode save_mode
3113 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3116 /* If we don't have the required alignment, must do this in BLKmode. */
3117 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3118 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3119 save_mode = BLKmode;
3121 stack_area = gen_rtx (MEM, save_mode,
3122 memory_address (save_mode,
3124 #ifdef ARGS_GROW_DOWNWARD
3125 plus_constant (argblock,
3128 plus_constant (argblock,
3132 if (save_mode == BLKmode)
3134 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3135 MEM_IN_STRUCT_P (save_area) = 0;
3136 emit_block_move (validize_mem (save_area), stack_area,
3137 GEN_INT (num_to_save),
3138 PARM_BOUNDARY / BITS_PER_UNIT);
3142 save_area = gen_reg_rtx (save_mode);
3143 emit_move_insn (save_area, stack_area);
3148 /* Push the args that need to be pushed. */
3150 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3151 are to be pushed. */
3152 for (count = 0; count < nargs; count++, argnum += inc)
3154 register enum machine_mode mode = argvec[argnum].mode;
3155 register rtx val = argvec[argnum].value;
3156 rtx reg = argvec[argnum].reg;
3157 int partial = argvec[argnum].partial;
3158 int lower_bound, upper_bound, i;
3160 if (! (reg != 0 && partial == 0))
3162 #ifdef ACCUMULATE_OUTGOING_ARGS
3163 /* If this is being stored into a pre-allocated, fixed-size, stack
3164 area, save any previous data at that location. */
3166 #ifdef ARGS_GROW_DOWNWARD
3167 /* stack_slot is negative, but we want to index stack_usage_map
3168 with positive values. */
3169 upper_bound = -argvec[argnum].offset.constant + 1;
3170 lower_bound = upper_bound - argvec[argnum].size.constant;
3172 lower_bound = argvec[argnum].offset.constant;
3173 upper_bound = lower_bound + argvec[argnum].size.constant;
3176 for (i = lower_bound; i < upper_bound; i++)
3177 if (stack_usage_map[i]
3178 #ifdef REG_PARM_STACK_SPACE
3179 /* Don't store things in the fixed argument area at this point;
3180 it has already been saved. */
3181 && i > reg_parm_stack_space
3186 if (i != upper_bound)
3188 /* We need to make a save area. See what mode we can make it. */
3189 enum machine_mode save_mode
3190 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3193 = gen_rtx (MEM, save_mode,
3194 memory_address (save_mode, plus_constant (argblock,
3195 argvec[argnum].offset.constant)));
3196 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3197 emit_move_insn (argvec[argnum].save_area, stack_area);
3200 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3201 argblock, GEN_INT (argvec[argnum].offset.constant));
3203 #ifdef ACCUMULATE_OUTGOING_ARGS
3204 /* Now mark the segment we just used. */
3205 for (i = lower_bound; i < upper_bound; i++)
3206 stack_usage_map[i] = 1;
3213 #ifndef PUSH_ARGS_REVERSED
3214 #ifdef STACK_BOUNDARY
3215 /* If we pushed args in forward order, perform stack alignment
3216 after pushing the last arg. */
3218 anti_adjust_stack (GEN_INT (args_size.constant
3219 - original_args_size.constant));
3223 #ifdef PUSH_ARGS_REVERSED
3229 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3231 /* Now load any reg parms into their regs. */
3233 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3234 are to be pushed. */
3235 for (count = 0; count < nargs; count++, argnum += inc)
3237 register enum machine_mode mode = argvec[argnum].mode;
3238 register rtx val = argvec[argnum].value;
3239 rtx reg = argvec[argnum].reg;
3240 int partial = argvec[argnum].partial;
3242 if (reg != 0 && partial == 0)
3243 emit_move_insn (reg, val);
3248 /* For version 1.37, try deleting this entirely. */
3253 /* Any regs containing parms remain in use through the call. */
3254 for (count = 0; count < nargs; count++)
3255 if (argvec[count].reg != 0)
3256 use_reg (&call_fusage, argvec[count].reg);
3258 /* Pass the function the address in which to return a structure value. */
3259 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3261 emit_move_insn (struct_value_rtx,
3263 force_operand (XEXP (mem_value, 0),
3265 if (GET_CODE (struct_value_rtx) == REG)
3266 use_reg (&call_fusage, struct_value_rtx);
3269 /* Don't allow popping to be deferred, since then
3270 cse'ing of library calls could delete a call and leave the pop. */
3273 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3274 will set inhibit_defer_pop to that value. */
3275 /* See the comment in emit_library_call about the function type we build
3279 get_identifier (XSTR (orgfun, 0)),
3280 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3281 args_size.constant, struct_value_size,
3282 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3283 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3284 old_inhibit_defer_pop + 1, call_fusage, is_const);
3286 /* Now restore inhibit_defer_pop to its actual original value. */
3291 /* Copy the value to the right place. */
3292 if (outmode != VOIDmode)
3298 if (value != mem_value)
3299 emit_move_insn (value, mem_value);
3301 else if (value != 0)
3302 emit_move_insn (value, hard_libcall_value (outmode));
3304 value = hard_libcall_value (outmode);
3307 #ifdef ACCUMULATE_OUTGOING_ARGS
3308 #ifdef REG_PARM_STACK_SPACE
3311 enum machine_mode save_mode = GET_MODE (save_area);
3313 = gen_rtx (MEM, save_mode,
3314 memory_address (save_mode,
3315 #ifdef ARGS_GROW_DOWNWARD
3316 plus_constant (argblock, - high_to_save)
3318 plus_constant (argblock, low_to_save)
3322 if (save_mode != BLKmode)
3323 emit_move_insn (stack_area, save_area);
3325 emit_block_move (stack_area, validize_mem (save_area),
3326 GEN_INT (high_to_save - low_to_save + 1),
3327 PARM_BOUNDARY / BITS_PER_UNIT);
3331 /* If we saved any argument areas, restore them. */
3332 for (count = 0; count < nargs; count++)
3333 if (argvec[count].save_area)
3335 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3337 = gen_rtx (MEM, save_mode,
3338 memory_address (save_mode, plus_constant (argblock,
3339 argvec[count].offset.constant)));
3341 emit_move_insn (stack_area, argvec[count].save_area);
3344 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3345 stack_usage_map = initial_stack_usage_map;
3352 /* Return an rtx which represents a suitable home on the stack
3353 given TYPE, the type of the argument looking for a home.
3354 This is called only for BLKmode arguments.
3356 SIZE is the size needed for this target.
3357 ARGS_ADDR is the address of the bottom of the argument block for this call.
3358 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3359 if this machine uses push insns. */
3362 target_for_arg (type, size, args_addr, offset)
3366 struct args_size offset;
3369 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3371 /* We do not call memory_address if possible,
3372 because we want to address as close to the stack
3373 as possible. For non-variable sized arguments,
3374 this will be stack-pointer relative addressing. */
3375 if (GET_CODE (offset_rtx) == CONST_INT)
3376 target = plus_constant (args_addr, INTVAL (offset_rtx));
3379 /* I have no idea how to guarantee that this
3380 will work in the presence of register parameters. */
3381 target = gen_rtx (PLUS, Pmode, args_addr, offset_rtx);
3382 target = memory_address (QImode, target);
3385 return gen_rtx (MEM, BLKmode, target);
3389 /* Store a single argument for a function call
3390 into the register or memory area where it must be passed.
3391 *ARG describes the argument value and where to pass it.
3393 ARGBLOCK is the address of the stack-block for all the arguments,
3394 or 0 on a machine where arguments are pushed individually.
3396 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3397 so must be careful about how the stack is used.
3399 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3400 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3401 that we need not worry about saving and restoring the stack.
3403 FNDECL is the declaration of the function we are calling. */
3406 store_one_arg (arg, argblock, may_be_alloca, variable_size, fndecl,
3407 reg_parm_stack_space)
3408 struct arg_data *arg;
3413 int reg_parm_stack_space;
3415 register tree pval = arg->tree_value;
3419 int i, lower_bound, upper_bound;
3421 if (TREE_CODE (pval) == ERROR_MARK)
3424 /* Push a new temporary level for any temporaries we make for
3428 #ifdef ACCUMULATE_OUTGOING_ARGS
3429 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3430 save any previous data at that location. */
3431 if (argblock && ! variable_size && arg->stack)
3433 #ifdef ARGS_GROW_DOWNWARD
3434 /* stack_slot is negative, but we want to index stack_usage_map
3435 with positive values. */
3436 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3437 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3441 lower_bound = upper_bound - arg->size.constant;
3443 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3444 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3448 upper_bound = lower_bound + arg->size.constant;
3451 for (i = lower_bound; i < upper_bound; i++)
3452 if (stack_usage_map[i]
3453 #ifdef REG_PARM_STACK_SPACE
3454 /* Don't store things in the fixed argument area at this point;
3455 it has already been saved. */
3456 && i > reg_parm_stack_space
3461 if (i != upper_bound)
3463 /* We need to make a save area. See what mode we can make it. */
3464 enum machine_mode save_mode
3465 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3467 = gen_rtx (MEM, save_mode,
3468 memory_address (save_mode, XEXP (arg->stack_slot, 0)));
3470 if (save_mode == BLKmode)
3472 arg->save_area = assign_stack_temp (BLKmode,
3473 arg->size.constant, 0);
3474 MEM_IN_STRUCT_P (arg->save_area)
3475 = AGGREGATE_TYPE_P (TREE_TYPE (arg->tree_value));
3476 preserve_temp_slots (arg->save_area);
3477 emit_block_move (validize_mem (arg->save_area), stack_area,
3478 GEN_INT (arg->size.constant),
3479 PARM_BOUNDARY / BITS_PER_UNIT);
3483 arg->save_area = gen_reg_rtx (save_mode);
3484 emit_move_insn (arg->save_area, stack_area);
3490 /* If this isn't going to be placed on both the stack and in registers,
3491 set up the register and number of words. */
3492 if (! arg->pass_on_stack)
3493 reg = arg->reg, partial = arg->partial;
3495 if (reg != 0 && partial == 0)
3496 /* Being passed entirely in a register. We shouldn't be called in
3500 /* If this arg needs special alignment, don't load the registers
3502 if (arg->n_aligned_regs != 0)
3505 /* If this is being passed partially in a register, we can't evaluate
3506 it directly into its stack slot. Otherwise, we can. */
3507 if (arg->value == 0)
3509 #ifdef ACCUMULATE_OUTGOING_ARGS
3510 /* stack_arg_under_construction is nonzero if a function argument is
3511 being evaluated directly into the outgoing argument list and
3512 expand_call must take special action to preserve the argument list
3513 if it is called recursively.
3515 For scalar function arguments stack_usage_map is sufficient to
3516 determine which stack slots must be saved and restored. Scalar
3517 arguments in general have pass_on_stack == 0.
3519 If this argument is initialized by a function which takes the
3520 address of the argument (a C++ constructor or a C function
3521 returning a BLKmode structure), then stack_usage_map is
3522 insufficient and expand_call must push the stack around the
3523 function call. Such arguments have pass_on_stack == 1.
3525 Note that it is always safe to set stack_arg_under_construction,
3526 but this generates suboptimal code if set when not needed. */
3528 if (arg->pass_on_stack)
3529 stack_arg_under_construction++;
3531 arg->value = expand_expr (pval,
3533 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3534 ? NULL_RTX : arg->stack,
3537 /* If we are promoting object (or for any other reason) the mode
3538 doesn't agree, convert the mode. */
3540 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3541 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3542 arg->value, arg->unsignedp);
3544 #ifdef ACCUMULATE_OUTGOING_ARGS
3545 if (arg->pass_on_stack)
3546 stack_arg_under_construction--;
3550 /* Don't allow anything left on stack from computation
3551 of argument to alloca. */
3553 do_pending_stack_adjust ();
3555 if (arg->value == arg->stack)
3557 /* If the value is already in the stack slot, we are done. */
3558 if (flag_check_memory_usage && GET_CODE (arg->stack) == MEM)
3560 if (arg->mode == BLKmode)
3563 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3564 XEXP (arg->stack, 0), ptr_mode,
3565 GEN_INT (GET_MODE_SIZE (arg->mode)),
3566 TYPE_MODE (sizetype),
3567 GEN_INT (MEMORY_USE_RW), QImode);
3570 else if (arg->mode != BLKmode)
3574 /* Argument is a scalar, not entirely passed in registers.
3575 (If part is passed in registers, arg->partial says how much
3576 and emit_push_insn will take care of putting it there.)
3578 Push it, and if its size is less than the
3579 amount of space allocated to it,
3580 also bump stack pointer by the additional space.
3581 Note that in C the default argument promotions
3582 will prevent such mismatches. */
3584 size = GET_MODE_SIZE (arg->mode);
3585 /* Compute how much space the push instruction will push.
3586 On many machines, pushing a byte will advance the stack
3587 pointer by a halfword. */
3588 #ifdef PUSH_ROUNDING
3589 size = PUSH_ROUNDING (size);
3593 /* Compute how much space the argument should get:
3594 round up to a multiple of the alignment for arguments. */
3595 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3596 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3597 / (PARM_BOUNDARY / BITS_PER_UNIT))
3598 * (PARM_BOUNDARY / BITS_PER_UNIT));
3600 /* This isn't already where we want it on the stack, so put it there.
3601 This can either be done with push or copy insns. */
3602 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
3603 0, partial, reg, used - size,
3604 argblock, ARGS_SIZE_RTX (arg->offset));
3608 /* BLKmode, at least partly to be pushed. */
3610 register int excess;
3613 /* Pushing a nonscalar.
3614 If part is passed in registers, PARTIAL says how much
3615 and emit_push_insn will take care of putting it there. */
3617 /* Round its size up to a multiple
3618 of the allocation unit for arguments. */
3620 if (arg->size.var != 0)
3623 size_rtx = ARGS_SIZE_RTX (arg->size);
3627 /* PUSH_ROUNDING has no effect on us, because
3628 emit_push_insn for BLKmode is careful to avoid it. */
3629 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3630 + partial * UNITS_PER_WORD);
3631 size_rtx = expr_size (pval);
3634 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3635 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3636 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset));
3640 /* Unless this is a partially-in-register argument, the argument is now
3643 ??? Note that this can change arg->value from arg->stack to
3644 arg->stack_slot and it matters when they are not the same.
3645 It isn't totally clear that this is correct in all cases. */
3647 arg->value = arg->stack_slot;
3649 /* Once we have pushed something, pops can't safely
3650 be deferred during the rest of the arguments. */
3653 /* ANSI doesn't require a sequence point here,
3654 but PCC has one, so this will avoid some problems. */
3657 /* Free any temporary slots made in processing this argument. Show
3658 that we might have taken the address of something and pushed that
3660 preserve_temp_slots (NULL_RTX);
3664 #ifdef ACCUMULATE_OUTGOING_ARGS
3665 /* Now mark the segment we just used. */
3666 if (argblock && ! variable_size && arg->stack)
3667 for (i = lower_bound; i < upper_bound; i++)
3668 stack_usage_map[i] = 1;