1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
28 #include "insn-flags.h"
32 /* Decide whether a function's arguments should be processed
33 from first to last or from last to first.
35 They should if the stack and args grow in opposite directions, but
36 only if we have push insns. */
40 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
41 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
47 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
49 /* Data structure and subroutines used within expand_call. */
53 /* Tree node for this argument. */
55 /* Mode for value; TYPE_MODE unless promoted. */
56 enum machine_mode mode;
57 /* Current RTL value for argument, or 0 if it isn't precomputed. */
59 /* Initially-compute RTL value for argument; only for const functions. */
61 /* Register to pass this argument in, 0 if passed on stack, or an
62 PARALLEL if the arg is to be copied into multiple non-contiguous
65 /* If REG was promoted from the actual mode of the argument expression,
66 indicates whether the promotion is sign- or zero-extended. */
68 /* Number of registers to use. 0 means put the whole arg in registers.
69 Also 0 if not passed in registers. */
71 /* Non-zero if argument must be passed on stack.
72 Note that some arguments may be passed on the stack
73 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
74 pass_on_stack identifies arguments that *cannot* go in registers. */
76 /* Offset of this argument from beginning of stack-args. */
77 struct args_size offset;
78 /* Similar, but offset to the start of the stack slot. Different from
79 OFFSET if this arg pads downward. */
80 struct args_size slot_offset;
81 /* Size of this argument on the stack, rounded up for any padding it gets,
82 parts of the argument passed in registers do not count.
83 If REG_PARM_STACK_SPACE is defined, then register parms
84 are counted here as well. */
85 struct args_size size;
86 /* Location on the stack at which parameter should be stored. The store
87 has already been done if STACK == VALUE. */
89 /* Location on the stack of the start of this argument slot. This can
90 differ from STACK if this arg pads downward. This location is known
91 to be aligned to FUNCTION_ARG_BOUNDARY. */
93 #ifdef ACCUMULATE_OUTGOING_ARGS
94 /* Place that this stack area has been saved, if needed. */
97 /* If an argument's alignment does not permit direct copying into registers,
98 copy in smaller-sized pieces into pseudos. These are stored in a
99 block pointed to by this field. The next field says how many
100 word-sized pseudos we made. */
105 #ifdef ACCUMULATE_OUTGOING_ARGS
106 /* A vector of one char per byte of stack space. A byte if non-zero if
107 the corresponding stack location has been used.
108 This vector is used to prevent a function call within an argument from
109 clobbering any stack already set up. */
110 static char *stack_usage_map;
112 /* Size of STACK_USAGE_MAP. */
113 static int highest_outgoing_arg_in_use;
115 /* stack_arg_under_construction is nonzero when an argument may be
116 initialized with a constructor call (including a C function that
117 returns a BLKmode struct) and expand_call must take special action
118 to make sure the object being constructed does not overlap the
119 argument list for the constructor call. */
120 int stack_arg_under_construction;
123 static int calls_function PROTO((tree, int));
124 static int calls_function_1 PROTO((tree, int));
125 static void emit_call_1 PROTO((rtx, tree, tree, HOST_WIDE_INT,
126 HOST_WIDE_INT, rtx, rtx,
128 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
131 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
134 If WHICH is 0, return 1 if EXP contains a call to any function.
135 Actually, we only need return 1 if evaluating EXP would require pushing
136 arguments on the stack, but that is too difficult to compute, so we just
137 assume any function call might require the stack. */
139 static tree calls_function_save_exprs;
142 calls_function (exp, which)
147 calls_function_save_exprs = 0;
148 val = calls_function_1 (exp, which);
149 calls_function_save_exprs = 0;
154 calls_function_1 (exp, which)
159 enum tree_code code = TREE_CODE (exp);
160 int type = TREE_CODE_CLASS (code);
161 int length = tree_code_length[(int) code];
163 /* If this code is language-specific, we don't know what it will do. */
164 if ((int) code >= NUM_TREE_CODES)
167 /* Only expressions and references can contain calls. */
168 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
177 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
178 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
181 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
183 if ((DECL_BUILT_IN (fndecl)
184 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
185 || (DECL_SAVED_INSNS (fndecl)
186 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
187 & FUNCTION_FLAGS_CALLS_ALLOCA)))
191 /* Third operand is RTL. */
196 if (SAVE_EXPR_RTL (exp) != 0)
198 if (value_member (exp, calls_function_save_exprs))
200 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
201 calls_function_save_exprs);
202 return (TREE_OPERAND (exp, 0) != 0
203 && calls_function_1 (TREE_OPERAND (exp, 0), which));
209 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
210 if (DECL_INITIAL (local) != 0
211 && calls_function_1 (DECL_INITIAL (local), which))
215 register tree subblock;
217 for (subblock = BLOCK_SUBBLOCKS (exp);
219 subblock = TREE_CHAIN (subblock))
220 if (calls_function_1 (subblock, which))
225 case METHOD_CALL_EXPR:
229 case WITH_CLEANUP_EXPR:
240 for (i = 0; i < length; i++)
241 if (TREE_OPERAND (exp, i) != 0
242 && calls_function_1 (TREE_OPERAND (exp, i), which))
248 /* Force FUNEXP into a form suitable for the address of a CALL,
249 and return that as an rtx. Also load the static chain register
250 if FNDECL is a nested function.
252 CALL_FUSAGE points to a variable holding the prospective
253 CALL_INSN_FUNCTION_USAGE information. */
256 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
262 rtx static_chain_value = 0;
264 funexp = protect_from_queue (funexp, 0);
267 /* Get possible static chain value for nested function in C. */
268 static_chain_value = lookup_static_chain (fndecl);
270 /* Make a valid memory address and copy constants thru pseudo-regs,
271 but not for a constant address if -fno-function-cse. */
272 if (GET_CODE (funexp) != SYMBOL_REF)
273 /* If we are using registers for parameters, force the
274 function address into a register now. */
275 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
276 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
277 : memory_address (FUNCTION_MODE, funexp));
280 #ifndef NO_FUNCTION_CSE
281 if (optimize && ! flag_no_function_cse)
282 #ifdef NO_RECURSIVE_FUNCTION_CSE
283 if (fndecl != current_function_decl)
285 funexp = force_reg (Pmode, funexp);
289 if (static_chain_value != 0)
291 emit_move_insn (static_chain_rtx, static_chain_value);
293 if (GET_CODE (static_chain_rtx) == REG)
294 use_reg (call_fusage, static_chain_rtx);
300 /* Generate instructions to call function FUNEXP,
301 and optionally pop the results.
302 The CALL_INSN is the first insn generated.
304 FNDECL is the declaration node of the function. This is given to the
305 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
307 FUNTYPE is the data type of the function. This is given to the macro
308 RETURN_POPS_ARGS to determine whether this function pops its own args.
309 We used to allow an identifier for library functions, but that doesn't
310 work when the return type is an aggregate type and the calling convention
311 says that the pointer to this aggregate is to be popped by the callee.
313 STACK_SIZE is the number of bytes of arguments on the stack,
314 rounded up to STACK_BOUNDARY; zero if the size is variable.
315 This is both to put into the call insn and
316 to generate explicit popping code if necessary.
318 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
319 It is zero if this call doesn't want a structure value.
321 NEXT_ARG_REG is the rtx that results from executing
322 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
323 just after all the args have had their registers assigned.
324 This could be whatever you like, but normally it is the first
325 arg-register beyond those used for args in this call,
326 or 0 if all the arg-registers are used in this call.
327 It is passed on to `gen_call' so you can put this info in the call insn.
329 VALREG is a hard register in which a value is returned,
330 or 0 if the call does not return a value.
332 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
333 the args to this call were processed.
334 We restore `inhibit_defer_pop' to that value.
336 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
337 denote registers used by the called function.
339 IS_CONST is true if this is a `const' call. */
342 emit_call_1 (funexp, fndecl, funtype, stack_size, struct_value_size,
343 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
346 tree fndecl ATTRIBUTE_UNUSED;
347 tree funtype ATTRIBUTE_UNUSED;
348 HOST_WIDE_INT stack_size;
349 HOST_WIDE_INT struct_value_size;
352 int old_inhibit_defer_pop;
356 rtx stack_size_rtx = GEN_INT (stack_size);
357 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
359 #ifndef ACCUMULATE_OUTGOING_ARGS
360 int already_popped = 0;
363 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
364 and we don't want to load it into a register as an optimization,
365 because prepare_call_address already did it if it should be done. */
366 if (GET_CODE (funexp) != SYMBOL_REF)
367 funexp = memory_address (FUNCTION_MODE, funexp);
369 #ifndef ACCUMULATE_OUTGOING_ARGS
370 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
371 if (HAVE_call_pop && HAVE_call_value_pop
372 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
375 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
378 /* If this subroutine pops its own args, record that in the call insn
379 if possible, for the sake of frame pointer elimination. */
382 pat = gen_call_value_pop (valreg,
383 gen_rtx_MEM (FUNCTION_MODE, funexp),
384 stack_size_rtx, next_arg_reg, n_pop);
386 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
387 stack_size_rtx, next_arg_reg, n_pop);
389 emit_call_insn (pat);
396 #if defined (HAVE_call) && defined (HAVE_call_value)
397 if (HAVE_call && HAVE_call_value)
400 emit_call_insn (gen_call_value (valreg,
401 gen_rtx_MEM (FUNCTION_MODE, funexp),
402 stack_size_rtx, next_arg_reg,
405 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
406 stack_size_rtx, next_arg_reg,
407 struct_value_size_rtx));
413 /* Find the CALL insn we just emitted. */
414 for (call_insn = get_last_insn ();
415 call_insn && GET_CODE (call_insn) != CALL_INSN;
416 call_insn = PREV_INSN (call_insn))
422 /* Put the register usage information on the CALL. If there is already
423 some usage information, put ours at the end. */
424 if (CALL_INSN_FUNCTION_USAGE (call_insn))
428 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
429 link = XEXP (link, 1))
432 XEXP (link, 1) = call_fusage;
435 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
437 /* If this is a const call, then set the insn's unchanging bit. */
439 CONST_CALL_P (call_insn) = 1;
441 /* Restore this now, so that we do defer pops for this call's args
442 if the context of the call as a whole permits. */
443 inhibit_defer_pop = old_inhibit_defer_pop;
445 #ifndef ACCUMULATE_OUTGOING_ARGS
446 /* If returning from the subroutine does not automatically pop the args,
447 we need an instruction to pop them sooner or later.
448 Perhaps do it now; perhaps just record how much space to pop later.
450 If returning from the subroutine does pop the args, indicate that the
451 stack pointer will be changed. */
453 if (stack_size != 0 && RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0)
456 CALL_INSN_FUNCTION_USAGE (call_insn)
457 = gen_rtx_EXPR_LIST (VOIDmode,
458 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
459 CALL_INSN_FUNCTION_USAGE (call_insn));
460 stack_size -= RETURN_POPS_ARGS (fndecl, funtype, stack_size);
461 stack_size_rtx = GEN_INT (stack_size);
466 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
467 pending_stack_adjust += stack_size;
469 adjust_stack (stack_size_rtx);
474 /* Generate all the code for a function call
475 and return an rtx for its value.
476 Store the value in TARGET (specified as an rtx) if convenient.
477 If the value is stored in TARGET then TARGET is returned.
478 If IGNORE is nonzero, then we ignore the value of the function call. */
481 expand_call (exp, target, ignore)
486 /* List of actual parameters. */
487 tree actparms = TREE_OPERAND (exp, 1);
488 /* RTX for the function to be called. */
490 /* Data type of the function. */
492 /* Declaration of the function being called,
493 or 0 if the function is computed (not known by name). */
497 /* Register in which non-BLKmode value will be returned,
498 or 0 if no value or if value is BLKmode. */
500 /* Address where we should return a BLKmode value;
501 0 if value not BLKmode. */
502 rtx structure_value_addr = 0;
503 /* Nonzero if that address is being passed by treating it as
504 an extra, implicit first parameter. Otherwise,
505 it is passed by being copied directly into struct_value_rtx. */
506 int structure_value_addr_parm = 0;
507 /* Size of aggregate value wanted, or zero if none wanted
508 or if we are using the non-reentrant PCC calling convention
509 or expecting the value in registers. */
510 HOST_WIDE_INT struct_value_size = 0;
511 /* Nonzero if called function returns an aggregate in memory PCC style,
512 by returning the address of where to find it. */
513 int pcc_struct_value = 0;
515 /* Number of actual parameters in this call, including struct value addr. */
517 /* Number of named args. Args after this are anonymous ones
518 and they must all go on the stack. */
520 /* Count arg position in order args appear. */
523 /* Vector of information about each argument.
524 Arguments are numbered in the order they will be pushed,
525 not the order they are written. */
526 struct arg_data *args;
528 /* Total size in bytes of all the stack-parms scanned so far. */
529 struct args_size args_size;
530 /* Size of arguments before any adjustments (such as rounding). */
531 struct args_size original_args_size;
532 /* Data on reg parms scanned so far. */
533 CUMULATIVE_ARGS args_so_far;
534 /* Nonzero if a reg parm has been scanned. */
536 /* Nonzero if this is an indirect function call. */
538 /* Nonzero if we must avoid push-insns in the args for this call.
539 If stack space is allocated for register parameters, but not by the
540 caller, then it is preallocated in the fixed part of the stack frame.
541 So the entire argument block must then be preallocated (i.e., we
542 ignore PUSH_ROUNDING in that case). */
545 int must_preallocate = 0;
547 int must_preallocate = 1;
550 /* Size of the stack reserved for parameter registers. */
551 int reg_parm_stack_space = 0;
553 /* 1 if scanning parms front to back, -1 if scanning back to front. */
555 /* Address of space preallocated for stack parms
556 (on machines that lack push insns), or 0 if space not preallocated. */
559 /* Nonzero if it is plausible that this is a call to alloca. */
561 /* Nonzero if this is a call to malloc or a related function. */
563 /* Nonzero if this is a call to setjmp or a related function. */
565 /* Nonzero if this is a call to `longjmp'. */
567 /* Nonzero if this is a call to an inline function. */
568 int is_integrable = 0;
569 /* Nonzero if this is a call to a `const' function.
570 Note that only explicitly named functions are handled as `const' here. */
572 /* Nonzero if this is a call to a `volatile' function. */
574 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
575 /* Define the boundary of the register parm stack space that needs to be
577 int low_to_save = -1, high_to_save;
578 rtx save_area = 0; /* Place that it is saved */
581 #ifdef ACCUMULATE_OUTGOING_ARGS
582 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
583 char *initial_stack_usage_map = stack_usage_map;
584 int old_stack_arg_under_construction;
587 rtx old_stack_level = 0;
588 int old_pending_adj = 0;
589 int old_inhibit_defer_pop = inhibit_defer_pop;
594 /* The value of the function call can be put in a hard register. But
595 if -fcheck-memory-usage, code which invokes functions (and thus
596 damages some hard registers) can be inserted before using the value.
597 So, target is always a pseudo-register in that case. */
598 if (current_function_check_memory_usage)
601 /* See if we can find a DECL-node for the actual function.
602 As a result, decide whether this is a call to an integrable function. */
604 p = TREE_OPERAND (exp, 0);
605 if (TREE_CODE (p) == ADDR_EXPR)
607 fndecl = TREE_OPERAND (p, 0);
608 if (TREE_CODE (fndecl) != FUNCTION_DECL)
613 && fndecl != current_function_decl
614 && DECL_INLINE (fndecl)
615 && DECL_SAVED_INSNS (fndecl)
616 && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl)))
618 else if (! TREE_ADDRESSABLE (fndecl))
620 /* In case this function later becomes inlinable,
621 record that there was already a non-inline call to it.
623 Use abstraction instead of setting TREE_ADDRESSABLE
625 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
628 warning_with_decl (fndecl, "can't inline call to `%s'");
629 warning ("called from here");
631 mark_addressable (fndecl);
634 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
635 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
638 if (TREE_THIS_VOLATILE (fndecl))
643 /* If we don't have specific function to call, see if we have a
644 constant or `noreturn' function from the type. */
647 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
648 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
651 #ifdef REG_PARM_STACK_SPACE
652 #ifdef MAYBE_REG_PARM_STACK_SPACE
653 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
655 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
659 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
660 if (reg_parm_stack_space > 0)
661 must_preallocate = 1;
664 /* Warn if this value is an aggregate type,
665 regardless of which calling convention we are using for it. */
666 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
667 warning ("function call has aggregate value");
669 /* Set up a place to return a structure. */
671 /* Cater to broken compilers. */
672 if (aggregate_value_p (exp))
674 /* This call returns a big structure. */
677 #ifdef PCC_STATIC_STRUCT_RETURN
679 pcc_struct_value = 1;
680 /* Easier than making that case work right. */
683 /* In case this is a static function, note that it has been
685 if (! TREE_ADDRESSABLE (fndecl))
686 mark_addressable (fndecl);
690 #else /* not PCC_STATIC_STRUCT_RETURN */
692 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
694 if (target && GET_CODE (target) == MEM)
695 structure_value_addr = XEXP (target, 0);
698 /* Assign a temporary to hold the value. */
701 /* For variable-sized objects, we must be called with a target
702 specified. If we were to allocate space on the stack here,
703 we would have no way of knowing when to free it. */
705 if (struct_value_size < 0)
708 /* This DECL is just something to feed to mark_addressable;
709 it doesn't get pushed. */
710 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
711 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
712 mark_addressable (d);
713 structure_value_addr = XEXP (DECL_RTL (d), 0);
718 #endif /* not PCC_STATIC_STRUCT_RETURN */
721 /* If called function is inline, try to integrate it. */
726 #ifdef ACCUMULATE_OUTGOING_ARGS
727 rtx before_call = get_last_insn ();
730 temp = expand_inline_function (fndecl, actparms, target,
731 ignore, TREE_TYPE (exp),
732 structure_value_addr);
734 /* If inlining succeeded, return. */
735 if (temp != (rtx) (HOST_WIDE_INT) -1)
737 #ifdef ACCUMULATE_OUTGOING_ARGS
738 /* If the outgoing argument list must be preserved, push
739 the stack before executing the inlined function if it
742 for (i = reg_parm_stack_space - 1; i >= 0; i--)
743 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
746 if (stack_arg_under_construction || i >= 0)
749 = before_call ? NEXT_INSN (before_call) : get_insns ();
752 /* Look for a call in the inline function code.
753 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
754 nonzero then there is a call and it is not necessary
755 to scan the insns. */
757 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
758 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
759 if (GET_CODE (insn) == CALL_INSN)
764 /* Reserve enough stack space so that the largest
765 argument list of any function call in the inline
766 function does not overlap the argument list being
767 evaluated. This is usually an overestimate because
768 allocate_dynamic_stack_space reserves space for an
769 outgoing argument list in addition to the requested
770 space, but there is no way to ask for stack space such
771 that an argument list of a certain length can be
774 Add the stack space reserved for register arguments, if
775 any, in the inline function. What is really needed is the
776 largest value of reg_parm_stack_space in the inline
777 function, but that is not available. Using the current
778 value of reg_parm_stack_space is wrong, but gives
779 correct results on all supported machines. */
781 int adjust = (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl))
782 + reg_parm_stack_space);
785 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
786 allocate_dynamic_stack_space (GEN_INT (adjust),
787 NULL_RTX, BITS_PER_UNIT);
790 emit_insns_before (seq, first_insn);
791 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
796 /* If the result is equivalent to TARGET, return TARGET to simplify
797 checks in store_expr. They can be equivalent but not equal in the
798 case of a function that returns BLKmode. */
799 if (temp != target && rtx_equal_p (temp, target))
804 /* If inlining failed, mark FNDECL as needing to be compiled
805 separately after all. If function was declared inline,
807 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
808 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
810 warning_with_decl (fndecl, "inlining failed in call to `%s'");
811 warning ("called from here");
813 mark_addressable (fndecl);
816 /* When calling a const function, we must pop the stack args right away,
817 so that the pop is deleted or moved with the call. */
821 function_call_count++;
823 if (fndecl && DECL_NAME (fndecl))
824 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
827 /* Unless it's a call to a specific function that isn't alloca,
828 if it has one argument, we must assume it might be alloca. */
831 = (!(fndecl != 0 && strcmp (name, "alloca"))
833 && TREE_CHAIN (actparms) == 0);
835 /* We assume that alloca will always be called by name. It
836 makes no sense to pass it as a pointer-to-function to
837 anything that does not understand its behavior. */
839 = (name && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
841 && ! strcmp (name, "alloca"))
842 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
844 && ! strcmp (name, "__builtin_alloca"))));
847 /* See if this is a call to a function that can return more than once
848 or a call to longjmp. */
854 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
855 /* Exclude functions not at the file scope, or not `extern',
856 since they are not the magic functions we would otherwise
858 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
862 /* Disregard prefix _, __ or __x. */
865 if (name[1] == '_' && name[2] == 'x')
867 else if (name[1] == '_')
877 && (! strcmp (tname, "setjmp")
878 || ! strcmp (tname, "setjmp_syscall")))
880 && ! strcmp (tname, "sigsetjmp"))
882 && ! strcmp (tname, "savectx")));
884 && ! strcmp (tname, "siglongjmp"))
887 else if ((tname[0] == 'q' && tname[1] == 's'
888 && ! strcmp (tname, "qsetjmp"))
889 || (tname[0] == 'v' && tname[1] == 'f'
890 && ! strcmp (tname, "vfork")))
893 else if (tname[0] == 'l' && tname[1] == 'o'
894 && ! strcmp (tname, "longjmp"))
896 /* XXX should have "malloc" attribute on functions instead
897 of recognizing them by name. */
898 else if (! strcmp (tname, "malloc")
899 || ! strcmp (tname, "calloc")
900 || ! strcmp (tname, "realloc")
901 /* Note use of NAME rather than TNAME here. These functions
902 are only reserved when preceded with __. */
903 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
904 || ! strcmp (name, "__nw") /* mangled __builtin_new */
905 || ! strcmp (name, "__builtin_new")
906 || ! strcmp (name, "__builtin_vec_new"))
911 current_function_calls_alloca = 1;
913 /* Don't let pending stack adjusts add up to too much.
914 Also, do all pending adjustments now
915 if there is any chance this might be a call to alloca. */
917 if (pending_stack_adjust >= 32
918 || (pending_stack_adjust > 0 && may_be_alloca))
919 do_pending_stack_adjust ();
921 /* Operand 0 is a pointer-to-function; get the type of the function. */
922 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
923 if (TREE_CODE (funtype) != POINTER_TYPE)
925 funtype = TREE_TYPE (funtype);
927 /* Push the temporary stack slot level so that we can free any temporaries
931 /* Start updating where the next arg would go.
933 On some machines (such as the PA) indirect calls have a different
934 calling convention than normal calls. The last argument in
935 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
937 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
939 /* If struct_value_rtx is 0, it means pass the address
940 as if it were an extra parameter. */
941 if (structure_value_addr && struct_value_rtx == 0)
943 /* If structure_value_addr is a REG other than
944 virtual_outgoing_args_rtx, we can use always use it. If it
945 is not a REG, we must always copy it into a register.
946 If it is virtual_outgoing_args_rtx, we must copy it to another
947 register in some cases. */
948 rtx temp = (GET_CODE (structure_value_addr) != REG
949 #ifdef ACCUMULATE_OUTGOING_ARGS
950 || (stack_arg_under_construction
951 && structure_value_addr == virtual_outgoing_args_rtx)
953 ? copy_addr_to_reg (structure_value_addr)
954 : structure_value_addr);
957 = tree_cons (error_mark_node,
958 make_tree (build_pointer_type (TREE_TYPE (funtype)),
961 structure_value_addr_parm = 1;
964 /* Count the arguments and set NUM_ACTUALS. */
965 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
968 /* Compute number of named args.
969 Normally, don't include the last named arg if anonymous args follow.
970 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
971 (If no anonymous args follow, the result of list_length is actually
972 one too large. This is harmless.)
974 If SETUP_INCOMING_VARARGS is defined and STRICT_ARGUMENT_NAMING is zero,
975 this machine will be able to place unnamed args that were passed in
976 registers into the stack. So treat all args as named. This allows the
977 insns emitting for a specific argument list to be independent of the
978 function declaration.
980 If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable
981 way to pass unnamed args in registers, so we must force them into
984 if ((STRICT_ARGUMENT_NAMING
985 #ifndef SETUP_INCOMING_VARARGS
989 && TYPE_ARG_TYPES (funtype) != 0)
991 = (list_length (TYPE_ARG_TYPES (funtype))
992 /* Don't include the last named arg. */
993 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
994 /* Count the struct value address, if it is passed as a parm. */
995 + structure_value_addr_parm);
997 /* If we know nothing, treat all args as named. */
998 n_named_args = num_actuals;
1000 /* Make a vector to hold all the information about each arg. */
1001 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
1002 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
1004 args_size.constant = 0;
1007 /* In this loop, we consider args in the order they are written.
1008 We fill up ARGS from the front or from the back if necessary
1009 so that in any case the first arg to be pushed ends up at the front. */
1011 #ifdef PUSH_ARGS_REVERSED
1012 i = num_actuals - 1, inc = -1;
1013 /* In this case, must reverse order of args
1014 so that we compute and push the last arg first. */
1019 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1020 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1022 tree type = TREE_TYPE (TREE_VALUE (p));
1024 enum machine_mode mode;
1026 args[i].tree_value = TREE_VALUE (p);
1028 /* Replace erroneous argument with constant zero. */
1029 if (type == error_mark_node || TYPE_SIZE (type) == 0)
1030 args[i].tree_value = integer_zero_node, type = integer_type_node;
1032 /* If TYPE is a transparent union, pass things the way we would
1033 pass the first field of the union. We have already verified that
1034 the modes are the same. */
1035 if (TYPE_TRANSPARENT_UNION (type))
1036 type = TREE_TYPE (TYPE_FIELDS (type));
1038 /* Decide where to pass this arg.
1040 args[i].reg is nonzero if all or part is passed in registers.
1042 args[i].partial is nonzero if part but not all is passed in registers,
1043 and the exact value says how many words are passed in registers.
1045 args[i].pass_on_stack is nonzero if the argument must at least be
1046 computed on the stack. It may then be loaded back into registers
1047 if args[i].reg is nonzero.
1049 These decisions are driven by the FUNCTION_... macros and must agree
1050 with those made by function.c. */
1052 /* See if this argument should be passed by invisible reference. */
1053 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1054 && contains_placeholder_p (TYPE_SIZE (type)))
1055 || TREE_ADDRESSABLE (type)
1056 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1057 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type),
1058 type, argpos < n_named_args)
1062 /* If we're compiling a thunk, pass through invisible
1063 references instead of making a copy. */
1064 if (current_function_is_thunk
1065 #ifdef FUNCTION_ARG_CALLEE_COPIES
1066 || (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type),
1067 type, argpos < n_named_args)
1068 /* If it's in a register, we must make a copy of it too. */
1069 /* ??? Is this a sufficient test? Is there a better one? */
1070 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1071 && REG_P (DECL_RTL (args[i].tree_value)))
1072 && ! TREE_ADDRESSABLE (type))
1076 args[i].tree_value = build1 (ADDR_EXPR,
1077 build_pointer_type (type),
1078 args[i].tree_value);
1079 type = build_pointer_type (type);
1083 /* We make a copy of the object and pass the address to the
1084 function being called. */
1087 if (TYPE_SIZE (type) == 0
1088 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1089 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1090 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1091 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1092 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
1094 /* This is a variable-sized object. Make space on the stack
1096 rtx size_rtx = expr_size (TREE_VALUE (p));
1098 if (old_stack_level == 0)
1100 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1101 old_pending_adj = pending_stack_adjust;
1102 pending_stack_adjust = 0;
1105 copy = gen_rtx_MEM (BLKmode,
1106 allocate_dynamic_stack_space (size_rtx,
1108 TYPE_ALIGN (type)));
1112 int size = int_size_in_bytes (type);
1113 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1116 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
1118 store_expr (args[i].tree_value, copy, 0);
1121 args[i].tree_value = build1 (ADDR_EXPR,
1122 build_pointer_type (type),
1123 make_tree (type, copy));
1124 type = build_pointer_type (type);
1128 mode = TYPE_MODE (type);
1129 unsignedp = TREE_UNSIGNED (type);
1131 #ifdef PROMOTE_FUNCTION_ARGS
1132 mode = promote_mode (type, mode, &unsignedp, 1);
1135 args[i].unsignedp = unsignedp;
1136 args[i].mode = mode;
1137 args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
1138 argpos < n_named_args);
1139 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1142 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type,
1143 argpos < n_named_args);
1146 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1148 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1149 it means that we are to pass this arg in the register(s) designated
1150 by the PARALLEL, but also to pass it in the stack. */
1151 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1152 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1153 args[i].pass_on_stack = 1;
1155 /* If this is an addressable type, we must preallocate the stack
1156 since we must evaluate the object into its final location.
1158 If this is to be passed in both registers and the stack, it is simpler
1160 if (TREE_ADDRESSABLE (type)
1161 || (args[i].pass_on_stack && args[i].reg != 0))
1162 must_preallocate = 1;
1164 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1165 we cannot consider this function call constant. */
1166 if (TREE_ADDRESSABLE (type))
1169 /* Compute the stack-size of this argument. */
1170 if (args[i].reg == 0 || args[i].partial != 0
1171 || reg_parm_stack_space > 0
1172 || args[i].pass_on_stack)
1173 locate_and_pad_parm (mode, type,
1174 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1179 fndecl, &args_size, &args[i].offset,
1182 #ifndef ARGS_GROW_DOWNWARD
1183 args[i].slot_offset = args_size;
1186 /* If a part of the arg was put into registers,
1187 don't include that part in the amount pushed. */
1188 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1189 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1190 / (PARM_BOUNDARY / BITS_PER_UNIT)
1191 * (PARM_BOUNDARY / BITS_PER_UNIT));
1193 /* Update ARGS_SIZE, the total stack space for args so far. */
1195 args_size.constant += args[i].size.constant;
1196 if (args[i].size.var)
1198 ADD_PARM_SIZE (args_size, args[i].size.var);
1201 /* Since the slot offset points to the bottom of the slot,
1202 we must record it after incrementing if the args grow down. */
1203 #ifdef ARGS_GROW_DOWNWARD
1204 args[i].slot_offset = args_size;
1206 args[i].slot_offset.constant = -args_size.constant;
1209 SUB_PARM_SIZE (args[i].slot_offset, args_size.var);
1213 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1214 have been used, etc. */
1216 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
1217 argpos < n_named_args);
1220 #ifdef FINAL_REG_PARM_STACK_SPACE
1221 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1225 /* Compute the actual size of the argument block required. The variable
1226 and constant sizes must be combined, the size may have to be rounded,
1227 and there may be a minimum required size. */
1229 original_args_size = args_size;
1232 /* If this function requires a variable-sized argument list, don't try to
1233 make a cse'able block for this call. We may be able to do this
1234 eventually, but it is too complicated to keep track of what insns go
1235 in the cse'able block and which don't. */
1238 must_preallocate = 1;
1240 args_size.var = ARGS_SIZE_TREE (args_size);
1241 args_size.constant = 0;
1243 #ifdef STACK_BOUNDARY
1244 if (STACK_BOUNDARY != BITS_PER_UNIT)
1245 args_size.var = round_up (args_size.var, STACK_BYTES);
1248 if (reg_parm_stack_space > 0)
1251 = size_binop (MAX_EXPR, args_size.var,
1252 size_int (reg_parm_stack_space));
1254 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1255 /* The area corresponding to register parameters is not to count in
1256 the size of the block we need. So make the adjustment. */
1258 = size_binop (MINUS_EXPR, args_size.var,
1259 size_int (reg_parm_stack_space));
1265 #ifdef STACK_BOUNDARY
1266 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1267 / STACK_BYTES) * STACK_BYTES);
1270 args_size.constant = MAX (args_size.constant,
1271 reg_parm_stack_space);
1273 #ifdef MAYBE_REG_PARM_STACK_SPACE
1274 if (reg_parm_stack_space == 0)
1275 args_size.constant = 0;
1278 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1279 args_size.constant -= reg_parm_stack_space;
1283 /* See if we have or want to preallocate stack space.
1285 If we would have to push a partially-in-regs parm
1286 before other stack parms, preallocate stack space instead.
1288 If the size of some parm is not a multiple of the required stack
1289 alignment, we must preallocate.
1291 If the total size of arguments that would otherwise create a copy in
1292 a temporary (such as a CALL) is more than half the total argument list
1293 size, preallocation is faster.
1295 Another reason to preallocate is if we have a machine (like the m88k)
1296 where stack alignment is required to be maintained between every
1297 pair of insns, not just when the call is made. However, we assume here
1298 that such machines either do not have push insns (and hence preallocation
1299 would occur anyway) or the problem is taken care of with
1302 if (! must_preallocate)
1304 int partial_seen = 0;
1305 int copy_to_evaluate_size = 0;
1307 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1309 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1311 else if (partial_seen && args[i].reg == 0)
1312 must_preallocate = 1;
1314 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1315 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1316 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1317 || TREE_CODE (args[i].tree_value) == COND_EXPR
1318 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1319 copy_to_evaluate_size
1320 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1323 if (copy_to_evaluate_size * 2 >= args_size.constant
1324 && args_size.constant > 0)
1325 must_preallocate = 1;
1328 /* If the structure value address will reference the stack pointer, we must
1329 stabilize it. We don't need to do this if we know that we are not going
1330 to adjust the stack pointer in processing this call. */
1332 if (structure_value_addr
1333 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1334 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1336 #ifndef ACCUMULATE_OUTGOING_ARGS
1337 || args_size.constant
1340 structure_value_addr = copy_to_reg (structure_value_addr);
1342 /* If this function call is cse'able, precompute all the parameters.
1343 Note that if the parameter is constructed into a temporary, this will
1344 cause an additional copy because the parameter will be constructed
1345 into a temporary location and then copied into the outgoing arguments.
1346 If a parameter contains a call to alloca and this function uses the
1347 stack, precompute the parameter. */
1349 /* If we preallocated the stack space, and some arguments must be passed
1350 on the stack, then we must precompute any parameter which contains a
1351 function call which will store arguments on the stack.
1352 Otherwise, evaluating the parameter may clobber previous parameters
1353 which have already been stored into the stack. */
1355 for (i = 0; i < num_actuals; i++)
1357 || ((args_size.var != 0 || args_size.constant != 0)
1358 && calls_function (args[i].tree_value, 1))
1359 || (must_preallocate && (args_size.var != 0 || args_size.constant != 0)
1360 && calls_function (args[i].tree_value, 0)))
1362 /* If this is an addressable type, we cannot pre-evaluate it. */
1363 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1368 args[i].initial_value = args[i].value
1369 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1371 preserve_temp_slots (args[i].value);
1374 /* ANSI doesn't require a sequence point here,
1375 but PCC has one, so this will avoid some problems. */
1378 args[i].initial_value = args[i].value
1379 = protect_from_queue (args[i].initial_value, 0);
1381 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1383 = convert_modes (args[i].mode,
1384 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1385 args[i].value, args[i].unsignedp);
1388 /* Now we are about to start emitting insns that can be deleted
1389 if a libcall is deleted. */
1390 if (is_const || is_malloc)
1393 /* If we have no actual push instructions, or shouldn't use them,
1394 make space for all args right now. */
1396 if (args_size.var != 0)
1398 if (old_stack_level == 0)
1400 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1401 old_pending_adj = pending_stack_adjust;
1402 pending_stack_adjust = 0;
1403 #ifdef ACCUMULATE_OUTGOING_ARGS
1404 /* stack_arg_under_construction says whether a stack arg is
1405 being constructed at the old stack level. Pushing the stack
1406 gets a clean outgoing argument block. */
1407 old_stack_arg_under_construction = stack_arg_under_construction;
1408 stack_arg_under_construction = 0;
1411 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1415 /* Note that we must go through the motions of allocating an argument
1416 block even if the size is zero because we may be storing args
1417 in the area reserved for register arguments, which may be part of
1420 int needed = args_size.constant;
1422 /* Store the maximum argument space used. It will be pushed by
1423 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
1426 if (needed > current_function_outgoing_args_size)
1427 current_function_outgoing_args_size = needed;
1429 if (must_preallocate)
1431 #ifdef ACCUMULATE_OUTGOING_ARGS
1432 /* Since the stack pointer will never be pushed, it is possible for
1433 the evaluation of a parm to clobber something we have already
1434 written to the stack. Since most function calls on RISC machines
1435 do not use the stack, this is uncommon, but must work correctly.
1437 Therefore, we save any area of the stack that was already written
1438 and that we are using. Here we set up to do this by making a new
1439 stack usage map from the old one. The actual save will be done
1442 Another approach might be to try to reorder the argument
1443 evaluations to avoid this conflicting stack usage. */
1445 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1446 /* Since we will be writing into the entire argument area, the
1447 map must be allocated for its entire size, not just the part that
1448 is the responsibility of the caller. */
1449 needed += reg_parm_stack_space;
1452 #ifdef ARGS_GROW_DOWNWARD
1453 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1456 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1459 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
1461 if (initial_highest_arg_in_use)
1462 bcopy (initial_stack_usage_map, stack_usage_map,
1463 initial_highest_arg_in_use);
1465 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1466 bzero (&stack_usage_map[initial_highest_arg_in_use],
1467 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1470 /* The address of the outgoing argument list must not be copied to a
1471 register here, because argblock would be left pointing to the
1472 wrong place after the call to allocate_dynamic_stack_space below.
1475 argblock = virtual_outgoing_args_rtx;
1477 #else /* not ACCUMULATE_OUTGOING_ARGS */
1478 if (inhibit_defer_pop == 0)
1480 /* Try to reuse some or all of the pending_stack_adjust
1481 to get this space. Maybe we can avoid any pushing. */
1482 if (needed > pending_stack_adjust)
1484 needed -= pending_stack_adjust;
1485 pending_stack_adjust = 0;
1489 pending_stack_adjust -= needed;
1493 /* Special case this because overhead of `push_block' in this
1494 case is non-trivial. */
1496 argblock = virtual_outgoing_args_rtx;
1498 argblock = push_block (GEN_INT (needed), 0, 0);
1500 /* We only really need to call `copy_to_reg' in the case where push
1501 insns are going to be used to pass ARGBLOCK to a function
1502 call in ARGS. In that case, the stack pointer changes value
1503 from the allocation point to the call point, and hence
1504 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1505 But might as well always do it. */
1506 argblock = copy_to_reg (argblock);
1507 #endif /* not ACCUMULATE_OUTGOING_ARGS */
1511 #ifdef ACCUMULATE_OUTGOING_ARGS
1512 /* The save/restore code in store_one_arg handles all cases except one:
1513 a constructor call (including a C function returning a BLKmode struct)
1514 to initialize an argument. */
1515 if (stack_arg_under_construction)
1517 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1518 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
1520 rtx push_size = GEN_INT (args_size.constant);
1522 if (old_stack_level == 0)
1524 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1525 old_pending_adj = pending_stack_adjust;
1526 pending_stack_adjust = 0;
1527 /* stack_arg_under_construction says whether a stack arg is
1528 being constructed at the old stack level. Pushing the stack
1529 gets a clean outgoing argument block. */
1530 old_stack_arg_under_construction = stack_arg_under_construction;
1531 stack_arg_under_construction = 0;
1532 /* Make a new map for the new argument list. */
1533 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
1534 bzero (stack_usage_map, highest_outgoing_arg_in_use);
1535 highest_outgoing_arg_in_use = 0;
1537 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
1539 /* If argument evaluation might modify the stack pointer, copy the
1540 address of the argument list to a register. */
1541 for (i = 0; i < num_actuals; i++)
1542 if (args[i].pass_on_stack)
1544 argblock = copy_addr_to_reg (argblock);
1550 /* If we preallocated stack space, compute the address of each argument.
1551 We need not ensure it is a valid memory address here; it will be
1552 validized when it is used. */
1555 rtx arg_reg = argblock;
1558 if (GET_CODE (argblock) == PLUS)
1559 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1561 for (i = 0; i < num_actuals; i++)
1563 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1564 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1567 /* Skip this parm if it will not be passed on the stack. */
1568 if (! args[i].pass_on_stack && args[i].reg != 0)
1571 if (GET_CODE (offset) == CONST_INT)
1572 addr = plus_constant (arg_reg, INTVAL (offset));
1574 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1576 addr = plus_constant (addr, arg_offset);
1577 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1578 MEM_IN_STRUCT_P (args[i].stack)
1579 = AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value));
1581 if (GET_CODE (slot_offset) == CONST_INT)
1582 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1584 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1586 addr = plus_constant (addr, arg_offset);
1587 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1591 #ifdef PUSH_ARGS_REVERSED
1592 #ifdef STACK_BOUNDARY
1593 /* If we push args individually in reverse order, perform stack alignment
1594 before the first push (the last arg). */
1596 anti_adjust_stack (GEN_INT (args_size.constant
1597 - original_args_size.constant));
1601 /* Don't try to defer pops if preallocating, not even from the first arg,
1602 since ARGBLOCK probably refers to the SP. */
1606 /* Get the function to call, in the form of RTL. */
1609 /* If this is the first use of the function, see if we need to
1610 make an external definition for it. */
1611 if (! TREE_USED (fndecl))
1613 assemble_external (fndecl);
1614 TREE_USED (fndecl) = 1;
1617 /* Get a SYMBOL_REF rtx for the function address. */
1618 funexp = XEXP (DECL_RTL (fndecl), 0);
1621 /* Generate an rtx (probably a pseudo-register) for the address. */
1624 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1625 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1627 /* Check the function is executable. */
1628 if (current_function_check_memory_usage)
1629 emit_library_call (chkr_check_exec_libfunc, 1,
1635 /* Figure out the register where the value, if any, will come back. */
1637 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
1638 && ! structure_value_addr)
1640 if (pcc_struct_value)
1641 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1644 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
1647 /* Precompute all register parameters. It isn't safe to compute anything
1648 once we have started filling any specific hard regs. */
1650 for (i = 0; i < num_actuals; i++)
1651 if (args[i].reg != 0 && ! args[i].pass_on_stack)
1655 if (args[i].value == 0)
1658 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
1660 preserve_temp_slots (args[i].value);
1663 /* ANSI doesn't require a sequence point here,
1664 but PCC has one, so this will avoid some problems. */
1668 /* If we are to promote the function arg to a wider mode,
1671 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
1673 = convert_modes (args[i].mode,
1674 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1675 args[i].value, args[i].unsignedp);
1677 /* If the value is expensive, and we are inside an appropriately
1678 short loop, put the value into a pseudo and then put the pseudo
1681 For small register classes, also do this if this call uses
1682 register parameters. This is to avoid reload conflicts while
1683 loading the parameters registers. */
1685 if ((! (GET_CODE (args[i].value) == REG
1686 || (GET_CODE (args[i].value) == SUBREG
1687 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
1688 && args[i].mode != BLKmode
1689 && rtx_cost (args[i].value, SET) > 2
1690 && ((SMALL_REGISTER_CLASSES && reg_parm_seen)
1691 || preserve_subexpressions_p ()))
1692 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1695 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1697 /* The argument list is the property of the called routine and it
1698 may clobber it. If the fixed area has been used for previous
1699 parameters, we must save and restore it.
1701 Here we compute the boundary of the that needs to be saved, if any. */
1703 #ifdef ARGS_GROW_DOWNWARD
1704 for (i = 0; i < reg_parm_stack_space + 1; i++)
1706 for (i = 0; i < reg_parm_stack_space; i++)
1709 if (i >= highest_outgoing_arg_in_use
1710 || stack_usage_map[i] == 0)
1713 if (low_to_save == -1)
1719 if (low_to_save >= 0)
1721 int num_to_save = high_to_save - low_to_save + 1;
1722 enum machine_mode save_mode
1723 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
1726 /* If we don't have the required alignment, must do this in BLKmode. */
1727 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
1728 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
1729 save_mode = BLKmode;
1731 #ifdef ARGS_GROW_DOWNWARD
1732 stack_area = gen_rtx_MEM (save_mode,
1733 memory_address (save_mode,
1734 plus_constant (argblock,
1737 stack_area = gen_rtx_MEM (save_mode,
1738 memory_address (save_mode,
1739 plus_constant (argblock,
1742 if (save_mode == BLKmode)
1744 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
1745 MEM_IN_STRUCT_P (save_area) = 0;
1746 emit_block_move (validize_mem (save_area), stack_area,
1747 GEN_INT (num_to_save),
1748 PARM_BOUNDARY / BITS_PER_UNIT);
1752 save_area = gen_reg_rtx (save_mode);
1753 emit_move_insn (save_area, stack_area);
1759 /* Now store (and compute if necessary) all non-register parms.
1760 These come before register parms, since they can require block-moves,
1761 which could clobber the registers used for register parms.
1762 Parms which have partial registers are not stored here,
1763 but we do preallocate space here if they want that. */
1765 for (i = 0; i < num_actuals; i++)
1766 if (args[i].reg == 0 || args[i].pass_on_stack)
1767 store_one_arg (&args[i], argblock, may_be_alloca,
1768 args_size.var != 0, reg_parm_stack_space);
1770 /* If we have a parm that is passed in registers but not in memory
1771 and whose alignment does not permit a direct copy into registers,
1772 make a group of pseudos that correspond to each register that we
1775 if (STRICT_ALIGNMENT)
1776 for (i = 0; i < num_actuals; i++)
1777 if (args[i].reg != 0 && ! args[i].pass_on_stack
1778 && args[i].mode == BLKmode
1779 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1780 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1782 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1783 int big_endian_correction = 0;
1785 args[i].n_aligned_regs
1786 = args[i].partial ? args[i].partial
1787 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1789 args[i].aligned_regs = (rtx *) alloca (sizeof (rtx)
1790 * args[i].n_aligned_regs);
1792 /* Structures smaller than a word are aligned to the least
1793 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1794 this means we must skip the empty high order bytes when
1795 calculating the bit offset. */
1796 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
1797 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1799 for (j = 0; j < args[i].n_aligned_regs; j++)
1801 rtx reg = gen_reg_rtx (word_mode);
1802 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1803 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1804 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1806 args[i].aligned_regs[j] = reg;
1808 /* There is no need to restrict this code to loading items
1809 in TYPE_ALIGN sized hunks. The bitfield instructions can
1810 load up entire word sized registers efficiently.
1812 ??? This may not be needed anymore.
1813 We use to emit a clobber here but that doesn't let later
1814 passes optimize the instructions we emit. By storing 0 into
1815 the register later passes know the first AND to zero out the
1816 bitfield being set in the register is unnecessary. The store
1817 of 0 will be deleted as will at least the first AND. */
1819 emit_move_insn (reg, const0_rtx);
1821 bytes -= bitsize / BITS_PER_UNIT;
1822 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
1823 extract_bit_field (word, bitsize, 0, 1,
1824 NULL_RTX, word_mode,
1826 bitalign / BITS_PER_UNIT,
1828 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
1832 /* Now store any partially-in-registers parm.
1833 This is the last place a block-move can happen. */
1835 for (i = 0; i < num_actuals; i++)
1836 if (args[i].partial != 0 && ! args[i].pass_on_stack)
1837 store_one_arg (&args[i], argblock, may_be_alloca,
1838 args_size.var != 0, reg_parm_stack_space);
1840 #ifndef PUSH_ARGS_REVERSED
1841 #ifdef STACK_BOUNDARY
1842 /* If we pushed args in forward order, perform stack alignment
1843 after pushing the last arg. */
1845 anti_adjust_stack (GEN_INT (args_size.constant
1846 - original_args_size.constant));
1850 /* If register arguments require space on the stack and stack space
1851 was not preallocated, allocate stack space here for arguments
1852 passed in registers. */
1853 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
1854 if (must_preallocate == 0 && reg_parm_stack_space > 0)
1855 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
1858 /* Pass the function the address in which to return a structure value. */
1859 if (structure_value_addr && ! structure_value_addr_parm)
1861 emit_move_insn (struct_value_rtx,
1863 force_operand (structure_value_addr,
1866 /* Mark the memory for the aggregate as write-only. */
1867 if (current_function_check_memory_usage)
1868 emit_library_call (chkr_set_right_libfunc, 1,
1870 structure_value_addr, ptr_mode,
1871 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
1872 GEN_INT (MEMORY_USE_WO),
1873 TYPE_MODE (integer_type_node));
1875 if (GET_CODE (struct_value_rtx) == REG)
1876 use_reg (&call_fusage, struct_value_rtx);
1879 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
1881 /* Now do the register loads required for any wholly-register parms or any
1882 parms which are passed both on the stack and in a register. Their
1883 expressions were already evaluated.
1885 Mark all register-parms as living through the call, putting these USE
1886 insns in the CALL_INSN_FUNCTION_USAGE field. */
1888 #ifdef LOAD_ARGS_REVERSED
1889 for (i = num_actuals - 1; i >= 0; i--)
1891 for (i = 0; i < num_actuals; i++)
1894 rtx reg = args[i].reg;
1895 int partial = args[i].partial;
1900 /* Set to non-negative if must move a word at a time, even if just
1901 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1902 we just use a normal move insn. This value can be zero if the
1903 argument is a zero size structure with no fields. */
1904 nregs = (partial ? partial
1905 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1906 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1907 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1910 /* Handle calls that pass values in multiple non-contiguous
1911 locations. The Irix 6 ABI has examples of this. */
1913 if (GET_CODE (reg) == PARALLEL)
1915 emit_group_load (reg, args[i].value,
1916 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1917 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1921 /* If simple case, just do move. If normal partial, store_one_arg
1922 has already loaded the register for us. In all other cases,
1923 load the register(s) from memory. */
1925 else if (nregs == -1)
1926 emit_move_insn (reg, args[i].value);
1928 /* If we have pre-computed the values to put in the registers in
1929 the case of non-aligned structures, copy them in now. */
1931 else if (args[i].n_aligned_regs != 0)
1932 for (j = 0; j < args[i].n_aligned_regs; j++)
1933 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1934 args[i].aligned_regs[j]);
1936 else if (partial == 0 || args[i].pass_on_stack)
1937 move_block_to_reg (REGNO (reg),
1938 validize_mem (args[i].value), nregs,
1941 /* Handle calls that pass values in multiple non-contiguous
1942 locations. The Irix 6 ABI has examples of this. */
1943 if (GET_CODE (reg) == PARALLEL)
1944 use_group_regs (&call_fusage, reg);
1945 else if (nregs == -1)
1946 use_reg (&call_fusage, reg);
1948 use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1952 /* Perform postincrements before actually calling the function. */
1955 /* All arguments and registers used for the call must be set up by now! */
1957 /* Generate the actual call instruction. */
1958 emit_call_1 (funexp, fndecl, funtype, args_size.constant, struct_value_size,
1959 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
1960 valreg, old_inhibit_defer_pop, call_fusage, is_const);
1962 /* If call is cse'able, make appropriate pair of reg-notes around it.
1963 Test valreg so we don't crash; may safely ignore `const'
1964 if return type is void. Disable for PARALLEL return values, because
1965 we have no way to move such values into a pseudo register. */
1966 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
1969 rtx temp = gen_reg_rtx (GET_MODE (valreg));
1972 /* Mark the return value as a pointer if needed. */
1973 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
1975 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
1976 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
1979 /* Construct an "equal form" for the value which mentions all the
1980 arguments in order as well as the function name. */
1981 #ifdef PUSH_ARGS_REVERSED
1982 for (i = 0; i < num_actuals; i++)
1983 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
1985 for (i = num_actuals - 1; i >= 0; i--)
1986 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
1988 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
1990 insns = get_insns ();
1993 emit_libcall_block (insns, temp, valreg, note);
1999 /* Otherwise, just write out the sequence without a note. */
2000 rtx insns = get_insns ();
2007 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2010 /* The return value from a malloc-like function is a pointer. */
2011 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2012 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2014 emit_move_insn (temp, valreg);
2016 /* The return value from a malloc-like function can not alias
2018 last = get_last_insn ();
2020 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2022 /* Write out the sequence. */
2023 insns = get_insns ();
2029 /* For calls to `setjmp', etc., inform flow.c it should complain
2030 if nonvolatile values are live. */
2034 emit_note (name, NOTE_INSN_SETJMP);
2035 current_function_calls_setjmp = 1;
2039 current_function_calls_longjmp = 1;
2041 /* Notice functions that cannot return.
2042 If optimizing, insns emitted below will be dead.
2043 If not optimizing, they will exist, which is useful
2044 if the user uses the `return' command in the debugger. */
2046 if (is_volatile || is_longjmp)
2049 /* If value type not void, return an rtx for the value. */
2051 /* If there are cleanups to be called, don't use a hard reg as target.
2052 We need to double check this and see if it matters anymore. */
2053 if (any_pending_cleanups (1)
2054 && target && REG_P (target)
2055 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2058 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2061 target = const0_rtx;
2063 else if (structure_value_addr)
2065 if (target == 0 || GET_CODE (target) != MEM)
2067 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2068 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2069 structure_value_addr));
2070 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2073 else if (pcc_struct_value)
2075 /* This is the special C++ case where we need to
2076 know what the true target was. We take care to
2077 never use this value more than once in one expression. */
2078 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2079 copy_to_reg (valreg));
2080 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2082 /* Handle calls that return values in multiple non-contiguous locations.
2083 The Irix 6 ABI has examples of this. */
2084 else if (GET_CODE (valreg) == PARALLEL)
2086 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2090 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2091 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2092 preserve_temp_slots (target);
2095 emit_group_store (target, valreg, bytes,
2096 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2098 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2099 && GET_MODE (target) == GET_MODE (valreg))
2100 /* TARGET and VALREG cannot be equal at this point because the latter
2101 would not have REG_FUNCTION_VALUE_P true, while the former would if
2102 it were referring to the same register.
2104 If they refer to the same register, this move will be a no-op, except
2105 when function inlining is being done. */
2106 emit_move_insn (target, valreg);
2107 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2108 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2110 target = copy_to_reg (valreg);
2112 #ifdef PROMOTE_FUNCTION_RETURN
2113 /* If we promoted this return value, make the proper SUBREG. TARGET
2114 might be const0_rtx here, so be careful. */
2115 if (GET_CODE (target) == REG
2116 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2117 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2119 tree type = TREE_TYPE (exp);
2120 int unsignedp = TREE_UNSIGNED (type);
2122 /* If we don't promote as expected, something is wrong. */
2123 if (GET_MODE (target)
2124 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2127 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2128 SUBREG_PROMOTED_VAR_P (target) = 1;
2129 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2133 /* If size of args is variable or this was a constructor call for a stack
2134 argument, restore saved stack-pointer value. */
2136 if (old_stack_level)
2138 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2139 pending_stack_adjust = old_pending_adj;
2140 #ifdef ACCUMULATE_OUTGOING_ARGS
2141 stack_arg_under_construction = old_stack_arg_under_construction;
2142 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2143 stack_usage_map = initial_stack_usage_map;
2146 #ifdef ACCUMULATE_OUTGOING_ARGS
2149 #ifdef REG_PARM_STACK_SPACE
2152 enum machine_mode save_mode = GET_MODE (save_area);
2153 #ifdef ARGS_GROW_DOWNWARD
2155 = gen_rtx_MEM (save_mode,
2156 memory_address (save_mode,
2157 plus_constant (argblock,
2161 = gen_rtx_MEM (save_mode,
2162 memory_address (save_mode,
2163 plus_constant (argblock,
2167 if (save_mode != BLKmode)
2168 emit_move_insn (stack_area, save_area);
2170 emit_block_move (stack_area, validize_mem (save_area),
2171 GEN_INT (high_to_save - low_to_save + 1),
2172 PARM_BOUNDARY / BITS_PER_UNIT);
2176 /* If we saved any argument areas, restore them. */
2177 for (i = 0; i < num_actuals; i++)
2178 if (args[i].save_area)
2180 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2182 = gen_rtx_MEM (save_mode,
2183 memory_address (save_mode,
2184 XEXP (args[i].stack_slot, 0)));
2186 if (save_mode != BLKmode)
2187 emit_move_insn (stack_area, args[i].save_area);
2189 emit_block_move (stack_area, validize_mem (args[i].save_area),
2190 GEN_INT (args[i].size.constant),
2191 PARM_BOUNDARY / BITS_PER_UNIT);
2194 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2195 stack_usage_map = initial_stack_usage_map;
2199 /* If this was alloca, record the new stack level for nonlocal gotos.
2200 Check for the handler slots since we might not have a save area
2201 for non-local gotos. */
2203 if (may_be_alloca && nonlocal_goto_handler_slot != 0)
2204 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2211 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2212 (emitting the queue unless NO_QUEUE is nonzero),
2213 for a value of mode OUTMODE,
2214 with NARGS different arguments, passed as alternating rtx values
2215 and machine_modes to convert them to.
2216 The rtx values should have been passed through protect_from_queue already.
2218 NO_QUEUE will be true if and only if the library call is a `const' call
2219 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2220 to the variable is_const in expand_call.
2222 NO_QUEUE must be true for const calls, because if it isn't, then
2223 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2224 and will be lost if the libcall sequence is optimized away.
2226 NO_QUEUE must be false for non-const calls, because if it isn't, the
2227 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2228 optimized. For instance, the instruction scheduler may incorrectly
2229 move memory references across the non-const call. */
2232 emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2235 #ifndef ANSI_PROTOTYPES
2238 enum machine_mode outmode;
2242 /* Total size in bytes of all the stack-parms scanned so far. */
2243 struct args_size args_size;
2244 /* Size of arguments before any adjustments (such as rounding). */
2245 struct args_size original_args_size;
2246 register int argnum;
2251 CUMULATIVE_ARGS args_so_far;
2252 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2253 struct args_size offset; struct args_size size; rtx save_area; };
2255 int old_inhibit_defer_pop = inhibit_defer_pop;
2256 rtx call_fusage = 0;
2257 int reg_parm_stack_space = 0;
2258 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2259 /* Define the boundary of the register parm stack space that needs to be
2261 int low_to_save = -1, high_to_save;
2262 rtx save_area = 0; /* Place that it is saved */
2265 #ifdef ACCUMULATE_OUTGOING_ARGS
2266 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2267 char *initial_stack_usage_map = stack_usage_map;
2271 #ifdef REG_PARM_STACK_SPACE
2272 /* Size of the stack reserved for parameter registers. */
2273 #ifdef MAYBE_REG_PARM_STACK_SPACE
2274 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2276 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2280 VA_START (p, nargs);
2282 #ifndef ANSI_PROTOTYPES
2283 orgfun = va_arg (p, rtx);
2284 no_queue = va_arg (p, int);
2285 outmode = va_arg (p, enum machine_mode);
2286 nargs = va_arg (p, int);
2291 /* Copy all the libcall-arguments out of the varargs data
2292 and into a vector ARGVEC.
2294 Compute how to pass each argument. We only support a very small subset
2295 of the full argument passing conventions to limit complexity here since
2296 library functions shouldn't have many args. */
2298 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2299 bzero ((char *) argvec, nargs * sizeof (struct arg));
2302 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2304 args_size.constant = 0;
2309 for (count = 0; count < nargs; count++)
2311 rtx val = va_arg (p, rtx);
2312 enum machine_mode mode = va_arg (p, enum machine_mode);
2314 /* We cannot convert the arg value to the mode the library wants here;
2315 must do it earlier where we know the signedness of the arg. */
2317 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2320 /* On some machines, there's no way to pass a float to a library fcn.
2321 Pass it as a double instead. */
2322 #ifdef LIBGCC_NEEDS_DOUBLE
2323 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2324 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2327 /* There's no need to call protect_from_queue, because
2328 either emit_move_insn or emit_push_insn will do that. */
2330 /* Make sure it is a reasonable operand for a move or push insn. */
2331 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2332 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2333 val = force_operand (val, NULL_RTX);
2335 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2336 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2338 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2339 be viewed as just an efficiency improvement. */
2340 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2341 emit_move_insn (slot, val);
2342 val = force_operand (XEXP (slot, 0), NULL_RTX);
2347 argvec[count].value = val;
2348 argvec[count].mode = mode;
2350 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2351 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2353 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2354 argvec[count].partial
2355 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2357 argvec[count].partial = 0;
2360 locate_and_pad_parm (mode, NULL_TREE,
2361 argvec[count].reg && argvec[count].partial == 0,
2362 NULL_TREE, &args_size, &argvec[count].offset,
2363 &argvec[count].size);
2365 if (argvec[count].size.var)
2368 if (reg_parm_stack_space == 0 && argvec[count].partial)
2369 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2371 if (argvec[count].reg == 0 || argvec[count].partial != 0
2372 || reg_parm_stack_space > 0)
2373 args_size.constant += argvec[count].size.constant;
2375 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2379 #ifdef FINAL_REG_PARM_STACK_SPACE
2380 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2384 /* If this machine requires an external definition for library
2385 functions, write one out. */
2386 assemble_external_libcall (fun);
2388 original_args_size = args_size;
2389 #ifdef STACK_BOUNDARY
2390 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2391 / STACK_BYTES) * STACK_BYTES);
2394 args_size.constant = MAX (args_size.constant,
2395 reg_parm_stack_space);
2397 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2398 args_size.constant -= reg_parm_stack_space;
2401 if (args_size.constant > current_function_outgoing_args_size)
2402 current_function_outgoing_args_size = args_size.constant;
2404 #ifdef ACCUMULATE_OUTGOING_ARGS
2405 /* Since the stack pointer will never be pushed, it is possible for
2406 the evaluation of a parm to clobber something we have already
2407 written to the stack. Since most function calls on RISC machines
2408 do not use the stack, this is uncommon, but must work correctly.
2410 Therefore, we save any area of the stack that was already written
2411 and that we are using. Here we set up to do this by making a new
2412 stack usage map from the old one.
2414 Another approach might be to try to reorder the argument
2415 evaluations to avoid this conflicting stack usage. */
2417 needed = args_size.constant;
2419 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2420 /* Since we will be writing into the entire argument area, the
2421 map must be allocated for its entire size, not just the part that
2422 is the responsibility of the caller. */
2423 needed += reg_parm_stack_space;
2426 #ifdef ARGS_GROW_DOWNWARD
2427 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2430 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2433 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2435 if (initial_highest_arg_in_use)
2436 bcopy (initial_stack_usage_map, stack_usage_map,
2437 initial_highest_arg_in_use);
2439 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2440 bzero (&stack_usage_map[initial_highest_arg_in_use],
2441 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2444 /* The address of the outgoing argument list must not be copied to a
2445 register here, because argblock would be left pointing to the
2446 wrong place after the call to allocate_dynamic_stack_space below.
2449 argblock = virtual_outgoing_args_rtx;
2450 #else /* not ACCUMULATE_OUTGOING_ARGS */
2451 #ifndef PUSH_ROUNDING
2452 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2456 #ifdef PUSH_ARGS_REVERSED
2457 #ifdef STACK_BOUNDARY
2458 /* If we push args individually in reverse order, perform stack alignment
2459 before the first push (the last arg). */
2461 anti_adjust_stack (GEN_INT (args_size.constant
2462 - original_args_size.constant));
2466 #ifdef PUSH_ARGS_REVERSED
2474 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2475 /* The argument list is the property of the called routine and it
2476 may clobber it. If the fixed area has been used for previous
2477 parameters, we must save and restore it.
2479 Here we compute the boundary of the that needs to be saved, if any. */
2481 #ifdef ARGS_GROW_DOWNWARD
2482 for (count = 0; count < reg_parm_stack_space + 1; count++)
2484 for (count = 0; count < reg_parm_stack_space; count++)
2487 if (count >= highest_outgoing_arg_in_use
2488 || stack_usage_map[count] == 0)
2491 if (low_to_save == -1)
2492 low_to_save = count;
2494 high_to_save = count;
2497 if (low_to_save >= 0)
2499 int num_to_save = high_to_save - low_to_save + 1;
2500 enum machine_mode save_mode
2501 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2504 /* If we don't have the required alignment, must do this in BLKmode. */
2505 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2506 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2507 save_mode = BLKmode;
2509 #ifdef ARGS_GROW_DOWNWARD
2510 stack_area = gen_rtx_MEM (save_mode,
2511 memory_address (save_mode,
2512 plus_constant (argblock,
2515 stack_area = gen_rtx_MEM (save_mode,
2516 memory_address (save_mode,
2517 plus_constant (argblock,
2520 if (save_mode == BLKmode)
2522 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2523 MEM_IN_STRUCT_P (save_area) = 0;
2524 emit_block_move (validize_mem (save_area), stack_area,
2525 GEN_INT (num_to_save),
2526 PARM_BOUNDARY / BITS_PER_UNIT);
2530 save_area = gen_reg_rtx (save_mode);
2531 emit_move_insn (save_area, stack_area);
2536 /* Push the args that need to be pushed. */
2538 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2539 are to be pushed. */
2540 for (count = 0; count < nargs; count++, argnum += inc)
2542 register enum machine_mode mode = argvec[argnum].mode;
2543 register rtx val = argvec[argnum].value;
2544 rtx reg = argvec[argnum].reg;
2545 int partial = argvec[argnum].partial;
2546 #ifdef ACCUMULATE_OUTGOING_ARGS
2547 int lower_bound, upper_bound, i;
2550 if (! (reg != 0 && partial == 0))
2552 #ifdef ACCUMULATE_OUTGOING_ARGS
2553 /* If this is being stored into a pre-allocated, fixed-size, stack
2554 area, save any previous data at that location. */
2556 #ifdef ARGS_GROW_DOWNWARD
2557 /* stack_slot is negative, but we want to index stack_usage_map
2558 with positive values. */
2559 upper_bound = -argvec[argnum].offset.constant + 1;
2560 lower_bound = upper_bound - argvec[argnum].size.constant;
2562 lower_bound = argvec[argnum].offset.constant;
2563 upper_bound = lower_bound + argvec[argnum].size.constant;
2566 for (i = lower_bound; i < upper_bound; i++)
2567 if (stack_usage_map[i]
2568 /* Don't store things in the fixed argument area at this point;
2569 it has already been saved. */
2570 && i > reg_parm_stack_space)
2573 if (i != upper_bound)
2575 /* We need to make a save area. See what mode we can make it. */
2576 enum machine_mode save_mode
2577 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
2580 = gen_rtx_MEM (save_mode,
2581 memory_address (save_mode,
2582 plus_constant (argblock, argvec[argnum].offset.constant)));
2583 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2584 emit_move_insn (argvec[argnum].save_area, stack_area);
2587 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2588 argblock, GEN_INT (argvec[argnum].offset.constant),
2589 reg_parm_stack_space);
2591 #ifdef ACCUMULATE_OUTGOING_ARGS
2592 /* Now mark the segment we just used. */
2593 for (i = lower_bound; i < upper_bound; i++)
2594 stack_usage_map[i] = 1;
2601 #ifndef PUSH_ARGS_REVERSED
2602 #ifdef STACK_BOUNDARY
2603 /* If we pushed args in forward order, perform stack alignment
2604 after pushing the last arg. */
2606 anti_adjust_stack (GEN_INT (args_size.constant
2607 - original_args_size.constant));
2611 #ifdef PUSH_ARGS_REVERSED
2617 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2619 /* Now load any reg parms into their regs. */
2621 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2622 are to be pushed. */
2623 for (count = 0; count < nargs; count++, argnum += inc)
2625 register rtx val = argvec[argnum].value;
2626 rtx reg = argvec[argnum].reg;
2627 int partial = argvec[argnum].partial;
2629 if (reg != 0 && partial == 0)
2630 emit_move_insn (reg, val);
2634 /* For version 1.37, try deleting this entirely. */
2638 /* Any regs containing parms remain in use through the call. */
2639 for (count = 0; count < nargs; count++)
2640 if (argvec[count].reg != 0)
2641 use_reg (&call_fusage, argvec[count].reg);
2643 /* Don't allow popping to be deferred, since then
2644 cse'ing of library calls could delete a call and leave the pop. */
2647 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2648 will set inhibit_defer_pop to that value. */
2650 /* The return type is needed to decide how many bytes the function pops.
2651 Signedness plays no role in that, so for simplicity, we pretend it's
2652 always signed. We also assume that the list of arguments passed has
2653 no impact, so we pretend it is unknown. */
2656 get_identifier (XSTR (orgfun, 0)),
2657 build_function_type (outmode == VOIDmode ? void_type_node
2658 : type_for_mode (outmode, 0), NULL_TREE),
2659 args_size.constant, 0,
2660 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2661 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2662 old_inhibit_defer_pop + 1, call_fusage, no_queue);
2666 /* Now restore inhibit_defer_pop to its actual original value. */
2669 #ifdef ACCUMULATE_OUTGOING_ARGS
2670 #ifdef REG_PARM_STACK_SPACE
2673 enum machine_mode save_mode = GET_MODE (save_area);
2674 #ifdef ARGS_GROW_DOWNWARD
2676 = gen_rtx_MEM (save_mode,
2677 memory_address (save_mode,
2678 plus_constant (argblock,
2682 = gen_rtx_MEM (save_mode,
2683 memory_address (save_mode,
2684 plus_constant (argblock, low_to_save)));
2687 if (save_mode != BLKmode)
2688 emit_move_insn (stack_area, save_area);
2690 emit_block_move (stack_area, validize_mem (save_area),
2691 GEN_INT (high_to_save - low_to_save + 1),
2692 PARM_BOUNDARY / BITS_PER_UNIT);
2696 /* If we saved any argument areas, restore them. */
2697 for (count = 0; count < nargs; count++)
2698 if (argvec[count].save_area)
2700 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
2702 = gen_rtx_MEM (save_mode,
2703 memory_address (save_mode,
2704 plus_constant (argblock, argvec[count].offset.constant)));
2706 emit_move_insn (stack_area, argvec[count].save_area);
2709 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2710 stack_usage_map = initial_stack_usage_map;
2714 /* Like emit_library_call except that an extra argument, VALUE,
2715 comes second and says where to store the result.
2716 (If VALUE is zero, this function chooses a convenient way
2717 to return the value.
2719 This function returns an rtx for where the value is to be found.
2720 If VALUE is nonzero, VALUE is returned. */
2723 emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
2724 enum machine_mode outmode, int nargs, ...))
2726 #ifndef ANSI_PROTOTYPES
2730 enum machine_mode outmode;
2734 /* Total size in bytes of all the stack-parms scanned so far. */
2735 struct args_size args_size;
2736 /* Size of arguments before any adjustments (such as rounding). */
2737 struct args_size original_args_size;
2738 register int argnum;
2743 CUMULATIVE_ARGS args_so_far;
2744 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2745 struct args_size offset; struct args_size size; rtx save_area; };
2747 int old_inhibit_defer_pop = inhibit_defer_pop;
2748 rtx call_fusage = 0;
2750 int pcc_struct_value = 0;
2751 int struct_value_size = 0;
2753 int reg_parm_stack_space = 0;
2754 #ifdef ACCUMULATE_OUTGOING_ARGS
2758 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2759 /* Define the boundary of the register parm stack space that needs to be
2761 int low_to_save = -1, high_to_save;
2762 rtx save_area = 0; /* Place that it is saved */
2765 #ifdef ACCUMULATE_OUTGOING_ARGS
2766 /* Size of the stack reserved for parameter registers. */
2767 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2768 char *initial_stack_usage_map = stack_usage_map;
2771 #ifdef REG_PARM_STACK_SPACE
2772 #ifdef MAYBE_REG_PARM_STACK_SPACE
2773 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2775 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2779 VA_START (p, nargs);
2781 #ifndef ANSI_PROTOTYPES
2782 orgfun = va_arg (p, rtx);
2783 value = va_arg (p, rtx);
2784 no_queue = va_arg (p, int);
2785 outmode = va_arg (p, enum machine_mode);
2786 nargs = va_arg (p, int);
2789 is_const = no_queue;
2792 /* If this kind of value comes back in memory,
2793 decide where in memory it should come back. */
2794 if (aggregate_value_p (type_for_mode (outmode, 0)))
2796 #ifdef PCC_STATIC_STRUCT_RETURN
2798 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
2800 mem_value = gen_rtx_MEM (outmode, pointer_reg);
2801 pcc_struct_value = 1;
2803 value = gen_reg_rtx (outmode);
2804 #else /* not PCC_STATIC_STRUCT_RETURN */
2805 struct_value_size = GET_MODE_SIZE (outmode);
2806 if (value != 0 && GET_CODE (value) == MEM)
2809 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
2812 /* This call returns a big structure. */
2816 /* ??? Unfinished: must pass the memory address as an argument. */
2818 /* Copy all the libcall-arguments out of the varargs data
2819 and into a vector ARGVEC.
2821 Compute how to pass each argument. We only support a very small subset
2822 of the full argument passing conventions to limit complexity here since
2823 library functions shouldn't have many args. */
2825 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2826 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
2828 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2830 args_size.constant = 0;
2837 /* If there's a structure value address to be passed,
2838 either pass it in the special place, or pass it as an extra argument. */
2839 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
2841 rtx addr = XEXP (mem_value, 0);
2844 /* Make sure it is a reasonable operand for a move or push insn. */
2845 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2846 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2847 addr = force_operand (addr, NULL_RTX);
2849 argvec[count].value = addr;
2850 argvec[count].mode = Pmode;
2851 argvec[count].partial = 0;
2853 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
2854 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2855 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
2859 locate_and_pad_parm (Pmode, NULL_TREE,
2860 argvec[count].reg && argvec[count].partial == 0,
2861 NULL_TREE, &args_size, &argvec[count].offset,
2862 &argvec[count].size);
2865 if (argvec[count].reg == 0 || argvec[count].partial != 0
2866 || reg_parm_stack_space > 0)
2867 args_size.constant += argvec[count].size.constant;
2869 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
2874 for (; count < nargs; count++)
2876 rtx val = va_arg (p, rtx);
2877 enum machine_mode mode = va_arg (p, enum machine_mode);
2879 /* We cannot convert the arg value to the mode the library wants here;
2880 must do it earlier where we know the signedness of the arg. */
2882 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2885 /* On some machines, there's no way to pass a float to a library fcn.
2886 Pass it as a double instead. */
2887 #ifdef LIBGCC_NEEDS_DOUBLE
2888 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2889 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2892 /* There's no need to call protect_from_queue, because
2893 either emit_move_insn or emit_push_insn will do that. */
2895 /* Make sure it is a reasonable operand for a move or push insn. */
2896 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2897 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2898 val = force_operand (val, NULL_RTX);
2900 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2901 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2903 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2904 be viewed as just an efficiency improvement. */
2905 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2906 emit_move_insn (slot, val);
2907 val = XEXP (slot, 0);
2912 argvec[count].value = val;
2913 argvec[count].mode = mode;
2915 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2916 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2918 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2919 argvec[count].partial
2920 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2922 argvec[count].partial = 0;
2925 locate_and_pad_parm (mode, NULL_TREE,
2926 argvec[count].reg && argvec[count].partial == 0,
2927 NULL_TREE, &args_size, &argvec[count].offset,
2928 &argvec[count].size);
2930 if (argvec[count].size.var)
2933 if (reg_parm_stack_space == 0 && argvec[count].partial)
2934 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2936 if (argvec[count].reg == 0 || argvec[count].partial != 0
2937 || reg_parm_stack_space > 0)
2938 args_size.constant += argvec[count].size.constant;
2940 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2944 #ifdef FINAL_REG_PARM_STACK_SPACE
2945 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2948 /* If this machine requires an external definition for library
2949 functions, write one out. */
2950 assemble_external_libcall (fun);
2952 original_args_size = args_size;
2953 #ifdef STACK_BOUNDARY
2954 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2955 / STACK_BYTES) * STACK_BYTES);
2958 args_size.constant = MAX (args_size.constant,
2959 reg_parm_stack_space);
2961 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2962 args_size.constant -= reg_parm_stack_space;
2965 if (args_size.constant > current_function_outgoing_args_size)
2966 current_function_outgoing_args_size = args_size.constant;
2968 #ifdef ACCUMULATE_OUTGOING_ARGS
2969 /* Since the stack pointer will never be pushed, it is possible for
2970 the evaluation of a parm to clobber something we have already
2971 written to the stack. Since most function calls on RISC machines
2972 do not use the stack, this is uncommon, but must work correctly.
2974 Therefore, we save any area of the stack that was already written
2975 and that we are using. Here we set up to do this by making a new
2976 stack usage map from the old one.
2978 Another approach might be to try to reorder the argument
2979 evaluations to avoid this conflicting stack usage. */
2981 needed = args_size.constant;
2983 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2984 /* Since we will be writing into the entire argument area, the
2985 map must be allocated for its entire size, not just the part that
2986 is the responsibility of the caller. */
2987 needed += reg_parm_stack_space;
2990 #ifdef ARGS_GROW_DOWNWARD
2991 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2994 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2997 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2999 if (initial_highest_arg_in_use)
3000 bcopy (initial_stack_usage_map, stack_usage_map,
3001 initial_highest_arg_in_use);
3003 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3004 bzero (&stack_usage_map[initial_highest_arg_in_use],
3005 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3008 /* The address of the outgoing argument list must not be copied to a
3009 register here, because argblock would be left pointing to the
3010 wrong place after the call to allocate_dynamic_stack_space below.
3013 argblock = virtual_outgoing_args_rtx;
3014 #else /* not ACCUMULATE_OUTGOING_ARGS */
3015 #ifndef PUSH_ROUNDING
3016 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3020 #ifdef PUSH_ARGS_REVERSED
3021 #ifdef STACK_BOUNDARY
3022 /* If we push args individually in reverse order, perform stack alignment
3023 before the first push (the last arg). */
3025 anti_adjust_stack (GEN_INT (args_size.constant
3026 - original_args_size.constant));
3030 #ifdef PUSH_ARGS_REVERSED
3038 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3039 /* The argument list is the property of the called routine and it
3040 may clobber it. If the fixed area has been used for previous
3041 parameters, we must save and restore it.
3043 Here we compute the boundary of the that needs to be saved, if any. */
3045 #ifdef ARGS_GROW_DOWNWARD
3046 for (count = 0; count < reg_parm_stack_space + 1; count++)
3048 for (count = 0; count < reg_parm_stack_space; count++)
3051 if (count >= highest_outgoing_arg_in_use
3052 || stack_usage_map[count] == 0)
3055 if (low_to_save == -1)
3056 low_to_save = count;
3058 high_to_save = count;
3061 if (low_to_save >= 0)
3063 int num_to_save = high_to_save - low_to_save + 1;
3064 enum machine_mode save_mode
3065 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3068 /* If we don't have the required alignment, must do this in BLKmode. */
3069 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3070 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3071 save_mode = BLKmode;
3073 #ifdef ARGS_GROW_DOWNWARD
3074 stack_area = gen_rtx_MEM (save_mode,
3075 memory_address (save_mode,
3076 plus_constant (argblock,
3079 stack_area = gen_rtx_MEM (save_mode,
3080 memory_address (save_mode,
3081 plus_constant (argblock,
3084 if (save_mode == BLKmode)
3086 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3087 MEM_IN_STRUCT_P (save_area) = 0;
3088 emit_block_move (validize_mem (save_area), stack_area,
3089 GEN_INT (num_to_save),
3090 PARM_BOUNDARY / BITS_PER_UNIT);
3094 save_area = gen_reg_rtx (save_mode);
3095 emit_move_insn (save_area, stack_area);
3100 /* Push the args that need to be pushed. */
3102 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3103 are to be pushed. */
3104 for (count = 0; count < nargs; count++, argnum += inc)
3106 register enum machine_mode mode = argvec[argnum].mode;
3107 register rtx val = argvec[argnum].value;
3108 rtx reg = argvec[argnum].reg;
3109 int partial = argvec[argnum].partial;
3110 #ifdef ACCUMULATE_OUTGOING_ARGS
3111 int lower_bound, upper_bound, i;
3114 if (! (reg != 0 && partial == 0))
3116 #ifdef ACCUMULATE_OUTGOING_ARGS
3117 /* If this is being stored into a pre-allocated, fixed-size, stack
3118 area, save any previous data at that location. */
3120 #ifdef ARGS_GROW_DOWNWARD
3121 /* stack_slot is negative, but we want to index stack_usage_map
3122 with positive values. */
3123 upper_bound = -argvec[argnum].offset.constant + 1;
3124 lower_bound = upper_bound - argvec[argnum].size.constant;
3126 lower_bound = argvec[argnum].offset.constant;
3127 upper_bound = lower_bound + argvec[argnum].size.constant;
3130 for (i = lower_bound; i < upper_bound; i++)
3131 if (stack_usage_map[i]
3132 /* Don't store things in the fixed argument area at this point;
3133 it has already been saved. */
3134 && i > reg_parm_stack_space)
3137 if (i != upper_bound)
3139 /* We need to make a save area. See what mode we can make it. */
3140 enum machine_mode save_mode
3141 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3144 = gen_rtx_MEM (save_mode,
3145 memory_address (save_mode,
3146 plus_constant (argblock,
3147 argvec[argnum].offset.constant)));
3148 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3149 emit_move_insn (argvec[argnum].save_area, stack_area);
3152 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3153 argblock, GEN_INT (argvec[argnum].offset.constant),
3154 reg_parm_stack_space);
3156 #ifdef ACCUMULATE_OUTGOING_ARGS
3157 /* Now mark the segment we just used. */
3158 for (i = lower_bound; i < upper_bound; i++)
3159 stack_usage_map[i] = 1;
3166 #ifndef PUSH_ARGS_REVERSED
3167 #ifdef STACK_BOUNDARY
3168 /* If we pushed args in forward order, perform stack alignment
3169 after pushing the last arg. */
3171 anti_adjust_stack (GEN_INT (args_size.constant
3172 - original_args_size.constant));
3176 #ifdef PUSH_ARGS_REVERSED
3182 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3184 /* Now load any reg parms into their regs. */
3186 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3187 are to be pushed. */
3188 for (count = 0; count < nargs; count++, argnum += inc)
3190 register rtx val = argvec[argnum].value;
3191 rtx reg = argvec[argnum].reg;
3192 int partial = argvec[argnum].partial;
3194 if (reg != 0 && partial == 0)
3195 emit_move_insn (reg, val);
3200 /* For version 1.37, try deleting this entirely. */
3205 /* Any regs containing parms remain in use through the call. */
3206 for (count = 0; count < nargs; count++)
3207 if (argvec[count].reg != 0)
3208 use_reg (&call_fusage, argvec[count].reg);
3210 /* Pass the function the address in which to return a structure value. */
3211 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3213 emit_move_insn (struct_value_rtx,
3215 force_operand (XEXP (mem_value, 0),
3217 if (GET_CODE (struct_value_rtx) == REG)
3218 use_reg (&call_fusage, struct_value_rtx);
3221 /* Don't allow popping to be deferred, since then
3222 cse'ing of library calls could delete a call and leave the pop. */
3225 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3226 will set inhibit_defer_pop to that value. */
3227 /* See the comment in emit_library_call about the function type we build
3231 get_identifier (XSTR (orgfun, 0)),
3232 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3233 args_size.constant, struct_value_size,
3234 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3235 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3236 old_inhibit_defer_pop + 1, call_fusage, is_const);
3238 /* Now restore inhibit_defer_pop to its actual original value. */
3243 /* Copy the value to the right place. */
3244 if (outmode != VOIDmode)
3250 if (value != mem_value)
3251 emit_move_insn (value, mem_value);
3253 else if (value != 0)
3254 emit_move_insn (value, hard_libcall_value (outmode));
3256 value = hard_libcall_value (outmode);
3259 #ifdef ACCUMULATE_OUTGOING_ARGS
3260 #ifdef REG_PARM_STACK_SPACE
3263 enum machine_mode save_mode = GET_MODE (save_area);
3264 #ifdef ARGS_GROW_DOWNWARD
3266 = gen_rtx_MEM (save_mode,
3267 memory_address (save_mode,
3268 plus_constant (argblock,
3272 = gen_rtx_MEM (save_mode,
3273 memory_address (save_mode,
3274 plus_constant (argblock, low_to_save)));
3276 if (save_mode != BLKmode)
3277 emit_move_insn (stack_area, save_area);
3279 emit_block_move (stack_area, validize_mem (save_area),
3280 GEN_INT (high_to_save - low_to_save + 1),
3281 PARM_BOUNDARY / BITS_PER_UNIT);
3285 /* If we saved any argument areas, restore them. */
3286 for (count = 0; count < nargs; count++)
3287 if (argvec[count].save_area)
3289 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3291 = gen_rtx_MEM (save_mode,
3292 memory_address (save_mode, plus_constant (argblock,
3293 argvec[count].offset.constant)));
3295 emit_move_insn (stack_area, argvec[count].save_area);
3298 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3299 stack_usage_map = initial_stack_usage_map;
3306 /* Return an rtx which represents a suitable home on the stack
3307 given TYPE, the type of the argument looking for a home.
3308 This is called only for BLKmode arguments.
3310 SIZE is the size needed for this target.
3311 ARGS_ADDR is the address of the bottom of the argument block for this call.
3312 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3313 if this machine uses push insns. */
3316 target_for_arg (type, size, args_addr, offset)
3320 struct args_size offset;
3323 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3325 /* We do not call memory_address if possible,
3326 because we want to address as close to the stack
3327 as possible. For non-variable sized arguments,
3328 this will be stack-pointer relative addressing. */
3329 if (GET_CODE (offset_rtx) == CONST_INT)
3330 target = plus_constant (args_addr, INTVAL (offset_rtx));
3333 /* I have no idea how to guarantee that this
3334 will work in the presence of register parameters. */
3335 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3336 target = memory_address (QImode, target);
3339 return gen_rtx_MEM (BLKmode, target);
3343 /* Store a single argument for a function call
3344 into the register or memory area where it must be passed.
3345 *ARG describes the argument value and where to pass it.
3347 ARGBLOCK is the address of the stack-block for all the arguments,
3348 or 0 on a machine where arguments are pushed individually.
3350 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3351 so must be careful about how the stack is used.
3353 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3354 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3355 that we need not worry about saving and restoring the stack.
3357 FNDECL is the declaration of the function we are calling. */
3360 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3361 reg_parm_stack_space)
3362 struct arg_data *arg;
3366 int reg_parm_stack_space;
3368 register tree pval = arg->tree_value;
3372 #ifdef ACCUMULATE_OUTGOING_ARGS
3373 int i, lower_bound, upper_bound;
3376 if (TREE_CODE (pval) == ERROR_MARK)
3379 /* Push a new temporary level for any temporaries we make for
3383 #ifdef ACCUMULATE_OUTGOING_ARGS
3384 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3385 save any previous data at that location. */
3386 if (argblock && ! variable_size && arg->stack)
3388 #ifdef ARGS_GROW_DOWNWARD
3389 /* stack_slot is negative, but we want to index stack_usage_map
3390 with positive values. */
3391 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3392 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3396 lower_bound = upper_bound - arg->size.constant;
3398 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3399 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3403 upper_bound = lower_bound + arg->size.constant;
3406 for (i = lower_bound; i < upper_bound; i++)
3407 if (stack_usage_map[i]
3408 /* Don't store things in the fixed argument area at this point;
3409 it has already been saved. */
3410 && i > reg_parm_stack_space)
3413 if (i != upper_bound)
3415 /* We need to make a save area. See what mode we can make it. */
3416 enum machine_mode save_mode
3417 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3419 = gen_rtx_MEM (save_mode,
3420 memory_address (save_mode,
3421 XEXP (arg->stack_slot, 0)));
3423 if (save_mode == BLKmode)
3425 arg->save_area = assign_stack_temp (BLKmode,
3426 arg->size.constant, 0);
3427 MEM_IN_STRUCT_P (arg->save_area)
3428 = AGGREGATE_TYPE_P (TREE_TYPE (arg->tree_value));
3429 preserve_temp_slots (arg->save_area);
3430 emit_block_move (validize_mem (arg->save_area), stack_area,
3431 GEN_INT (arg->size.constant),
3432 PARM_BOUNDARY / BITS_PER_UNIT);
3436 arg->save_area = gen_reg_rtx (save_mode);
3437 emit_move_insn (arg->save_area, stack_area);
3443 /* If this isn't going to be placed on both the stack and in registers,
3444 set up the register and number of words. */
3445 if (! arg->pass_on_stack)
3446 reg = arg->reg, partial = arg->partial;
3448 if (reg != 0 && partial == 0)
3449 /* Being passed entirely in a register. We shouldn't be called in
3453 /* If this arg needs special alignment, don't load the registers
3455 if (arg->n_aligned_regs != 0)
3458 /* If this is being passed partially in a register, we can't evaluate
3459 it directly into its stack slot. Otherwise, we can. */
3460 if (arg->value == 0)
3462 #ifdef ACCUMULATE_OUTGOING_ARGS
3463 /* stack_arg_under_construction is nonzero if a function argument is
3464 being evaluated directly into the outgoing argument list and
3465 expand_call must take special action to preserve the argument list
3466 if it is called recursively.
3468 For scalar function arguments stack_usage_map is sufficient to
3469 determine which stack slots must be saved and restored. Scalar
3470 arguments in general have pass_on_stack == 0.
3472 If this argument is initialized by a function which takes the
3473 address of the argument (a C++ constructor or a C function
3474 returning a BLKmode structure), then stack_usage_map is
3475 insufficient and expand_call must push the stack around the
3476 function call. Such arguments have pass_on_stack == 1.
3478 Note that it is always safe to set stack_arg_under_construction,
3479 but this generates suboptimal code if set when not needed. */
3481 if (arg->pass_on_stack)
3482 stack_arg_under_construction++;
3484 arg->value = expand_expr (pval,
3486 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3487 ? NULL_RTX : arg->stack,
3490 /* If we are promoting object (or for any other reason) the mode
3491 doesn't agree, convert the mode. */
3493 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3494 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3495 arg->value, arg->unsignedp);
3497 #ifdef ACCUMULATE_OUTGOING_ARGS
3498 if (arg->pass_on_stack)
3499 stack_arg_under_construction--;
3503 /* Don't allow anything left on stack from computation
3504 of argument to alloca. */
3506 do_pending_stack_adjust ();
3508 if (arg->value == arg->stack)
3510 /* If the value is already in the stack slot, we are done moving
3512 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
3514 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3515 XEXP (arg->stack, 0), ptr_mode,
3516 ARGS_SIZE_RTX (arg->size),
3517 TYPE_MODE (sizetype),
3518 GEN_INT (MEMORY_USE_RW),
3519 TYPE_MODE (integer_type_node));
3522 else if (arg->mode != BLKmode)
3526 /* Argument is a scalar, not entirely passed in registers.
3527 (If part is passed in registers, arg->partial says how much
3528 and emit_push_insn will take care of putting it there.)
3530 Push it, and if its size is less than the
3531 amount of space allocated to it,
3532 also bump stack pointer by the additional space.
3533 Note that in C the default argument promotions
3534 will prevent such mismatches. */
3536 size = GET_MODE_SIZE (arg->mode);
3537 /* Compute how much space the push instruction will push.
3538 On many machines, pushing a byte will advance the stack
3539 pointer by a halfword. */
3540 #ifdef PUSH_ROUNDING
3541 size = PUSH_ROUNDING (size);
3545 /* Compute how much space the argument should get:
3546 round up to a multiple of the alignment for arguments. */
3547 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3548 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3549 / (PARM_BOUNDARY / BITS_PER_UNIT))
3550 * (PARM_BOUNDARY / BITS_PER_UNIT));
3552 /* This isn't already where we want it on the stack, so put it there.
3553 This can either be done with push or copy insns. */
3554 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3555 partial, reg, used - size, argblock,
3556 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space);
3560 /* BLKmode, at least partly to be pushed. */
3562 register int excess;
3565 /* Pushing a nonscalar.
3566 If part is passed in registers, PARTIAL says how much
3567 and emit_push_insn will take care of putting it there. */
3569 /* Round its size up to a multiple
3570 of the allocation unit for arguments. */
3572 if (arg->size.var != 0)
3575 size_rtx = ARGS_SIZE_RTX (arg->size);
3579 /* PUSH_ROUNDING has no effect on us, because
3580 emit_push_insn for BLKmode is careful to avoid it. */
3581 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3582 + partial * UNITS_PER_WORD);
3583 size_rtx = expr_size (pval);
3586 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3587 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3588 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3589 reg_parm_stack_space);
3593 /* Unless this is a partially-in-register argument, the argument is now
3596 ??? Note that this can change arg->value from arg->stack to
3597 arg->stack_slot and it matters when they are not the same.
3598 It isn't totally clear that this is correct in all cases. */
3600 arg->value = arg->stack_slot;
3602 /* Once we have pushed something, pops can't safely
3603 be deferred during the rest of the arguments. */
3606 /* ANSI doesn't require a sequence point here,
3607 but PCC has one, so this will avoid some problems. */
3610 /* Free any temporary slots made in processing this argument. Show
3611 that we might have taken the address of something and pushed that
3613 preserve_temp_slots (NULL_RTX);
3617 #ifdef ACCUMULATE_OUTGOING_ARGS
3618 /* Now mark the segment we just used. */
3619 if (argblock && ! variable_size && arg->stack)
3620 for (i = lower_bound; i < upper_bound; i++)
3621 stack_usage_map[i] = 1;