1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-97, 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
28 #include "insn-flags.h"
32 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
33 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
50 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
51 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
53 /* Data structure and subroutines used within expand_call. */
57 /* Tree node for this argument. */
59 /* Mode for value; TYPE_MODE unless promoted. */
60 enum machine_mode mode;
61 /* Current RTL value for argument, or 0 if it isn't precomputed. */
63 /* Initially-compute RTL value for argument; only for const functions. */
65 /* Register to pass this argument in, 0 if passed on stack, or an
66 PARALLEL if the arg is to be copied into multiple non-contiguous
69 /* If REG was promoted from the actual mode of the argument expression,
70 indicates whether the promotion is sign- or zero-extended. */
72 /* Number of registers to use. 0 means put the whole arg in registers.
73 Also 0 if not passed in registers. */
75 /* Non-zero if argument must be passed on stack.
76 Note that some arguments may be passed on the stack
77 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
78 pass_on_stack identifies arguments that *cannot* go in registers. */
80 /* Offset of this argument from beginning of stack-args. */
81 struct args_size offset;
82 /* Similar, but offset to the start of the stack slot. Different from
83 OFFSET if this arg pads downward. */
84 struct args_size slot_offset;
85 /* Size of this argument on the stack, rounded up for any padding it gets,
86 parts of the argument passed in registers do not count.
87 If REG_PARM_STACK_SPACE is defined, then register parms
88 are counted here as well. */
89 struct args_size size;
90 /* Location on the stack at which parameter should be stored. The store
91 has already been done if STACK == VALUE. */
93 /* Location on the stack of the start of this argument slot. This can
94 differ from STACK if this arg pads downward. This location is known
95 to be aligned to FUNCTION_ARG_BOUNDARY. */
97 #ifdef ACCUMULATE_OUTGOING_ARGS
98 /* Place that this stack area has been saved, if needed. */
101 /* If an argument's alignment does not permit direct copying into registers,
102 copy in smaller-sized pieces into pseudos. These are stored in a
103 block pointed to by this field. The next field says how many
104 word-sized pseudos we made. */
109 #ifdef ACCUMULATE_OUTGOING_ARGS
110 /* A vector of one char per byte of stack space. A byte if non-zero if
111 the corresponding stack location has been used.
112 This vector is used to prevent a function call within an argument from
113 clobbering any stack already set up. */
114 static char *stack_usage_map;
116 /* Size of STACK_USAGE_MAP. */
117 static int highest_outgoing_arg_in_use;
119 /* stack_arg_under_construction is nonzero when an argument may be
120 initialized with a constructor call (including a C function that
121 returns a BLKmode struct) and expand_call must take special action
122 to make sure the object being constructed does not overlap the
123 argument list for the constructor call. */
124 int stack_arg_under_construction;
127 static int calls_function PROTO ((tree, int));
128 static int calls_function_1 PROTO ((tree, int));
129 static void emit_call_1 PROTO ((rtx, tree, tree, HOST_WIDE_INT,
130 HOST_WIDE_INT, rtx, rtx,
132 static void special_function_p PROTO ((char *, tree, int *, int *,
134 static void precompute_register_parameters PROTO ((int, struct arg_data *,
136 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
138 static void store_unaligned_arguments_into_pseudos PROTO ((struct arg_data *,
140 static int finalize_must_preallocate PROTO ((int, int,
142 struct args_size *));
143 static void precompute_arguments PROTO ((int, int, int,
145 struct args_size *));
146 static int compute_argument_block_size PROTO ((int,
147 struct args_size *));
148 static void initialize_argument_information PROTO ((int,
155 static void compute_argument_addresses PROTO ((struct arg_data *,
157 static rtx rtx_for_function_call PROTO ((tree, tree));
159 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
160 static rtx save_fixed_argument_area PROTO ((int, rtx, int *, int *));
161 static void restore_fixed_argument_area PROTO ((rtx, rtx, int, int));
164 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
167 If WHICH is 0, return 1 if EXP contains a call to any function.
168 Actually, we only need return 1 if evaluating EXP would require pushing
169 arguments on the stack, but that is too difficult to compute, so we just
170 assume any function call might require the stack. */
172 static tree calls_function_save_exprs;
175 calls_function (exp, which)
180 calls_function_save_exprs = 0;
181 val = calls_function_1 (exp, which);
182 calls_function_save_exprs = 0;
187 calls_function_1 (exp, which)
192 enum tree_code code = TREE_CODE (exp);
193 int type = TREE_CODE_CLASS (code);
194 int length = tree_code_length[(int) code];
196 /* If this code is language-specific, we don't know what it will do. */
197 if ((int) code >= NUM_TREE_CODES)
200 /* Only expressions and references can contain calls. */
201 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
210 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
211 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
214 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
216 if ((DECL_BUILT_IN (fndecl)
217 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
218 || (DECL_SAVED_INSNS (fndecl)
219 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
220 & FUNCTION_FLAGS_CALLS_ALLOCA)))
224 /* Third operand is RTL. */
229 if (SAVE_EXPR_RTL (exp) != 0)
231 if (value_member (exp, calls_function_save_exprs))
233 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
234 calls_function_save_exprs);
235 return (TREE_OPERAND (exp, 0) != 0
236 && calls_function_1 (TREE_OPERAND (exp, 0), which));
242 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
243 if (DECL_INITIAL (local) != 0
244 && calls_function_1 (DECL_INITIAL (local), which))
248 register tree subblock;
250 for (subblock = BLOCK_SUBBLOCKS (exp);
252 subblock = TREE_CHAIN (subblock))
253 if (calls_function_1 (subblock, which))
258 case METHOD_CALL_EXPR:
262 case WITH_CLEANUP_EXPR:
273 for (i = 0; i < length; i++)
274 if (TREE_OPERAND (exp, i) != 0
275 && calls_function_1 (TREE_OPERAND (exp, i), which))
281 /* Force FUNEXP into a form suitable for the address of a CALL,
282 and return that as an rtx. Also load the static chain register
283 if FNDECL is a nested function.
285 CALL_FUSAGE points to a variable holding the prospective
286 CALL_INSN_FUNCTION_USAGE information. */
289 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
295 rtx static_chain_value = 0;
297 funexp = protect_from_queue (funexp, 0);
300 /* Get possible static chain value for nested function in C. */
301 static_chain_value = lookup_static_chain (fndecl);
303 /* Make a valid memory address and copy constants thru pseudo-regs,
304 but not for a constant address if -fno-function-cse. */
305 if (GET_CODE (funexp) != SYMBOL_REF)
306 /* If we are using registers for parameters, force the
307 function address into a register now. */
308 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
309 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
310 : memory_address (FUNCTION_MODE, funexp));
313 #ifndef NO_FUNCTION_CSE
314 if (optimize && ! flag_no_function_cse)
315 #ifdef NO_RECURSIVE_FUNCTION_CSE
316 if (fndecl != current_function_decl)
318 funexp = force_reg (Pmode, funexp);
322 if (static_chain_value != 0)
324 emit_move_insn (static_chain_rtx, static_chain_value);
326 if (GET_CODE (static_chain_rtx) == REG)
327 use_reg (call_fusage, static_chain_rtx);
333 /* Generate instructions to call function FUNEXP,
334 and optionally pop the results.
335 The CALL_INSN is the first insn generated.
337 FNDECL is the declaration node of the function. This is given to the
338 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
340 FUNTYPE is the data type of the function. This is given to the macro
341 RETURN_POPS_ARGS to determine whether this function pops its own args.
342 We used to allow an identifier for library functions, but that doesn't
343 work when the return type is an aggregate type and the calling convention
344 says that the pointer to this aggregate is to be popped by the callee.
346 STACK_SIZE is the number of bytes of arguments on the stack,
347 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
348 This is both to put into the call insn and
349 to generate explicit popping code if necessary.
351 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
352 It is zero if this call doesn't want a structure value.
354 NEXT_ARG_REG is the rtx that results from executing
355 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
356 just after all the args have had their registers assigned.
357 This could be whatever you like, but normally it is the first
358 arg-register beyond those used for args in this call,
359 or 0 if all the arg-registers are used in this call.
360 It is passed on to `gen_call' so you can put this info in the call insn.
362 VALREG is a hard register in which a value is returned,
363 or 0 if the call does not return a value.
365 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
366 the args to this call were processed.
367 We restore `inhibit_defer_pop' to that value.
369 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
370 denote registers used by the called function.
372 IS_CONST is true if this is a `const' call. */
375 emit_call_1 (funexp, fndecl, funtype, stack_size, struct_value_size,
376 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
379 tree fndecl ATTRIBUTE_UNUSED;
380 tree funtype ATTRIBUTE_UNUSED;
381 HOST_WIDE_INT stack_size;
382 HOST_WIDE_INT struct_value_size;
385 int old_inhibit_defer_pop;
389 rtx stack_size_rtx = GEN_INT (stack_size);
390 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
392 #ifndef ACCUMULATE_OUTGOING_ARGS
393 int already_popped = 0;
396 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
397 and we don't want to load it into a register as an optimization,
398 because prepare_call_address already did it if it should be done. */
399 if (GET_CODE (funexp) != SYMBOL_REF)
400 funexp = memory_address (FUNCTION_MODE, funexp);
402 #ifndef ACCUMULATE_OUTGOING_ARGS
403 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
404 if (HAVE_call_pop && HAVE_call_value_pop
405 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
408 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
411 /* If this subroutine pops its own args, record that in the call insn
412 if possible, for the sake of frame pointer elimination. */
415 pat = gen_call_value_pop (valreg,
416 gen_rtx_MEM (FUNCTION_MODE, funexp),
417 stack_size_rtx, next_arg_reg, n_pop);
419 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
420 stack_size_rtx, next_arg_reg, n_pop);
422 emit_call_insn (pat);
429 #if defined (HAVE_call) && defined (HAVE_call_value)
430 if (HAVE_call && HAVE_call_value)
433 emit_call_insn (gen_call_value (valreg,
434 gen_rtx_MEM (FUNCTION_MODE, funexp),
435 stack_size_rtx, next_arg_reg,
438 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
439 stack_size_rtx, next_arg_reg,
440 struct_value_size_rtx));
446 /* Find the CALL insn we just emitted. */
447 for (call_insn = get_last_insn ();
448 call_insn && GET_CODE (call_insn) != CALL_INSN;
449 call_insn = PREV_INSN (call_insn))
455 /* Put the register usage information on the CALL. If there is already
456 some usage information, put ours at the end. */
457 if (CALL_INSN_FUNCTION_USAGE (call_insn))
461 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
462 link = XEXP (link, 1))
465 XEXP (link, 1) = call_fusage;
468 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
470 /* If this is a const call, then set the insn's unchanging bit. */
472 CONST_CALL_P (call_insn) = 1;
474 /* Restore this now, so that we do defer pops for this call's args
475 if the context of the call as a whole permits. */
476 inhibit_defer_pop = old_inhibit_defer_pop;
478 #ifndef ACCUMULATE_OUTGOING_ARGS
479 /* If returning from the subroutine does not automatically pop the args,
480 we need an instruction to pop them sooner or later.
481 Perhaps do it now; perhaps just record how much space to pop later.
483 If returning from the subroutine does pop the args, indicate that the
484 stack pointer will be changed. */
486 if (stack_size != 0 && RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0)
489 CALL_INSN_FUNCTION_USAGE (call_insn)
490 = gen_rtx_EXPR_LIST (VOIDmode,
491 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
492 CALL_INSN_FUNCTION_USAGE (call_insn));
493 stack_size -= RETURN_POPS_ARGS (fndecl, funtype, stack_size);
494 stack_size_rtx = GEN_INT (stack_size);
499 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
500 pending_stack_adjust += stack_size;
502 adjust_stack (stack_size_rtx);
507 /* Determine if the function identified by NAME and FNDECL is one with
508 special properties we wish to know about.
510 For example, if the function might return more than one time (setjmp), then
511 set RETURNS_TWICE to a nonzero value.
513 Similarly set IS_LONGJMP for if the function is in the longjmp family.
515 Set IS_MALLOC for any of the standard memory allocation functions which
516 allocate from the heap.
518 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
519 space from the stack such as alloca. */
522 special_function_p (name, fndecl, returns_twice, is_longjmp,
523 is_malloc, may_be_alloca)
536 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
537 /* Exclude functions not at the file scope, or not `extern',
538 since they are not the magic functions we would otherwise
540 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
544 /* We assume that alloca will always be called by name. It
545 makes no sense to pass it as a pointer-to-function to
546 anything that does not understand its behavior. */
548 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
550 && ! strcmp (name, "alloca"))
551 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
553 && ! strcmp (name, "__builtin_alloca"))));
555 /* Disregard prefix _, __ or __x. */
558 if (name[1] == '_' && name[2] == 'x')
560 else if (name[1] == '_')
570 && (! strcmp (tname, "setjmp")
571 || ! strcmp (tname, "setjmp_syscall")))
573 && ! strcmp (tname, "sigsetjmp"))
575 && ! strcmp (tname, "savectx")));
577 && ! strcmp (tname, "siglongjmp"))
580 else if ((tname[0] == 'q' && tname[1] == 's'
581 && ! strcmp (tname, "qsetjmp"))
582 || (tname[0] == 'v' && tname[1] == 'f'
583 && ! strcmp (tname, "vfork")))
586 else if (tname[0] == 'l' && tname[1] == 'o'
587 && ! strcmp (tname, "longjmp"))
589 /* XXX should have "malloc" attribute on functions instead
590 of recognizing them by name. */
591 else if (! strcmp (tname, "malloc")
592 || ! strcmp (tname, "calloc")
593 || ! strcmp (tname, "realloc")
594 /* Note use of NAME rather than TNAME here. These functions
595 are only reserved when preceded with __. */
596 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
597 || ! strcmp (name, "__nw") /* mangled __builtin_new */
598 || ! strcmp (name, "__builtin_new")
599 || ! strcmp (name, "__builtin_vec_new"))
604 /* Precompute all register parameters as described by ARGS, storing values
605 into fields within the ARGS array.
607 NUM_ACTUALS indicates the total number elements in the ARGS array.
609 Set REG_PARM_SEEN if we encounter a register parameter. */
612 precompute_register_parameters (num_actuals, args, reg_parm_seen)
614 struct arg_data *args;
621 for (i = 0; i < num_actuals; i++)
622 if (args[i].reg != 0 && ! args[i].pass_on_stack)
626 if (args[i].value == 0)
629 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
631 preserve_temp_slots (args[i].value);
634 /* ANSI doesn't require a sequence point here,
635 but PCC has one, so this will avoid some problems. */
639 /* If we are to promote the function arg to a wider mode,
642 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
644 = convert_modes (args[i].mode,
645 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
646 args[i].value, args[i].unsignedp);
648 /* If the value is expensive, and we are inside an appropriately
649 short loop, put the value into a pseudo and then put the pseudo
652 For small register classes, also do this if this call uses
653 register parameters. This is to avoid reload conflicts while
654 loading the parameters registers. */
656 if ((! (GET_CODE (args[i].value) == REG
657 || (GET_CODE (args[i].value) == SUBREG
658 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
659 && args[i].mode != BLKmode
660 && rtx_cost (args[i].value, SET) > 2
661 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
662 || preserve_subexpressions_p ()))
663 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
667 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
669 /* The argument list is the property of the called routine and it
670 may clobber it. If the fixed area has been used for previous
671 parameters, we must save and restore it. */
673 save_fixed_argument_area (reg_parm_stack_space, argblock,
674 low_to_save, high_to_save)
675 int reg_parm_stack_space;
681 rtx save_area = NULL_RTX;
683 /* Compute the boundary of the that needs to be saved, if any. */
684 #ifdef ARGS_GROW_DOWNWARD
685 for (i = 0; i < reg_parm_stack_space + 1; i++)
687 for (i = 0; i < reg_parm_stack_space; i++)
690 if (i >= highest_outgoing_arg_in_use
691 || stack_usage_map[i] == 0)
694 if (*low_to_save == -1)
700 if (*low_to_save >= 0)
702 int num_to_save = *high_to_save - *low_to_save + 1;
703 enum machine_mode save_mode
704 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
707 /* If we don't have the required alignment, must do this in BLKmode. */
708 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
709 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
712 #ifdef ARGS_GROW_DOWNWARD
713 stack_area = gen_rtx_MEM (save_mode,
714 memory_address (save_mode,
715 plus_constant (argblock,
718 stack_area = gen_rtx_MEM (save_mode,
719 memory_address (save_mode,
720 plus_constant (argblock,
723 if (save_mode == BLKmode)
725 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
726 emit_block_move (validize_mem (save_area), stack_area,
727 GEN_INT (num_to_save),
728 PARM_BOUNDARY / BITS_PER_UNIT);
732 save_area = gen_reg_rtx (save_mode);
733 emit_move_insn (save_area, stack_area);
740 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
746 enum machine_mode save_mode = GET_MODE (save_area);
747 #ifdef ARGS_GROW_DOWNWARD
749 = gen_rtx_MEM (save_mode,
750 memory_address (save_mode,
751 plus_constant (argblock,
755 = gen_rtx_MEM (save_mode,
756 memory_address (save_mode,
757 plus_constant (argblock,
761 if (save_mode != BLKmode)
762 emit_move_insn (stack_area, save_area);
764 emit_block_move (stack_area, validize_mem (save_area),
765 GEN_INT (high_to_save - low_to_save + 1),
766 PARM_BOUNDARY / BITS_PER_UNIT);
770 /* If any elements in ARGS refer to parameters that are to be passed in
771 registers, but not in memory, and whose alignment does not permit a
772 direct copy into registers. Copy the values into a group of pseudos
773 which we will later copy into the appropriate hard registers.
775 Pseudos for each unaligned argument will be stored into the array
776 args[argnum].aligned_regs. The caller is responsible for deallocating
777 the aligned_regs array if it is nonzero. */
780 store_unaligned_arguments_into_pseudos (args, num_actuals)
781 struct arg_data *args;
786 for (i = 0; i < num_actuals; i++)
787 if (args[i].reg != 0 && ! args[i].pass_on_stack
788 && args[i].mode == BLKmode
789 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
790 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
792 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
793 int big_endian_correction = 0;
795 args[i].n_aligned_regs
796 = args[i].partial ? args[i].partial
797 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
799 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
800 * args[i].n_aligned_regs);
802 /* Structures smaller than a word are aligned to the least
803 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
804 this means we must skip the empty high order bytes when
805 calculating the bit offset. */
806 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
807 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
809 for (j = 0; j < args[i].n_aligned_regs; j++)
811 rtx reg = gen_reg_rtx (word_mode);
812 rtx word = operand_subword_force (args[i].value, j, BLKmode);
813 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
814 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
816 args[i].aligned_regs[j] = reg;
818 /* There is no need to restrict this code to loading items
819 in TYPE_ALIGN sized hunks. The bitfield instructions can
820 load up entire word sized registers efficiently.
822 ??? This may not be needed anymore.
823 We use to emit a clobber here but that doesn't let later
824 passes optimize the instructions we emit. By storing 0 into
825 the register later passes know the first AND to zero out the
826 bitfield being set in the register is unnecessary. The store
827 of 0 will be deleted as will at least the first AND. */
829 emit_move_insn (reg, const0_rtx);
831 bytes -= bitsize / BITS_PER_UNIT;
832 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
833 extract_bit_field (word, bitsize, 0, 1,
836 bitalign / BITS_PER_UNIT,
838 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
843 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
846 NUM_ACTUALS is the total number of parameters.
848 N_NAMED_ARGS is the total number of named arguments.
850 FNDECL is the tree code for the target of this call (if known)
852 ARGS_SO_FAR holds state needed by the target to know where to place
855 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
856 for arguments which are passed in registers.
858 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
859 and may be modified by this routine.
861 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
862 flags which may may be modified by this routine. */
865 initialize_argument_information (num_actuals, args, args_size, n_named_args,
866 actparms, fndecl, args_so_far,
867 reg_parm_stack_space, old_stack_level,
868 old_pending_adj, must_preallocate, is_const)
870 struct arg_data *args;
871 struct args_size *args_size;
875 CUMULATIVE_ARGS *args_so_far;
876 int reg_parm_stack_space;
877 rtx *old_stack_level;
878 int *old_pending_adj;
879 int *must_preallocate;
882 /* 1 if scanning parms front to back, -1 if scanning back to front. */
885 /* Count arg position in order args appear. */
891 args_size->constant = 0;
894 /* In this loop, we consider args in the order they are written.
895 We fill up ARGS from the front or from the back if necessary
896 so that in any case the first arg to be pushed ends up at the front. */
898 #ifdef PUSH_ARGS_REVERSED
899 i = num_actuals - 1, inc = -1;
900 /* In this case, must reverse order of args
901 so that we compute and push the last arg first. */
906 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
907 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
909 tree type = TREE_TYPE (TREE_VALUE (p));
911 enum machine_mode mode;
913 args[i].tree_value = TREE_VALUE (p);
915 /* Replace erroneous argument with constant zero. */
916 if (type == error_mark_node || TYPE_SIZE (type) == 0)
917 args[i].tree_value = integer_zero_node, type = integer_type_node;
919 /* If TYPE is a transparent union, pass things the way we would
920 pass the first field of the union. We have already verified that
921 the modes are the same. */
922 if (TYPE_TRANSPARENT_UNION (type))
923 type = TREE_TYPE (TYPE_FIELDS (type));
925 /* Decide where to pass this arg.
927 args[i].reg is nonzero if all or part is passed in registers.
929 args[i].partial is nonzero if part but not all is passed in registers,
930 and the exact value says how many words are passed in registers.
932 args[i].pass_on_stack is nonzero if the argument must at least be
933 computed on the stack. It may then be loaded back into registers
934 if args[i].reg is nonzero.
936 These decisions are driven by the FUNCTION_... macros and must agree
937 with those made by function.c. */
939 /* See if this argument should be passed by invisible reference. */
940 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
941 && contains_placeholder_p (TYPE_SIZE (type)))
942 || TREE_ADDRESSABLE (type)
943 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
944 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
945 type, argpos < n_named_args)
949 /* If we're compiling a thunk, pass through invisible
950 references instead of making a copy. */
951 if (current_function_is_thunk
952 #ifdef FUNCTION_ARG_CALLEE_COPIES
953 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
954 type, argpos < n_named_args)
955 /* If it's in a register, we must make a copy of it too. */
956 /* ??? Is this a sufficient test? Is there a better one? */
957 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
958 && REG_P (DECL_RTL (args[i].tree_value)))
959 && ! TREE_ADDRESSABLE (type))
963 /* C++ uses a TARGET_EXPR to indicate that we want to make a
964 new object from the argument. If we are passing by
965 invisible reference, the callee will do that for us, so we
966 can strip off the TARGET_EXPR. This is not always safe,
967 but it is safe in the only case where this is a useful
968 optimization; namely, when the argument is a plain object.
969 In that case, the frontend is just asking the backend to
970 make a bitwise copy of the argument. */
972 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
973 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
974 (args[i].tree_value, 1)))
976 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
977 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
979 args[i].tree_value = build1 (ADDR_EXPR,
980 build_pointer_type (type),
982 type = build_pointer_type (type);
986 /* We make a copy of the object and pass the address to the
987 function being called. */
990 if (TYPE_SIZE (type) == 0
991 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
992 || (flag_stack_check && ! STACK_CHECK_BUILTIN
993 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
994 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
995 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
997 /* This is a variable-sized object. Make space on the stack
999 rtx size_rtx = expr_size (TREE_VALUE (p));
1001 if (*old_stack_level == 0)
1003 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1004 *old_pending_adj = pending_stack_adjust;
1005 pending_stack_adjust = 0;
1008 copy = gen_rtx_MEM (BLKmode,
1009 allocate_dynamic_stack_space (size_rtx,
1011 TYPE_ALIGN (type)));
1015 int size = int_size_in_bytes (type);
1016 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1019 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1021 store_expr (args[i].tree_value, copy, 0);
1024 args[i].tree_value = build1 (ADDR_EXPR,
1025 build_pointer_type (type),
1026 make_tree (type, copy));
1027 type = build_pointer_type (type);
1031 mode = TYPE_MODE (type);
1032 unsignedp = TREE_UNSIGNED (type);
1034 #ifdef PROMOTE_FUNCTION_ARGS
1035 mode = promote_mode (type, mode, &unsignedp, 1);
1038 args[i].unsignedp = unsignedp;
1039 args[i].mode = mode;
1040 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1041 argpos < n_named_args);
1042 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1045 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1046 argpos < n_named_args);
1049 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1051 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1052 it means that we are to pass this arg in the register(s) designated
1053 by the PARALLEL, but also to pass it in the stack. */
1054 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1055 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1056 args[i].pass_on_stack = 1;
1058 /* If this is an addressable type, we must preallocate the stack
1059 since we must evaluate the object into its final location.
1061 If this is to be passed in both registers and the stack, it is simpler
1063 if (TREE_ADDRESSABLE (type)
1064 || (args[i].pass_on_stack && args[i].reg != 0))
1065 *must_preallocate = 1;
1067 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1068 we cannot consider this function call constant. */
1069 if (TREE_ADDRESSABLE (type))
1072 /* Compute the stack-size of this argument. */
1073 if (args[i].reg == 0 || args[i].partial != 0
1074 || reg_parm_stack_space > 0
1075 || args[i].pass_on_stack)
1076 locate_and_pad_parm (mode, type,
1077 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1082 fndecl, args_size, &args[i].offset,
1085 #ifndef ARGS_GROW_DOWNWARD
1086 args[i].slot_offset = *args_size;
1089 /* If a part of the arg was put into registers,
1090 don't include that part in the amount pushed. */
1091 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1092 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1093 / (PARM_BOUNDARY / BITS_PER_UNIT)
1094 * (PARM_BOUNDARY / BITS_PER_UNIT));
1096 /* Update ARGS_SIZE, the total stack space for args so far. */
1098 args_size->constant += args[i].size.constant;
1099 if (args[i].size.var)
1101 ADD_PARM_SIZE (*args_size, args[i].size.var);
1104 /* Since the slot offset points to the bottom of the slot,
1105 we must record it after incrementing if the args grow down. */
1106 #ifdef ARGS_GROW_DOWNWARD
1107 args[i].slot_offset = *args_size;
1109 args[i].slot_offset.constant = -args_size->constant;
1112 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1116 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1117 have been used, etc. */
1119 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1120 argpos < n_named_args);
1124 /* Update ARGS_SIZE to contain the total size for the argument block.
1125 Return the original constant component of the argument block's size.
1127 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1128 for arguments passed in registers. */
1131 compute_argument_block_size (reg_parm_stack_space, args_size)
1132 int reg_parm_stack_space;
1133 struct args_size *args_size;
1135 int unadjusted_args_size = args_size->constant;
1137 /* Compute the actual size of the argument block required. The variable
1138 and constant sizes must be combined, the size may have to be rounded,
1139 and there may be a minimum required size. */
1143 args_size->var = ARGS_SIZE_TREE (*args_size);
1144 args_size->constant = 0;
1146 #ifdef PREFERRED_STACK_BOUNDARY
1147 if (PREFERRED_STACK_BOUNDARY != BITS_PER_UNIT)
1148 args_size->var = round_up (args_size->var, STACK_BYTES);
1151 if (reg_parm_stack_space > 0)
1154 = size_binop (MAX_EXPR, args_size->var,
1155 size_int (reg_parm_stack_space));
1157 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1158 /* The area corresponding to register parameters is not to count in
1159 the size of the block we need. So make the adjustment. */
1161 = size_binop (MINUS_EXPR, args_size->var,
1162 size_int (reg_parm_stack_space));
1168 #ifdef PREFERRED_STACK_BOUNDARY
1169 args_size->constant = (((args_size->constant + (STACK_BYTES - 1))
1170 / STACK_BYTES) * STACK_BYTES);
1173 args_size->constant = MAX (args_size->constant,
1174 reg_parm_stack_space);
1176 #ifdef MAYBE_REG_PARM_STACK_SPACE
1177 if (reg_parm_stack_space == 0)
1178 args_size->constant = 0;
1181 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1182 args_size->constant -= reg_parm_stack_space;
1185 return unadjusted_args_size;
1188 /* Precompute parameters has needed for a function call.
1190 IS_CONST indicates the target function is a pure function.
1192 MUST_PREALLOCATE indicates that we must preallocate stack space for
1193 any stack arguments.
1195 NUM_ACTUALS is the number of arguments.
1197 ARGS is an array containing information for each argument; this routine
1198 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1200 ARGS_SIZE contains information about the size of the arg list. */
1203 precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1205 int must_preallocate;
1207 struct arg_data *args;
1208 struct args_size *args_size;
1212 /* If this function call is cse'able, precompute all the parameters.
1213 Note that if the parameter is constructed into a temporary, this will
1214 cause an additional copy because the parameter will be constructed
1215 into a temporary location and then copied into the outgoing arguments.
1216 If a parameter contains a call to alloca and this function uses the
1217 stack, precompute the parameter. */
1219 /* If we preallocated the stack space, and some arguments must be passed
1220 on the stack, then we must precompute any parameter which contains a
1221 function call which will store arguments on the stack.
1222 Otherwise, evaluating the parameter may clobber previous parameters
1223 which have already been stored into the stack. */
1225 for (i = 0; i < num_actuals; i++)
1227 || ((args_size->var != 0 || args_size->constant != 0)
1228 && calls_function (args[i].tree_value, 1))
1229 || (must_preallocate
1230 && (args_size->var != 0 || args_size->constant != 0)
1231 && calls_function (args[i].tree_value, 0)))
1233 /* If this is an addressable type, we cannot pre-evaluate it. */
1234 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1239 args[i].initial_value = args[i].value
1240 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1242 preserve_temp_slots (args[i].value);
1245 /* ANSI doesn't require a sequence point here,
1246 but PCC has one, so this will avoid some problems. */
1249 args[i].initial_value = args[i].value
1250 = protect_from_queue (args[i].initial_value, 0);
1252 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1254 = convert_modes (args[i].mode,
1255 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1256 args[i].value, args[i].unsignedp);
1260 /* Given the current state of MUST_PREALLOCATE and information about
1261 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1262 compute and return the final value for MUST_PREALLOCATE. */
1265 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1266 int must_preallocate;
1268 struct arg_data *args;
1269 struct args_size *args_size;
1271 /* See if we have or want to preallocate stack space.
1273 If we would have to push a partially-in-regs parm
1274 before other stack parms, preallocate stack space instead.
1276 If the size of some parm is not a multiple of the required stack
1277 alignment, we must preallocate.
1279 If the total size of arguments that would otherwise create a copy in
1280 a temporary (such as a CALL) is more than half the total argument list
1281 size, preallocation is faster.
1283 Another reason to preallocate is if we have a machine (like the m88k)
1284 where stack alignment is required to be maintained between every
1285 pair of insns, not just when the call is made. However, we assume here
1286 that such machines either do not have push insns (and hence preallocation
1287 would occur anyway) or the problem is taken care of with
1290 if (! must_preallocate)
1292 int partial_seen = 0;
1293 int copy_to_evaluate_size = 0;
1296 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1298 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1300 else if (partial_seen && args[i].reg == 0)
1301 must_preallocate = 1;
1303 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1304 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1305 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1306 || TREE_CODE (args[i].tree_value) == COND_EXPR
1307 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1308 copy_to_evaluate_size
1309 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1312 if (copy_to_evaluate_size * 2 >= args_size->constant
1313 && args_size->constant > 0)
1314 must_preallocate = 1;
1316 return must_preallocate;
1319 /* If we preallocated stack space, compute the address of each argument
1320 and store it into the ARGS array.
1322 We need not ensure it is a valid memory address here; it will be
1323 validized when it is used.
1325 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1328 compute_argument_addresses (args, argblock, num_actuals)
1329 struct arg_data *args;
1335 rtx arg_reg = argblock;
1336 int i, arg_offset = 0;
1338 if (GET_CODE (argblock) == PLUS)
1339 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1341 for (i = 0; i < num_actuals; i++)
1343 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1344 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1347 /* Skip this parm if it will not be passed on the stack. */
1348 if (! args[i].pass_on_stack && args[i].reg != 0)
1351 if (GET_CODE (offset) == CONST_INT)
1352 addr = plus_constant (arg_reg, INTVAL (offset));
1354 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1356 addr = plus_constant (addr, arg_offset);
1357 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1360 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1362 if (GET_CODE (slot_offset) == CONST_INT)
1363 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1365 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1367 addr = plus_constant (addr, arg_offset);
1368 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1373 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1374 in a call instruction.
1376 FNDECL is the tree node for the target function. For an indirect call
1377 FNDECL will be NULL_TREE.
1379 EXP is the CALL_EXPR for this call. */
1382 rtx_for_function_call (fndecl, exp)
1388 /* Get the function to call, in the form of RTL. */
1391 /* If this is the first use of the function, see if we need to
1392 make an external definition for it. */
1393 if (! TREE_USED (fndecl))
1395 assemble_external (fndecl);
1396 TREE_USED (fndecl) = 1;
1399 /* Get a SYMBOL_REF rtx for the function address. */
1400 funexp = XEXP (DECL_RTL (fndecl), 0);
1403 /* Generate an rtx (probably a pseudo-register) for the address. */
1406 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1407 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1409 /* Check the function is executable. */
1410 if (current_function_check_memory_usage)
1411 emit_library_call (chkr_check_exec_libfunc, 1,
1419 /* Generate all the code for a function call
1420 and return an rtx for its value.
1421 Store the value in TARGET (specified as an rtx) if convenient.
1422 If the value is stored in TARGET then TARGET is returned.
1423 If IGNORE is nonzero, then we ignore the value of the function call. */
1426 expand_call (exp, target, ignore)
1431 /* List of actual parameters. */
1432 tree actparms = TREE_OPERAND (exp, 1);
1433 /* RTX for the function to be called. */
1435 /* Data type of the function. */
1437 /* Declaration of the function being called,
1438 or 0 if the function is computed (not known by name). */
1442 /* Register in which non-BLKmode value will be returned,
1443 or 0 if no value or if value is BLKmode. */
1445 /* Address where we should return a BLKmode value;
1446 0 if value not BLKmode. */
1447 rtx structure_value_addr = 0;
1448 /* Nonzero if that address is being passed by treating it as
1449 an extra, implicit first parameter. Otherwise,
1450 it is passed by being copied directly into struct_value_rtx. */
1451 int structure_value_addr_parm = 0;
1452 /* Size of aggregate value wanted, or zero if none wanted
1453 or if we are using the non-reentrant PCC calling convention
1454 or expecting the value in registers. */
1455 HOST_WIDE_INT struct_value_size = 0;
1456 /* Nonzero if called function returns an aggregate in memory PCC style,
1457 by returning the address of where to find it. */
1458 int pcc_struct_value = 0;
1460 /* Number of actual parameters in this call, including struct value addr. */
1462 /* Number of named args. Args after this are anonymous ones
1463 and they must all go on the stack. */
1466 /* Vector of information about each argument.
1467 Arguments are numbered in the order they will be pushed,
1468 not the order they are written. */
1469 struct arg_data *args;
1471 /* Total size in bytes of all the stack-parms scanned so far. */
1472 struct args_size args_size;
1473 /* Size of arguments before any adjustments (such as rounding). */
1474 int unadjusted_args_size;
1475 /* Data on reg parms scanned so far. */
1476 CUMULATIVE_ARGS args_so_far;
1477 /* Nonzero if a reg parm has been scanned. */
1479 /* Nonzero if this is an indirect function call. */
1481 /* Nonzero if we must avoid push-insns in the args for this call.
1482 If stack space is allocated for register parameters, but not by the
1483 caller, then it is preallocated in the fixed part of the stack frame.
1484 So the entire argument block must then be preallocated (i.e., we
1485 ignore PUSH_ROUNDING in that case). */
1487 #ifdef PUSH_ROUNDING
1488 int must_preallocate = 0;
1490 int must_preallocate = 1;
1493 /* Size of the stack reserved for parameter registers. */
1494 int reg_parm_stack_space = 0;
1496 /* Address of space preallocated for stack parms
1497 (on machines that lack push insns), or 0 if space not preallocated. */
1500 /* Nonzero if it is plausible that this is a call to alloca. */
1502 /* Nonzero if this is a call to malloc or a related function. */
1504 /* Nonzero if this is a call to setjmp or a related function. */
1506 /* Nonzero if this is a call to `longjmp'. */
1508 /* Nonzero if this is a call to an inline function. */
1509 int is_integrable = 0;
1510 /* Nonzero if this is a call to a `const' function.
1511 Note that only explicitly named functions are handled as `const' here. */
1513 /* Nonzero if this is a call to a `volatile' function. */
1514 int is_volatile = 0;
1515 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1516 /* Define the boundary of the register parm stack space that needs to be
1518 int low_to_save = -1, high_to_save;
1519 rtx save_area = 0; /* Place that it is saved */
1522 #ifdef ACCUMULATE_OUTGOING_ARGS
1523 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1524 char *initial_stack_usage_map = stack_usage_map;
1525 int old_stack_arg_under_construction;
1528 rtx old_stack_level = 0;
1529 int old_pending_adj = 0;
1530 int old_inhibit_defer_pop = inhibit_defer_pop;
1531 rtx call_fusage = 0;
1535 /* The value of the function call can be put in a hard register. But
1536 if -fcheck-memory-usage, code which invokes functions (and thus
1537 damages some hard registers) can be inserted before using the value.
1538 So, target is always a pseudo-register in that case. */
1539 if (current_function_check_memory_usage)
1542 /* See if we can find a DECL-node for the actual function.
1543 As a result, decide whether this is a call to an integrable function. */
1545 p = TREE_OPERAND (exp, 0);
1546 if (TREE_CODE (p) == ADDR_EXPR)
1548 fndecl = TREE_OPERAND (p, 0);
1549 if (TREE_CODE (fndecl) != FUNCTION_DECL)
1554 && fndecl != current_function_decl
1555 && DECL_INLINE (fndecl)
1556 && DECL_SAVED_INSNS (fndecl)
1557 && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl)))
1559 else if (! TREE_ADDRESSABLE (fndecl))
1561 /* In case this function later becomes inlinable,
1562 record that there was already a non-inline call to it.
1564 Use abstraction instead of setting TREE_ADDRESSABLE
1566 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1569 warning_with_decl (fndecl, "can't inline call to `%s'");
1570 warning ("called from here");
1572 mark_addressable (fndecl);
1575 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1576 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
1579 if (TREE_THIS_VOLATILE (fndecl))
1584 /* If we don't have specific function to call, see if we have a
1585 constant or `noreturn' function from the type. */
1588 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1589 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1592 #ifdef REG_PARM_STACK_SPACE
1593 #ifdef MAYBE_REG_PARM_STACK_SPACE
1594 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1596 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1600 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1601 if (reg_parm_stack_space > 0)
1602 must_preallocate = 1;
1605 /* Warn if this value is an aggregate type,
1606 regardless of which calling convention we are using for it. */
1607 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1608 warning ("function call has aggregate value");
1610 /* Set up a place to return a structure. */
1612 /* Cater to broken compilers. */
1613 if (aggregate_value_p (exp))
1615 /* This call returns a big structure. */
1618 #ifdef PCC_STATIC_STRUCT_RETURN
1620 pcc_struct_value = 1;
1621 /* Easier than making that case work right. */
1624 /* In case this is a static function, note that it has been
1626 if (! TREE_ADDRESSABLE (fndecl))
1627 mark_addressable (fndecl);
1631 #else /* not PCC_STATIC_STRUCT_RETURN */
1633 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1635 if (target && GET_CODE (target) == MEM)
1636 structure_value_addr = XEXP (target, 0);
1639 /* Assign a temporary to hold the value. */
1642 /* For variable-sized objects, we must be called with a target
1643 specified. If we were to allocate space on the stack here,
1644 we would have no way of knowing when to free it. */
1646 if (struct_value_size < 0)
1649 /* This DECL is just something to feed to mark_addressable;
1650 it doesn't get pushed. */
1651 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1652 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1653 mark_addressable (d);
1654 structure_value_addr = XEXP (DECL_RTL (d), 0);
1659 #endif /* not PCC_STATIC_STRUCT_RETURN */
1662 /* If called function is inline, try to integrate it. */
1667 #ifdef ACCUMULATE_OUTGOING_ARGS
1668 rtx before_call = get_last_insn ();
1671 temp = expand_inline_function (fndecl, actparms, target,
1672 ignore, TREE_TYPE (exp),
1673 structure_value_addr);
1675 /* If inlining succeeded, return. */
1676 if (temp != (rtx) (HOST_WIDE_INT) -1)
1678 #ifdef ACCUMULATE_OUTGOING_ARGS
1679 /* If the outgoing argument list must be preserved, push
1680 the stack before executing the inlined function if it
1683 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1684 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1687 if (stack_arg_under_construction || i >= 0)
1690 = before_call ? NEXT_INSN (before_call) : get_insns ();
1693 /* Look for a call in the inline function code.
1694 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
1695 nonzero then there is a call and it is not necessary
1696 to scan the insns. */
1698 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
1699 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1700 if (GET_CODE (insn) == CALL_INSN)
1705 /* Reserve enough stack space so that the largest
1706 argument list of any function call in the inline
1707 function does not overlap the argument list being
1708 evaluated. This is usually an overestimate because
1709 allocate_dynamic_stack_space reserves space for an
1710 outgoing argument list in addition to the requested
1711 space, but there is no way to ask for stack space such
1712 that an argument list of a certain length can be
1715 Add the stack space reserved for register arguments, if
1716 any, in the inline function. What is really needed is the
1717 largest value of reg_parm_stack_space in the inline
1718 function, but that is not available. Using the current
1719 value of reg_parm_stack_space is wrong, but gives
1720 correct results on all supported machines. */
1722 int adjust = (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl))
1723 + reg_parm_stack_space);
1726 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1727 allocate_dynamic_stack_space (GEN_INT (adjust),
1728 NULL_RTX, BITS_PER_UNIT);
1731 emit_insns_before (seq, first_insn);
1732 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1737 /* If the result is equivalent to TARGET, return TARGET to simplify
1738 checks in store_expr. They can be equivalent but not equal in the
1739 case of a function that returns BLKmode. */
1740 if (temp != target && rtx_equal_p (temp, target))
1745 /* If inlining failed, mark FNDECL as needing to be compiled
1746 separately after all. If function was declared inline,
1748 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1749 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1751 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1752 warning ("called from here");
1754 mark_addressable (fndecl);
1757 function_call_count++;
1759 if (fndecl && DECL_NAME (fndecl))
1760 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1762 /* See if this is a call to a function that can return more than once
1763 or a call to longjmp or malloc. */
1764 special_function_p (name, fndecl, &returns_twice, &is_longjmp,
1765 &is_malloc, &may_be_alloca);
1768 current_function_calls_alloca = 1;
1770 /* Operand 0 is a pointer-to-function; get the type of the function. */
1771 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1772 if (! POINTER_TYPE_P (funtype))
1774 funtype = TREE_TYPE (funtype);
1776 /* When calling a const function, we must pop the stack args right away,
1777 so that the pop is deleted or moved with the call. */
1781 /* Don't let pending stack adjusts add up to too much.
1782 Also, do all pending adjustments now
1783 if there is any chance this might be a call to alloca. */
1785 if (pending_stack_adjust >= 32
1786 || (pending_stack_adjust > 0 && may_be_alloca))
1787 do_pending_stack_adjust ();
1789 /* Push the temporary stack slot level so that we can free any temporaries
1793 /* Start updating where the next arg would go.
1795 On some machines (such as the PA) indirect calls have a different
1796 calling convention than normal calls. The last argument in
1797 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1799 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
1801 /* If struct_value_rtx is 0, it means pass the address
1802 as if it were an extra parameter. */
1803 if (structure_value_addr && struct_value_rtx == 0)
1805 /* If structure_value_addr is a REG other than
1806 virtual_outgoing_args_rtx, we can use always use it. If it
1807 is not a REG, we must always copy it into a register.
1808 If it is virtual_outgoing_args_rtx, we must copy it to another
1809 register in some cases. */
1810 rtx temp = (GET_CODE (structure_value_addr) != REG
1811 #ifdef ACCUMULATE_OUTGOING_ARGS
1812 || (stack_arg_under_construction
1813 && structure_value_addr == virtual_outgoing_args_rtx)
1815 ? copy_addr_to_reg (structure_value_addr)
1816 : structure_value_addr);
1819 = tree_cons (error_mark_node,
1820 make_tree (build_pointer_type (TREE_TYPE (funtype)),
1823 structure_value_addr_parm = 1;
1826 /* Count the arguments and set NUM_ACTUALS. */
1827 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
1830 /* Compute number of named args.
1831 Normally, don't include the last named arg if anonymous args follow.
1832 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
1833 (If no anonymous args follow, the result of list_length is actually
1834 one too large. This is harmless.)
1836 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
1837 zero, this machine will be able to place unnamed args that were passed in
1838 registers into the stack. So treat all args as named. This allows the
1839 insns emitting for a specific argument list to be independent of the
1840 function declaration.
1842 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
1843 way to pass unnamed args in registers, so we must force them into
1846 if ((STRICT_ARGUMENT_NAMING
1847 || ! PRETEND_OUTGOING_VARARGS_NAMED)
1848 && TYPE_ARG_TYPES (funtype) != 0)
1850 = (list_length (TYPE_ARG_TYPES (funtype))
1851 /* Don't include the last named arg. */
1852 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
1853 /* Count the struct value address, if it is passed as a parm. */
1854 + structure_value_addr_parm);
1856 /* If we know nothing, treat all args as named. */
1857 n_named_args = num_actuals;
1859 /* Make a vector to hold all the information about each arg. */
1860 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
1861 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
1863 /* Build up entries inthe ARGS array, compute the size of the arguments
1864 into ARGS_SIZE, etc. */
1865 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
1866 actparms, fndecl, &args_so_far,
1867 reg_parm_stack_space, &old_stack_level,
1868 &old_pending_adj, &must_preallocate,
1871 #ifdef FINAL_REG_PARM_STACK_SPACE
1872 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1878 /* If this function requires a variable-sized argument list, don't try to
1879 make a cse'able block for this call. We may be able to do this
1880 eventually, but it is too complicated to keep track of what insns go
1881 in the cse'able block and which don't. */
1884 must_preallocate = 1;
1887 /* Compute the actual size of the argument block required. The variable
1888 and constant sizes must be combined, the size may have to be rounded,
1889 and there may be a minimum required size. */
1890 unadjusted_args_size
1891 = compute_argument_block_size (reg_parm_stack_space, &args_size);
1893 /* Now make final decision about preallocating stack space. */
1894 must_preallocate = finalize_must_preallocate (must_preallocate,
1895 num_actuals, args, &args_size);
1897 /* If the structure value address will reference the stack pointer, we must
1898 stabilize it. We don't need to do this if we know that we are not going
1899 to adjust the stack pointer in processing this call. */
1901 if (structure_value_addr
1902 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1903 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1905 #ifndef ACCUMULATE_OUTGOING_ARGS
1906 || args_size.constant
1909 structure_value_addr = copy_to_reg (structure_value_addr);
1911 /* Precompute any arguments as needed. */
1912 precompute_arguments (is_const, must_preallocate, num_actuals,
1915 /* Now we are about to start emitting insns that can be deleted
1916 if a libcall is deleted. */
1917 if (is_const || is_malloc)
1920 /* If we have no actual push instructions, or shouldn't use them,
1921 make space for all args right now. */
1923 if (args_size.var != 0)
1925 if (old_stack_level == 0)
1927 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1928 old_pending_adj = pending_stack_adjust;
1929 pending_stack_adjust = 0;
1930 #ifdef ACCUMULATE_OUTGOING_ARGS
1931 /* stack_arg_under_construction says whether a stack arg is
1932 being constructed at the old stack level. Pushing the stack
1933 gets a clean outgoing argument block. */
1934 old_stack_arg_under_construction = stack_arg_under_construction;
1935 stack_arg_under_construction = 0;
1938 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1942 /* Note that we must go through the motions of allocating an argument
1943 block even if the size is zero because we may be storing args
1944 in the area reserved for register arguments, which may be part of
1947 int needed = args_size.constant;
1949 /* Store the maximum argument space used. It will be pushed by
1950 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
1953 if (needed > current_function_outgoing_args_size)
1954 current_function_outgoing_args_size = needed;
1956 if (must_preallocate)
1958 #ifdef ACCUMULATE_OUTGOING_ARGS
1959 /* Since the stack pointer will never be pushed, it is possible for
1960 the evaluation of a parm to clobber something we have already
1961 written to the stack. Since most function calls on RISC machines
1962 do not use the stack, this is uncommon, but must work correctly.
1964 Therefore, we save any area of the stack that was already written
1965 and that we are using. Here we set up to do this by making a new
1966 stack usage map from the old one. The actual save will be done
1969 Another approach might be to try to reorder the argument
1970 evaluations to avoid this conflicting stack usage. */
1972 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1973 /* Since we will be writing into the entire argument area, the
1974 map must be allocated for its entire size, not just the part that
1975 is the responsibility of the caller. */
1976 needed += reg_parm_stack_space;
1979 #ifdef ARGS_GROW_DOWNWARD
1980 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1983 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1986 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
1988 if (initial_highest_arg_in_use)
1989 bcopy (initial_stack_usage_map, stack_usage_map,
1990 initial_highest_arg_in_use);
1992 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1993 bzero (&stack_usage_map[initial_highest_arg_in_use],
1994 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1997 /* The address of the outgoing argument list must not be copied to a
1998 register here, because argblock would be left pointing to the
1999 wrong place after the call to allocate_dynamic_stack_space below.
2002 argblock = virtual_outgoing_args_rtx;
2004 #else /* not ACCUMULATE_OUTGOING_ARGS */
2005 if (inhibit_defer_pop == 0)
2007 /* Try to reuse some or all of the pending_stack_adjust
2008 to get this space. Maybe we can avoid any pushing. */
2009 if (needed > pending_stack_adjust)
2011 needed -= pending_stack_adjust;
2012 pending_stack_adjust = 0;
2016 pending_stack_adjust -= needed;
2020 /* Special case this because overhead of `push_block' in this
2021 case is non-trivial. */
2023 argblock = virtual_outgoing_args_rtx;
2025 argblock = push_block (GEN_INT (needed), 0, 0);
2027 /* We only really need to call `copy_to_reg' in the case where push
2028 insns are going to be used to pass ARGBLOCK to a function
2029 call in ARGS. In that case, the stack pointer changes value
2030 from the allocation point to the call point, and hence
2031 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2032 But might as well always do it. */
2033 argblock = copy_to_reg (argblock);
2034 #endif /* not ACCUMULATE_OUTGOING_ARGS */
2038 #ifdef ACCUMULATE_OUTGOING_ARGS
2039 /* The save/restore code in store_one_arg handles all cases except one:
2040 a constructor call (including a C function returning a BLKmode struct)
2041 to initialize an argument. */
2042 if (stack_arg_under_construction)
2044 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2045 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
2047 rtx push_size = GEN_INT (args_size.constant);
2049 if (old_stack_level == 0)
2051 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2052 old_pending_adj = pending_stack_adjust;
2053 pending_stack_adjust = 0;
2054 /* stack_arg_under_construction says whether a stack arg is
2055 being constructed at the old stack level. Pushing the stack
2056 gets a clean outgoing argument block. */
2057 old_stack_arg_under_construction = stack_arg_under_construction;
2058 stack_arg_under_construction = 0;
2059 /* Make a new map for the new argument list. */
2060 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2061 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2062 highest_outgoing_arg_in_use = 0;
2064 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
2066 /* If argument evaluation might modify the stack pointer, copy the
2067 address of the argument list to a register. */
2068 for (i = 0; i < num_actuals; i++)
2069 if (args[i].pass_on_stack)
2071 argblock = copy_addr_to_reg (argblock);
2076 compute_argument_addresses (args, argblock, num_actuals);
2078 #ifdef PUSH_ARGS_REVERSED
2079 #ifdef PREFERRED_STACK_BOUNDARY
2080 /* If we push args individually in reverse order, perform stack alignment
2081 before the first push (the last arg). */
2083 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2087 /* Don't try to defer pops if preallocating, not even from the first arg,
2088 since ARGBLOCK probably refers to the SP. */
2092 funexp = rtx_for_function_call (fndecl, exp);
2094 /* Figure out the register where the value, if any, will come back. */
2096 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2097 && ! structure_value_addr)
2099 if (pcc_struct_value)
2100 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2103 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
2106 /* Precompute all register parameters. It isn't safe to compute anything
2107 once we have started filling any specific hard regs. */
2108 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2110 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2112 /* Save the fixed argument area if it's part of the caller's frame and
2113 is clobbered by argument setup for this call. */
2114 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2115 &low_to_save, &high_to_save);
2119 /* Now store (and compute if necessary) all non-register parms.
2120 These come before register parms, since they can require block-moves,
2121 which could clobber the registers used for register parms.
2122 Parms which have partial registers are not stored here,
2123 but we do preallocate space here if they want that. */
2125 for (i = 0; i < num_actuals; i++)
2126 if (args[i].reg == 0 || args[i].pass_on_stack)
2127 store_one_arg (&args[i], argblock, may_be_alloca,
2128 args_size.var != 0, reg_parm_stack_space);
2130 /* If we have a parm that is passed in registers but not in memory
2131 and whose alignment does not permit a direct copy into registers,
2132 make a group of pseudos that correspond to each register that we
2134 if (STRICT_ALIGNMENT)
2135 store_unaligned_arguments_into_pseudos (args, num_actuals);
2137 /* Now store any partially-in-registers parm.
2138 This is the last place a block-move can happen. */
2140 for (i = 0; i < num_actuals; i++)
2141 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2142 store_one_arg (&args[i], argblock, may_be_alloca,
2143 args_size.var != 0, reg_parm_stack_space);
2145 #ifndef PUSH_ARGS_REVERSED
2146 #ifdef PREFERRED_STACK_BOUNDARY
2147 /* If we pushed args in forward order, perform stack alignment
2148 after pushing the last arg. */
2150 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2154 /* If register arguments require space on the stack and stack space
2155 was not preallocated, allocate stack space here for arguments
2156 passed in registers. */
2157 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
2158 if (must_preallocate == 0 && reg_parm_stack_space > 0)
2159 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2162 /* Pass the function the address in which to return a structure value. */
2163 if (structure_value_addr && ! structure_value_addr_parm)
2165 emit_move_insn (struct_value_rtx,
2167 force_operand (structure_value_addr,
2170 /* Mark the memory for the aggregate as write-only. */
2171 if (current_function_check_memory_usage)
2172 emit_library_call (chkr_set_right_libfunc, 1,
2174 structure_value_addr, ptr_mode,
2175 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
2176 GEN_INT (MEMORY_USE_WO),
2177 TYPE_MODE (integer_type_node));
2179 if (GET_CODE (struct_value_rtx) == REG)
2180 use_reg (&call_fusage, struct_value_rtx);
2183 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
2185 /* Now do the register loads required for any wholly-register parms or any
2186 parms which are passed both on the stack and in a register. Their
2187 expressions were already evaluated.
2189 Mark all register-parms as living through the call, putting these USE
2190 insns in the CALL_INSN_FUNCTION_USAGE field. */
2192 #ifdef LOAD_ARGS_REVERSED
2193 for (i = num_actuals - 1; i >= 0; i--)
2195 for (i = 0; i < num_actuals; i++)
2198 rtx reg = args[i].reg;
2199 int partial = args[i].partial;
2204 /* Set to non-negative if must move a word at a time, even if just
2205 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
2206 we just use a normal move insn. This value can be zero if the
2207 argument is a zero size structure with no fields. */
2208 nregs = (partial ? partial
2209 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2210 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
2211 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
2214 /* Handle calls that pass values in multiple non-contiguous
2215 locations. The Irix 6 ABI has examples of this. */
2217 if (GET_CODE (reg) == PARALLEL)
2219 emit_group_load (reg, args[i].value,
2220 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
2221 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
2225 /* If simple case, just do move. If normal partial, store_one_arg
2226 has already loaded the register for us. In all other cases,
2227 load the register(s) from memory. */
2229 else if (nregs == -1)
2230 emit_move_insn (reg, args[i].value);
2232 /* If we have pre-computed the values to put in the registers in
2233 the case of non-aligned structures, copy them in now. */
2235 else if (args[i].n_aligned_regs != 0)
2236 for (j = 0; j < args[i].n_aligned_regs; j++)
2237 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2238 args[i].aligned_regs[j]);
2240 else if (partial == 0 || args[i].pass_on_stack)
2241 move_block_to_reg (REGNO (reg),
2242 validize_mem (args[i].value), nregs,
2245 /* Handle calls that pass values in multiple non-contiguous
2246 locations. The Irix 6 ABI has examples of this. */
2247 if (GET_CODE (reg) == PARALLEL)
2248 use_group_regs (&call_fusage, reg);
2249 else if (nregs == -1)
2250 use_reg (&call_fusage, reg);
2252 use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
2256 /* Perform postincrements before actually calling the function. */
2259 /* All arguments and registers used for the call must be set up by now! */
2261 /* Generate the actual call instruction. */
2262 emit_call_1 (funexp, fndecl, funtype, args_size.constant, struct_value_size,
2263 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2264 valreg, old_inhibit_defer_pop, call_fusage, is_const);
2266 /* If call is cse'able, make appropriate pair of reg-notes around it.
2267 Test valreg so we don't crash; may safely ignore `const'
2268 if return type is void. Disable for PARALLEL return values, because
2269 we have no way to move such values into a pseudo register. */
2270 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2273 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2276 /* Mark the return value as a pointer if needed. */
2277 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2279 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2280 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2283 /* Construct an "equal form" for the value which mentions all the
2284 arguments in order as well as the function name. */
2285 #ifdef PUSH_ARGS_REVERSED
2286 for (i = 0; i < num_actuals; i++)
2287 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2289 for (i = num_actuals - 1; i >= 0; i--)
2290 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2292 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2294 insns = get_insns ();
2297 emit_libcall_block (insns, temp, valreg, note);
2303 /* Otherwise, just write out the sequence without a note. */
2304 rtx insns = get_insns ();
2311 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2314 /* The return value from a malloc-like function is a pointer. */
2315 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2316 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2318 emit_move_insn (temp, valreg);
2320 /* The return value from a malloc-like function can not alias
2322 last = get_last_insn ();
2324 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2326 /* Write out the sequence. */
2327 insns = get_insns ();
2333 /* For calls to `setjmp', etc., inform flow.c it should complain
2334 if nonvolatile values are live. */
2338 emit_note (name, NOTE_INSN_SETJMP);
2339 current_function_calls_setjmp = 1;
2343 current_function_calls_longjmp = 1;
2345 /* Notice functions that cannot return.
2346 If optimizing, insns emitted below will be dead.
2347 If not optimizing, they will exist, which is useful
2348 if the user uses the `return' command in the debugger. */
2350 if (is_volatile || is_longjmp)
2353 /* If value type not void, return an rtx for the value. */
2355 /* If there are cleanups to be called, don't use a hard reg as target.
2356 We need to double check this and see if it matters anymore. */
2357 if (any_pending_cleanups (1)
2358 && target && REG_P (target)
2359 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2362 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2365 target = const0_rtx;
2367 else if (structure_value_addr)
2369 if (target == 0 || GET_CODE (target) != MEM)
2371 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2372 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2373 structure_value_addr));
2374 MEM_SET_IN_STRUCT_P (target,
2375 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2378 else if (pcc_struct_value)
2380 /* This is the special C++ case where we need to
2381 know what the true target was. We take care to
2382 never use this value more than once in one expression. */
2383 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2384 copy_to_reg (valreg));
2385 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2387 /* Handle calls that return values in multiple non-contiguous locations.
2388 The Irix 6 ABI has examples of this. */
2389 else if (GET_CODE (valreg) == PARALLEL)
2391 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2395 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2396 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2397 preserve_temp_slots (target);
2400 emit_group_store (target, valreg, bytes,
2401 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2403 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2404 && GET_MODE (target) == GET_MODE (valreg))
2405 /* TARGET and VALREG cannot be equal at this point because the latter
2406 would not have REG_FUNCTION_VALUE_P true, while the former would if
2407 it were referring to the same register.
2409 If they refer to the same register, this move will be a no-op, except
2410 when function inlining is being done. */
2411 emit_move_insn (target, valreg);
2412 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2413 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2415 target = copy_to_reg (valreg);
2417 #ifdef PROMOTE_FUNCTION_RETURN
2418 /* If we promoted this return value, make the proper SUBREG. TARGET
2419 might be const0_rtx here, so be careful. */
2420 if (GET_CODE (target) == REG
2421 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2422 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2424 tree type = TREE_TYPE (exp);
2425 int unsignedp = TREE_UNSIGNED (type);
2427 /* If we don't promote as expected, something is wrong. */
2428 if (GET_MODE (target)
2429 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2432 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2433 SUBREG_PROMOTED_VAR_P (target) = 1;
2434 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2438 /* If size of args is variable or this was a constructor call for a stack
2439 argument, restore saved stack-pointer value. */
2441 if (old_stack_level)
2443 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2444 pending_stack_adjust = old_pending_adj;
2445 #ifdef ACCUMULATE_OUTGOING_ARGS
2446 stack_arg_under_construction = old_stack_arg_under_construction;
2447 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2448 stack_usage_map = initial_stack_usage_map;
2451 #ifdef ACCUMULATE_OUTGOING_ARGS
2454 #ifdef REG_PARM_STACK_SPACE
2456 restore_fixed_argument_area (save_area, argblock,
2457 high_to_save, low_to_save);
2460 /* If we saved any argument areas, restore them. */
2461 for (i = 0; i < num_actuals; i++)
2462 if (args[i].save_area)
2464 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2466 = gen_rtx_MEM (save_mode,
2467 memory_address (save_mode,
2468 XEXP (args[i].stack_slot, 0)));
2470 if (save_mode != BLKmode)
2471 emit_move_insn (stack_area, args[i].save_area);
2473 emit_block_move (stack_area, validize_mem (args[i].save_area),
2474 GEN_INT (args[i].size.constant),
2475 PARM_BOUNDARY / BITS_PER_UNIT);
2478 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2479 stack_usage_map = initial_stack_usage_map;
2483 /* If this was alloca, record the new stack level for nonlocal gotos.
2484 Check for the handler slots since we might not have a save area
2485 for non-local gotos. */
2487 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
2488 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2492 /* Free up storage we no longer need. */
2493 for (i = 0; i < num_actuals; ++i)
2494 if (args[i].aligned_regs)
2495 free (args[i].aligned_regs);
2500 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2501 (emitting the queue unless NO_QUEUE is nonzero),
2502 for a value of mode OUTMODE,
2503 with NARGS different arguments, passed as alternating rtx values
2504 and machine_modes to convert them to.
2505 The rtx values should have been passed through protect_from_queue already.
2507 NO_QUEUE will be true if and only if the library call is a `const' call
2508 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2509 to the variable is_const in expand_call.
2511 NO_QUEUE must be true for const calls, because if it isn't, then
2512 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2513 and will be lost if the libcall sequence is optimized away.
2515 NO_QUEUE must be false for non-const calls, because if it isn't, the
2516 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2517 optimized. For instance, the instruction scheduler may incorrectly
2518 move memory references across the non-const call. */
2521 emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2524 #ifndef ANSI_PROTOTYPES
2527 enum machine_mode outmode;
2531 /* Total size in bytes of all the stack-parms scanned so far. */
2532 struct args_size args_size;
2533 /* Size of arguments before any adjustments (such as rounding). */
2534 struct args_size original_args_size;
2535 register int argnum;
2540 CUMULATIVE_ARGS args_so_far;
2541 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2542 struct args_size offset; struct args_size size; rtx save_area; };
2544 int old_inhibit_defer_pop = inhibit_defer_pop;
2545 rtx call_fusage = 0;
2546 int reg_parm_stack_space = 0;
2547 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2548 /* Define the boundary of the register parm stack space that needs to be
2550 int low_to_save = -1, high_to_save;
2551 rtx save_area = 0; /* Place that it is saved */
2554 #ifdef ACCUMULATE_OUTGOING_ARGS
2555 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2556 char *initial_stack_usage_map = stack_usage_map;
2560 #ifdef REG_PARM_STACK_SPACE
2561 /* Size of the stack reserved for parameter registers. */
2562 #ifdef MAYBE_REG_PARM_STACK_SPACE
2563 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2565 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
2569 VA_START (p, nargs);
2571 #ifndef ANSI_PROTOTYPES
2572 orgfun = va_arg (p, rtx);
2573 no_queue = va_arg (p, int);
2574 outmode = va_arg (p, enum machine_mode);
2575 nargs = va_arg (p, int);
2580 /* Copy all the libcall-arguments out of the varargs data
2581 and into a vector ARGVEC.
2583 Compute how to pass each argument. We only support a very small subset
2584 of the full argument passing conventions to limit complexity here since
2585 library functions shouldn't have many args. */
2587 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2588 bzero ((char *) argvec, nargs * sizeof (struct arg));
2591 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2593 args_size.constant = 0;
2598 for (count = 0; count < nargs; count++)
2600 rtx val = va_arg (p, rtx);
2601 enum machine_mode mode = va_arg (p, enum machine_mode);
2603 /* We cannot convert the arg value to the mode the library wants here;
2604 must do it earlier where we know the signedness of the arg. */
2606 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2609 /* On some machines, there's no way to pass a float to a library fcn.
2610 Pass it as a double instead. */
2611 #ifdef LIBGCC_NEEDS_DOUBLE
2612 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2613 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2616 /* There's no need to call protect_from_queue, because
2617 either emit_move_insn or emit_push_insn will do that. */
2619 /* Make sure it is a reasonable operand for a move or push insn. */
2620 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2621 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2622 val = force_operand (val, NULL_RTX);
2624 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2625 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2627 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2628 be viewed as just an efficiency improvement. */
2629 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2630 emit_move_insn (slot, val);
2631 val = force_operand (XEXP (slot, 0), NULL_RTX);
2636 argvec[count].value = val;
2637 argvec[count].mode = mode;
2639 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2640 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2642 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2643 argvec[count].partial
2644 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2646 argvec[count].partial = 0;
2649 locate_and_pad_parm (mode, NULL_TREE,
2650 argvec[count].reg && argvec[count].partial == 0,
2651 NULL_TREE, &args_size, &argvec[count].offset,
2652 &argvec[count].size);
2654 if (argvec[count].size.var)
2657 if (reg_parm_stack_space == 0 && argvec[count].partial)
2658 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2660 if (argvec[count].reg == 0 || argvec[count].partial != 0
2661 || reg_parm_stack_space > 0)
2662 args_size.constant += argvec[count].size.constant;
2664 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2668 #ifdef FINAL_REG_PARM_STACK_SPACE
2669 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2673 /* If this machine requires an external definition for library
2674 functions, write one out. */
2675 assemble_external_libcall (fun);
2677 original_args_size = args_size;
2678 #ifdef PREFERRED_STACK_BOUNDARY
2679 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2680 / STACK_BYTES) * STACK_BYTES);
2683 args_size.constant = MAX (args_size.constant,
2684 reg_parm_stack_space);
2686 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2687 args_size.constant -= reg_parm_stack_space;
2690 if (args_size.constant > current_function_outgoing_args_size)
2691 current_function_outgoing_args_size = args_size.constant;
2693 #ifdef ACCUMULATE_OUTGOING_ARGS
2694 /* Since the stack pointer will never be pushed, it is possible for
2695 the evaluation of a parm to clobber something we have already
2696 written to the stack. Since most function calls on RISC machines
2697 do not use the stack, this is uncommon, but must work correctly.
2699 Therefore, we save any area of the stack that was already written
2700 and that we are using. Here we set up to do this by making a new
2701 stack usage map from the old one.
2703 Another approach might be to try to reorder the argument
2704 evaluations to avoid this conflicting stack usage. */
2706 needed = args_size.constant;
2708 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2709 /* Since we will be writing into the entire argument area, the
2710 map must be allocated for its entire size, not just the part that
2711 is the responsibility of the caller. */
2712 needed += reg_parm_stack_space;
2715 #ifdef ARGS_GROW_DOWNWARD
2716 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2719 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2722 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2724 if (initial_highest_arg_in_use)
2725 bcopy (initial_stack_usage_map, stack_usage_map,
2726 initial_highest_arg_in_use);
2728 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2729 bzero (&stack_usage_map[initial_highest_arg_in_use],
2730 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2733 /* The address of the outgoing argument list must not be copied to a
2734 register here, because argblock would be left pointing to the
2735 wrong place after the call to allocate_dynamic_stack_space below.
2738 argblock = virtual_outgoing_args_rtx;
2739 #else /* not ACCUMULATE_OUTGOING_ARGS */
2740 #ifndef PUSH_ROUNDING
2741 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2745 #ifdef PUSH_ARGS_REVERSED
2746 #ifdef PREFERRED_STACK_BOUNDARY
2747 /* If we push args individually in reverse order, perform stack alignment
2748 before the first push (the last arg). */
2750 anti_adjust_stack (GEN_INT (args_size.constant
2751 - original_args_size.constant));
2755 #ifdef PUSH_ARGS_REVERSED
2763 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2764 /* The argument list is the property of the called routine and it
2765 may clobber it. If the fixed area has been used for previous
2766 parameters, we must save and restore it.
2768 Here we compute the boundary of the that needs to be saved, if any. */
2770 #ifdef ARGS_GROW_DOWNWARD
2771 for (count = 0; count < reg_parm_stack_space + 1; count++)
2773 for (count = 0; count < reg_parm_stack_space; count++)
2776 if (count >= highest_outgoing_arg_in_use
2777 || stack_usage_map[count] == 0)
2780 if (low_to_save == -1)
2781 low_to_save = count;
2783 high_to_save = count;
2786 if (low_to_save >= 0)
2788 int num_to_save = high_to_save - low_to_save + 1;
2789 enum machine_mode save_mode
2790 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2793 /* If we don't have the required alignment, must do this in BLKmode. */
2794 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2795 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2796 save_mode = BLKmode;
2798 #ifdef ARGS_GROW_DOWNWARD
2799 stack_area = gen_rtx_MEM (save_mode,
2800 memory_address (save_mode,
2801 plus_constant (argblock,
2804 stack_area = gen_rtx_MEM (save_mode,
2805 memory_address (save_mode,
2806 plus_constant (argblock,
2809 if (save_mode == BLKmode)
2811 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2812 emit_block_move (validize_mem (save_area), stack_area,
2813 GEN_INT (num_to_save),
2814 PARM_BOUNDARY / BITS_PER_UNIT);
2818 save_area = gen_reg_rtx (save_mode);
2819 emit_move_insn (save_area, stack_area);
2824 /* Push the args that need to be pushed. */
2826 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2827 are to be pushed. */
2828 for (count = 0; count < nargs; count++, argnum += inc)
2830 register enum machine_mode mode = argvec[argnum].mode;
2831 register rtx val = argvec[argnum].value;
2832 rtx reg = argvec[argnum].reg;
2833 int partial = argvec[argnum].partial;
2834 #ifdef ACCUMULATE_OUTGOING_ARGS
2835 int lower_bound, upper_bound, i;
2838 if (! (reg != 0 && partial == 0))
2840 #ifdef ACCUMULATE_OUTGOING_ARGS
2841 /* If this is being stored into a pre-allocated, fixed-size, stack
2842 area, save any previous data at that location. */
2844 #ifdef ARGS_GROW_DOWNWARD
2845 /* stack_slot is negative, but we want to index stack_usage_map
2846 with positive values. */
2847 upper_bound = -argvec[argnum].offset.constant + 1;
2848 lower_bound = upper_bound - argvec[argnum].size.constant;
2850 lower_bound = argvec[argnum].offset.constant;
2851 upper_bound = lower_bound + argvec[argnum].size.constant;
2854 for (i = lower_bound; i < upper_bound; i++)
2855 if (stack_usage_map[i]
2856 /* Don't store things in the fixed argument area at this point;
2857 it has already been saved. */
2858 && i > reg_parm_stack_space)
2861 if (i != upper_bound)
2863 /* We need to make a save area. See what mode we can make it. */
2864 enum machine_mode save_mode
2865 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
2868 = gen_rtx_MEM (save_mode,
2869 memory_address (save_mode,
2870 plus_constant (argblock, argvec[argnum].offset.constant)));
2871 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2872 emit_move_insn (argvec[argnum].save_area, stack_area);
2875 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2876 argblock, GEN_INT (argvec[argnum].offset.constant),
2877 reg_parm_stack_space);
2879 #ifdef ACCUMULATE_OUTGOING_ARGS
2880 /* Now mark the segment we just used. */
2881 for (i = lower_bound; i < upper_bound; i++)
2882 stack_usage_map[i] = 1;
2889 #ifndef PUSH_ARGS_REVERSED
2890 #ifdef PREFERRED_STACK_BOUNDARY
2891 /* If we pushed args in forward order, perform stack alignment
2892 after pushing the last arg. */
2894 anti_adjust_stack (GEN_INT (args_size.constant
2895 - original_args_size.constant));
2899 #ifdef PUSH_ARGS_REVERSED
2905 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2907 /* Now load any reg parms into their regs. */
2909 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2910 are to be pushed. */
2911 for (count = 0; count < nargs; count++, argnum += inc)
2913 register rtx val = argvec[argnum].value;
2914 rtx reg = argvec[argnum].reg;
2915 int partial = argvec[argnum].partial;
2917 if (reg != 0 && partial == 0)
2918 emit_move_insn (reg, val);
2922 /* For version 1.37, try deleting this entirely. */
2926 /* Any regs containing parms remain in use through the call. */
2927 for (count = 0; count < nargs; count++)
2928 if (argvec[count].reg != 0)
2929 use_reg (&call_fusage, argvec[count].reg);
2931 /* Don't allow popping to be deferred, since then
2932 cse'ing of library calls could delete a call and leave the pop. */
2935 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2936 will set inhibit_defer_pop to that value. */
2938 /* The return type is needed to decide how many bytes the function pops.
2939 Signedness plays no role in that, so for simplicity, we pretend it's
2940 always signed. We also assume that the list of arguments passed has
2941 no impact, so we pretend it is unknown. */
2944 get_identifier (XSTR (orgfun, 0)),
2945 build_function_type (outmode == VOIDmode ? void_type_node
2946 : type_for_mode (outmode, 0), NULL_TREE),
2947 args_size.constant, 0,
2948 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2949 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2950 old_inhibit_defer_pop + 1, call_fusage, no_queue);
2954 /* Now restore inhibit_defer_pop to its actual original value. */
2957 #ifdef ACCUMULATE_OUTGOING_ARGS
2958 #ifdef REG_PARM_STACK_SPACE
2961 enum machine_mode save_mode = GET_MODE (save_area);
2962 #ifdef ARGS_GROW_DOWNWARD
2964 = gen_rtx_MEM (save_mode,
2965 memory_address (save_mode,
2966 plus_constant (argblock,
2970 = gen_rtx_MEM (save_mode,
2971 memory_address (save_mode,
2972 plus_constant (argblock, low_to_save)));
2975 if (save_mode != BLKmode)
2976 emit_move_insn (stack_area, save_area);
2978 emit_block_move (stack_area, validize_mem (save_area),
2979 GEN_INT (high_to_save - low_to_save + 1),
2980 PARM_BOUNDARY / BITS_PER_UNIT);
2984 /* If we saved any argument areas, restore them. */
2985 for (count = 0; count < nargs; count++)
2986 if (argvec[count].save_area)
2988 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
2990 = gen_rtx_MEM (save_mode,
2991 memory_address (save_mode,
2992 plus_constant (argblock, argvec[count].offset.constant)));
2994 emit_move_insn (stack_area, argvec[count].save_area);
2997 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2998 stack_usage_map = initial_stack_usage_map;
3002 /* Like emit_library_call except that an extra argument, VALUE,
3003 comes second and says where to store the result.
3004 (If VALUE is zero, this function chooses a convenient way
3005 to return the value.
3007 This function returns an rtx for where the value is to be found.
3008 If VALUE is nonzero, VALUE is returned. */
3011 emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
3012 enum machine_mode outmode, int nargs, ...))
3014 #ifndef ANSI_PROTOTYPES
3018 enum machine_mode outmode;
3022 /* Total size in bytes of all the stack-parms scanned so far. */
3023 struct args_size args_size;
3024 /* Size of arguments before any adjustments (such as rounding). */
3025 struct args_size original_args_size;
3026 register int argnum;
3031 CUMULATIVE_ARGS args_so_far;
3032 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3033 struct args_size offset; struct args_size size; rtx save_area; };
3035 int old_inhibit_defer_pop = inhibit_defer_pop;
3036 rtx call_fusage = 0;
3038 int pcc_struct_value = 0;
3039 int struct_value_size = 0;
3041 int reg_parm_stack_space = 0;
3042 #ifdef ACCUMULATE_OUTGOING_ARGS
3046 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3047 /* Define the boundary of the register parm stack space that needs to be
3049 int low_to_save = -1, high_to_save;
3050 rtx save_area = 0; /* Place that it is saved */
3053 #ifdef ACCUMULATE_OUTGOING_ARGS
3054 /* Size of the stack reserved for parameter registers. */
3055 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3056 char *initial_stack_usage_map = stack_usage_map;
3059 #ifdef REG_PARM_STACK_SPACE
3060 #ifdef MAYBE_REG_PARM_STACK_SPACE
3061 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3063 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3067 VA_START (p, nargs);
3069 #ifndef ANSI_PROTOTYPES
3070 orgfun = va_arg (p, rtx);
3071 value = va_arg (p, rtx);
3072 no_queue = va_arg (p, int);
3073 outmode = va_arg (p, enum machine_mode);
3074 nargs = va_arg (p, int);
3077 is_const = no_queue;
3080 /* If this kind of value comes back in memory,
3081 decide where in memory it should come back. */
3082 if (aggregate_value_p (type_for_mode (outmode, 0)))
3084 #ifdef PCC_STATIC_STRUCT_RETURN
3086 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3088 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3089 pcc_struct_value = 1;
3091 value = gen_reg_rtx (outmode);
3092 #else /* not PCC_STATIC_STRUCT_RETURN */
3093 struct_value_size = GET_MODE_SIZE (outmode);
3094 if (value != 0 && GET_CODE (value) == MEM)
3097 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3100 /* This call returns a big structure. */
3104 /* ??? Unfinished: must pass the memory address as an argument. */
3106 /* Copy all the libcall-arguments out of the varargs data
3107 and into a vector ARGVEC.
3109 Compute how to pass each argument. We only support a very small subset
3110 of the full argument passing conventions to limit complexity here since
3111 library functions shouldn't have many args. */
3113 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3114 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3116 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3118 args_size.constant = 0;
3125 /* If there's a structure value address to be passed,
3126 either pass it in the special place, or pass it as an extra argument. */
3127 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3129 rtx addr = XEXP (mem_value, 0);
3132 /* Make sure it is a reasonable operand for a move or push insn. */
3133 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3134 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3135 addr = force_operand (addr, NULL_RTX);
3137 argvec[count].value = addr;
3138 argvec[count].mode = Pmode;
3139 argvec[count].partial = 0;
3141 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3142 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3143 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3147 locate_and_pad_parm (Pmode, NULL_TREE,
3148 argvec[count].reg && argvec[count].partial == 0,
3149 NULL_TREE, &args_size, &argvec[count].offset,
3150 &argvec[count].size);
3153 if (argvec[count].reg == 0 || argvec[count].partial != 0
3154 || reg_parm_stack_space > 0)
3155 args_size.constant += argvec[count].size.constant;
3157 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3162 for (; count < nargs; count++)
3164 rtx val = va_arg (p, rtx);
3165 enum machine_mode mode = va_arg (p, enum machine_mode);
3167 /* We cannot convert the arg value to the mode the library wants here;
3168 must do it earlier where we know the signedness of the arg. */
3170 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3173 /* On some machines, there's no way to pass a float to a library fcn.
3174 Pass it as a double instead. */
3175 #ifdef LIBGCC_NEEDS_DOUBLE
3176 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3177 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3180 /* There's no need to call protect_from_queue, because
3181 either emit_move_insn or emit_push_insn will do that. */
3183 /* Make sure it is a reasonable operand for a move or push insn. */
3184 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3185 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3186 val = force_operand (val, NULL_RTX);
3188 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3189 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3191 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3192 be viewed as just an efficiency improvement. */
3193 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3194 emit_move_insn (slot, val);
3195 val = XEXP (slot, 0);
3200 argvec[count].value = val;
3201 argvec[count].mode = mode;
3203 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3204 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
3206 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3207 argvec[count].partial
3208 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3210 argvec[count].partial = 0;
3213 locate_and_pad_parm (mode, NULL_TREE,
3214 argvec[count].reg && argvec[count].partial == 0,
3215 NULL_TREE, &args_size, &argvec[count].offset,
3216 &argvec[count].size);
3218 if (argvec[count].size.var)
3221 if (reg_parm_stack_space == 0 && argvec[count].partial)
3222 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3224 if (argvec[count].reg == 0 || argvec[count].partial != 0
3225 || reg_parm_stack_space > 0)
3226 args_size.constant += argvec[count].size.constant;
3228 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3232 #ifdef FINAL_REG_PARM_STACK_SPACE
3233 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3236 /* If this machine requires an external definition for library
3237 functions, write one out. */
3238 assemble_external_libcall (fun);
3240 original_args_size = args_size;
3241 #ifdef PREFERRED_STACK_BOUNDARY
3242 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3243 / STACK_BYTES) * STACK_BYTES);
3246 args_size.constant = MAX (args_size.constant,
3247 reg_parm_stack_space);
3249 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3250 args_size.constant -= reg_parm_stack_space;
3253 if (args_size.constant > current_function_outgoing_args_size)
3254 current_function_outgoing_args_size = args_size.constant;
3256 #ifdef ACCUMULATE_OUTGOING_ARGS
3257 /* Since the stack pointer will never be pushed, it is possible for
3258 the evaluation of a parm to clobber something we have already
3259 written to the stack. Since most function calls on RISC machines
3260 do not use the stack, this is uncommon, but must work correctly.
3262 Therefore, we save any area of the stack that was already written
3263 and that we are using. Here we set up to do this by making a new
3264 stack usage map from the old one.
3266 Another approach might be to try to reorder the argument
3267 evaluations to avoid this conflicting stack usage. */
3269 needed = args_size.constant;
3271 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3272 /* Since we will be writing into the entire argument area, the
3273 map must be allocated for its entire size, not just the part that
3274 is the responsibility of the caller. */
3275 needed += reg_parm_stack_space;
3278 #ifdef ARGS_GROW_DOWNWARD
3279 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3282 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3285 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3287 if (initial_highest_arg_in_use)
3288 bcopy (initial_stack_usage_map, stack_usage_map,
3289 initial_highest_arg_in_use);
3291 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3292 bzero (&stack_usage_map[initial_highest_arg_in_use],
3293 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3296 /* The address of the outgoing argument list must not be copied to a
3297 register here, because argblock would be left pointing to the
3298 wrong place after the call to allocate_dynamic_stack_space below.
3301 argblock = virtual_outgoing_args_rtx;
3302 #else /* not ACCUMULATE_OUTGOING_ARGS */
3303 #ifndef PUSH_ROUNDING
3304 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3308 #ifdef PUSH_ARGS_REVERSED
3309 #ifdef PREFERRED_STACK_BOUNDARY
3310 /* If we push args individually in reverse order, perform stack alignment
3311 before the first push (the last arg). */
3313 anti_adjust_stack (GEN_INT (args_size.constant
3314 - original_args_size.constant));
3318 #ifdef PUSH_ARGS_REVERSED
3326 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3327 /* The argument list is the property of the called routine and it
3328 may clobber it. If the fixed area has been used for previous
3329 parameters, we must save and restore it.
3331 Here we compute the boundary of the that needs to be saved, if any. */
3333 #ifdef ARGS_GROW_DOWNWARD
3334 for (count = 0; count < reg_parm_stack_space + 1; count++)
3336 for (count = 0; count < reg_parm_stack_space; count++)
3339 if (count >= highest_outgoing_arg_in_use
3340 || stack_usage_map[count] == 0)
3343 if (low_to_save == -1)
3344 low_to_save = count;
3346 high_to_save = count;
3349 if (low_to_save >= 0)
3351 int num_to_save = high_to_save - low_to_save + 1;
3352 enum machine_mode save_mode
3353 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3356 /* If we don't have the required alignment, must do this in BLKmode. */
3357 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3358 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3359 save_mode = BLKmode;
3361 #ifdef ARGS_GROW_DOWNWARD
3362 stack_area = gen_rtx_MEM (save_mode,
3363 memory_address (save_mode,
3364 plus_constant (argblock,
3367 stack_area = gen_rtx_MEM (save_mode,
3368 memory_address (save_mode,
3369 plus_constant (argblock,
3372 if (save_mode == BLKmode)
3374 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3375 emit_block_move (validize_mem (save_area), stack_area,
3376 GEN_INT (num_to_save),
3377 PARM_BOUNDARY / BITS_PER_UNIT);
3381 save_area = gen_reg_rtx (save_mode);
3382 emit_move_insn (save_area, stack_area);
3387 /* Push the args that need to be pushed. */
3389 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3390 are to be pushed. */
3391 for (count = 0; count < nargs; count++, argnum += inc)
3393 register enum machine_mode mode = argvec[argnum].mode;
3394 register rtx val = argvec[argnum].value;
3395 rtx reg = argvec[argnum].reg;
3396 int partial = argvec[argnum].partial;
3397 #ifdef ACCUMULATE_OUTGOING_ARGS
3398 int lower_bound, upper_bound, i;
3401 if (! (reg != 0 && partial == 0))
3403 #ifdef ACCUMULATE_OUTGOING_ARGS
3404 /* If this is being stored into a pre-allocated, fixed-size, stack
3405 area, save any previous data at that location. */
3407 #ifdef ARGS_GROW_DOWNWARD
3408 /* stack_slot is negative, but we want to index stack_usage_map
3409 with positive values. */
3410 upper_bound = -argvec[argnum].offset.constant + 1;
3411 lower_bound = upper_bound - argvec[argnum].size.constant;
3413 lower_bound = argvec[argnum].offset.constant;
3414 upper_bound = lower_bound + argvec[argnum].size.constant;
3417 for (i = lower_bound; i < upper_bound; i++)
3418 if (stack_usage_map[i]
3419 /* Don't store things in the fixed argument area at this point;
3420 it has already been saved. */
3421 && i > reg_parm_stack_space)
3424 if (i != upper_bound)
3426 /* We need to make a save area. See what mode we can make it. */
3427 enum machine_mode save_mode
3428 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3431 = gen_rtx_MEM (save_mode,
3432 memory_address (save_mode,
3433 plus_constant (argblock,
3434 argvec[argnum].offset.constant)));
3435 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3436 emit_move_insn (argvec[argnum].save_area, stack_area);
3439 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3440 argblock, GEN_INT (argvec[argnum].offset.constant),
3441 reg_parm_stack_space);
3443 #ifdef ACCUMULATE_OUTGOING_ARGS
3444 /* Now mark the segment we just used. */
3445 for (i = lower_bound; i < upper_bound; i++)
3446 stack_usage_map[i] = 1;
3453 #ifndef PUSH_ARGS_REVERSED
3454 #ifdef PREFERRED_STACK_BOUNDARY
3455 /* If we pushed args in forward order, perform stack alignment
3456 after pushing the last arg. */
3458 anti_adjust_stack (GEN_INT (args_size.constant
3459 - original_args_size.constant));
3463 #ifdef PUSH_ARGS_REVERSED
3469 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3471 /* Now load any reg parms into their regs. */
3473 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3474 are to be pushed. */
3475 for (count = 0; count < nargs; count++, argnum += inc)
3477 register rtx val = argvec[argnum].value;
3478 rtx reg = argvec[argnum].reg;
3479 int partial = argvec[argnum].partial;
3481 if (reg != 0 && partial == 0)
3482 emit_move_insn (reg, val);
3487 /* For version 1.37, try deleting this entirely. */
3492 /* Any regs containing parms remain in use through the call. */
3493 for (count = 0; count < nargs; count++)
3494 if (argvec[count].reg != 0)
3495 use_reg (&call_fusage, argvec[count].reg);
3497 /* Pass the function the address in which to return a structure value. */
3498 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3500 emit_move_insn (struct_value_rtx,
3502 force_operand (XEXP (mem_value, 0),
3504 if (GET_CODE (struct_value_rtx) == REG)
3505 use_reg (&call_fusage, struct_value_rtx);
3508 /* Don't allow popping to be deferred, since then
3509 cse'ing of library calls could delete a call and leave the pop. */
3512 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3513 will set inhibit_defer_pop to that value. */
3514 /* See the comment in emit_library_call about the function type we build
3518 get_identifier (XSTR (orgfun, 0)),
3519 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3520 args_size.constant, struct_value_size,
3521 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3522 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3523 old_inhibit_defer_pop + 1, call_fusage, is_const);
3525 /* Now restore inhibit_defer_pop to its actual original value. */
3530 /* Copy the value to the right place. */
3531 if (outmode != VOIDmode)
3537 if (value != mem_value)
3538 emit_move_insn (value, mem_value);
3540 else if (value != 0)
3541 emit_move_insn (value, hard_libcall_value (outmode));
3543 value = hard_libcall_value (outmode);
3546 #ifdef ACCUMULATE_OUTGOING_ARGS
3547 #ifdef REG_PARM_STACK_SPACE
3550 enum machine_mode save_mode = GET_MODE (save_area);
3551 #ifdef ARGS_GROW_DOWNWARD
3553 = gen_rtx_MEM (save_mode,
3554 memory_address (save_mode,
3555 plus_constant (argblock,
3559 = gen_rtx_MEM (save_mode,
3560 memory_address (save_mode,
3561 plus_constant (argblock, low_to_save)));
3563 if (save_mode != BLKmode)
3564 emit_move_insn (stack_area, save_area);
3566 emit_block_move (stack_area, validize_mem (save_area),
3567 GEN_INT (high_to_save - low_to_save + 1),
3568 PARM_BOUNDARY / BITS_PER_UNIT);
3572 /* If we saved any argument areas, restore them. */
3573 for (count = 0; count < nargs; count++)
3574 if (argvec[count].save_area)
3576 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3578 = gen_rtx_MEM (save_mode,
3579 memory_address (save_mode, plus_constant (argblock,
3580 argvec[count].offset.constant)));
3582 emit_move_insn (stack_area, argvec[count].save_area);
3585 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3586 stack_usage_map = initial_stack_usage_map;
3593 /* Return an rtx which represents a suitable home on the stack
3594 given TYPE, the type of the argument looking for a home.
3595 This is called only for BLKmode arguments.
3597 SIZE is the size needed for this target.
3598 ARGS_ADDR is the address of the bottom of the argument block for this call.
3599 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3600 if this machine uses push insns. */
3603 target_for_arg (type, size, args_addr, offset)
3607 struct args_size offset;
3610 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3612 /* We do not call memory_address if possible,
3613 because we want to address as close to the stack
3614 as possible. For non-variable sized arguments,
3615 this will be stack-pointer relative addressing. */
3616 if (GET_CODE (offset_rtx) == CONST_INT)
3617 target = plus_constant (args_addr, INTVAL (offset_rtx));
3620 /* I have no idea how to guarantee that this
3621 will work in the presence of register parameters. */
3622 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3623 target = memory_address (QImode, target);
3626 return gen_rtx_MEM (BLKmode, target);
3630 /* Store a single argument for a function call
3631 into the register or memory area where it must be passed.
3632 *ARG describes the argument value and where to pass it.
3634 ARGBLOCK is the address of the stack-block for all the arguments,
3635 or 0 on a machine where arguments are pushed individually.
3637 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3638 so must be careful about how the stack is used.
3640 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3641 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3642 that we need not worry about saving and restoring the stack.
3644 FNDECL is the declaration of the function we are calling. */
3647 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3648 reg_parm_stack_space)
3649 struct arg_data *arg;
3652 int variable_size ATTRIBUTE_UNUSED;
3653 int reg_parm_stack_space;
3655 register tree pval = arg->tree_value;
3659 #ifdef ACCUMULATE_OUTGOING_ARGS
3660 int i, lower_bound, upper_bound;
3663 if (TREE_CODE (pval) == ERROR_MARK)
3666 /* Push a new temporary level for any temporaries we make for
3670 #ifdef ACCUMULATE_OUTGOING_ARGS
3671 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3672 save any previous data at that location. */
3673 if (argblock && ! variable_size && arg->stack)
3675 #ifdef ARGS_GROW_DOWNWARD
3676 /* stack_slot is negative, but we want to index stack_usage_map
3677 with positive values. */
3678 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3679 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3683 lower_bound = upper_bound - arg->size.constant;
3685 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3686 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3690 upper_bound = lower_bound + arg->size.constant;
3693 for (i = lower_bound; i < upper_bound; i++)
3694 if (stack_usage_map[i]
3695 /* Don't store things in the fixed argument area at this point;
3696 it has already been saved. */
3697 && i > reg_parm_stack_space)
3700 if (i != upper_bound)
3702 /* We need to make a save area. See what mode we can make it. */
3703 enum machine_mode save_mode
3704 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3706 = gen_rtx_MEM (save_mode,
3707 memory_address (save_mode,
3708 XEXP (arg->stack_slot, 0)));
3710 if (save_mode == BLKmode)
3712 arg->save_area = assign_stack_temp (BLKmode,
3713 arg->size.constant, 0);
3714 MEM_SET_IN_STRUCT_P (arg->save_area,
3715 AGGREGATE_TYPE_P (TREE_TYPE
3716 (arg->tree_value)));
3717 preserve_temp_slots (arg->save_area);
3718 emit_block_move (validize_mem (arg->save_area), stack_area,
3719 GEN_INT (arg->size.constant),
3720 PARM_BOUNDARY / BITS_PER_UNIT);
3724 arg->save_area = gen_reg_rtx (save_mode);
3725 emit_move_insn (arg->save_area, stack_area);
3730 /* Now that we have saved any slots that will be overwritten by this
3731 store, mark all slots this store will use. We must do this before
3732 we actually expand the argument since the expansion itself may
3733 trigger library calls which might need to use the same stack slot. */
3734 if (argblock && ! variable_size && arg->stack)
3735 for (i = lower_bound; i < upper_bound; i++)
3736 stack_usage_map[i] = 1;
3739 /* If this isn't going to be placed on both the stack and in registers,
3740 set up the register and number of words. */
3741 if (! arg->pass_on_stack)
3742 reg = arg->reg, partial = arg->partial;
3744 if (reg != 0 && partial == 0)
3745 /* Being passed entirely in a register. We shouldn't be called in
3749 /* If this arg needs special alignment, don't load the registers
3751 if (arg->n_aligned_regs != 0)
3754 /* If this is being passed partially in a register, we can't evaluate
3755 it directly into its stack slot. Otherwise, we can. */
3756 if (arg->value == 0)
3758 #ifdef ACCUMULATE_OUTGOING_ARGS
3759 /* stack_arg_under_construction is nonzero if a function argument is
3760 being evaluated directly into the outgoing argument list and
3761 expand_call must take special action to preserve the argument list
3762 if it is called recursively.
3764 For scalar function arguments stack_usage_map is sufficient to
3765 determine which stack slots must be saved and restored. Scalar
3766 arguments in general have pass_on_stack == 0.
3768 If this argument is initialized by a function which takes the
3769 address of the argument (a C++ constructor or a C function
3770 returning a BLKmode structure), then stack_usage_map is
3771 insufficient and expand_call must push the stack around the
3772 function call. Such arguments have pass_on_stack == 1.
3774 Note that it is always safe to set stack_arg_under_construction,
3775 but this generates suboptimal code if set when not needed. */
3777 if (arg->pass_on_stack)
3778 stack_arg_under_construction++;
3780 arg->value = expand_expr (pval,
3782 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3783 ? NULL_RTX : arg->stack,
3786 /* If we are promoting object (or for any other reason) the mode
3787 doesn't agree, convert the mode. */
3789 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3790 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3791 arg->value, arg->unsignedp);
3793 #ifdef ACCUMULATE_OUTGOING_ARGS
3794 if (arg->pass_on_stack)
3795 stack_arg_under_construction--;
3799 /* Don't allow anything left on stack from computation
3800 of argument to alloca. */
3802 do_pending_stack_adjust ();
3804 if (arg->value == arg->stack)
3806 /* If the value is already in the stack slot, we are done moving
3808 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
3810 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3811 XEXP (arg->stack, 0), ptr_mode,
3812 ARGS_SIZE_RTX (arg->size),
3813 TYPE_MODE (sizetype),
3814 GEN_INT (MEMORY_USE_RW),
3815 TYPE_MODE (integer_type_node));
3818 else if (arg->mode != BLKmode)
3822 /* Argument is a scalar, not entirely passed in registers.
3823 (If part is passed in registers, arg->partial says how much
3824 and emit_push_insn will take care of putting it there.)
3826 Push it, and if its size is less than the
3827 amount of space allocated to it,
3828 also bump stack pointer by the additional space.
3829 Note that in C the default argument promotions
3830 will prevent such mismatches. */
3832 size = GET_MODE_SIZE (arg->mode);
3833 /* Compute how much space the push instruction will push.
3834 On many machines, pushing a byte will advance the stack
3835 pointer by a halfword. */
3836 #ifdef PUSH_ROUNDING
3837 size = PUSH_ROUNDING (size);
3841 /* Compute how much space the argument should get:
3842 round up to a multiple of the alignment for arguments. */
3843 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3844 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3845 / (PARM_BOUNDARY / BITS_PER_UNIT))
3846 * (PARM_BOUNDARY / BITS_PER_UNIT));
3848 /* This isn't already where we want it on the stack, so put it there.
3849 This can either be done with push or copy insns. */
3850 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3851 partial, reg, used - size, argblock,
3852 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space);
3856 /* BLKmode, at least partly to be pushed. */
3858 register int excess;
3861 /* Pushing a nonscalar.
3862 If part is passed in registers, PARTIAL says how much
3863 and emit_push_insn will take care of putting it there. */
3865 /* Round its size up to a multiple
3866 of the allocation unit for arguments. */
3868 if (arg->size.var != 0)
3871 size_rtx = ARGS_SIZE_RTX (arg->size);
3875 /* PUSH_ROUNDING has no effect on us, because
3876 emit_push_insn for BLKmode is careful to avoid it. */
3877 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3878 + partial * UNITS_PER_WORD);
3879 size_rtx = expr_size (pval);
3882 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3883 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3884 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3885 reg_parm_stack_space);
3889 /* Unless this is a partially-in-register argument, the argument is now
3892 ??? Note that this can change arg->value from arg->stack to
3893 arg->stack_slot and it matters when they are not the same.
3894 It isn't totally clear that this is correct in all cases. */
3896 arg->value = arg->stack_slot;
3898 /* Once we have pushed something, pops can't safely
3899 be deferred during the rest of the arguments. */
3902 /* ANSI doesn't require a sequence point here,
3903 but PCC has one, so this will avoid some problems. */
3906 /* Free any temporary slots made in processing this argument. Show
3907 that we might have taken the address of something and pushed that
3909 preserve_temp_slots (NULL_RTX);