1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-97, 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "insn-flags.h"
34 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
35 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
38 /* Decide whether a function's arguments should be processed
39 from first to last or from last to first.
41 They should if the stack and args grow in opposite directions, but
42 only if we have push insns. */
46 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
47 #define PUSH_ARGS_REVERSED /* If it's last to first */
52 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
53 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
55 /* Data structure and subroutines used within expand_call. */
59 /* Tree node for this argument. */
61 /* Mode for value; TYPE_MODE unless promoted. */
62 enum machine_mode mode;
63 /* Current RTL value for argument, or 0 if it isn't precomputed. */
65 /* Initially-compute RTL value for argument; only for const functions. */
67 /* Register to pass this argument in, 0 if passed on stack, or an
68 PARALLEL if the arg is to be copied into multiple non-contiguous
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
74 /* Number of registers to use. 0 means put the whole arg in registers.
75 Also 0 if not passed in registers. */
77 /* Non-zero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
82 /* Offset of this argument from beginning of stack-args. */
83 struct args_size offset;
84 /* Similar, but offset to the start of the stack slot. Different from
85 OFFSET if this arg pads downward. */
86 struct args_size slot_offset;
87 /* Size of this argument on the stack, rounded up for any padding it gets,
88 parts of the argument passed in registers do not count.
89 If REG_PARM_STACK_SPACE is defined, then register parms
90 are counted here as well. */
91 struct args_size size;
92 /* Location on the stack at which parameter should be stored. The store
93 has already been done if STACK == VALUE. */
95 /* Location on the stack of the start of this argument slot. This can
96 differ from STACK if this arg pads downward. This location is known
97 to be aligned to FUNCTION_ARG_BOUNDARY. */
99 #ifdef ACCUMULATE_OUTGOING_ARGS
100 /* Place that this stack area has been saved, if needed. */
103 /* If an argument's alignment does not permit direct copying into registers,
104 copy in smaller-sized pieces into pseudos. These are stored in a
105 block pointed to by this field. The next field says how many
106 word-sized pseudos we made. */
111 #ifdef ACCUMULATE_OUTGOING_ARGS
112 /* A vector of one char per byte of stack space. A byte if non-zero if
113 the corresponding stack location has been used.
114 This vector is used to prevent a function call within an argument from
115 clobbering any stack already set up. */
116 static char *stack_usage_map;
118 /* Size of STACK_USAGE_MAP. */
119 static int highest_outgoing_arg_in_use;
121 /* stack_arg_under_construction is nonzero when an argument may be
122 initialized with a constructor call (including a C function that
123 returns a BLKmode struct) and expand_call must take special action
124 to make sure the object being constructed does not overlap the
125 argument list for the constructor call. */
126 int stack_arg_under_construction;
129 static int calls_function PROTO ((tree, int));
130 static int calls_function_1 PROTO ((tree, int));
131 static void emit_call_1 PROTO ((rtx, tree, tree, HOST_WIDE_INT,
132 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
133 rtx, int, rtx, int));
134 static void special_function_p PROTO ((char *, tree, int *, int *,
136 static void precompute_register_parameters PROTO ((int, struct arg_data *,
138 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
140 static void store_unaligned_arguments_into_pseudos PROTO ((struct arg_data *,
142 static int finalize_must_preallocate PROTO ((int, int,
144 struct args_size *));
145 static void precompute_arguments PROTO ((int, int, int,
147 struct args_size *));
148 static int compute_argument_block_size PROTO ((int,
149 struct args_size *));
150 static void initialize_argument_information PROTO ((int,
157 static void compute_argument_addresses PROTO ((struct arg_data *,
159 static rtx rtx_for_function_call PROTO ((tree, tree));
160 static void load_register_parameters PROTO ((struct arg_data *,
163 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
164 static rtx save_fixed_argument_area PROTO ((int, rtx, int *, int *));
165 static void restore_fixed_argument_area PROTO ((rtx, rtx, int, int));
168 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
171 If WHICH is 0, return 1 if EXP contains a call to any function.
172 Actually, we only need return 1 if evaluating EXP would require pushing
173 arguments on the stack, but that is too difficult to compute, so we just
174 assume any function call might require the stack. */
176 static tree calls_function_save_exprs;
179 calls_function (exp, which)
184 calls_function_save_exprs = 0;
185 val = calls_function_1 (exp, which);
186 calls_function_save_exprs = 0;
191 calls_function_1 (exp, which)
196 enum tree_code code = TREE_CODE (exp);
197 int type = TREE_CODE_CLASS (code);
198 int length = tree_code_length[(int) code];
200 /* If this code is language-specific, we don't know what it will do. */
201 if ((int) code >= NUM_TREE_CODES)
204 /* Only expressions and references can contain calls. */
205 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
214 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
215 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
218 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
220 if ((DECL_BUILT_IN (fndecl)
221 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
222 || (DECL_SAVED_INSNS (fndecl)
223 && DECL_SAVED_INSNS (fndecl)->calls_alloca))
227 /* Third operand is RTL. */
232 if (SAVE_EXPR_RTL (exp) != 0)
234 if (value_member (exp, calls_function_save_exprs))
236 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
237 calls_function_save_exprs);
238 return (TREE_OPERAND (exp, 0) != 0
239 && calls_function_1 (TREE_OPERAND (exp, 0), which));
245 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
246 if (DECL_INITIAL (local) != 0
247 && calls_function_1 (DECL_INITIAL (local), which))
251 register tree subblock;
253 for (subblock = BLOCK_SUBBLOCKS (exp);
255 subblock = TREE_CHAIN (subblock))
256 if (calls_function_1 (subblock, which))
261 case METHOD_CALL_EXPR:
265 case WITH_CLEANUP_EXPR:
276 for (i = 0; i < length; i++)
277 if (TREE_OPERAND (exp, i) != 0
278 && calls_function_1 (TREE_OPERAND (exp, i), which))
284 /* Force FUNEXP into a form suitable for the address of a CALL,
285 and return that as an rtx. Also load the static chain register
286 if FNDECL is a nested function.
288 CALL_FUSAGE points to a variable holding the prospective
289 CALL_INSN_FUNCTION_USAGE information. */
292 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
298 rtx static_chain_value = 0;
300 funexp = protect_from_queue (funexp, 0);
303 /* Get possible static chain value for nested function in C. */
304 static_chain_value = lookup_static_chain (fndecl);
306 /* Make a valid memory address and copy constants thru pseudo-regs,
307 but not for a constant address if -fno-function-cse. */
308 if (GET_CODE (funexp) != SYMBOL_REF)
309 /* If we are using registers for parameters, force the
310 function address into a register now. */
311 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
312 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
313 : memory_address (FUNCTION_MODE, funexp));
316 #ifndef NO_FUNCTION_CSE
317 if (optimize && ! flag_no_function_cse)
318 #ifdef NO_RECURSIVE_FUNCTION_CSE
319 if (fndecl != current_function_decl)
321 funexp = force_reg (Pmode, funexp);
325 if (static_chain_value != 0)
327 emit_move_insn (static_chain_rtx, static_chain_value);
329 if (GET_CODE (static_chain_rtx) == REG)
330 use_reg (call_fusage, static_chain_rtx);
336 /* Generate instructions to call function FUNEXP,
337 and optionally pop the results.
338 The CALL_INSN is the first insn generated.
340 FNDECL is the declaration node of the function. This is given to the
341 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
343 FUNTYPE is the data type of the function. This is given to the macro
344 RETURN_POPS_ARGS to determine whether this function pops its own args.
345 We used to allow an identifier for library functions, but that doesn't
346 work when the return type is an aggregate type and the calling convention
347 says that the pointer to this aggregate is to be popped by the callee.
349 STACK_SIZE is the number of bytes of arguments on the stack,
350 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
351 This is both to put into the call insn and
352 to generate explicit popping code if necessary.
354 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
355 It is zero if this call doesn't want a structure value.
357 NEXT_ARG_REG is the rtx that results from executing
358 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
359 just after all the args have had their registers assigned.
360 This could be whatever you like, but normally it is the first
361 arg-register beyond those used for args in this call,
362 or 0 if all the arg-registers are used in this call.
363 It is passed on to `gen_call' so you can put this info in the call insn.
365 VALREG is a hard register in which a value is returned,
366 or 0 if the call does not return a value.
368 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
369 the args to this call were processed.
370 We restore `inhibit_defer_pop' to that value.
372 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
373 denote registers used by the called function.
375 IS_CONST is true if this is a `const' call. */
378 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
379 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
380 call_fusage, is_const)
382 tree fndecl ATTRIBUTE_UNUSED;
383 tree funtype ATTRIBUTE_UNUSED;
384 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
385 HOST_WIDE_INT rounded_stack_size;
386 HOST_WIDE_INT struct_value_size;
389 int old_inhibit_defer_pop;
393 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
394 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
396 #ifndef ACCUMULATE_OUTGOING_ARGS
397 int already_popped = 0;
398 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
401 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
402 and we don't want to load it into a register as an optimization,
403 because prepare_call_address already did it if it should be done. */
404 if (GET_CODE (funexp) != SYMBOL_REF)
405 funexp = memory_address (FUNCTION_MODE, funexp);
407 #ifndef ACCUMULATE_OUTGOING_ARGS
408 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
409 /* If the target has "call" or "call_value" insns, then prefer them
410 if no arguments are actually popped. If the target does not have
411 "call" or "call_value" insns, then we must use the popping versions
412 even if the call has no arguments to pop. */
413 #if defined (HAVE_call) && defined (HAVE_call_value)
414 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
417 if (HAVE_call_pop && HAVE_call_value_pop)
420 rtx n_pop = GEN_INT (n_popped);
423 /* If this subroutine pops its own args, record that in the call insn
424 if possible, for the sake of frame pointer elimination. */
427 pat = gen_call_value_pop (valreg,
428 gen_rtx_MEM (FUNCTION_MODE, funexp),
429 rounded_stack_size_rtx, next_arg_reg, n_pop);
431 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
432 rounded_stack_size_rtx, next_arg_reg, n_pop);
434 emit_call_insn (pat);
441 #if defined (HAVE_call) && defined (HAVE_call_value)
442 if (HAVE_call && HAVE_call_value)
445 emit_call_insn (gen_call_value (valreg,
446 gen_rtx_MEM (FUNCTION_MODE, funexp),
447 rounded_stack_size_rtx, next_arg_reg,
450 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
451 rounded_stack_size_rtx, next_arg_reg,
452 struct_value_size_rtx));
458 /* Find the CALL insn we just emitted. */
459 for (call_insn = get_last_insn ();
460 call_insn && GET_CODE (call_insn) != CALL_INSN;
461 call_insn = PREV_INSN (call_insn))
467 /* Put the register usage information on the CALL. If there is already
468 some usage information, put ours at the end. */
469 if (CALL_INSN_FUNCTION_USAGE (call_insn))
473 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
474 link = XEXP (link, 1))
477 XEXP (link, 1) = call_fusage;
480 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
482 /* If this is a const call, then set the insn's unchanging bit. */
484 CONST_CALL_P (call_insn) = 1;
486 /* Restore this now, so that we do defer pops for this call's args
487 if the context of the call as a whole permits. */
488 inhibit_defer_pop = old_inhibit_defer_pop;
490 #ifndef ACCUMULATE_OUTGOING_ARGS
491 /* If returning from the subroutine does not automatically pop the args,
492 we need an instruction to pop them sooner or later.
493 Perhaps do it now; perhaps just record how much space to pop later.
495 If returning from the subroutine does pop the args, indicate that the
496 stack pointer will be changed. */
501 CALL_INSN_FUNCTION_USAGE (call_insn)
502 = gen_rtx_EXPR_LIST (VOIDmode,
503 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
504 CALL_INSN_FUNCTION_USAGE (call_insn));
505 rounded_stack_size -= n_popped;
506 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
509 if (rounded_stack_size != 0)
511 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
512 pending_stack_adjust += rounded_stack_size;
514 adjust_stack (rounded_stack_size_rtx);
519 /* Determine if the function identified by NAME and FNDECL is one with
520 special properties we wish to know about.
522 For example, if the function might return more than one time (setjmp), then
523 set RETURNS_TWICE to a nonzero value.
525 Similarly set IS_LONGJMP for if the function is in the longjmp family.
527 Set IS_MALLOC for any of the standard memory allocation functions which
528 allocate from the heap.
530 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
531 space from the stack such as alloca. */
534 special_function_p (name, fndecl, returns_twice, is_longjmp,
535 is_malloc, may_be_alloca)
548 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
549 /* Exclude functions not at the file scope, or not `extern',
550 since they are not the magic functions we would otherwise
552 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
556 /* We assume that alloca will always be called by name. It
557 makes no sense to pass it as a pointer-to-function to
558 anything that does not understand its behavior. */
560 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
562 && ! strcmp (name, "alloca"))
563 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
565 && ! strcmp (name, "__builtin_alloca"))));
567 /* Disregard prefix _, __ or __x. */
570 if (name[1] == '_' && name[2] == 'x')
572 else if (name[1] == '_')
582 && (! strcmp (tname, "setjmp")
583 || ! strcmp (tname, "setjmp_syscall")))
585 && ! strcmp (tname, "sigsetjmp"))
587 && ! strcmp (tname, "savectx")));
589 && ! strcmp (tname, "siglongjmp"))
592 else if ((tname[0] == 'q' && tname[1] == 's'
593 && ! strcmp (tname, "qsetjmp"))
594 || (tname[0] == 'v' && tname[1] == 'f'
595 && ! strcmp (tname, "vfork")))
598 else if (tname[0] == 'l' && tname[1] == 'o'
599 && ! strcmp (tname, "longjmp"))
601 /* XXX should have "malloc" attribute on functions instead
602 of recognizing them by name. */
603 else if (! strcmp (tname, "malloc")
604 || ! strcmp (tname, "calloc")
605 || ! strcmp (tname, "realloc")
606 /* Note use of NAME rather than TNAME here. These functions
607 are only reserved when preceded with __. */
608 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
609 || ! strcmp (name, "__nw") /* mangled __builtin_new */
610 || ! strcmp (name, "__builtin_new")
611 || ! strcmp (name, "__builtin_vec_new"))
616 /* Precompute all register parameters as described by ARGS, storing values
617 into fields within the ARGS array.
619 NUM_ACTUALS indicates the total number elements in the ARGS array.
621 Set REG_PARM_SEEN if we encounter a register parameter. */
624 precompute_register_parameters (num_actuals, args, reg_parm_seen)
626 struct arg_data *args;
633 for (i = 0; i < num_actuals; i++)
634 if (args[i].reg != 0 && ! args[i].pass_on_stack)
638 if (args[i].value == 0)
641 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
643 preserve_temp_slots (args[i].value);
646 /* ANSI doesn't require a sequence point here,
647 but PCC has one, so this will avoid some problems. */
651 /* If we are to promote the function arg to a wider mode,
654 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
656 = convert_modes (args[i].mode,
657 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
658 args[i].value, args[i].unsignedp);
660 /* If the value is expensive, and we are inside an appropriately
661 short loop, put the value into a pseudo and then put the pseudo
664 For small register classes, also do this if this call uses
665 register parameters. This is to avoid reload conflicts while
666 loading the parameters registers. */
668 if ((! (GET_CODE (args[i].value) == REG
669 || (GET_CODE (args[i].value) == SUBREG
670 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
671 && args[i].mode != BLKmode
672 && rtx_cost (args[i].value, SET) > 2
673 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
674 || preserve_subexpressions_p ()))
675 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
679 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
681 /* The argument list is the property of the called routine and it
682 may clobber it. If the fixed area has been used for previous
683 parameters, we must save and restore it. */
685 save_fixed_argument_area (reg_parm_stack_space, argblock,
686 low_to_save, high_to_save)
687 int reg_parm_stack_space;
693 rtx save_area = NULL_RTX;
695 /* Compute the boundary of the that needs to be saved, if any. */
696 #ifdef ARGS_GROW_DOWNWARD
697 for (i = 0; i < reg_parm_stack_space + 1; i++)
699 for (i = 0; i < reg_parm_stack_space; i++)
702 if (i >= highest_outgoing_arg_in_use
703 || stack_usage_map[i] == 0)
706 if (*low_to_save == -1)
712 if (*low_to_save >= 0)
714 int num_to_save = *high_to_save - *low_to_save + 1;
715 enum machine_mode save_mode
716 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
719 /* If we don't have the required alignment, must do this in BLKmode. */
720 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
721 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
724 #ifdef ARGS_GROW_DOWNWARD
725 stack_area = gen_rtx_MEM (save_mode,
726 memory_address (save_mode,
727 plus_constant (argblock,
730 stack_area = gen_rtx_MEM (save_mode,
731 memory_address (save_mode,
732 plus_constant (argblock,
735 if (save_mode == BLKmode)
737 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
738 emit_block_move (validize_mem (save_area), stack_area,
739 GEN_INT (num_to_save),
740 PARM_BOUNDARY / BITS_PER_UNIT);
744 save_area = gen_reg_rtx (save_mode);
745 emit_move_insn (save_area, stack_area);
752 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
758 enum machine_mode save_mode = GET_MODE (save_area);
759 #ifdef ARGS_GROW_DOWNWARD
761 = gen_rtx_MEM (save_mode,
762 memory_address (save_mode,
763 plus_constant (argblock,
767 = gen_rtx_MEM (save_mode,
768 memory_address (save_mode,
769 plus_constant (argblock,
773 if (save_mode != BLKmode)
774 emit_move_insn (stack_area, save_area);
776 emit_block_move (stack_area, validize_mem (save_area),
777 GEN_INT (high_to_save - low_to_save + 1),
778 PARM_BOUNDARY / BITS_PER_UNIT);
782 /* If any elements in ARGS refer to parameters that are to be passed in
783 registers, but not in memory, and whose alignment does not permit a
784 direct copy into registers. Copy the values into a group of pseudos
785 which we will later copy into the appropriate hard registers.
787 Pseudos for each unaligned argument will be stored into the array
788 args[argnum].aligned_regs. The caller is responsible for deallocating
789 the aligned_regs array if it is nonzero. */
792 store_unaligned_arguments_into_pseudos (args, num_actuals)
793 struct arg_data *args;
798 for (i = 0; i < num_actuals; i++)
799 if (args[i].reg != 0 && ! args[i].pass_on_stack
800 && args[i].mode == BLKmode
801 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
802 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
804 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
805 int big_endian_correction = 0;
807 args[i].n_aligned_regs
808 = args[i].partial ? args[i].partial
809 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
811 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
812 * args[i].n_aligned_regs);
814 /* Structures smaller than a word are aligned to the least
815 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
816 this means we must skip the empty high order bytes when
817 calculating the bit offset. */
818 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
819 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
821 for (j = 0; j < args[i].n_aligned_regs; j++)
823 rtx reg = gen_reg_rtx (word_mode);
824 rtx word = operand_subword_force (args[i].value, j, BLKmode);
825 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
826 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
828 args[i].aligned_regs[j] = reg;
830 /* There is no need to restrict this code to loading items
831 in TYPE_ALIGN sized hunks. The bitfield instructions can
832 load up entire word sized registers efficiently.
834 ??? This may not be needed anymore.
835 We use to emit a clobber here but that doesn't let later
836 passes optimize the instructions we emit. By storing 0 into
837 the register later passes know the first AND to zero out the
838 bitfield being set in the register is unnecessary. The store
839 of 0 will be deleted as will at least the first AND. */
841 emit_move_insn (reg, const0_rtx);
843 bytes -= bitsize / BITS_PER_UNIT;
844 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
845 extract_bit_field (word, bitsize, 0, 1,
848 bitalign / BITS_PER_UNIT,
850 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
855 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
858 NUM_ACTUALS is the total number of parameters.
860 N_NAMED_ARGS is the total number of named arguments.
862 FNDECL is the tree code for the target of this call (if known)
864 ARGS_SO_FAR holds state needed by the target to know where to place
867 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
868 for arguments which are passed in registers.
870 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
871 and may be modified by this routine.
873 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
874 flags which may may be modified by this routine. */
877 initialize_argument_information (num_actuals, args, args_size, n_named_args,
878 actparms, fndecl, args_so_far,
879 reg_parm_stack_space, old_stack_level,
880 old_pending_adj, must_preallocate, is_const)
881 int num_actuals ATTRIBUTE_UNUSED;
882 struct arg_data *args;
883 struct args_size *args_size;
884 int n_named_args ATTRIBUTE_UNUSED;
887 CUMULATIVE_ARGS *args_so_far;
888 int reg_parm_stack_space;
889 rtx *old_stack_level;
890 int *old_pending_adj;
891 int *must_preallocate;
894 /* 1 if scanning parms front to back, -1 if scanning back to front. */
897 /* Count arg position in order args appear. */
903 args_size->constant = 0;
906 /* In this loop, we consider args in the order they are written.
907 We fill up ARGS from the front or from the back if necessary
908 so that in any case the first arg to be pushed ends up at the front. */
910 #ifdef PUSH_ARGS_REVERSED
911 i = num_actuals - 1, inc = -1;
912 /* In this case, must reverse order of args
913 so that we compute and push the last arg first. */
918 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
919 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
921 tree type = TREE_TYPE (TREE_VALUE (p));
923 enum machine_mode mode;
925 args[i].tree_value = TREE_VALUE (p);
927 /* Replace erroneous argument with constant zero. */
928 if (type == error_mark_node || TYPE_SIZE (type) == 0)
929 args[i].tree_value = integer_zero_node, type = integer_type_node;
931 /* If TYPE is a transparent union, pass things the way we would
932 pass the first field of the union. We have already verified that
933 the modes are the same. */
934 if (TYPE_TRANSPARENT_UNION (type))
935 type = TREE_TYPE (TYPE_FIELDS (type));
937 /* Decide where to pass this arg.
939 args[i].reg is nonzero if all or part is passed in registers.
941 args[i].partial is nonzero if part but not all is passed in registers,
942 and the exact value says how many words are passed in registers.
944 args[i].pass_on_stack is nonzero if the argument must at least be
945 computed on the stack. It may then be loaded back into registers
946 if args[i].reg is nonzero.
948 These decisions are driven by the FUNCTION_... macros and must agree
949 with those made by function.c. */
951 /* See if this argument should be passed by invisible reference. */
952 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
953 && contains_placeholder_p (TYPE_SIZE (type)))
954 || TREE_ADDRESSABLE (type)
955 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
956 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
957 type, argpos < n_named_args)
961 /* If we're compiling a thunk, pass through invisible
962 references instead of making a copy. */
963 if (current_function_is_thunk
964 #ifdef FUNCTION_ARG_CALLEE_COPIES
965 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
966 type, argpos < n_named_args)
967 /* If it's in a register, we must make a copy of it too. */
968 /* ??? Is this a sufficient test? Is there a better one? */
969 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
970 && REG_P (DECL_RTL (args[i].tree_value)))
971 && ! TREE_ADDRESSABLE (type))
975 /* C++ uses a TARGET_EXPR to indicate that we want to make a
976 new object from the argument. If we are passing by
977 invisible reference, the callee will do that for us, so we
978 can strip off the TARGET_EXPR. This is not always safe,
979 but it is safe in the only case where this is a useful
980 optimization; namely, when the argument is a plain object.
981 In that case, the frontend is just asking the backend to
982 make a bitwise copy of the argument. */
984 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
985 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
986 (args[i].tree_value, 1)))
988 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
989 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
991 args[i].tree_value = build1 (ADDR_EXPR,
992 build_pointer_type (type),
994 type = build_pointer_type (type);
998 /* We make a copy of the object and pass the address to the
999 function being called. */
1002 if (TYPE_SIZE (type) == 0
1003 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1004 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1005 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1006 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1007 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
1009 /* This is a variable-sized object. Make space on the stack
1011 rtx size_rtx = expr_size (TREE_VALUE (p));
1013 if (*old_stack_level == 0)
1015 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1016 *old_pending_adj = pending_stack_adjust;
1017 pending_stack_adjust = 0;
1020 copy = gen_rtx_MEM (BLKmode,
1021 allocate_dynamic_stack_space (size_rtx,
1023 TYPE_ALIGN (type)));
1027 int size = int_size_in_bytes (type);
1028 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1031 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1033 store_expr (args[i].tree_value, copy, 0);
1036 args[i].tree_value = build1 (ADDR_EXPR,
1037 build_pointer_type (type),
1038 make_tree (type, copy));
1039 type = build_pointer_type (type);
1043 mode = TYPE_MODE (type);
1044 unsignedp = TREE_UNSIGNED (type);
1046 #ifdef PROMOTE_FUNCTION_ARGS
1047 mode = promote_mode (type, mode, &unsignedp, 1);
1050 args[i].unsignedp = unsignedp;
1051 args[i].mode = mode;
1052 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1053 argpos < n_named_args);
1054 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1057 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1058 argpos < n_named_args);
1061 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1063 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1064 it means that we are to pass this arg in the register(s) designated
1065 by the PARALLEL, but also to pass it in the stack. */
1066 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1067 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1068 args[i].pass_on_stack = 1;
1070 /* If this is an addressable type, we must preallocate the stack
1071 since we must evaluate the object into its final location.
1073 If this is to be passed in both registers and the stack, it is simpler
1075 if (TREE_ADDRESSABLE (type)
1076 || (args[i].pass_on_stack && args[i].reg != 0))
1077 *must_preallocate = 1;
1079 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1080 we cannot consider this function call constant. */
1081 if (TREE_ADDRESSABLE (type))
1084 /* Compute the stack-size of this argument. */
1085 if (args[i].reg == 0 || args[i].partial != 0
1086 || reg_parm_stack_space > 0
1087 || args[i].pass_on_stack)
1088 locate_and_pad_parm (mode, type,
1089 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1094 fndecl, args_size, &args[i].offset,
1097 #ifndef ARGS_GROW_DOWNWARD
1098 args[i].slot_offset = *args_size;
1101 /* If a part of the arg was put into registers,
1102 don't include that part in the amount pushed. */
1103 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1104 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1105 / (PARM_BOUNDARY / BITS_PER_UNIT)
1106 * (PARM_BOUNDARY / BITS_PER_UNIT));
1108 /* Update ARGS_SIZE, the total stack space for args so far. */
1110 args_size->constant += args[i].size.constant;
1111 if (args[i].size.var)
1113 ADD_PARM_SIZE (*args_size, args[i].size.var);
1116 /* Since the slot offset points to the bottom of the slot,
1117 we must record it after incrementing if the args grow down. */
1118 #ifdef ARGS_GROW_DOWNWARD
1119 args[i].slot_offset = *args_size;
1121 args[i].slot_offset.constant = -args_size->constant;
1124 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1128 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1129 have been used, etc. */
1131 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1132 argpos < n_named_args);
1136 /* Update ARGS_SIZE to contain the total size for the argument block.
1137 Return the original constant component of the argument block's size.
1139 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1140 for arguments passed in registers. */
1143 compute_argument_block_size (reg_parm_stack_space, args_size)
1144 int reg_parm_stack_space;
1145 struct args_size *args_size;
1147 int unadjusted_args_size = args_size->constant;
1149 /* Compute the actual size of the argument block required. The variable
1150 and constant sizes must be combined, the size may have to be rounded,
1151 and there may be a minimum required size. */
1155 args_size->var = ARGS_SIZE_TREE (*args_size);
1156 args_size->constant = 0;
1158 #ifdef PREFERRED_STACK_BOUNDARY
1159 if (PREFERRED_STACK_BOUNDARY != BITS_PER_UNIT)
1160 args_size->var = round_up (args_size->var, STACK_BYTES);
1163 if (reg_parm_stack_space > 0)
1166 = size_binop (MAX_EXPR, args_size->var,
1167 size_int (reg_parm_stack_space));
1169 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1170 /* The area corresponding to register parameters is not to count in
1171 the size of the block we need. So make the adjustment. */
1173 = size_binop (MINUS_EXPR, args_size->var,
1174 size_int (reg_parm_stack_space));
1180 #ifdef PREFERRED_STACK_BOUNDARY
1181 args_size->constant = (((args_size->constant
1182 + pending_stack_adjust
1184 / STACK_BYTES * STACK_BYTES)
1185 - pending_stack_adjust);
1188 args_size->constant = MAX (args_size->constant,
1189 reg_parm_stack_space);
1191 #ifdef MAYBE_REG_PARM_STACK_SPACE
1192 if (reg_parm_stack_space == 0)
1193 args_size->constant = 0;
1196 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1197 args_size->constant -= reg_parm_stack_space;
1200 return unadjusted_args_size;
1203 /* Precompute parameters as needed for a function call.
1205 IS_CONST indicates the target function is a pure function.
1207 MUST_PREALLOCATE indicates that we must preallocate stack space for
1208 any stack arguments.
1210 NUM_ACTUALS is the number of arguments.
1212 ARGS is an array containing information for each argument; this routine
1213 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1215 ARGS_SIZE contains information about the size of the arg list. */
1218 precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1220 int must_preallocate;
1222 struct arg_data *args;
1223 struct args_size *args_size;
1227 /* If this function call is cse'able, precompute all the parameters.
1228 Note that if the parameter is constructed into a temporary, this will
1229 cause an additional copy because the parameter will be constructed
1230 into a temporary location and then copied into the outgoing arguments.
1231 If a parameter contains a call to alloca and this function uses the
1232 stack, precompute the parameter. */
1234 /* If we preallocated the stack space, and some arguments must be passed
1235 on the stack, then we must precompute any parameter which contains a
1236 function call which will store arguments on the stack.
1237 Otherwise, evaluating the parameter may clobber previous parameters
1238 which have already been stored into the stack. */
1240 for (i = 0; i < num_actuals; i++)
1242 || ((args_size->var != 0 || args_size->constant != 0)
1243 && calls_function (args[i].tree_value, 1))
1244 || (must_preallocate
1245 && (args_size->var != 0 || args_size->constant != 0)
1246 && calls_function (args[i].tree_value, 0)))
1248 /* If this is an addressable type, we cannot pre-evaluate it. */
1249 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1254 args[i].initial_value = args[i].value
1255 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1257 preserve_temp_slots (args[i].value);
1260 /* ANSI doesn't require a sequence point here,
1261 but PCC has one, so this will avoid some problems. */
1264 args[i].initial_value = args[i].value
1265 = protect_from_queue (args[i].initial_value, 0);
1267 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1269 = convert_modes (args[i].mode,
1270 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1271 args[i].value, args[i].unsignedp);
1275 /* Given the current state of MUST_PREALLOCATE and information about
1276 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1277 compute and return the final value for MUST_PREALLOCATE. */
1280 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1281 int must_preallocate;
1283 struct arg_data *args;
1284 struct args_size *args_size;
1286 /* See if we have or want to preallocate stack space.
1288 If we would have to push a partially-in-regs parm
1289 before other stack parms, preallocate stack space instead.
1291 If the size of some parm is not a multiple of the required stack
1292 alignment, we must preallocate.
1294 If the total size of arguments that would otherwise create a copy in
1295 a temporary (such as a CALL) is more than half the total argument list
1296 size, preallocation is faster.
1298 Another reason to preallocate is if we have a machine (like the m88k)
1299 where stack alignment is required to be maintained between every
1300 pair of insns, not just when the call is made. However, we assume here
1301 that such machines either do not have push insns (and hence preallocation
1302 would occur anyway) or the problem is taken care of with
1305 if (! must_preallocate)
1307 int partial_seen = 0;
1308 int copy_to_evaluate_size = 0;
1311 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1313 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1315 else if (partial_seen && args[i].reg == 0)
1316 must_preallocate = 1;
1318 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1319 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1320 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1321 || TREE_CODE (args[i].tree_value) == COND_EXPR
1322 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1323 copy_to_evaluate_size
1324 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1327 if (copy_to_evaluate_size * 2 >= args_size->constant
1328 && args_size->constant > 0)
1329 must_preallocate = 1;
1331 return must_preallocate;
1334 /* If we preallocated stack space, compute the address of each argument
1335 and store it into the ARGS array.
1337 We need not ensure it is a valid memory address here; it will be
1338 validized when it is used.
1340 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1343 compute_argument_addresses (args, argblock, num_actuals)
1344 struct arg_data *args;
1350 rtx arg_reg = argblock;
1351 int i, arg_offset = 0;
1353 if (GET_CODE (argblock) == PLUS)
1354 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1356 for (i = 0; i < num_actuals; i++)
1358 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1359 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1362 /* Skip this parm if it will not be passed on the stack. */
1363 if (! args[i].pass_on_stack && args[i].reg != 0)
1366 if (GET_CODE (offset) == CONST_INT)
1367 addr = plus_constant (arg_reg, INTVAL (offset));
1369 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1371 addr = plus_constant (addr, arg_offset);
1372 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1375 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1377 if (GET_CODE (slot_offset) == CONST_INT)
1378 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1380 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1382 addr = plus_constant (addr, arg_offset);
1383 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1388 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1389 in a call instruction.
1391 FNDECL is the tree node for the target function. For an indirect call
1392 FNDECL will be NULL_TREE.
1394 EXP is the CALL_EXPR for this call. */
1397 rtx_for_function_call (fndecl, exp)
1403 /* Get the function to call, in the form of RTL. */
1406 /* If this is the first use of the function, see if we need to
1407 make an external definition for it. */
1408 if (! TREE_USED (fndecl))
1410 assemble_external (fndecl);
1411 TREE_USED (fndecl) = 1;
1414 /* Get a SYMBOL_REF rtx for the function address. */
1415 funexp = XEXP (DECL_RTL (fndecl), 0);
1418 /* Generate an rtx (probably a pseudo-register) for the address. */
1423 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1424 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1426 /* Check the function is executable. */
1427 if (current_function_check_memory_usage)
1429 #ifdef POINTERS_EXTEND_UNSIGNED
1430 /* It might be OK to convert funexp in place, but there's
1431 a lot going on between here and when it happens naturally
1432 that this seems safer. */
1433 funaddr = convert_memory_address (Pmode, funexp);
1435 emit_library_call (chkr_check_exec_libfunc, 1,
1444 /* Do the register loads required for any wholly-register parms or any
1445 parms which are passed both on the stack and in a register. Their
1446 expressions were already evaluated.
1448 Mark all register-parms as living through the call, putting these USE
1449 insns in the CALL_INSN_FUNCTION_USAGE field. */
1452 load_register_parameters (args, num_actuals, call_fusage)
1453 struct arg_data *args;
1459 #ifdef LOAD_ARGS_REVERSED
1460 for (i = num_actuals - 1; i >= 0; i--)
1462 for (i = 0; i < num_actuals; i++)
1465 rtx reg = args[i].reg;
1466 int partial = args[i].partial;
1471 /* Set to non-negative if must move a word at a time, even if just
1472 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1473 we just use a normal move insn. This value can be zero if the
1474 argument is a zero size structure with no fields. */
1475 nregs = (partial ? partial
1476 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1477 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1478 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1481 /* Handle calls that pass values in multiple non-contiguous
1482 locations. The Irix 6 ABI has examples of this. */
1484 if (GET_CODE (reg) == PARALLEL)
1486 emit_group_load (reg, args[i].value,
1487 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1488 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1492 /* If simple case, just do move. If normal partial, store_one_arg
1493 has already loaded the register for us. In all other cases,
1494 load the register(s) from memory. */
1496 else if (nregs == -1)
1497 emit_move_insn (reg, args[i].value);
1499 /* If we have pre-computed the values to put in the registers in
1500 the case of non-aligned structures, copy them in now. */
1502 else if (args[i].n_aligned_regs != 0)
1503 for (j = 0; j < args[i].n_aligned_regs; j++)
1504 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1505 args[i].aligned_regs[j]);
1507 else if (partial == 0 || args[i].pass_on_stack)
1508 move_block_to_reg (REGNO (reg),
1509 validize_mem (args[i].value), nregs,
1512 /* Handle calls that pass values in multiple non-contiguous
1513 locations. The Irix 6 ABI has examples of this. */
1514 if (GET_CODE (reg) == PARALLEL)
1515 use_group_regs (call_fusage, reg);
1516 else if (nregs == -1)
1517 use_reg (call_fusage, reg);
1519 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1524 /* Generate all the code for a function call
1525 and return an rtx for its value.
1526 Store the value in TARGET (specified as an rtx) if convenient.
1527 If the value is stored in TARGET then TARGET is returned.
1528 If IGNORE is nonzero, then we ignore the value of the function call. */
1531 expand_call (exp, target, ignore)
1536 /* List of actual parameters. */
1537 tree actparms = TREE_OPERAND (exp, 1);
1538 /* RTX for the function to be called. */
1540 /* Data type of the function. */
1542 /* Declaration of the function being called,
1543 or 0 if the function is computed (not known by name). */
1547 /* Register in which non-BLKmode value will be returned,
1548 or 0 if no value or if value is BLKmode. */
1550 /* Address where we should return a BLKmode value;
1551 0 if value not BLKmode. */
1552 rtx structure_value_addr = 0;
1553 /* Nonzero if that address is being passed by treating it as
1554 an extra, implicit first parameter. Otherwise,
1555 it is passed by being copied directly into struct_value_rtx. */
1556 int structure_value_addr_parm = 0;
1557 /* Size of aggregate value wanted, or zero if none wanted
1558 or if we are using the non-reentrant PCC calling convention
1559 or expecting the value in registers. */
1560 HOST_WIDE_INT struct_value_size = 0;
1561 /* Nonzero if called function returns an aggregate in memory PCC style,
1562 by returning the address of where to find it. */
1563 int pcc_struct_value = 0;
1565 /* Number of actual parameters in this call, including struct value addr. */
1567 /* Number of named args. Args after this are anonymous ones
1568 and they must all go on the stack. */
1571 /* Vector of information about each argument.
1572 Arguments are numbered in the order they will be pushed,
1573 not the order they are written. */
1574 struct arg_data *args;
1576 /* Total size in bytes of all the stack-parms scanned so far. */
1577 struct args_size args_size;
1578 /* Size of arguments before any adjustments (such as rounding). */
1579 int unadjusted_args_size;
1580 /* Data on reg parms scanned so far. */
1581 CUMULATIVE_ARGS args_so_far;
1582 /* Nonzero if a reg parm has been scanned. */
1584 /* Nonzero if this is an indirect function call. */
1586 /* Nonzero if we must avoid push-insns in the args for this call.
1587 If stack space is allocated for register parameters, but not by the
1588 caller, then it is preallocated in the fixed part of the stack frame.
1589 So the entire argument block must then be preallocated (i.e., we
1590 ignore PUSH_ROUNDING in that case). */
1592 #ifdef PUSH_ROUNDING
1593 int must_preallocate = 0;
1595 int must_preallocate = 1;
1598 /* Size of the stack reserved for parameter registers. */
1599 int reg_parm_stack_space = 0;
1601 /* Address of space preallocated for stack parms
1602 (on machines that lack push insns), or 0 if space not preallocated. */
1605 /* Nonzero if it is plausible that this is a call to alloca. */
1607 /* Nonzero if this is a call to malloc or a related function. */
1609 /* Nonzero if this is a call to setjmp or a related function. */
1611 /* Nonzero if this is a call to `longjmp'. */
1613 /* Nonzero if this is a call to an inline function. */
1614 int is_integrable = 0;
1615 /* Nonzero if this is a call to a `const' function.
1616 Note that only explicitly named functions are handled as `const' here. */
1618 /* Nonzero if this is a call to a `volatile' function. */
1619 int is_volatile = 0;
1620 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1621 /* Define the boundary of the register parm stack space that needs to be
1623 int low_to_save = -1, high_to_save;
1624 rtx save_area = 0; /* Place that it is saved */
1627 #ifdef ACCUMULATE_OUTGOING_ARGS
1628 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1629 char *initial_stack_usage_map = stack_usage_map;
1630 int old_stack_arg_under_construction;
1633 rtx old_stack_level = 0;
1634 int old_pending_adj = 0;
1635 int old_inhibit_defer_pop = inhibit_defer_pop;
1636 rtx call_fusage = 0;
1640 /* The value of the function call can be put in a hard register. But
1641 if -fcheck-memory-usage, code which invokes functions (and thus
1642 damages some hard registers) can be inserted before using the value.
1643 So, target is always a pseudo-register in that case. */
1644 if (current_function_check_memory_usage)
1647 /* See if we can find a DECL-node for the actual function.
1648 As a result, decide whether this is a call to an integrable function. */
1650 p = TREE_OPERAND (exp, 0);
1651 if (TREE_CODE (p) == ADDR_EXPR)
1653 fndecl = TREE_OPERAND (p, 0);
1654 if (TREE_CODE (fndecl) != FUNCTION_DECL)
1659 && fndecl != current_function_decl
1660 && DECL_INLINE (fndecl)
1661 && DECL_SAVED_INSNS (fndecl)
1662 && DECL_SAVED_INSNS (fndecl)->inlinable)
1664 else if (! TREE_ADDRESSABLE (fndecl))
1666 /* In case this function later becomes inlinable,
1667 record that there was already a non-inline call to it.
1669 Use abstraction instead of setting TREE_ADDRESSABLE
1671 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1674 warning_with_decl (fndecl, "can't inline call to `%s'");
1675 warning ("called from here");
1677 mark_addressable (fndecl);
1680 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1681 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
1684 if (TREE_THIS_VOLATILE (fndecl))
1689 /* If we don't have specific function to call, see if we have a
1690 constant or `noreturn' function from the type. */
1693 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1694 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1697 #ifdef REG_PARM_STACK_SPACE
1698 #ifdef MAYBE_REG_PARM_STACK_SPACE
1699 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1701 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1705 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1706 if (reg_parm_stack_space > 0)
1707 must_preallocate = 1;
1710 /* Warn if this value is an aggregate type,
1711 regardless of which calling convention we are using for it. */
1712 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1713 warning ("function call has aggregate value");
1715 /* Set up a place to return a structure. */
1717 /* Cater to broken compilers. */
1718 if (aggregate_value_p (exp))
1720 /* This call returns a big structure. */
1723 #ifdef PCC_STATIC_STRUCT_RETURN
1725 pcc_struct_value = 1;
1726 /* Easier than making that case work right. */
1729 /* In case this is a static function, note that it has been
1731 if (! TREE_ADDRESSABLE (fndecl))
1732 mark_addressable (fndecl);
1736 #else /* not PCC_STATIC_STRUCT_RETURN */
1738 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1740 if (target && GET_CODE (target) == MEM)
1741 structure_value_addr = XEXP (target, 0);
1744 /* Assign a temporary to hold the value. */
1747 /* For variable-sized objects, we must be called with a target
1748 specified. If we were to allocate space on the stack here,
1749 we would have no way of knowing when to free it. */
1751 if (struct_value_size < 0)
1754 /* This DECL is just something to feed to mark_addressable;
1755 it doesn't get pushed. */
1756 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1757 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1758 mark_addressable (d);
1759 structure_value_addr = XEXP (DECL_RTL (d), 0);
1764 #endif /* not PCC_STATIC_STRUCT_RETURN */
1767 /* If called function is inline, try to integrate it. */
1772 #ifdef ACCUMULATE_OUTGOING_ARGS
1773 rtx before_call = get_last_insn ();
1776 temp = expand_inline_function (fndecl, actparms, target,
1777 ignore, TREE_TYPE (exp),
1778 structure_value_addr);
1780 /* If inlining succeeded, return. */
1781 if (temp != (rtx) (HOST_WIDE_INT) -1)
1783 #ifdef ACCUMULATE_OUTGOING_ARGS
1784 /* If the outgoing argument list must be preserved, push
1785 the stack before executing the inlined function if it
1788 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1789 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1792 if (stack_arg_under_construction || i >= 0)
1795 = before_call ? NEXT_INSN (before_call) : get_insns ();
1796 rtx insn = NULL_RTX, seq;
1798 /* Look for a call in the inline function code.
1799 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1800 nonzero then there is a call and it is not necessary
1801 to scan the insns. */
1803 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1804 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1805 if (GET_CODE (insn) == CALL_INSN)
1810 /* Reserve enough stack space so that the largest
1811 argument list of any function call in the inline
1812 function does not overlap the argument list being
1813 evaluated. This is usually an overestimate because
1814 allocate_dynamic_stack_space reserves space for an
1815 outgoing argument list in addition to the requested
1816 space, but there is no way to ask for stack space such
1817 that an argument list of a certain length can be
1820 Add the stack space reserved for register arguments, if
1821 any, in the inline function. What is really needed is the
1822 largest value of reg_parm_stack_space in the inline
1823 function, but that is not available. Using the current
1824 value of reg_parm_stack_space is wrong, but gives
1825 correct results on all supported machines. */
1827 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1828 + reg_parm_stack_space);
1831 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1832 allocate_dynamic_stack_space (GEN_INT (adjust),
1833 NULL_RTX, BITS_PER_UNIT);
1836 emit_insns_before (seq, first_insn);
1837 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1842 /* If the result is equivalent to TARGET, return TARGET to simplify
1843 checks in store_expr. They can be equivalent but not equal in the
1844 case of a function that returns BLKmode. */
1845 if (temp != target && rtx_equal_p (temp, target))
1850 /* If inlining failed, mark FNDECL as needing to be compiled
1851 separately after all. If function was declared inline,
1853 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1854 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1856 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1857 warning ("called from here");
1859 mark_addressable (fndecl);
1862 function_call_count++;
1864 if (fndecl && DECL_NAME (fndecl))
1865 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1867 /* See if this is a call to a function that can return more than once
1868 or a call to longjmp or malloc. */
1869 special_function_p (name, fndecl, &returns_twice, &is_longjmp,
1870 &is_malloc, &may_be_alloca);
1873 current_function_calls_alloca = 1;
1875 /* Operand 0 is a pointer-to-function; get the type of the function. */
1876 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1877 if (! POINTER_TYPE_P (funtype))
1879 funtype = TREE_TYPE (funtype);
1881 /* When calling a const function, we must pop the stack args right away,
1882 so that the pop is deleted or moved with the call. */
1886 /* Don't let pending stack adjusts add up to too much.
1887 Also, do all pending adjustments now
1888 if there is any chance this might be a call to alloca. */
1890 if (pending_stack_adjust >= 32
1891 || (pending_stack_adjust > 0 && may_be_alloca))
1892 do_pending_stack_adjust ();
1894 /* Push the temporary stack slot level so that we can free any temporaries
1898 /* Start updating where the next arg would go.
1900 On some machines (such as the PA) indirect calls have a different
1901 calling convention than normal calls. The last argument in
1902 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1904 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
1906 /* If struct_value_rtx is 0, it means pass the address
1907 as if it were an extra parameter. */
1908 if (structure_value_addr && struct_value_rtx == 0)
1910 /* If structure_value_addr is a REG other than
1911 virtual_outgoing_args_rtx, we can use always use it. If it
1912 is not a REG, we must always copy it into a register.
1913 If it is virtual_outgoing_args_rtx, we must copy it to another
1914 register in some cases. */
1915 rtx temp = (GET_CODE (structure_value_addr) != REG
1916 #ifdef ACCUMULATE_OUTGOING_ARGS
1917 || (stack_arg_under_construction
1918 && structure_value_addr == virtual_outgoing_args_rtx)
1920 ? copy_addr_to_reg (structure_value_addr)
1921 : structure_value_addr);
1924 = tree_cons (error_mark_node,
1925 make_tree (build_pointer_type (TREE_TYPE (funtype)),
1928 structure_value_addr_parm = 1;
1931 /* Count the arguments and set NUM_ACTUALS. */
1932 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
1935 /* Compute number of named args.
1936 Normally, don't include the last named arg if anonymous args follow.
1937 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
1938 (If no anonymous args follow, the result of list_length is actually
1939 one too large. This is harmless.)
1941 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
1942 zero, this machine will be able to place unnamed args that were passed in
1943 registers into the stack. So treat all args as named. This allows the
1944 insns emitting for a specific argument list to be independent of the
1945 function declaration.
1947 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
1948 way to pass unnamed args in registers, so we must force them into
1951 if ((STRICT_ARGUMENT_NAMING
1952 || ! PRETEND_OUTGOING_VARARGS_NAMED)
1953 && TYPE_ARG_TYPES (funtype) != 0)
1955 = (list_length (TYPE_ARG_TYPES (funtype))
1956 /* Don't include the last named arg. */
1957 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
1958 /* Count the struct value address, if it is passed as a parm. */
1959 + structure_value_addr_parm);
1961 /* If we know nothing, treat all args as named. */
1962 n_named_args = num_actuals;
1964 /* Make a vector to hold all the information about each arg. */
1965 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
1966 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
1968 /* Build up entries inthe ARGS array, compute the size of the arguments
1969 into ARGS_SIZE, etc. */
1970 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
1971 actparms, fndecl, &args_so_far,
1972 reg_parm_stack_space, &old_stack_level,
1973 &old_pending_adj, &must_preallocate,
1976 #ifdef FINAL_REG_PARM_STACK_SPACE
1977 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1983 /* If this function requires a variable-sized argument list, don't try to
1984 make a cse'able block for this call. We may be able to do this
1985 eventually, but it is too complicated to keep track of what insns go
1986 in the cse'able block and which don't. */
1989 must_preallocate = 1;
1992 /* Compute the actual size of the argument block required. The variable
1993 and constant sizes must be combined, the size may have to be rounded,
1994 and there may be a minimum required size. */
1995 unadjusted_args_size
1996 = compute_argument_block_size (reg_parm_stack_space, &args_size);
1998 /* Now make final decision about preallocating stack space. */
1999 must_preallocate = finalize_must_preallocate (must_preallocate,
2000 num_actuals, args, &args_size);
2002 /* If the structure value address will reference the stack pointer, we must
2003 stabilize it. We don't need to do this if we know that we are not going
2004 to adjust the stack pointer in processing this call. */
2006 if (structure_value_addr
2007 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2008 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
2010 #ifndef ACCUMULATE_OUTGOING_ARGS
2011 || args_size.constant
2014 structure_value_addr = copy_to_reg (structure_value_addr);
2016 /* Precompute any arguments as needed. */
2017 precompute_arguments (is_const, must_preallocate, num_actuals,
2020 /* Now we are about to start emitting insns that can be deleted
2021 if a libcall is deleted. */
2022 if (is_const || is_malloc)
2025 /* If we have no actual push instructions, or shouldn't use them,
2026 make space for all args right now. */
2028 if (args_size.var != 0)
2030 if (old_stack_level == 0)
2032 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2033 old_pending_adj = pending_stack_adjust;
2034 pending_stack_adjust = 0;
2035 #ifdef ACCUMULATE_OUTGOING_ARGS
2036 /* stack_arg_under_construction says whether a stack arg is
2037 being constructed at the old stack level. Pushing the stack
2038 gets a clean outgoing argument block. */
2039 old_stack_arg_under_construction = stack_arg_under_construction;
2040 stack_arg_under_construction = 0;
2043 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2047 /* Note that we must go through the motions of allocating an argument
2048 block even if the size is zero because we may be storing args
2049 in the area reserved for register arguments, which may be part of
2052 int needed = args_size.constant;
2054 /* Store the maximum argument space used. It will be pushed by
2055 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2058 if (needed > current_function_outgoing_args_size)
2059 current_function_outgoing_args_size = needed;
2061 if (must_preallocate)
2063 #ifdef ACCUMULATE_OUTGOING_ARGS
2064 /* Since the stack pointer will never be pushed, it is possible for
2065 the evaluation of a parm to clobber something we have already
2066 written to the stack. Since most function calls on RISC machines
2067 do not use the stack, this is uncommon, but must work correctly.
2069 Therefore, we save any area of the stack that was already written
2070 and that we are using. Here we set up to do this by making a new
2071 stack usage map from the old one. The actual save will be done
2074 Another approach might be to try to reorder the argument
2075 evaluations to avoid this conflicting stack usage. */
2077 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2078 /* Since we will be writing into the entire argument area, the
2079 map must be allocated for its entire size, not just the part that
2080 is the responsibility of the caller. */
2081 needed += reg_parm_stack_space;
2084 #ifdef ARGS_GROW_DOWNWARD
2085 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2088 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2091 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2093 if (initial_highest_arg_in_use)
2094 bcopy (initial_stack_usage_map, stack_usage_map,
2095 initial_highest_arg_in_use);
2097 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2098 bzero (&stack_usage_map[initial_highest_arg_in_use],
2099 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2102 /* The address of the outgoing argument list must not be copied to a
2103 register here, because argblock would be left pointing to the
2104 wrong place after the call to allocate_dynamic_stack_space below.
2107 argblock = virtual_outgoing_args_rtx;
2109 #else /* not ACCUMULATE_OUTGOING_ARGS */
2110 if (inhibit_defer_pop == 0)
2112 /* Try to reuse some or all of the pending_stack_adjust
2113 to get this space. Maybe we can avoid any pushing. */
2114 if (needed > pending_stack_adjust)
2116 needed -= pending_stack_adjust;
2117 pending_stack_adjust = 0;
2121 pending_stack_adjust -= needed;
2125 /* Special case this because overhead of `push_block' in this
2126 case is non-trivial. */
2128 argblock = virtual_outgoing_args_rtx;
2130 argblock = push_block (GEN_INT (needed), 0, 0);
2132 /* We only really need to call `copy_to_reg' in the case where push
2133 insns are going to be used to pass ARGBLOCK to a function
2134 call in ARGS. In that case, the stack pointer changes value
2135 from the allocation point to the call point, and hence
2136 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2137 But might as well always do it. */
2138 argblock = copy_to_reg (argblock);
2139 #endif /* not ACCUMULATE_OUTGOING_ARGS */
2143 #ifdef ACCUMULATE_OUTGOING_ARGS
2144 /* The save/restore code in store_one_arg handles all cases except one:
2145 a constructor call (including a C function returning a BLKmode struct)
2146 to initialize an argument. */
2147 if (stack_arg_under_construction)
2149 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2150 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
2152 rtx push_size = GEN_INT (args_size.constant);
2154 if (old_stack_level == 0)
2156 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2157 old_pending_adj = pending_stack_adjust;
2158 pending_stack_adjust = 0;
2159 /* stack_arg_under_construction says whether a stack arg is
2160 being constructed at the old stack level. Pushing the stack
2161 gets a clean outgoing argument block. */
2162 old_stack_arg_under_construction = stack_arg_under_construction;
2163 stack_arg_under_construction = 0;
2164 /* Make a new map for the new argument list. */
2165 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2166 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2167 highest_outgoing_arg_in_use = 0;
2169 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
2171 /* If argument evaluation might modify the stack pointer, copy the
2172 address of the argument list to a register. */
2173 for (i = 0; i < num_actuals; i++)
2174 if (args[i].pass_on_stack)
2176 argblock = copy_addr_to_reg (argblock);
2181 compute_argument_addresses (args, argblock, num_actuals);
2183 #ifdef PUSH_ARGS_REVERSED
2184 #ifdef PREFERRED_STACK_BOUNDARY
2185 /* If we push args individually in reverse order, perform stack alignment
2186 before the first push (the last arg). */
2188 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2192 /* Don't try to defer pops if preallocating, not even from the first arg,
2193 since ARGBLOCK probably refers to the SP. */
2197 funexp = rtx_for_function_call (fndecl, exp);
2199 /* Figure out the register where the value, if any, will come back. */
2201 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2202 && ! structure_value_addr)
2204 if (pcc_struct_value)
2205 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2208 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
2211 /* Precompute all register parameters. It isn't safe to compute anything
2212 once we have started filling any specific hard regs. */
2213 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2215 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2217 /* Save the fixed argument area if it's part of the caller's frame and
2218 is clobbered by argument setup for this call. */
2219 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2220 &low_to_save, &high_to_save);
2224 /* Now store (and compute if necessary) all non-register parms.
2225 These come before register parms, since they can require block-moves,
2226 which could clobber the registers used for register parms.
2227 Parms which have partial registers are not stored here,
2228 but we do preallocate space here if they want that. */
2230 for (i = 0; i < num_actuals; i++)
2231 if (args[i].reg == 0 || args[i].pass_on_stack)
2232 store_one_arg (&args[i], argblock, may_be_alloca,
2233 args_size.var != 0, reg_parm_stack_space);
2235 /* If we have a parm that is passed in registers but not in memory
2236 and whose alignment does not permit a direct copy into registers,
2237 make a group of pseudos that correspond to each register that we
2239 if (STRICT_ALIGNMENT)
2240 store_unaligned_arguments_into_pseudos (args, num_actuals);
2242 /* Now store any partially-in-registers parm.
2243 This is the last place a block-move can happen. */
2245 for (i = 0; i < num_actuals; i++)
2246 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2247 store_one_arg (&args[i], argblock, may_be_alloca,
2248 args_size.var != 0, reg_parm_stack_space);
2250 #ifndef PUSH_ARGS_REVERSED
2251 #ifdef PREFERRED_STACK_BOUNDARY
2252 /* If we pushed args in forward order, perform stack alignment
2253 after pushing the last arg. */
2255 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2259 /* If register arguments require space on the stack and stack space
2260 was not preallocated, allocate stack space here for arguments
2261 passed in registers. */
2262 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
2263 if (must_preallocate == 0 && reg_parm_stack_space > 0)
2264 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2267 /* Pass the function the address in which to return a structure value. */
2268 if (structure_value_addr && ! structure_value_addr_parm)
2270 emit_move_insn (struct_value_rtx,
2272 force_operand (structure_value_addr,
2275 /* Mark the memory for the aggregate as write-only. */
2276 if (current_function_check_memory_usage)
2277 emit_library_call (chkr_set_right_libfunc, 1,
2279 structure_value_addr, Pmode,
2280 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
2281 GEN_INT (MEMORY_USE_WO),
2282 TYPE_MODE (integer_type_node));
2284 if (GET_CODE (struct_value_rtx) == REG)
2285 use_reg (&call_fusage, struct_value_rtx);
2288 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
2290 load_register_parameters (args, num_actuals, &call_fusage);
2292 /* Perform postincrements before actually calling the function. */
2295 /* All arguments and registers used for the call must be set up by now! */
2297 /* Generate the actual call instruction. */
2298 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2299 args_size.constant, struct_value_size,
2300 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2301 valreg, old_inhibit_defer_pop, call_fusage, is_const);
2303 /* If call is cse'able, make appropriate pair of reg-notes around it.
2304 Test valreg so we don't crash; may safely ignore `const'
2305 if return type is void. Disable for PARALLEL return values, because
2306 we have no way to move such values into a pseudo register. */
2307 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2310 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2313 /* Mark the return value as a pointer if needed. */
2314 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2316 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2317 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2320 /* Construct an "equal form" for the value which mentions all the
2321 arguments in order as well as the function name. */
2322 #ifdef PUSH_ARGS_REVERSED
2323 for (i = 0; i < num_actuals; i++)
2324 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2326 for (i = num_actuals - 1; i >= 0; i--)
2327 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2329 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2331 insns = get_insns ();
2334 emit_libcall_block (insns, temp, valreg, note);
2340 /* Otherwise, just write out the sequence without a note. */
2341 rtx insns = get_insns ();
2348 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2351 /* The return value from a malloc-like function is a pointer. */
2352 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2353 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2355 emit_move_insn (temp, valreg);
2357 /* The return value from a malloc-like function can not alias
2359 last = get_last_insn ();
2361 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2363 /* Write out the sequence. */
2364 insns = get_insns ();
2370 /* For calls to `setjmp', etc., inform flow.c it should complain
2371 if nonvolatile values are live. */
2375 emit_note (name, NOTE_INSN_SETJMP);
2376 current_function_calls_setjmp = 1;
2380 current_function_calls_longjmp = 1;
2382 /* Notice functions that cannot return.
2383 If optimizing, insns emitted below will be dead.
2384 If not optimizing, they will exist, which is useful
2385 if the user uses the `return' command in the debugger. */
2387 if (is_volatile || is_longjmp)
2390 /* If value type not void, return an rtx for the value. */
2392 /* If there are cleanups to be called, don't use a hard reg as target.
2393 We need to double check this and see if it matters anymore. */
2394 if (any_pending_cleanups (1)
2395 && target && REG_P (target)
2396 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2399 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2402 target = const0_rtx;
2404 else if (structure_value_addr)
2406 if (target == 0 || GET_CODE (target) != MEM)
2408 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2409 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2410 structure_value_addr));
2411 MEM_SET_IN_STRUCT_P (target,
2412 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2415 else if (pcc_struct_value)
2417 /* This is the special C++ case where we need to
2418 know what the true target was. We take care to
2419 never use this value more than once in one expression. */
2420 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2421 copy_to_reg (valreg));
2422 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2424 /* Handle calls that return values in multiple non-contiguous locations.
2425 The Irix 6 ABI has examples of this. */
2426 else if (GET_CODE (valreg) == PARALLEL)
2428 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2432 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2433 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2434 preserve_temp_slots (target);
2437 if (! rtx_equal_p (target, valreg))
2438 emit_group_store (target, valreg, bytes,
2439 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2441 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2442 && GET_MODE (target) == GET_MODE (valreg))
2443 /* TARGET and VALREG cannot be equal at this point because the latter
2444 would not have REG_FUNCTION_VALUE_P true, while the former would if
2445 it were referring to the same register.
2447 If they refer to the same register, this move will be a no-op, except
2448 when function inlining is being done. */
2449 emit_move_insn (target, valreg);
2450 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2451 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2453 target = copy_to_reg (valreg);
2455 #ifdef PROMOTE_FUNCTION_RETURN
2456 /* If we promoted this return value, make the proper SUBREG. TARGET
2457 might be const0_rtx here, so be careful. */
2458 if (GET_CODE (target) == REG
2459 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2460 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2462 tree type = TREE_TYPE (exp);
2463 int unsignedp = TREE_UNSIGNED (type);
2465 /* If we don't promote as expected, something is wrong. */
2466 if (GET_MODE (target)
2467 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2470 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2471 SUBREG_PROMOTED_VAR_P (target) = 1;
2472 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2476 /* If size of args is variable or this was a constructor call for a stack
2477 argument, restore saved stack-pointer value. */
2479 if (old_stack_level)
2481 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2482 pending_stack_adjust = old_pending_adj;
2483 #ifdef ACCUMULATE_OUTGOING_ARGS
2484 stack_arg_under_construction = old_stack_arg_under_construction;
2485 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2486 stack_usage_map = initial_stack_usage_map;
2489 #ifdef ACCUMULATE_OUTGOING_ARGS
2492 #ifdef REG_PARM_STACK_SPACE
2494 restore_fixed_argument_area (save_area, argblock,
2495 high_to_save, low_to_save);
2498 /* If we saved any argument areas, restore them. */
2499 for (i = 0; i < num_actuals; i++)
2500 if (args[i].save_area)
2502 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2504 = gen_rtx_MEM (save_mode,
2505 memory_address (save_mode,
2506 XEXP (args[i].stack_slot, 0)));
2508 if (save_mode != BLKmode)
2509 emit_move_insn (stack_area, args[i].save_area);
2511 emit_block_move (stack_area, validize_mem (args[i].save_area),
2512 GEN_INT (args[i].size.constant),
2513 PARM_BOUNDARY / BITS_PER_UNIT);
2516 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2517 stack_usage_map = initial_stack_usage_map;
2521 /* If this was alloca, record the new stack level for nonlocal gotos.
2522 Check for the handler slots since we might not have a save area
2523 for non-local gotos. */
2525 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
2526 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2530 /* Free up storage we no longer need. */
2531 for (i = 0; i < num_actuals; ++i)
2532 if (args[i].aligned_regs)
2533 free (args[i].aligned_regs);
2538 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2539 (emitting the queue unless NO_QUEUE is nonzero),
2540 for a value of mode OUTMODE,
2541 with NARGS different arguments, passed as alternating rtx values
2542 and machine_modes to convert them to.
2543 The rtx values should have been passed through protect_from_queue already.
2545 NO_QUEUE will be true if and only if the library call is a `const' call
2546 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2547 to the variable is_const in expand_call.
2549 NO_QUEUE must be true for const calls, because if it isn't, then
2550 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2551 and will be lost if the libcall sequence is optimized away.
2553 NO_QUEUE must be false for non-const calls, because if it isn't, the
2554 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2555 optimized. For instance, the instruction scheduler may incorrectly
2556 move memory references across the non-const call. */
2559 emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2562 #ifndef ANSI_PROTOTYPES
2565 enum machine_mode outmode;
2569 /* Total size in bytes of all the stack-parms scanned so far. */
2570 struct args_size args_size;
2571 /* Size of arguments before any adjustments (such as rounding). */
2572 struct args_size original_args_size;
2573 register int argnum;
2578 CUMULATIVE_ARGS args_so_far;
2579 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2580 struct args_size offset; struct args_size size; rtx save_area; };
2582 int old_inhibit_defer_pop = inhibit_defer_pop;
2583 rtx call_fusage = 0;
2584 int reg_parm_stack_space = 0;
2585 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2586 /* Define the boundary of the register parm stack space that needs to be
2588 int low_to_save = -1, high_to_save = 0;
2589 rtx save_area = 0; /* Place that it is saved */
2592 #ifdef ACCUMULATE_OUTGOING_ARGS
2593 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2594 char *initial_stack_usage_map = stack_usage_map;
2598 #ifdef REG_PARM_STACK_SPACE
2599 /* Size of the stack reserved for parameter registers. */
2600 #ifdef MAYBE_REG_PARM_STACK_SPACE
2601 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2603 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
2607 VA_START (p, nargs);
2609 #ifndef ANSI_PROTOTYPES
2610 orgfun = va_arg (p, rtx);
2611 no_queue = va_arg (p, int);
2612 outmode = va_arg (p, enum machine_mode);
2613 nargs = va_arg (p, int);
2618 /* Copy all the libcall-arguments out of the varargs data
2619 and into a vector ARGVEC.
2621 Compute how to pass each argument. We only support a very small subset
2622 of the full argument passing conventions to limit complexity here since
2623 library functions shouldn't have many args. */
2625 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2626 bzero ((char *) argvec, nargs * sizeof (struct arg));
2629 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2631 args_size.constant = 0;
2636 for (count = 0; count < nargs; count++)
2638 rtx val = va_arg (p, rtx);
2639 enum machine_mode mode = va_arg (p, enum machine_mode);
2641 /* We cannot convert the arg value to the mode the library wants here;
2642 must do it earlier where we know the signedness of the arg. */
2644 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2647 /* On some machines, there's no way to pass a float to a library fcn.
2648 Pass it as a double instead. */
2649 #ifdef LIBGCC_NEEDS_DOUBLE
2650 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2651 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2654 /* There's no need to call protect_from_queue, because
2655 either emit_move_insn or emit_push_insn will do that. */
2657 /* Make sure it is a reasonable operand for a move or push insn. */
2658 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2659 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2660 val = force_operand (val, NULL_RTX);
2662 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2663 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2665 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2666 be viewed as just an efficiency improvement. */
2667 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2668 emit_move_insn (slot, val);
2669 val = force_operand (XEXP (slot, 0), NULL_RTX);
2674 argvec[count].value = val;
2675 argvec[count].mode = mode;
2677 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2678 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2680 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2681 argvec[count].partial
2682 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2684 argvec[count].partial = 0;
2687 locate_and_pad_parm (mode, NULL_TREE,
2688 argvec[count].reg && argvec[count].partial == 0,
2689 NULL_TREE, &args_size, &argvec[count].offset,
2690 &argvec[count].size);
2692 if (argvec[count].size.var)
2695 if (reg_parm_stack_space == 0 && argvec[count].partial)
2696 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2698 if (argvec[count].reg == 0 || argvec[count].partial != 0
2699 || reg_parm_stack_space > 0)
2700 args_size.constant += argvec[count].size.constant;
2702 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2706 #ifdef FINAL_REG_PARM_STACK_SPACE
2707 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2711 /* If this machine requires an external definition for library
2712 functions, write one out. */
2713 assemble_external_libcall (fun);
2715 original_args_size = args_size;
2716 #ifdef PREFERRED_STACK_BOUNDARY
2717 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2718 / STACK_BYTES) * STACK_BYTES);
2721 args_size.constant = MAX (args_size.constant,
2722 reg_parm_stack_space);
2724 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2725 args_size.constant -= reg_parm_stack_space;
2728 if (args_size.constant > current_function_outgoing_args_size)
2729 current_function_outgoing_args_size = args_size.constant;
2731 #ifdef ACCUMULATE_OUTGOING_ARGS
2732 /* Since the stack pointer will never be pushed, it is possible for
2733 the evaluation of a parm to clobber something we have already
2734 written to the stack. Since most function calls on RISC machines
2735 do not use the stack, this is uncommon, but must work correctly.
2737 Therefore, we save any area of the stack that was already written
2738 and that we are using. Here we set up to do this by making a new
2739 stack usage map from the old one.
2741 Another approach might be to try to reorder the argument
2742 evaluations to avoid this conflicting stack usage. */
2744 needed = args_size.constant;
2746 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2747 /* Since we will be writing into the entire argument area, the
2748 map must be allocated for its entire size, not just the part that
2749 is the responsibility of the caller. */
2750 needed += reg_parm_stack_space;
2753 #ifdef ARGS_GROW_DOWNWARD
2754 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2757 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2760 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2762 if (initial_highest_arg_in_use)
2763 bcopy (initial_stack_usage_map, stack_usage_map,
2764 initial_highest_arg_in_use);
2766 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2767 bzero (&stack_usage_map[initial_highest_arg_in_use],
2768 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2771 /* The address of the outgoing argument list must not be copied to a
2772 register here, because argblock would be left pointing to the
2773 wrong place after the call to allocate_dynamic_stack_space below.
2776 argblock = virtual_outgoing_args_rtx;
2777 #else /* not ACCUMULATE_OUTGOING_ARGS */
2778 #ifndef PUSH_ROUNDING
2779 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2783 #ifdef PUSH_ARGS_REVERSED
2784 #ifdef PREFERRED_STACK_BOUNDARY
2785 /* If we push args individually in reverse order, perform stack alignment
2786 before the first push (the last arg). */
2788 anti_adjust_stack (GEN_INT (args_size.constant
2789 - original_args_size.constant));
2793 #ifdef PUSH_ARGS_REVERSED
2801 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2802 /* The argument list is the property of the called routine and it
2803 may clobber it. If the fixed area has been used for previous
2804 parameters, we must save and restore it.
2806 Here we compute the boundary of the that needs to be saved, if any. */
2808 #ifdef ARGS_GROW_DOWNWARD
2809 for (count = 0; count < reg_parm_stack_space + 1; count++)
2811 for (count = 0; count < reg_parm_stack_space; count++)
2814 if (count >= highest_outgoing_arg_in_use
2815 || stack_usage_map[count] == 0)
2818 if (low_to_save == -1)
2819 low_to_save = count;
2821 high_to_save = count;
2824 if (low_to_save >= 0)
2826 int num_to_save = high_to_save - low_to_save + 1;
2827 enum machine_mode save_mode
2828 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2831 /* If we don't have the required alignment, must do this in BLKmode. */
2832 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2833 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2834 save_mode = BLKmode;
2836 #ifdef ARGS_GROW_DOWNWARD
2837 stack_area = gen_rtx_MEM (save_mode,
2838 memory_address (save_mode,
2839 plus_constant (argblock,
2842 stack_area = gen_rtx_MEM (save_mode,
2843 memory_address (save_mode,
2844 plus_constant (argblock,
2847 if (save_mode == BLKmode)
2849 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2850 emit_block_move (validize_mem (save_area), stack_area,
2851 GEN_INT (num_to_save),
2852 PARM_BOUNDARY / BITS_PER_UNIT);
2856 save_area = gen_reg_rtx (save_mode);
2857 emit_move_insn (save_area, stack_area);
2862 /* Push the args that need to be pushed. */
2864 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2865 are to be pushed. */
2866 for (count = 0; count < nargs; count++, argnum += inc)
2868 register enum machine_mode mode = argvec[argnum].mode;
2869 register rtx val = argvec[argnum].value;
2870 rtx reg = argvec[argnum].reg;
2871 int partial = argvec[argnum].partial;
2872 #ifdef ACCUMULATE_OUTGOING_ARGS
2873 int lower_bound, upper_bound, i;
2876 if (! (reg != 0 && partial == 0))
2878 #ifdef ACCUMULATE_OUTGOING_ARGS
2879 /* If this is being stored into a pre-allocated, fixed-size, stack
2880 area, save any previous data at that location. */
2882 #ifdef ARGS_GROW_DOWNWARD
2883 /* stack_slot is negative, but we want to index stack_usage_map
2884 with positive values. */
2885 upper_bound = -argvec[argnum].offset.constant + 1;
2886 lower_bound = upper_bound - argvec[argnum].size.constant;
2888 lower_bound = argvec[argnum].offset.constant;
2889 upper_bound = lower_bound + argvec[argnum].size.constant;
2892 for (i = lower_bound; i < upper_bound; i++)
2893 if (stack_usage_map[i]
2894 /* Don't store things in the fixed argument area at this point;
2895 it has already been saved. */
2896 && i > reg_parm_stack_space)
2899 if (i != upper_bound)
2901 /* We need to make a save area. See what mode we can make it. */
2902 enum machine_mode save_mode
2903 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
2910 plus_constant (argblock,
2911 argvec[argnum].offset.constant)));
2913 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2914 emit_move_insn (argvec[argnum].save_area, stack_area);
2917 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2918 argblock, GEN_INT (argvec[argnum].offset.constant),
2919 reg_parm_stack_space);
2921 #ifdef ACCUMULATE_OUTGOING_ARGS
2922 /* Now mark the segment we just used. */
2923 for (i = lower_bound; i < upper_bound; i++)
2924 stack_usage_map[i] = 1;
2931 #ifndef PUSH_ARGS_REVERSED
2932 #ifdef PREFERRED_STACK_BOUNDARY
2933 /* If we pushed args in forward order, perform stack alignment
2934 after pushing the last arg. */
2936 anti_adjust_stack (GEN_INT (args_size.constant
2937 - original_args_size.constant));
2941 #ifdef PUSH_ARGS_REVERSED
2947 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2949 /* Now load any reg parms into their regs. */
2951 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2952 are to be pushed. */
2953 for (count = 0; count < nargs; count++, argnum += inc)
2955 register rtx val = argvec[argnum].value;
2956 rtx reg = argvec[argnum].reg;
2957 int partial = argvec[argnum].partial;
2959 if (reg != 0 && partial == 0)
2960 emit_move_insn (reg, val);
2964 /* For version 1.37, try deleting this entirely. */
2968 /* Any regs containing parms remain in use through the call. */
2969 for (count = 0; count < nargs; count++)
2970 if (argvec[count].reg != 0)
2971 use_reg (&call_fusage, argvec[count].reg);
2973 /* Don't allow popping to be deferred, since then
2974 cse'ing of library calls could delete a call and leave the pop. */
2977 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2978 will set inhibit_defer_pop to that value. */
2980 /* The return type is needed to decide how many bytes the function pops.
2981 Signedness plays no role in that, so for simplicity, we pretend it's
2982 always signed. We also assume that the list of arguments passed has
2983 no impact, so we pretend it is unknown. */
2986 get_identifier (XSTR (orgfun, 0)),
2987 build_function_type (outmode == VOIDmode ? void_type_node
2988 : type_for_mode (outmode, 0), NULL_TREE),
2989 original_args_size.constant, args_size.constant, 0,
2990 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2991 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2992 old_inhibit_defer_pop + 1, call_fusage, no_queue);
2996 /* Now restore inhibit_defer_pop to its actual original value. */
2999 #ifdef ACCUMULATE_OUTGOING_ARGS
3000 #ifdef REG_PARM_STACK_SPACE
3003 enum machine_mode save_mode = GET_MODE (save_area);
3004 #ifdef ARGS_GROW_DOWNWARD
3006 = gen_rtx_MEM (save_mode,
3007 memory_address (save_mode,
3008 plus_constant (argblock,
3012 = gen_rtx_MEM (save_mode,
3013 memory_address (save_mode,
3014 plus_constant (argblock, low_to_save)));
3017 if (save_mode != BLKmode)
3018 emit_move_insn (stack_area, save_area);
3020 emit_block_move (stack_area, validize_mem (save_area),
3021 GEN_INT (high_to_save - low_to_save + 1),
3022 PARM_BOUNDARY / BITS_PER_UNIT);
3026 /* If we saved any argument areas, restore them. */
3027 for (count = 0; count < nargs; count++)
3028 if (argvec[count].save_area)
3030 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3032 = gen_rtx_MEM (save_mode,
3035 plus_constant (argblock,
3036 argvec[count].offset.constant)));
3038 emit_move_insn (stack_area, argvec[count].save_area);
3041 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3042 stack_usage_map = initial_stack_usage_map;
3046 /* Like emit_library_call except that an extra argument, VALUE,
3047 comes second and says where to store the result.
3048 (If VALUE is zero, this function chooses a convenient way
3049 to return the value.
3051 This function returns an rtx for where the value is to be found.
3052 If VALUE is nonzero, VALUE is returned. */
3055 emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
3056 enum machine_mode outmode, int nargs, ...))
3058 #ifndef ANSI_PROTOTYPES
3062 enum machine_mode outmode;
3066 /* Total size in bytes of all the stack-parms scanned so far. */
3067 struct args_size args_size;
3068 /* Size of arguments before any adjustments (such as rounding). */
3069 struct args_size original_args_size;
3070 register int argnum;
3075 CUMULATIVE_ARGS args_so_far;
3076 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3077 struct args_size offset; struct args_size size; rtx save_area; };
3079 int old_inhibit_defer_pop = inhibit_defer_pop;
3080 rtx call_fusage = 0;
3082 int pcc_struct_value = 0;
3083 int struct_value_size = 0;
3085 int reg_parm_stack_space = 0;
3086 #ifdef ACCUMULATE_OUTGOING_ARGS
3090 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3091 /* Define the boundary of the register parm stack space that needs to be
3093 int low_to_save = -1, high_to_save = 0;
3094 rtx save_area = 0; /* Place that it is saved */
3097 #ifdef ACCUMULATE_OUTGOING_ARGS
3098 /* Size of the stack reserved for parameter registers. */
3099 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3100 char *initial_stack_usage_map = stack_usage_map;
3103 #ifdef REG_PARM_STACK_SPACE
3104 #ifdef MAYBE_REG_PARM_STACK_SPACE
3105 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3107 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3111 VA_START (p, nargs);
3113 #ifndef ANSI_PROTOTYPES
3114 orgfun = va_arg (p, rtx);
3115 value = va_arg (p, rtx);
3116 no_queue = va_arg (p, int);
3117 outmode = va_arg (p, enum machine_mode);
3118 nargs = va_arg (p, int);
3121 is_const = no_queue;
3124 /* If this kind of value comes back in memory,
3125 decide where in memory it should come back. */
3126 if (aggregate_value_p (type_for_mode (outmode, 0)))
3128 #ifdef PCC_STATIC_STRUCT_RETURN
3130 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3132 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3133 pcc_struct_value = 1;
3135 value = gen_reg_rtx (outmode);
3136 #else /* not PCC_STATIC_STRUCT_RETURN */
3137 struct_value_size = GET_MODE_SIZE (outmode);
3138 if (value != 0 && GET_CODE (value) == MEM)
3141 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3144 /* This call returns a big structure. */
3148 /* ??? Unfinished: must pass the memory address as an argument. */
3150 /* Copy all the libcall-arguments out of the varargs data
3151 and into a vector ARGVEC.
3153 Compute how to pass each argument. We only support a very small subset
3154 of the full argument passing conventions to limit complexity here since
3155 library functions shouldn't have many args. */
3157 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3158 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3160 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3162 args_size.constant = 0;
3169 /* If there's a structure value address to be passed,
3170 either pass it in the special place, or pass it as an extra argument. */
3171 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3173 rtx addr = XEXP (mem_value, 0);
3176 /* Make sure it is a reasonable operand for a move or push insn. */
3177 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3178 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3179 addr = force_operand (addr, NULL_RTX);
3181 argvec[count].value = addr;
3182 argvec[count].mode = Pmode;
3183 argvec[count].partial = 0;
3185 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3186 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3187 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3191 locate_and_pad_parm (Pmode, NULL_TREE,
3192 argvec[count].reg && argvec[count].partial == 0,
3193 NULL_TREE, &args_size, &argvec[count].offset,
3194 &argvec[count].size);
3197 if (argvec[count].reg == 0 || argvec[count].partial != 0
3198 || reg_parm_stack_space > 0)
3199 args_size.constant += argvec[count].size.constant;
3201 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3206 for (; count < nargs; count++)
3208 rtx val = va_arg (p, rtx);
3209 enum machine_mode mode = va_arg (p, enum machine_mode);
3211 /* We cannot convert the arg value to the mode the library wants here;
3212 must do it earlier where we know the signedness of the arg. */
3214 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3217 /* On some machines, there's no way to pass a float to a library fcn.
3218 Pass it as a double instead. */
3219 #ifdef LIBGCC_NEEDS_DOUBLE
3220 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3221 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3224 /* There's no need to call protect_from_queue, because
3225 either emit_move_insn or emit_push_insn will do that. */
3227 /* Make sure it is a reasonable operand for a move or push insn. */
3228 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3229 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3230 val = force_operand (val, NULL_RTX);
3232 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3233 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3235 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3236 be viewed as just an efficiency improvement. */
3237 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3238 emit_move_insn (slot, val);
3239 val = XEXP (slot, 0);
3244 argvec[count].value = val;
3245 argvec[count].mode = mode;
3247 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3248 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
3250 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3251 argvec[count].partial
3252 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3254 argvec[count].partial = 0;
3257 locate_and_pad_parm (mode, NULL_TREE,
3258 argvec[count].reg && argvec[count].partial == 0,
3259 NULL_TREE, &args_size, &argvec[count].offset,
3260 &argvec[count].size);
3262 if (argvec[count].size.var)
3265 if (reg_parm_stack_space == 0 && argvec[count].partial)
3266 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3268 if (argvec[count].reg == 0 || argvec[count].partial != 0
3269 || reg_parm_stack_space > 0)
3270 args_size.constant += argvec[count].size.constant;
3272 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3276 #ifdef FINAL_REG_PARM_STACK_SPACE
3277 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3280 /* If this machine requires an external definition for library
3281 functions, write one out. */
3282 assemble_external_libcall (fun);
3284 original_args_size = args_size;
3285 #ifdef PREFERRED_STACK_BOUNDARY
3286 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3287 / STACK_BYTES) * STACK_BYTES);
3290 args_size.constant = MAX (args_size.constant,
3291 reg_parm_stack_space);
3293 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3294 args_size.constant -= reg_parm_stack_space;
3297 if (args_size.constant > current_function_outgoing_args_size)
3298 current_function_outgoing_args_size = args_size.constant;
3300 #ifdef ACCUMULATE_OUTGOING_ARGS
3301 /* Since the stack pointer will never be pushed, it is possible for
3302 the evaluation of a parm to clobber something we have already
3303 written to the stack. Since most function calls on RISC machines
3304 do not use the stack, this is uncommon, but must work correctly.
3306 Therefore, we save any area of the stack that was already written
3307 and that we are using. Here we set up to do this by making a new
3308 stack usage map from the old one.
3310 Another approach might be to try to reorder the argument
3311 evaluations to avoid this conflicting stack usage. */
3313 needed = args_size.constant;
3315 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3316 /* Since we will be writing into the entire argument area, the
3317 map must be allocated for its entire size, not just the part that
3318 is the responsibility of the caller. */
3319 needed += reg_parm_stack_space;
3322 #ifdef ARGS_GROW_DOWNWARD
3323 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3326 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3329 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3331 if (initial_highest_arg_in_use)
3332 bcopy (initial_stack_usage_map, stack_usage_map,
3333 initial_highest_arg_in_use);
3335 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3336 bzero (&stack_usage_map[initial_highest_arg_in_use],
3337 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3340 /* The address of the outgoing argument list must not be copied to a
3341 register here, because argblock would be left pointing to the
3342 wrong place after the call to allocate_dynamic_stack_space below.
3345 argblock = virtual_outgoing_args_rtx;
3346 #else /* not ACCUMULATE_OUTGOING_ARGS */
3347 #ifndef PUSH_ROUNDING
3348 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3352 #ifdef PUSH_ARGS_REVERSED
3353 #ifdef PREFERRED_STACK_BOUNDARY
3354 /* If we push args individually in reverse order, perform stack alignment
3355 before the first push (the last arg). */
3357 anti_adjust_stack (GEN_INT (args_size.constant
3358 - original_args_size.constant));
3362 #ifdef PUSH_ARGS_REVERSED
3370 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3371 /* The argument list is the property of the called routine and it
3372 may clobber it. If the fixed area has been used for previous
3373 parameters, we must save and restore it.
3375 Here we compute the boundary of the that needs to be saved, if any. */
3377 #ifdef ARGS_GROW_DOWNWARD
3378 for (count = 0; count < reg_parm_stack_space + 1; count++)
3380 for (count = 0; count < reg_parm_stack_space; count++)
3383 if (count >= highest_outgoing_arg_in_use
3384 || stack_usage_map[count] == 0)
3387 if (low_to_save == -1)
3388 low_to_save = count;
3390 high_to_save = count;
3393 if (low_to_save >= 0)
3395 int num_to_save = high_to_save - low_to_save + 1;
3396 enum machine_mode save_mode
3397 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3400 /* If we don't have the required alignment, must do this in BLKmode. */
3401 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3402 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3403 save_mode = BLKmode;
3405 #ifdef ARGS_GROW_DOWNWARD
3406 stack_area = gen_rtx_MEM (save_mode,
3407 memory_address (save_mode,
3408 plus_constant (argblock,
3411 stack_area = gen_rtx_MEM (save_mode,
3412 memory_address (save_mode,
3413 plus_constant (argblock,
3416 if (save_mode == BLKmode)
3418 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3419 emit_block_move (validize_mem (save_area), stack_area,
3420 GEN_INT (num_to_save),
3421 PARM_BOUNDARY / BITS_PER_UNIT);
3425 save_area = gen_reg_rtx (save_mode);
3426 emit_move_insn (save_area, stack_area);
3431 /* Push the args that need to be pushed. */
3433 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3434 are to be pushed. */
3435 for (count = 0; count < nargs; count++, argnum += inc)
3437 register enum machine_mode mode = argvec[argnum].mode;
3438 register rtx val = argvec[argnum].value;
3439 rtx reg = argvec[argnum].reg;
3440 int partial = argvec[argnum].partial;
3441 #ifdef ACCUMULATE_OUTGOING_ARGS
3442 int lower_bound, upper_bound, i;
3445 if (! (reg != 0 && partial == 0))
3447 #ifdef ACCUMULATE_OUTGOING_ARGS
3448 /* If this is being stored into a pre-allocated, fixed-size, stack
3449 area, save any previous data at that location. */
3451 #ifdef ARGS_GROW_DOWNWARD
3452 /* stack_slot is negative, but we want to index stack_usage_map
3453 with positive values. */
3454 upper_bound = -argvec[argnum].offset.constant + 1;
3455 lower_bound = upper_bound - argvec[argnum].size.constant;
3457 lower_bound = argvec[argnum].offset.constant;
3458 upper_bound = lower_bound + argvec[argnum].size.constant;
3461 for (i = lower_bound; i < upper_bound; i++)
3462 if (stack_usage_map[i]
3463 /* Don't store things in the fixed argument area at this point;
3464 it has already been saved. */
3465 && i > reg_parm_stack_space)
3468 if (i != upper_bound)
3470 /* We need to make a save area. See what mode we can make it. */
3471 enum machine_mode save_mode
3472 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3479 plus_constant (argblock,
3480 argvec[argnum].offset.constant)));
3481 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3483 emit_move_insn (argvec[argnum].save_area, stack_area);
3486 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3487 argblock, GEN_INT (argvec[argnum].offset.constant),
3488 reg_parm_stack_space);
3490 #ifdef ACCUMULATE_OUTGOING_ARGS
3491 /* Now mark the segment we just used. */
3492 for (i = lower_bound; i < upper_bound; i++)
3493 stack_usage_map[i] = 1;
3500 #ifndef PUSH_ARGS_REVERSED
3501 #ifdef PREFERRED_STACK_BOUNDARY
3502 /* If we pushed args in forward order, perform stack alignment
3503 after pushing the last arg. */
3505 anti_adjust_stack (GEN_INT (args_size.constant
3506 - original_args_size.constant));
3510 #ifdef PUSH_ARGS_REVERSED
3516 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3518 /* Now load any reg parms into their regs. */
3520 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3521 are to be pushed. */
3522 for (count = 0; count < nargs; count++, argnum += inc)
3524 register rtx val = argvec[argnum].value;
3525 rtx reg = argvec[argnum].reg;
3526 int partial = argvec[argnum].partial;
3528 if (reg != 0 && partial == 0)
3529 emit_move_insn (reg, val);
3534 /* For version 1.37, try deleting this entirely. */
3539 /* Any regs containing parms remain in use through the call. */
3540 for (count = 0; count < nargs; count++)
3541 if (argvec[count].reg != 0)
3542 use_reg (&call_fusage, argvec[count].reg);
3544 /* Pass the function the address in which to return a structure value. */
3545 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3547 emit_move_insn (struct_value_rtx,
3549 force_operand (XEXP (mem_value, 0),
3551 if (GET_CODE (struct_value_rtx) == REG)
3552 use_reg (&call_fusage, struct_value_rtx);
3555 /* Don't allow popping to be deferred, since then
3556 cse'ing of library calls could delete a call and leave the pop. */
3559 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3560 will set inhibit_defer_pop to that value. */
3561 /* See the comment in emit_library_call about the function type we build
3565 get_identifier (XSTR (orgfun, 0)),
3566 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3567 original_args_size.constant, args_size.constant,
3569 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3570 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3571 old_inhibit_defer_pop + 1, call_fusage, is_const);
3573 /* Now restore inhibit_defer_pop to its actual original value. */
3578 /* Copy the value to the right place. */
3579 if (outmode != VOIDmode)
3585 if (value != mem_value)
3586 emit_move_insn (value, mem_value);
3588 else if (value != 0)
3589 emit_move_insn (value, hard_libcall_value (outmode));
3591 value = hard_libcall_value (outmode);
3594 #ifdef ACCUMULATE_OUTGOING_ARGS
3595 #ifdef REG_PARM_STACK_SPACE
3598 enum machine_mode save_mode = GET_MODE (save_area);
3599 #ifdef ARGS_GROW_DOWNWARD
3601 = gen_rtx_MEM (save_mode,
3602 memory_address (save_mode,
3603 plus_constant (argblock,
3607 = gen_rtx_MEM (save_mode,
3608 memory_address (save_mode,
3609 plus_constant (argblock, low_to_save)));
3611 if (save_mode != BLKmode)
3612 emit_move_insn (stack_area, save_area);
3614 emit_block_move (stack_area, validize_mem (save_area),
3615 GEN_INT (high_to_save - low_to_save + 1),
3616 PARM_BOUNDARY / BITS_PER_UNIT);
3620 /* If we saved any argument areas, restore them. */
3621 for (count = 0; count < nargs; count++)
3622 if (argvec[count].save_area)
3624 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3626 = gen_rtx_MEM (save_mode,
3629 plus_constant (argblock,
3630 argvec[count].offset.constant)));
3632 emit_move_insn (stack_area, argvec[count].save_area);
3635 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3636 stack_usage_map = initial_stack_usage_map;
3643 /* Return an rtx which represents a suitable home on the stack
3644 given TYPE, the type of the argument looking for a home.
3645 This is called only for BLKmode arguments.
3647 SIZE is the size needed for this target.
3648 ARGS_ADDR is the address of the bottom of the argument block for this call.
3649 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3650 if this machine uses push insns. */
3653 target_for_arg (type, size, args_addr, offset)
3657 struct args_size offset;
3660 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3662 /* We do not call memory_address if possible,
3663 because we want to address as close to the stack
3664 as possible. For non-variable sized arguments,
3665 this will be stack-pointer relative addressing. */
3666 if (GET_CODE (offset_rtx) == CONST_INT)
3667 target = plus_constant (args_addr, INTVAL (offset_rtx));
3670 /* I have no idea how to guarantee that this
3671 will work in the presence of register parameters. */
3672 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3673 target = memory_address (QImode, target);
3676 return gen_rtx_MEM (BLKmode, target);
3680 /* Store a single argument for a function call
3681 into the register or memory area where it must be passed.
3682 *ARG describes the argument value and where to pass it.
3684 ARGBLOCK is the address of the stack-block for all the arguments,
3685 or 0 on a machine where arguments are pushed individually.
3687 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3688 so must be careful about how the stack is used.
3690 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3691 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3692 that we need not worry about saving and restoring the stack.
3694 FNDECL is the declaration of the function we are calling. */
3697 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3698 reg_parm_stack_space)
3699 struct arg_data *arg;
3702 int variable_size ATTRIBUTE_UNUSED;
3703 int reg_parm_stack_space;
3705 register tree pval = arg->tree_value;
3709 #ifdef ACCUMULATE_OUTGOING_ARGS
3710 int i, lower_bound = 0, upper_bound = 0;
3713 if (TREE_CODE (pval) == ERROR_MARK)
3716 /* Push a new temporary level for any temporaries we make for
3720 #ifdef ACCUMULATE_OUTGOING_ARGS
3721 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3722 save any previous data at that location. */
3723 if (argblock && ! variable_size && arg->stack)
3725 #ifdef ARGS_GROW_DOWNWARD
3726 /* stack_slot is negative, but we want to index stack_usage_map
3727 with positive values. */
3728 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3729 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3733 lower_bound = upper_bound - arg->size.constant;
3735 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3736 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3740 upper_bound = lower_bound + arg->size.constant;
3743 for (i = lower_bound; i < upper_bound; i++)
3744 if (stack_usage_map[i]
3745 /* Don't store things in the fixed argument area at this point;
3746 it has already been saved. */
3747 && i > reg_parm_stack_space)
3750 if (i != upper_bound)
3752 /* We need to make a save area. See what mode we can make it. */
3753 enum machine_mode save_mode
3754 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3756 = gen_rtx_MEM (save_mode,
3757 memory_address (save_mode,
3758 XEXP (arg->stack_slot, 0)));
3760 if (save_mode == BLKmode)
3762 arg->save_area = assign_stack_temp (BLKmode,
3763 arg->size.constant, 0);
3764 MEM_SET_IN_STRUCT_P (arg->save_area,
3765 AGGREGATE_TYPE_P (TREE_TYPE
3766 (arg->tree_value)));
3767 preserve_temp_slots (arg->save_area);
3768 emit_block_move (validize_mem (arg->save_area), stack_area,
3769 GEN_INT (arg->size.constant),
3770 PARM_BOUNDARY / BITS_PER_UNIT);
3774 arg->save_area = gen_reg_rtx (save_mode);
3775 emit_move_insn (arg->save_area, stack_area);
3780 /* Now that we have saved any slots that will be overwritten by this
3781 store, mark all slots this store will use. We must do this before
3782 we actually expand the argument since the expansion itself may
3783 trigger library calls which might need to use the same stack slot. */
3784 if (argblock && ! variable_size && arg->stack)
3785 for (i = lower_bound; i < upper_bound; i++)
3786 stack_usage_map[i] = 1;
3789 /* If this isn't going to be placed on both the stack and in registers,
3790 set up the register and number of words. */
3791 if (! arg->pass_on_stack)
3792 reg = arg->reg, partial = arg->partial;
3794 if (reg != 0 && partial == 0)
3795 /* Being passed entirely in a register. We shouldn't be called in
3799 /* If this arg needs special alignment, don't load the registers
3801 if (arg->n_aligned_regs != 0)
3804 /* If this is being passed partially in a register, we can't evaluate
3805 it directly into its stack slot. Otherwise, we can. */
3806 if (arg->value == 0)
3808 #ifdef ACCUMULATE_OUTGOING_ARGS
3809 /* stack_arg_under_construction is nonzero if a function argument is
3810 being evaluated directly into the outgoing argument list and
3811 expand_call must take special action to preserve the argument list
3812 if it is called recursively.
3814 For scalar function arguments stack_usage_map is sufficient to
3815 determine which stack slots must be saved and restored. Scalar
3816 arguments in general have pass_on_stack == 0.
3818 If this argument is initialized by a function which takes the
3819 address of the argument (a C++ constructor or a C function
3820 returning a BLKmode structure), then stack_usage_map is
3821 insufficient and expand_call must push the stack around the
3822 function call. Such arguments have pass_on_stack == 1.
3824 Note that it is always safe to set stack_arg_under_construction,
3825 but this generates suboptimal code if set when not needed. */
3827 if (arg->pass_on_stack)
3828 stack_arg_under_construction++;
3830 arg->value = expand_expr (pval,
3832 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3833 ? NULL_RTX : arg->stack,
3836 /* If we are promoting object (or for any other reason) the mode
3837 doesn't agree, convert the mode. */
3839 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3840 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3841 arg->value, arg->unsignedp);
3843 #ifdef ACCUMULATE_OUTGOING_ARGS
3844 if (arg->pass_on_stack)
3845 stack_arg_under_construction--;
3849 /* Don't allow anything left on stack from computation
3850 of argument to alloca. */
3852 do_pending_stack_adjust ();
3854 if (arg->value == arg->stack)
3856 /* If the value is already in the stack slot, we are done. */
3857 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
3859 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3860 XEXP (arg->stack, 0), Pmode,
3861 ARGS_SIZE_RTX (arg->size),
3862 TYPE_MODE (sizetype),
3863 GEN_INT (MEMORY_USE_RW),
3864 TYPE_MODE (integer_type_node));
3867 else if (arg->mode != BLKmode)
3871 /* Argument is a scalar, not entirely passed in registers.
3872 (If part is passed in registers, arg->partial says how much
3873 and emit_push_insn will take care of putting it there.)
3875 Push it, and if its size is less than the
3876 amount of space allocated to it,
3877 also bump stack pointer by the additional space.
3878 Note that in C the default argument promotions
3879 will prevent such mismatches. */
3881 size = GET_MODE_SIZE (arg->mode);
3882 /* Compute how much space the push instruction will push.
3883 On many machines, pushing a byte will advance the stack
3884 pointer by a halfword. */
3885 #ifdef PUSH_ROUNDING
3886 size = PUSH_ROUNDING (size);
3890 /* Compute how much space the argument should get:
3891 round up to a multiple of the alignment for arguments. */
3892 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3893 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3894 / (PARM_BOUNDARY / BITS_PER_UNIT))
3895 * (PARM_BOUNDARY / BITS_PER_UNIT));
3897 /* This isn't already where we want it on the stack, so put it there.
3898 This can either be done with push or copy insns. */
3899 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3900 partial, reg, used - size, argblock,
3901 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space);
3905 /* BLKmode, at least partly to be pushed. */
3907 register int excess;
3910 /* Pushing a nonscalar.
3911 If part is passed in registers, PARTIAL says how much
3912 and emit_push_insn will take care of putting it there. */
3914 /* Round its size up to a multiple
3915 of the allocation unit for arguments. */
3917 if (arg->size.var != 0)
3920 size_rtx = ARGS_SIZE_RTX (arg->size);
3924 /* PUSH_ROUNDING has no effect on us, because
3925 emit_push_insn for BLKmode is careful to avoid it. */
3926 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3927 + partial * UNITS_PER_WORD);
3928 size_rtx = expr_size (pval);
3931 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3932 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3933 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3934 reg_parm_stack_space);
3938 /* Unless this is a partially-in-register argument, the argument is now
3941 ??? Note that this can change arg->value from arg->stack to
3942 arg->stack_slot and it matters when they are not the same.
3943 It isn't totally clear that this is correct in all cases. */
3945 arg->value = arg->stack_slot;
3947 /* Once we have pushed something, pops can't safely
3948 be deferred during the rest of the arguments. */
3951 /* ANSI doesn't require a sequence point here,
3952 but PCC has one, so this will avoid some problems. */
3955 /* Free any temporary slots made in processing this argument. Show
3956 that we might have taken the address of something and pushed that
3958 preserve_temp_slots (NULL_RTX);