1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-97, 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "insn-flags.h"
34 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
35 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
38 /* Decide whether a function's arguments should be processed
39 from first to last or from last to first.
41 They should if the stack and args grow in opposite directions, but
42 only if we have push insns. */
46 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
47 #define PUSH_ARGS_REVERSED /* If it's last to first */
52 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
53 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
55 /* Data structure and subroutines used within expand_call. */
59 /* Tree node for this argument. */
61 /* Mode for value; TYPE_MODE unless promoted. */
62 enum machine_mode mode;
63 /* Current RTL value for argument, or 0 if it isn't precomputed. */
65 /* Initially-compute RTL value for argument; only for const functions. */
67 /* Register to pass this argument in, 0 if passed on stack, or an
68 PARALLEL if the arg is to be copied into multiple non-contiguous
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
74 /* Number of registers to use. 0 means put the whole arg in registers.
75 Also 0 if not passed in registers. */
77 /* Non-zero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
82 /* Offset of this argument from beginning of stack-args. */
83 struct args_size offset;
84 /* Similar, but offset to the start of the stack slot. Different from
85 OFFSET if this arg pads downward. */
86 struct args_size slot_offset;
87 /* Size of this argument on the stack, rounded up for any padding it gets,
88 parts of the argument passed in registers do not count.
89 If REG_PARM_STACK_SPACE is defined, then register parms
90 are counted here as well. */
91 struct args_size size;
92 /* Location on the stack at which parameter should be stored. The store
93 has already been done if STACK == VALUE. */
95 /* Location on the stack of the start of this argument slot. This can
96 differ from STACK if this arg pads downward. This location is known
97 to be aligned to FUNCTION_ARG_BOUNDARY. */
99 #ifdef ACCUMULATE_OUTGOING_ARGS
100 /* Place that this stack area has been saved, if needed. */
103 /* If an argument's alignment does not permit direct copying into registers,
104 copy in smaller-sized pieces into pseudos. These are stored in a
105 block pointed to by this field. The next field says how many
106 word-sized pseudos we made. */
109 /* The amount that the stack pointer needs to be adjusted to
110 force alignment for the next argument. */
111 struct args_size alignment_pad;
114 #ifdef ACCUMULATE_OUTGOING_ARGS
115 /* A vector of one char per byte of stack space. A byte if non-zero if
116 the corresponding stack location has been used.
117 This vector is used to prevent a function call within an argument from
118 clobbering any stack already set up. */
119 static char *stack_usage_map;
121 /* Size of STACK_USAGE_MAP. */
122 static int highest_outgoing_arg_in_use;
124 /* stack_arg_under_construction is nonzero when an argument may be
125 initialized with a constructor call (including a C function that
126 returns a BLKmode struct) and expand_call must take special action
127 to make sure the object being constructed does not overlap the
128 argument list for the constructor call. */
129 int stack_arg_under_construction;
132 static int calls_function PROTO ((tree, int));
133 static int calls_function_1 PROTO ((tree, int));
134 static void emit_call_1 PROTO ((rtx, tree, tree, HOST_WIDE_INT,
135 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
136 rtx, int, rtx, int));
137 static void precompute_register_parameters PROTO ((int, struct arg_data *,
139 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
141 static void store_unaligned_arguments_into_pseudos PROTO ((struct arg_data *,
143 static int finalize_must_preallocate PROTO ((int, int,
145 struct args_size *));
146 static void precompute_arguments PROTO ((int, int, int,
148 struct args_size *));
149 static int compute_argument_block_size PROTO ((int,
150 struct args_size *));
151 static void initialize_argument_information PROTO ((int,
158 static void compute_argument_addresses PROTO ((struct arg_data *,
160 static rtx rtx_for_function_call PROTO ((tree, tree));
161 static void load_register_parameters PROTO ((struct arg_data *,
164 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
165 static rtx save_fixed_argument_area PROTO ((int, rtx, int *, int *));
166 static void restore_fixed_argument_area PROTO ((rtx, rtx, int, int));
169 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
172 If WHICH is 0, return 1 if EXP contains a call to any function.
173 Actually, we only need return 1 if evaluating EXP would require pushing
174 arguments on the stack, but that is too difficult to compute, so we just
175 assume any function call might require the stack. */
177 static tree calls_function_save_exprs;
180 calls_function (exp, which)
185 calls_function_save_exprs = 0;
186 val = calls_function_1 (exp, which);
187 calls_function_save_exprs = 0;
192 calls_function_1 (exp, which)
197 enum tree_code code = TREE_CODE (exp);
198 int type = TREE_CODE_CLASS (code);
199 int length = tree_code_length[(int) code];
201 /* If this code is language-specific, we don't know what it will do. */
202 if ((int) code >= NUM_TREE_CODES)
205 /* Only expressions and references can contain calls. */
206 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
215 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
216 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
219 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
221 if ((DECL_BUILT_IN (fndecl)
222 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
223 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
224 || (DECL_SAVED_INSNS (fndecl)
225 && DECL_SAVED_INSNS (fndecl)->calls_alloca))
229 /* Third operand is RTL. */
234 if (SAVE_EXPR_RTL (exp) != 0)
236 if (value_member (exp, calls_function_save_exprs))
238 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
239 calls_function_save_exprs);
240 return (TREE_OPERAND (exp, 0) != 0
241 && calls_function_1 (TREE_OPERAND (exp, 0), which));
247 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
248 if (DECL_INITIAL (local) != 0
249 && calls_function_1 (DECL_INITIAL (local), which))
253 register tree subblock;
255 for (subblock = BLOCK_SUBBLOCKS (exp);
257 subblock = TREE_CHAIN (subblock))
258 if (calls_function_1 (subblock, which))
263 case METHOD_CALL_EXPR:
267 case WITH_CLEANUP_EXPR:
278 for (i = 0; i < length; i++)
279 if (TREE_OPERAND (exp, i) != 0
280 && calls_function_1 (TREE_OPERAND (exp, i), which))
286 /* Force FUNEXP into a form suitable for the address of a CALL,
287 and return that as an rtx. Also load the static chain register
288 if FNDECL is a nested function.
290 CALL_FUSAGE points to a variable holding the prospective
291 CALL_INSN_FUNCTION_USAGE information. */
294 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
300 rtx static_chain_value = 0;
302 funexp = protect_from_queue (funexp, 0);
305 /* Get possible static chain value for nested function in C. */
306 static_chain_value = lookup_static_chain (fndecl);
308 /* Make a valid memory address and copy constants thru pseudo-regs,
309 but not for a constant address if -fno-function-cse. */
310 if (GET_CODE (funexp) != SYMBOL_REF)
311 /* If we are using registers for parameters, force the
312 function address into a register now. */
313 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
314 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
315 : memory_address (FUNCTION_MODE, funexp));
318 #ifndef NO_FUNCTION_CSE
319 if (optimize && ! flag_no_function_cse)
320 #ifdef NO_RECURSIVE_FUNCTION_CSE
321 if (fndecl != current_function_decl)
323 funexp = force_reg (Pmode, funexp);
327 if (static_chain_value != 0)
329 emit_move_insn (static_chain_rtx, static_chain_value);
331 if (GET_CODE (static_chain_rtx) == REG)
332 use_reg (call_fusage, static_chain_rtx);
338 /* Generate instructions to call function FUNEXP,
339 and optionally pop the results.
340 The CALL_INSN is the first insn generated.
342 FNDECL is the declaration node of the function. This is given to the
343 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
345 FUNTYPE is the data type of the function. This is given to the macro
346 RETURN_POPS_ARGS to determine whether this function pops its own args.
347 We used to allow an identifier for library functions, but that doesn't
348 work when the return type is an aggregate type and the calling convention
349 says that the pointer to this aggregate is to be popped by the callee.
351 STACK_SIZE is the number of bytes of arguments on the stack,
352 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
353 This is both to put into the call insn and
354 to generate explicit popping code if necessary.
356 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
357 It is zero if this call doesn't want a structure value.
359 NEXT_ARG_REG is the rtx that results from executing
360 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
361 just after all the args have had their registers assigned.
362 This could be whatever you like, but normally it is the first
363 arg-register beyond those used for args in this call,
364 or 0 if all the arg-registers are used in this call.
365 It is passed on to `gen_call' so you can put this info in the call insn.
367 VALREG is a hard register in which a value is returned,
368 or 0 if the call does not return a value.
370 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
371 the args to this call were processed.
372 We restore `inhibit_defer_pop' to that value.
374 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
375 denote registers used by the called function.
377 IS_CONST is true if this is a `const' call. */
380 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
381 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
382 call_fusage, is_const)
384 tree fndecl ATTRIBUTE_UNUSED;
385 tree funtype ATTRIBUTE_UNUSED;
386 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
387 HOST_WIDE_INT rounded_stack_size;
388 HOST_WIDE_INT struct_value_size;
391 int old_inhibit_defer_pop;
395 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
396 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
398 #ifndef ACCUMULATE_OUTGOING_ARGS
399 int already_popped = 0;
400 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
403 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
404 and we don't want to load it into a register as an optimization,
405 because prepare_call_address already did it if it should be done. */
406 if (GET_CODE (funexp) != SYMBOL_REF)
407 funexp = memory_address (FUNCTION_MODE, funexp);
409 #ifndef ACCUMULATE_OUTGOING_ARGS
410 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
411 /* If the target has "call" or "call_value" insns, then prefer them
412 if no arguments are actually popped. If the target does not have
413 "call" or "call_value" insns, then we must use the popping versions
414 even if the call has no arguments to pop. */
415 #if defined (HAVE_call) && defined (HAVE_call_value)
416 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
419 if (HAVE_call_pop && HAVE_call_value_pop)
422 rtx n_pop = GEN_INT (n_popped);
425 /* If this subroutine pops its own args, record that in the call insn
426 if possible, for the sake of frame pointer elimination. */
429 pat = gen_call_value_pop (valreg,
430 gen_rtx_MEM (FUNCTION_MODE, funexp),
431 rounded_stack_size_rtx, next_arg_reg, n_pop);
433 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
434 rounded_stack_size_rtx, next_arg_reg, n_pop);
436 emit_call_insn (pat);
443 #if defined (HAVE_call) && defined (HAVE_call_value)
444 if (HAVE_call && HAVE_call_value)
447 emit_call_insn (gen_call_value (valreg,
448 gen_rtx_MEM (FUNCTION_MODE, funexp),
449 rounded_stack_size_rtx, next_arg_reg,
452 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
453 rounded_stack_size_rtx, next_arg_reg,
454 struct_value_size_rtx));
460 /* Find the CALL insn we just emitted. */
461 for (call_insn = get_last_insn ();
462 call_insn && GET_CODE (call_insn) != CALL_INSN;
463 call_insn = PREV_INSN (call_insn))
469 /* Put the register usage information on the CALL. If there is already
470 some usage information, put ours at the end. */
471 if (CALL_INSN_FUNCTION_USAGE (call_insn))
475 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
476 link = XEXP (link, 1))
479 XEXP (link, 1) = call_fusage;
482 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
484 /* If this is a const call, then set the insn's unchanging bit. */
486 CONST_CALL_P (call_insn) = 1;
488 /* Restore this now, so that we do defer pops for this call's args
489 if the context of the call as a whole permits. */
490 inhibit_defer_pop = old_inhibit_defer_pop;
492 #ifndef ACCUMULATE_OUTGOING_ARGS
493 /* If returning from the subroutine does not automatically pop the args,
494 we need an instruction to pop them sooner or later.
495 Perhaps do it now; perhaps just record how much space to pop later.
497 If returning from the subroutine does pop the args, indicate that the
498 stack pointer will be changed. */
503 CALL_INSN_FUNCTION_USAGE (call_insn)
504 = gen_rtx_EXPR_LIST (VOIDmode,
505 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
506 CALL_INSN_FUNCTION_USAGE (call_insn));
507 rounded_stack_size -= n_popped;
508 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
511 if (rounded_stack_size != 0)
513 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
514 pending_stack_adjust += rounded_stack_size;
516 adjust_stack (rounded_stack_size_rtx);
521 /* Determine if the function identified by NAME and FNDECL is one with
522 special properties we wish to know about.
524 For example, if the function might return more than one time (setjmp), then
525 set RETURNS_TWICE to a nonzero value.
527 Similarly set IS_LONGJMP for if the function is in the longjmp family.
529 Set IS_MALLOC for any of the standard memory allocation functions which
530 allocate from the heap.
532 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
533 space from the stack such as alloca. */
536 special_function_p (fndecl, returns_twice, is_longjmp,
537 is_malloc, may_be_alloca)
548 /* The function decl may have the `malloc' attribute. */
549 *is_malloc = fndecl && DECL_IS_MALLOC (fndecl);
552 && fndecl && DECL_NAME (fndecl)
553 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
554 /* Exclude functions not at the file scope, or not `extern',
555 since they are not the magic functions we would otherwise
557 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
559 char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
562 /* We assume that alloca will always be called by name. It
563 makes no sense to pass it as a pointer-to-function to
564 anything that does not understand its behavior. */
566 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
568 && ! strcmp (name, "alloca"))
569 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
571 && ! strcmp (name, "__builtin_alloca"))));
573 /* Disregard prefix _, __ or __x. */
576 if (name[1] == '_' && name[2] == 'x')
578 else if (name[1] == '_')
588 && (! strcmp (tname, "setjmp")
589 || ! strcmp (tname, "setjmp_syscall")))
591 && ! strcmp (tname, "sigsetjmp"))
593 && ! strcmp (tname, "savectx")));
595 && ! strcmp (tname, "siglongjmp"))
598 else if ((tname[0] == 'q' && tname[1] == 's'
599 && ! strcmp (tname, "qsetjmp"))
600 || (tname[0] == 'v' && tname[1] == 'f'
601 && ! strcmp (tname, "vfork")))
604 else if (tname[0] == 'l' && tname[1] == 'o'
605 && ! strcmp (tname, "longjmp"))
607 /* Do not add any more malloc-like functions to this list,
608 instead mark them as malloc functions using the malloc attribute.
609 Note, realloc is not suitable for attribute malloc since
610 it may return the same address across multiple calls. */
611 else if (! strcmp (tname, "malloc")
612 || ! strcmp (tname, "calloc")
613 || ! strcmp (tname, "strdup")
614 /* Note use of NAME rather than TNAME here. These functions
615 are only reserved when preceded with __. */
616 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
617 || ! strcmp (name, "__nw") /* mangled __builtin_new */
618 || ! strcmp (name, "__builtin_new")
619 || ! strcmp (name, "__builtin_vec_new"))
624 /* Precompute all register parameters as described by ARGS, storing values
625 into fields within the ARGS array.
627 NUM_ACTUALS indicates the total number elements in the ARGS array.
629 Set REG_PARM_SEEN if we encounter a register parameter. */
632 precompute_register_parameters (num_actuals, args, reg_parm_seen)
634 struct arg_data *args;
641 for (i = 0; i < num_actuals; i++)
642 if (args[i].reg != 0 && ! args[i].pass_on_stack)
646 if (args[i].value == 0)
649 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
651 preserve_temp_slots (args[i].value);
654 /* ANSI doesn't require a sequence point here,
655 but PCC has one, so this will avoid some problems. */
659 /* If we are to promote the function arg to a wider mode,
662 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
664 = convert_modes (args[i].mode,
665 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
666 args[i].value, args[i].unsignedp);
668 /* If the value is expensive, and we are inside an appropriately
669 short loop, put the value into a pseudo and then put the pseudo
672 For small register classes, also do this if this call uses
673 register parameters. This is to avoid reload conflicts while
674 loading the parameters registers. */
676 if ((! (GET_CODE (args[i].value) == REG
677 || (GET_CODE (args[i].value) == SUBREG
678 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
679 && args[i].mode != BLKmode
680 && rtx_cost (args[i].value, SET) > 2
681 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
682 || preserve_subexpressions_p ()))
683 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
687 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
689 /* The argument list is the property of the called routine and it
690 may clobber it. If the fixed area has been used for previous
691 parameters, we must save and restore it. */
693 save_fixed_argument_area (reg_parm_stack_space, argblock,
694 low_to_save, high_to_save)
695 int reg_parm_stack_space;
701 rtx save_area = NULL_RTX;
703 /* Compute the boundary of the that needs to be saved, if any. */
704 #ifdef ARGS_GROW_DOWNWARD
705 for (i = 0; i < reg_parm_stack_space + 1; i++)
707 for (i = 0; i < reg_parm_stack_space; i++)
710 if (i >= highest_outgoing_arg_in_use
711 || stack_usage_map[i] == 0)
714 if (*low_to_save == -1)
720 if (*low_to_save >= 0)
722 int num_to_save = *high_to_save - *low_to_save + 1;
723 enum machine_mode save_mode
724 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
727 /* If we don't have the required alignment, must do this in BLKmode. */
728 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
729 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
732 #ifdef ARGS_GROW_DOWNWARD
733 stack_area = gen_rtx_MEM (save_mode,
734 memory_address (save_mode,
735 plus_constant (argblock,
738 stack_area = gen_rtx_MEM (save_mode,
739 memory_address (save_mode,
740 plus_constant (argblock,
743 if (save_mode == BLKmode)
745 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
746 emit_block_move (validize_mem (save_area), stack_area,
747 GEN_INT (num_to_save),
748 PARM_BOUNDARY / BITS_PER_UNIT);
752 save_area = gen_reg_rtx (save_mode);
753 emit_move_insn (save_area, stack_area);
760 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
766 enum machine_mode save_mode = GET_MODE (save_area);
767 #ifdef ARGS_GROW_DOWNWARD
769 = gen_rtx_MEM (save_mode,
770 memory_address (save_mode,
771 plus_constant (argblock,
775 = gen_rtx_MEM (save_mode,
776 memory_address (save_mode,
777 plus_constant (argblock,
781 if (save_mode != BLKmode)
782 emit_move_insn (stack_area, save_area);
784 emit_block_move (stack_area, validize_mem (save_area),
785 GEN_INT (high_to_save - low_to_save + 1),
786 PARM_BOUNDARY / BITS_PER_UNIT);
790 /* If any elements in ARGS refer to parameters that are to be passed in
791 registers, but not in memory, and whose alignment does not permit a
792 direct copy into registers. Copy the values into a group of pseudos
793 which we will later copy into the appropriate hard registers.
795 Pseudos for each unaligned argument will be stored into the array
796 args[argnum].aligned_regs. The caller is responsible for deallocating
797 the aligned_regs array if it is nonzero. */
800 store_unaligned_arguments_into_pseudos (args, num_actuals)
801 struct arg_data *args;
806 for (i = 0; i < num_actuals; i++)
807 if (args[i].reg != 0 && ! args[i].pass_on_stack
808 && args[i].mode == BLKmode
809 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
810 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
812 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
813 int big_endian_correction = 0;
815 args[i].n_aligned_regs
816 = args[i].partial ? args[i].partial
817 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
819 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
820 * args[i].n_aligned_regs);
822 /* Structures smaller than a word are aligned to the least
823 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
824 this means we must skip the empty high order bytes when
825 calculating the bit offset. */
826 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
827 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
829 for (j = 0; j < args[i].n_aligned_regs; j++)
831 rtx reg = gen_reg_rtx (word_mode);
832 rtx word = operand_subword_force (args[i].value, j, BLKmode);
833 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
834 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
836 args[i].aligned_regs[j] = reg;
838 /* There is no need to restrict this code to loading items
839 in TYPE_ALIGN sized hunks. The bitfield instructions can
840 load up entire word sized registers efficiently.
842 ??? This may not be needed anymore.
843 We use to emit a clobber here but that doesn't let later
844 passes optimize the instructions we emit. By storing 0 into
845 the register later passes know the first AND to zero out the
846 bitfield being set in the register is unnecessary. The store
847 of 0 will be deleted as will at least the first AND. */
849 emit_move_insn (reg, const0_rtx);
851 bytes -= bitsize / BITS_PER_UNIT;
852 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
853 extract_bit_field (word, bitsize, 0, 1,
856 bitalign / BITS_PER_UNIT,
858 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
863 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
866 NUM_ACTUALS is the total number of parameters.
868 N_NAMED_ARGS is the total number of named arguments.
870 FNDECL is the tree code for the target of this call (if known)
872 ARGS_SO_FAR holds state needed by the target to know where to place
875 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
876 for arguments which are passed in registers.
878 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
879 and may be modified by this routine.
881 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
882 flags which may may be modified by this routine. */
885 initialize_argument_information (num_actuals, args, args_size, n_named_args,
886 actparms, fndecl, args_so_far,
887 reg_parm_stack_space, old_stack_level,
888 old_pending_adj, must_preallocate, is_const)
889 int num_actuals ATTRIBUTE_UNUSED;
890 struct arg_data *args;
891 struct args_size *args_size;
892 int n_named_args ATTRIBUTE_UNUSED;
895 CUMULATIVE_ARGS *args_so_far;
896 int reg_parm_stack_space;
897 rtx *old_stack_level;
898 int *old_pending_adj;
899 int *must_preallocate;
902 /* 1 if scanning parms front to back, -1 if scanning back to front. */
905 /* Count arg position in order args appear. */
908 struct args_size alignment_pad;
912 args_size->constant = 0;
915 /* In this loop, we consider args in the order they are written.
916 We fill up ARGS from the front or from the back if necessary
917 so that in any case the first arg to be pushed ends up at the front. */
919 #ifdef PUSH_ARGS_REVERSED
920 i = num_actuals - 1, inc = -1;
921 /* In this case, must reverse order of args
922 so that we compute and push the last arg first. */
927 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
928 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
930 tree type = TREE_TYPE (TREE_VALUE (p));
932 enum machine_mode mode;
934 args[i].tree_value = TREE_VALUE (p);
936 /* Replace erroneous argument with constant zero. */
937 if (type == error_mark_node || TYPE_SIZE (type) == 0)
938 args[i].tree_value = integer_zero_node, type = integer_type_node;
940 /* If TYPE is a transparent union, pass things the way we would
941 pass the first field of the union. We have already verified that
942 the modes are the same. */
943 if (TYPE_TRANSPARENT_UNION (type))
944 type = TREE_TYPE (TYPE_FIELDS (type));
946 /* Decide where to pass this arg.
948 args[i].reg is nonzero if all or part is passed in registers.
950 args[i].partial is nonzero if part but not all is passed in registers,
951 and the exact value says how many words are passed in registers.
953 args[i].pass_on_stack is nonzero if the argument must at least be
954 computed on the stack. It may then be loaded back into registers
955 if args[i].reg is nonzero.
957 These decisions are driven by the FUNCTION_... macros and must agree
958 with those made by function.c. */
960 /* See if this argument should be passed by invisible reference. */
961 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
962 && contains_placeholder_p (TYPE_SIZE (type)))
963 || TREE_ADDRESSABLE (type)
964 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
965 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
966 type, argpos < n_named_args)
970 /* If we're compiling a thunk, pass through invisible
971 references instead of making a copy. */
972 if (current_function_is_thunk
973 #ifdef FUNCTION_ARG_CALLEE_COPIES
974 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
975 type, argpos < n_named_args)
976 /* If it's in a register, we must make a copy of it too. */
977 /* ??? Is this a sufficient test? Is there a better one? */
978 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
979 && REG_P (DECL_RTL (args[i].tree_value)))
980 && ! TREE_ADDRESSABLE (type))
984 /* C++ uses a TARGET_EXPR to indicate that we want to make a
985 new object from the argument. If we are passing by
986 invisible reference, the callee will do that for us, so we
987 can strip off the TARGET_EXPR. This is not always safe,
988 but it is safe in the only case where this is a useful
989 optimization; namely, when the argument is a plain object.
990 In that case, the frontend is just asking the backend to
991 make a bitwise copy of the argument. */
993 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
994 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
995 (args[i].tree_value, 1)))
997 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
998 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1000 args[i].tree_value = build1 (ADDR_EXPR,
1001 build_pointer_type (type),
1002 args[i].tree_value);
1003 type = build_pointer_type (type);
1007 /* We make a copy of the object and pass the address to the
1008 function being called. */
1011 if (TYPE_SIZE (type) == 0
1012 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1013 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1014 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1015 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1016 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
1018 /* This is a variable-sized object. Make space on the stack
1020 rtx size_rtx = expr_size (TREE_VALUE (p));
1022 if (*old_stack_level == 0)
1024 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1025 *old_pending_adj = pending_stack_adjust;
1026 pending_stack_adjust = 0;
1029 copy = gen_rtx_MEM (BLKmode,
1030 allocate_dynamic_stack_space (size_rtx,
1032 TYPE_ALIGN (type)));
1036 int size = int_size_in_bytes (type);
1037 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1040 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1042 store_expr (args[i].tree_value, copy, 0);
1045 args[i].tree_value = build1 (ADDR_EXPR,
1046 build_pointer_type (type),
1047 make_tree (type, copy));
1048 type = build_pointer_type (type);
1052 mode = TYPE_MODE (type);
1053 unsignedp = TREE_UNSIGNED (type);
1055 #ifdef PROMOTE_FUNCTION_ARGS
1056 mode = promote_mode (type, mode, &unsignedp, 1);
1059 args[i].unsignedp = unsignedp;
1060 args[i].mode = mode;
1061 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1062 argpos < n_named_args);
1063 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1066 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1067 argpos < n_named_args);
1070 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1072 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1073 it means that we are to pass this arg in the register(s) designated
1074 by the PARALLEL, but also to pass it in the stack. */
1075 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1076 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1077 args[i].pass_on_stack = 1;
1079 /* If this is an addressable type, we must preallocate the stack
1080 since we must evaluate the object into its final location.
1082 If this is to be passed in both registers and the stack, it is simpler
1084 if (TREE_ADDRESSABLE (type)
1085 || (args[i].pass_on_stack && args[i].reg != 0))
1086 *must_preallocate = 1;
1088 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1089 we cannot consider this function call constant. */
1090 if (TREE_ADDRESSABLE (type))
1093 /* Compute the stack-size of this argument. */
1094 if (args[i].reg == 0 || args[i].partial != 0
1095 || reg_parm_stack_space > 0
1096 || args[i].pass_on_stack)
1097 locate_and_pad_parm (mode, type,
1098 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1103 fndecl, args_size, &args[i].offset,
1104 &args[i].size, &alignment_pad);
1106 #ifndef ARGS_GROW_DOWNWARD
1107 args[i].slot_offset = *args_size;
1110 args[i].alignment_pad = alignment_pad;
1112 /* If a part of the arg was put into registers,
1113 don't include that part in the amount pushed. */
1114 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1115 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1116 / (PARM_BOUNDARY / BITS_PER_UNIT)
1117 * (PARM_BOUNDARY / BITS_PER_UNIT));
1119 /* Update ARGS_SIZE, the total stack space for args so far. */
1121 args_size->constant += args[i].size.constant;
1122 if (args[i].size.var)
1124 ADD_PARM_SIZE (*args_size, args[i].size.var);
1127 /* Since the slot offset points to the bottom of the slot,
1128 we must record it after incrementing if the args grow down. */
1129 #ifdef ARGS_GROW_DOWNWARD
1130 args[i].slot_offset = *args_size;
1132 args[i].slot_offset.constant = -args_size->constant;
1135 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1139 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1140 have been used, etc. */
1142 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1143 argpos < n_named_args);
1147 /* Update ARGS_SIZE to contain the total size for the argument block.
1148 Return the original constant component of the argument block's size.
1150 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1151 for arguments passed in registers. */
1154 compute_argument_block_size (reg_parm_stack_space, args_size)
1155 int reg_parm_stack_space;
1156 struct args_size *args_size;
1158 int unadjusted_args_size = args_size->constant;
1160 /* Compute the actual size of the argument block required. The variable
1161 and constant sizes must be combined, the size may have to be rounded,
1162 and there may be a minimum required size. */
1166 args_size->var = ARGS_SIZE_TREE (*args_size);
1167 args_size->constant = 0;
1169 #ifdef PREFERRED_STACK_BOUNDARY
1170 if (PREFERRED_STACK_BOUNDARY != BITS_PER_UNIT)
1171 args_size->var = round_up (args_size->var, STACK_BYTES);
1174 if (reg_parm_stack_space > 0)
1177 = size_binop (MAX_EXPR, args_size->var,
1178 size_int (reg_parm_stack_space));
1180 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1181 /* The area corresponding to register parameters is not to count in
1182 the size of the block we need. So make the adjustment. */
1184 = size_binop (MINUS_EXPR, args_size->var,
1185 size_int (reg_parm_stack_space));
1191 #ifdef PREFERRED_STACK_BOUNDARY
1192 args_size->constant = (((args_size->constant
1193 + pending_stack_adjust
1195 / STACK_BYTES * STACK_BYTES)
1196 - pending_stack_adjust);
1199 args_size->constant = MAX (args_size->constant,
1200 reg_parm_stack_space);
1202 #ifdef MAYBE_REG_PARM_STACK_SPACE
1203 if (reg_parm_stack_space == 0)
1204 args_size->constant = 0;
1207 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1208 args_size->constant -= reg_parm_stack_space;
1211 return unadjusted_args_size;
1214 /* Precompute parameters as needed for a function call.
1216 IS_CONST indicates the target function is a pure function.
1218 MUST_PREALLOCATE indicates that we must preallocate stack space for
1219 any stack arguments.
1221 NUM_ACTUALS is the number of arguments.
1223 ARGS is an array containing information for each argument; this routine
1224 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1226 ARGS_SIZE contains information about the size of the arg list. */
1229 precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1231 int must_preallocate;
1233 struct arg_data *args;
1234 struct args_size *args_size;
1238 /* If this function call is cse'able, precompute all the parameters.
1239 Note that if the parameter is constructed into a temporary, this will
1240 cause an additional copy because the parameter will be constructed
1241 into a temporary location and then copied into the outgoing arguments.
1242 If a parameter contains a call to alloca and this function uses the
1243 stack, precompute the parameter. */
1245 /* If we preallocated the stack space, and some arguments must be passed
1246 on the stack, then we must precompute any parameter which contains a
1247 function call which will store arguments on the stack.
1248 Otherwise, evaluating the parameter may clobber previous parameters
1249 which have already been stored into the stack. */
1251 for (i = 0; i < num_actuals; i++)
1253 || ((args_size->var != 0 || args_size->constant != 0)
1254 && calls_function (args[i].tree_value, 1))
1255 || (must_preallocate
1256 && (args_size->var != 0 || args_size->constant != 0)
1257 && calls_function (args[i].tree_value, 0)))
1259 /* If this is an addressable type, we cannot pre-evaluate it. */
1260 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1265 args[i].initial_value = args[i].value
1266 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1268 preserve_temp_slots (args[i].value);
1271 /* ANSI doesn't require a sequence point here,
1272 but PCC has one, so this will avoid some problems. */
1275 args[i].initial_value = args[i].value
1276 = protect_from_queue (args[i].initial_value, 0);
1278 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1280 = convert_modes (args[i].mode,
1281 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1282 args[i].value, args[i].unsignedp);
1286 /* Given the current state of MUST_PREALLOCATE and information about
1287 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1288 compute and return the final value for MUST_PREALLOCATE. */
1291 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1292 int must_preallocate;
1294 struct arg_data *args;
1295 struct args_size *args_size;
1297 /* See if we have or want to preallocate stack space.
1299 If we would have to push a partially-in-regs parm
1300 before other stack parms, preallocate stack space instead.
1302 If the size of some parm is not a multiple of the required stack
1303 alignment, we must preallocate.
1305 If the total size of arguments that would otherwise create a copy in
1306 a temporary (such as a CALL) is more than half the total argument list
1307 size, preallocation is faster.
1309 Another reason to preallocate is if we have a machine (like the m88k)
1310 where stack alignment is required to be maintained between every
1311 pair of insns, not just when the call is made. However, we assume here
1312 that such machines either do not have push insns (and hence preallocation
1313 would occur anyway) or the problem is taken care of with
1316 if (! must_preallocate)
1318 int partial_seen = 0;
1319 int copy_to_evaluate_size = 0;
1322 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1324 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1326 else if (partial_seen && args[i].reg == 0)
1327 must_preallocate = 1;
1329 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1330 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1331 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1332 || TREE_CODE (args[i].tree_value) == COND_EXPR
1333 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1334 copy_to_evaluate_size
1335 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1338 if (copy_to_evaluate_size * 2 >= args_size->constant
1339 && args_size->constant > 0)
1340 must_preallocate = 1;
1342 return must_preallocate;
1345 /* If we preallocated stack space, compute the address of each argument
1346 and store it into the ARGS array.
1348 We need not ensure it is a valid memory address here; it will be
1349 validized when it is used.
1351 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1354 compute_argument_addresses (args, argblock, num_actuals)
1355 struct arg_data *args;
1361 rtx arg_reg = argblock;
1362 int i, arg_offset = 0;
1364 if (GET_CODE (argblock) == PLUS)
1365 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1367 for (i = 0; i < num_actuals; i++)
1369 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1370 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1373 /* Skip this parm if it will not be passed on the stack. */
1374 if (! args[i].pass_on_stack && args[i].reg != 0)
1377 if (GET_CODE (offset) == CONST_INT)
1378 addr = plus_constant (arg_reg, INTVAL (offset));
1380 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1382 addr = plus_constant (addr, arg_offset);
1383 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1386 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1388 if (GET_CODE (slot_offset) == CONST_INT)
1389 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1391 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1393 addr = plus_constant (addr, arg_offset);
1394 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1399 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1400 in a call instruction.
1402 FNDECL is the tree node for the target function. For an indirect call
1403 FNDECL will be NULL_TREE.
1405 EXP is the CALL_EXPR for this call. */
1408 rtx_for_function_call (fndecl, exp)
1414 /* Get the function to call, in the form of RTL. */
1417 /* If this is the first use of the function, see if we need to
1418 make an external definition for it. */
1419 if (! TREE_USED (fndecl))
1421 assemble_external (fndecl);
1422 TREE_USED (fndecl) = 1;
1425 /* Get a SYMBOL_REF rtx for the function address. */
1426 funexp = XEXP (DECL_RTL (fndecl), 0);
1429 /* Generate an rtx (probably a pseudo-register) for the address. */
1434 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1435 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1437 /* Check the function is executable. */
1438 if (current_function_check_memory_usage)
1440 #ifdef POINTERS_EXTEND_UNSIGNED
1441 /* It might be OK to convert funexp in place, but there's
1442 a lot going on between here and when it happens naturally
1443 that this seems safer. */
1444 funaddr = convert_memory_address (Pmode, funexp);
1446 emit_library_call (chkr_check_exec_libfunc, 1,
1455 /* Do the register loads required for any wholly-register parms or any
1456 parms which are passed both on the stack and in a register. Their
1457 expressions were already evaluated.
1459 Mark all register-parms as living through the call, putting these USE
1460 insns in the CALL_INSN_FUNCTION_USAGE field. */
1463 load_register_parameters (args, num_actuals, call_fusage)
1464 struct arg_data *args;
1470 #ifdef LOAD_ARGS_REVERSED
1471 for (i = num_actuals - 1; i >= 0; i--)
1473 for (i = 0; i < num_actuals; i++)
1476 rtx reg = args[i].reg;
1477 int partial = args[i].partial;
1482 /* Set to non-negative if must move a word at a time, even if just
1483 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1484 we just use a normal move insn. This value can be zero if the
1485 argument is a zero size structure with no fields. */
1486 nregs = (partial ? partial
1487 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1488 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1489 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1492 /* Handle calls that pass values in multiple non-contiguous
1493 locations. The Irix 6 ABI has examples of this. */
1495 if (GET_CODE (reg) == PARALLEL)
1497 emit_group_load (reg, args[i].value,
1498 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1499 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1503 /* If simple case, just do move. If normal partial, store_one_arg
1504 has already loaded the register for us. In all other cases,
1505 load the register(s) from memory. */
1507 else if (nregs == -1)
1508 emit_move_insn (reg, args[i].value);
1510 /* If we have pre-computed the values to put in the registers in
1511 the case of non-aligned structures, copy them in now. */
1513 else if (args[i].n_aligned_regs != 0)
1514 for (j = 0; j < args[i].n_aligned_regs; j++)
1515 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1516 args[i].aligned_regs[j]);
1518 else if (partial == 0 || args[i].pass_on_stack)
1519 move_block_to_reg (REGNO (reg),
1520 validize_mem (args[i].value), nregs,
1523 /* Handle calls that pass values in multiple non-contiguous
1524 locations. The Irix 6 ABI has examples of this. */
1525 if (GET_CODE (reg) == PARALLEL)
1526 use_group_regs (call_fusage, reg);
1527 else if (nregs == -1)
1528 use_reg (call_fusage, reg);
1530 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1535 /* Generate all the code for a function call
1536 and return an rtx for its value.
1537 Store the value in TARGET (specified as an rtx) if convenient.
1538 If the value is stored in TARGET then TARGET is returned.
1539 If IGNORE is nonzero, then we ignore the value of the function call. */
1542 expand_call (exp, target, ignore)
1547 /* List of actual parameters. */
1548 tree actparms = TREE_OPERAND (exp, 1);
1549 /* RTX for the function to be called. */
1551 /* Data type of the function. */
1553 /* Declaration of the function being called,
1554 or 0 if the function is computed (not known by name). */
1558 /* Register in which non-BLKmode value will be returned,
1559 or 0 if no value or if value is BLKmode. */
1561 /* Address where we should return a BLKmode value;
1562 0 if value not BLKmode. */
1563 rtx structure_value_addr = 0;
1564 /* Nonzero if that address is being passed by treating it as
1565 an extra, implicit first parameter. Otherwise,
1566 it is passed by being copied directly into struct_value_rtx. */
1567 int structure_value_addr_parm = 0;
1568 /* Size of aggregate value wanted, or zero if none wanted
1569 or if we are using the non-reentrant PCC calling convention
1570 or expecting the value in registers. */
1571 HOST_WIDE_INT struct_value_size = 0;
1572 /* Nonzero if called function returns an aggregate in memory PCC style,
1573 by returning the address of where to find it. */
1574 int pcc_struct_value = 0;
1576 /* Number of actual parameters in this call, including struct value addr. */
1578 /* Number of named args. Args after this are anonymous ones
1579 and they must all go on the stack. */
1582 /* Vector of information about each argument.
1583 Arguments are numbered in the order they will be pushed,
1584 not the order they are written. */
1585 struct arg_data *args;
1587 /* Total size in bytes of all the stack-parms scanned so far. */
1588 struct args_size args_size;
1589 /* Size of arguments before any adjustments (such as rounding). */
1590 int unadjusted_args_size;
1591 /* Data on reg parms scanned so far. */
1592 CUMULATIVE_ARGS args_so_far;
1593 /* Nonzero if a reg parm has been scanned. */
1595 /* Nonzero if this is an indirect function call. */
1597 /* Nonzero if we must avoid push-insns in the args for this call.
1598 If stack space is allocated for register parameters, but not by the
1599 caller, then it is preallocated in the fixed part of the stack frame.
1600 So the entire argument block must then be preallocated (i.e., we
1601 ignore PUSH_ROUNDING in that case). */
1603 #ifdef PUSH_ROUNDING
1604 int must_preallocate = 0;
1606 int must_preallocate = 1;
1609 /* Size of the stack reserved for parameter registers. */
1610 int reg_parm_stack_space = 0;
1612 /* Address of space preallocated for stack parms
1613 (on machines that lack push insns), or 0 if space not preallocated. */
1616 /* Nonzero if it is plausible that this is a call to alloca. */
1618 /* Nonzero if this is a call to malloc or a related function. */
1620 /* Nonzero if this is a call to setjmp or a related function. */
1622 /* Nonzero if this is a call to `longjmp'. */
1624 /* Nonzero if this is a call to an inline function. */
1625 int is_integrable = 0;
1626 /* Nonzero if this is a call to a `const' function.
1627 Note that only explicitly named functions are handled as `const' here. */
1629 /* Nonzero if this is a call to a `volatile' function. */
1630 int is_volatile = 0;
1631 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1632 /* Define the boundary of the register parm stack space that needs to be
1634 int low_to_save = -1, high_to_save;
1635 rtx save_area = 0; /* Place that it is saved */
1638 #ifdef ACCUMULATE_OUTGOING_ARGS
1639 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1640 char *initial_stack_usage_map = stack_usage_map;
1641 int old_stack_arg_under_construction;
1644 rtx old_stack_level = 0;
1645 int old_pending_adj = 0;
1646 int old_inhibit_defer_pop = inhibit_defer_pop;
1647 rtx call_fusage = 0;
1651 /* The value of the function call can be put in a hard register. But
1652 if -fcheck-memory-usage, code which invokes functions (and thus
1653 damages some hard registers) can be inserted before using the value.
1654 So, target is always a pseudo-register in that case. */
1655 if (current_function_check_memory_usage)
1658 /* See if we can find a DECL-node for the actual function.
1659 As a result, decide whether this is a call to an integrable function. */
1661 p = TREE_OPERAND (exp, 0);
1662 if (TREE_CODE (p) == ADDR_EXPR)
1664 fndecl = TREE_OPERAND (p, 0);
1665 if (TREE_CODE (fndecl) != FUNCTION_DECL)
1670 && fndecl != current_function_decl
1671 && DECL_INLINE (fndecl)
1672 && DECL_SAVED_INSNS (fndecl)
1673 && DECL_SAVED_INSNS (fndecl)->inlinable)
1675 else if (! TREE_ADDRESSABLE (fndecl))
1677 /* In case this function later becomes inlinable,
1678 record that there was already a non-inline call to it.
1680 Use abstraction instead of setting TREE_ADDRESSABLE
1682 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1685 warning_with_decl (fndecl, "can't inline call to `%s'");
1686 warning ("called from here");
1688 mark_addressable (fndecl);
1691 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1692 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
1695 if (TREE_THIS_VOLATILE (fndecl))
1700 /* If we don't have specific function to call, see if we have a
1701 constant or `noreturn' function from the type. */
1704 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1705 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1708 #ifdef REG_PARM_STACK_SPACE
1709 #ifdef MAYBE_REG_PARM_STACK_SPACE
1710 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1712 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1716 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1717 if (reg_parm_stack_space > 0)
1718 must_preallocate = 1;
1721 /* Warn if this value is an aggregate type,
1722 regardless of which calling convention we are using for it. */
1723 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1724 warning ("function call has aggregate value");
1726 /* Set up a place to return a structure. */
1728 /* Cater to broken compilers. */
1729 if (aggregate_value_p (exp))
1731 /* This call returns a big structure. */
1734 #ifdef PCC_STATIC_STRUCT_RETURN
1736 pcc_struct_value = 1;
1737 /* Easier than making that case work right. */
1740 /* In case this is a static function, note that it has been
1742 if (! TREE_ADDRESSABLE (fndecl))
1743 mark_addressable (fndecl);
1747 #else /* not PCC_STATIC_STRUCT_RETURN */
1749 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1751 if (target && GET_CODE (target) == MEM)
1752 structure_value_addr = XEXP (target, 0);
1755 /* Assign a temporary to hold the value. */
1758 /* For variable-sized objects, we must be called with a target
1759 specified. If we were to allocate space on the stack here,
1760 we would have no way of knowing when to free it. */
1762 if (struct_value_size < 0)
1765 /* This DECL is just something to feed to mark_addressable;
1766 it doesn't get pushed. */
1767 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1768 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1769 mark_addressable (d);
1770 mark_temp_addr_taken (DECL_RTL (d));
1771 structure_value_addr = XEXP (DECL_RTL (d), 0);
1776 #endif /* not PCC_STATIC_STRUCT_RETURN */
1779 /* If called function is inline, try to integrate it. */
1784 #ifdef ACCUMULATE_OUTGOING_ARGS
1785 rtx before_call = get_last_insn ();
1788 temp = expand_inline_function (fndecl, actparms, target,
1789 ignore, TREE_TYPE (exp),
1790 structure_value_addr);
1792 /* If inlining succeeded, return. */
1793 if (temp != (rtx) (HOST_WIDE_INT) -1)
1795 #ifdef ACCUMULATE_OUTGOING_ARGS
1796 /* If the outgoing argument list must be preserved, push
1797 the stack before executing the inlined function if it
1800 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1801 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1804 if (stack_arg_under_construction || i >= 0)
1807 = before_call ? NEXT_INSN (before_call) : get_insns ();
1808 rtx insn = NULL_RTX, seq;
1810 /* Look for a call in the inline function code.
1811 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1812 nonzero then there is a call and it is not necessary
1813 to scan the insns. */
1815 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1816 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1817 if (GET_CODE (insn) == CALL_INSN)
1822 /* Reserve enough stack space so that the largest
1823 argument list of any function call in the inline
1824 function does not overlap the argument list being
1825 evaluated. This is usually an overestimate because
1826 allocate_dynamic_stack_space reserves space for an
1827 outgoing argument list in addition to the requested
1828 space, but there is no way to ask for stack space such
1829 that an argument list of a certain length can be
1832 Add the stack space reserved for register arguments, if
1833 any, in the inline function. What is really needed is the
1834 largest value of reg_parm_stack_space in the inline
1835 function, but that is not available. Using the current
1836 value of reg_parm_stack_space is wrong, but gives
1837 correct results on all supported machines. */
1839 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1840 + reg_parm_stack_space);
1843 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1844 allocate_dynamic_stack_space (GEN_INT (adjust),
1845 NULL_RTX, BITS_PER_UNIT);
1848 emit_insns_before (seq, first_insn);
1849 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1854 /* If the result is equivalent to TARGET, return TARGET to simplify
1855 checks in store_expr. They can be equivalent but not equal in the
1856 case of a function that returns BLKmode. */
1857 if (temp != target && rtx_equal_p (temp, target))
1862 /* If inlining failed, mark FNDECL as needing to be compiled
1863 separately after all. If function was declared inline,
1865 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1866 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1868 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1869 warning ("called from here");
1871 mark_addressable (fndecl);
1874 function_call_count++;
1876 if (fndecl && DECL_NAME (fndecl))
1877 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1879 /* See if this is a call to a function that can return more than once
1880 or a call to longjmp or malloc. */
1881 special_function_p (fndecl, &returns_twice, &is_longjmp,
1882 &is_malloc, &may_be_alloca);
1885 current_function_calls_alloca = 1;
1887 /* Operand 0 is a pointer-to-function; get the type of the function. */
1888 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1889 if (! POINTER_TYPE_P (funtype))
1891 funtype = TREE_TYPE (funtype);
1893 /* When calling a const function, we must pop the stack args right away,
1894 so that the pop is deleted or moved with the call. */
1898 /* Don't let pending stack adjusts add up to too much.
1899 Also, do all pending adjustments now
1900 if there is any chance this might be a call to alloca. */
1902 if (pending_stack_adjust >= 32
1903 || (pending_stack_adjust > 0 && may_be_alloca))
1904 do_pending_stack_adjust ();
1906 /* Push the temporary stack slot level so that we can free any temporaries
1910 /* Start updating where the next arg would go.
1912 On some machines (such as the PA) indirect calls have a different
1913 calling convention than normal calls. The last argument in
1914 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1916 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
1918 /* If struct_value_rtx is 0, it means pass the address
1919 as if it were an extra parameter. */
1920 if (structure_value_addr && struct_value_rtx == 0)
1922 /* If structure_value_addr is a REG other than
1923 virtual_outgoing_args_rtx, we can use always use it. If it
1924 is not a REG, we must always copy it into a register.
1925 If it is virtual_outgoing_args_rtx, we must copy it to another
1926 register in some cases. */
1927 rtx temp = (GET_CODE (structure_value_addr) != REG
1928 #ifdef ACCUMULATE_OUTGOING_ARGS
1929 || (stack_arg_under_construction
1930 && structure_value_addr == virtual_outgoing_args_rtx)
1932 ? copy_addr_to_reg (structure_value_addr)
1933 : structure_value_addr);
1936 = tree_cons (error_mark_node,
1937 make_tree (build_pointer_type (TREE_TYPE (funtype)),
1940 structure_value_addr_parm = 1;
1943 /* Count the arguments and set NUM_ACTUALS. */
1944 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
1947 /* Compute number of named args.
1948 Normally, don't include the last named arg if anonymous args follow.
1949 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
1950 (If no anonymous args follow, the result of list_length is actually
1951 one too large. This is harmless.)
1953 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
1954 zero, this machine will be able to place unnamed args that were passed in
1955 registers into the stack. So treat all args as named. This allows the
1956 insns emitting for a specific argument list to be independent of the
1957 function declaration.
1959 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
1960 way to pass unnamed args in registers, so we must force them into
1963 if ((STRICT_ARGUMENT_NAMING
1964 || ! PRETEND_OUTGOING_VARARGS_NAMED)
1965 && TYPE_ARG_TYPES (funtype) != 0)
1967 = (list_length (TYPE_ARG_TYPES (funtype))
1968 /* Don't include the last named arg. */
1969 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
1970 /* Count the struct value address, if it is passed as a parm. */
1971 + structure_value_addr_parm);
1973 /* If we know nothing, treat all args as named. */
1974 n_named_args = num_actuals;
1976 /* Make a vector to hold all the information about each arg. */
1977 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
1978 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
1980 /* Build up entries inthe ARGS array, compute the size of the arguments
1981 into ARGS_SIZE, etc. */
1982 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
1983 actparms, fndecl, &args_so_far,
1984 reg_parm_stack_space, &old_stack_level,
1985 &old_pending_adj, &must_preallocate,
1988 #ifdef FINAL_REG_PARM_STACK_SPACE
1989 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1995 /* If this function requires a variable-sized argument list, don't try to
1996 make a cse'able block for this call. We may be able to do this
1997 eventually, but it is too complicated to keep track of what insns go
1998 in the cse'able block and which don't. */
2001 must_preallocate = 1;
2004 /* Compute the actual size of the argument block required. The variable
2005 and constant sizes must be combined, the size may have to be rounded,
2006 and there may be a minimum required size. */
2007 unadjusted_args_size
2008 = compute_argument_block_size (reg_parm_stack_space, &args_size);
2010 /* Now make final decision about preallocating stack space. */
2011 must_preallocate = finalize_must_preallocate (must_preallocate,
2012 num_actuals, args, &args_size);
2014 /* If the structure value address will reference the stack pointer, we must
2015 stabilize it. We don't need to do this if we know that we are not going
2016 to adjust the stack pointer in processing this call. */
2018 if (structure_value_addr
2019 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2020 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
2022 #ifndef ACCUMULATE_OUTGOING_ARGS
2023 || args_size.constant
2026 structure_value_addr = copy_to_reg (structure_value_addr);
2028 /* Precompute any arguments as needed. */
2029 precompute_arguments (is_const, must_preallocate, num_actuals,
2032 /* Now we are about to start emitting insns that can be deleted
2033 if a libcall is deleted. */
2034 if (is_const || is_malloc)
2037 /* If we have no actual push instructions, or shouldn't use them,
2038 make space for all args right now. */
2040 if (args_size.var != 0)
2042 if (old_stack_level == 0)
2044 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2045 old_pending_adj = pending_stack_adjust;
2046 pending_stack_adjust = 0;
2047 #ifdef ACCUMULATE_OUTGOING_ARGS
2048 /* stack_arg_under_construction says whether a stack arg is
2049 being constructed at the old stack level. Pushing the stack
2050 gets a clean outgoing argument block. */
2051 old_stack_arg_under_construction = stack_arg_under_construction;
2052 stack_arg_under_construction = 0;
2055 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2059 /* Note that we must go through the motions of allocating an argument
2060 block even if the size is zero because we may be storing args
2061 in the area reserved for register arguments, which may be part of
2064 int needed = args_size.constant;
2066 /* Store the maximum argument space used. It will be pushed by
2067 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2070 if (needed > current_function_outgoing_args_size)
2071 current_function_outgoing_args_size = needed;
2073 if (must_preallocate)
2075 #ifdef ACCUMULATE_OUTGOING_ARGS
2076 /* Since the stack pointer will never be pushed, it is possible for
2077 the evaluation of a parm to clobber something we have already
2078 written to the stack. Since most function calls on RISC machines
2079 do not use the stack, this is uncommon, but must work correctly.
2081 Therefore, we save any area of the stack that was already written
2082 and that we are using. Here we set up to do this by making a new
2083 stack usage map from the old one. The actual save will be done
2086 Another approach might be to try to reorder the argument
2087 evaluations to avoid this conflicting stack usage. */
2089 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2090 /* Since we will be writing into the entire argument area, the
2091 map must be allocated for its entire size, not just the part that
2092 is the responsibility of the caller. */
2093 needed += reg_parm_stack_space;
2096 #ifdef ARGS_GROW_DOWNWARD
2097 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2100 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2103 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2105 if (initial_highest_arg_in_use)
2106 bcopy (initial_stack_usage_map, stack_usage_map,
2107 initial_highest_arg_in_use);
2109 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2110 bzero (&stack_usage_map[initial_highest_arg_in_use],
2111 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2114 /* The address of the outgoing argument list must not be copied to a
2115 register here, because argblock would be left pointing to the
2116 wrong place after the call to allocate_dynamic_stack_space below.
2119 argblock = virtual_outgoing_args_rtx;
2121 #else /* not ACCUMULATE_OUTGOING_ARGS */
2122 if (inhibit_defer_pop == 0)
2124 /* Try to reuse some or all of the pending_stack_adjust
2125 to get this space. Maybe we can avoid any pushing. */
2126 if (needed > pending_stack_adjust)
2128 needed -= pending_stack_adjust;
2129 pending_stack_adjust = 0;
2133 pending_stack_adjust -= needed;
2137 /* Special case this because overhead of `push_block' in this
2138 case is non-trivial. */
2140 argblock = virtual_outgoing_args_rtx;
2142 argblock = push_block (GEN_INT (needed), 0, 0);
2144 /* We only really need to call `copy_to_reg' in the case where push
2145 insns are going to be used to pass ARGBLOCK to a function
2146 call in ARGS. In that case, the stack pointer changes value
2147 from the allocation point to the call point, and hence
2148 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2149 But might as well always do it. */
2150 argblock = copy_to_reg (argblock);
2151 #endif /* not ACCUMULATE_OUTGOING_ARGS */
2155 #ifdef ACCUMULATE_OUTGOING_ARGS
2156 /* The save/restore code in store_one_arg handles all cases except one:
2157 a constructor call (including a C function returning a BLKmode struct)
2158 to initialize an argument. */
2159 if (stack_arg_under_construction)
2161 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2162 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
2164 rtx push_size = GEN_INT (args_size.constant);
2166 if (old_stack_level == 0)
2168 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2169 old_pending_adj = pending_stack_adjust;
2170 pending_stack_adjust = 0;
2171 /* stack_arg_under_construction says whether a stack arg is
2172 being constructed at the old stack level. Pushing the stack
2173 gets a clean outgoing argument block. */
2174 old_stack_arg_under_construction = stack_arg_under_construction;
2175 stack_arg_under_construction = 0;
2176 /* Make a new map for the new argument list. */
2177 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2178 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2179 highest_outgoing_arg_in_use = 0;
2181 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
2183 /* If argument evaluation might modify the stack pointer, copy the
2184 address of the argument list to a register. */
2185 for (i = 0; i < num_actuals; i++)
2186 if (args[i].pass_on_stack)
2188 argblock = copy_addr_to_reg (argblock);
2193 compute_argument_addresses (args, argblock, num_actuals);
2195 #ifdef PUSH_ARGS_REVERSED
2196 #ifdef PREFERRED_STACK_BOUNDARY
2197 /* If we push args individually in reverse order, perform stack alignment
2198 before the first push (the last arg). */
2200 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2204 /* Don't try to defer pops if preallocating, not even from the first arg,
2205 since ARGBLOCK probably refers to the SP. */
2209 funexp = rtx_for_function_call (fndecl, exp);
2211 /* Figure out the register where the value, if any, will come back. */
2213 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2214 && ! structure_value_addr)
2216 if (pcc_struct_value)
2217 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2220 valreg = hard_function_value (TREE_TYPE (exp), fndecl, 0);
2223 /* Precompute all register parameters. It isn't safe to compute anything
2224 once we have started filling any specific hard regs. */
2225 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2227 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2229 /* Save the fixed argument area if it's part of the caller's frame and
2230 is clobbered by argument setup for this call. */
2231 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2232 &low_to_save, &high_to_save);
2236 /* Now store (and compute if necessary) all non-register parms.
2237 These come before register parms, since they can require block-moves,
2238 which could clobber the registers used for register parms.
2239 Parms which have partial registers are not stored here,
2240 but we do preallocate space here if they want that. */
2242 for (i = 0; i < num_actuals; i++)
2243 if (args[i].reg == 0 || args[i].pass_on_stack)
2244 store_one_arg (&args[i], argblock, may_be_alloca,
2245 args_size.var != 0, reg_parm_stack_space);
2247 /* If we have a parm that is passed in registers but not in memory
2248 and whose alignment does not permit a direct copy into registers,
2249 make a group of pseudos that correspond to each register that we
2251 if (STRICT_ALIGNMENT)
2252 store_unaligned_arguments_into_pseudos (args, num_actuals);
2254 /* Now store any partially-in-registers parm.
2255 This is the last place a block-move can happen. */
2257 for (i = 0; i < num_actuals; i++)
2258 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2259 store_one_arg (&args[i], argblock, may_be_alloca,
2260 args_size.var != 0, reg_parm_stack_space);
2262 #ifndef PUSH_ARGS_REVERSED
2263 #ifdef PREFERRED_STACK_BOUNDARY
2264 /* If we pushed args in forward order, perform stack alignment
2265 after pushing the last arg. */
2267 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2271 /* If register arguments require space on the stack and stack space
2272 was not preallocated, allocate stack space here for arguments
2273 passed in registers. */
2274 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
2275 if (must_preallocate == 0 && reg_parm_stack_space > 0)
2276 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2279 /* Pass the function the address in which to return a structure value. */
2280 if (structure_value_addr && ! structure_value_addr_parm)
2282 emit_move_insn (struct_value_rtx,
2284 force_operand (structure_value_addr,
2287 /* Mark the memory for the aggregate as write-only. */
2288 if (current_function_check_memory_usage)
2289 emit_library_call (chkr_set_right_libfunc, 1,
2291 structure_value_addr, Pmode,
2292 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
2293 GEN_INT (MEMORY_USE_WO),
2294 TYPE_MODE (integer_type_node));
2296 if (GET_CODE (struct_value_rtx) == REG)
2297 use_reg (&call_fusage, struct_value_rtx);
2300 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
2302 load_register_parameters (args, num_actuals, &call_fusage);
2304 /* Perform postincrements before actually calling the function. */
2307 /* All arguments and registers used for the call must be set up by now! */
2309 /* Generate the actual call instruction. */
2310 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2311 args_size.constant, struct_value_size,
2312 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2313 valreg, old_inhibit_defer_pop, call_fusage, is_const);
2315 /* If call is cse'able, make appropriate pair of reg-notes around it.
2316 Test valreg so we don't crash; may safely ignore `const'
2317 if return type is void. Disable for PARALLEL return values, because
2318 we have no way to move such values into a pseudo register. */
2319 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2322 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2325 /* Mark the return value as a pointer if needed. */
2326 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2328 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2329 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2332 /* Construct an "equal form" for the value which mentions all the
2333 arguments in order as well as the function name. */
2334 #ifdef PUSH_ARGS_REVERSED
2335 for (i = 0; i < num_actuals; i++)
2336 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2338 for (i = num_actuals - 1; i >= 0; i--)
2339 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2341 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2343 insns = get_insns ();
2346 emit_libcall_block (insns, temp, valreg, note);
2352 /* Otherwise, just write out the sequence without a note. */
2353 rtx insns = get_insns ();
2360 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2363 /* The return value from a malloc-like function is a pointer. */
2364 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2365 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2367 emit_move_insn (temp, valreg);
2369 /* The return value from a malloc-like function can not alias
2371 last = get_last_insn ();
2373 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2375 /* Write out the sequence. */
2376 insns = get_insns ();
2382 /* For calls to `setjmp', etc., inform flow.c it should complain
2383 if nonvolatile values are live. */
2387 emit_note (name, NOTE_INSN_SETJMP);
2388 current_function_calls_setjmp = 1;
2392 current_function_calls_longjmp = 1;
2394 /* Notice functions that cannot return.
2395 If optimizing, insns emitted below will be dead.
2396 If not optimizing, they will exist, which is useful
2397 if the user uses the `return' command in the debugger. */
2399 if (is_volatile || is_longjmp)
2402 /* If value type not void, return an rtx for the value. */
2404 /* If there are cleanups to be called, don't use a hard reg as target.
2405 We need to double check this and see if it matters anymore. */
2406 if (any_pending_cleanups (1)
2407 && target && REG_P (target)
2408 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2411 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2414 target = const0_rtx;
2416 else if (structure_value_addr)
2418 if (target == 0 || GET_CODE (target) != MEM)
2420 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2421 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2422 structure_value_addr));
2423 MEM_SET_IN_STRUCT_P (target,
2424 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2427 else if (pcc_struct_value)
2429 /* This is the special C++ case where we need to
2430 know what the true target was. We take care to
2431 never use this value more than once in one expression. */
2432 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2433 copy_to_reg (valreg));
2434 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2436 /* Handle calls that return values in multiple non-contiguous locations.
2437 The Irix 6 ABI has examples of this. */
2438 else if (GET_CODE (valreg) == PARALLEL)
2440 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2444 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2445 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2446 preserve_temp_slots (target);
2449 if (! rtx_equal_p (target, valreg))
2450 emit_group_store (target, valreg, bytes,
2451 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2453 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2454 && GET_MODE (target) == GET_MODE (valreg))
2455 /* TARGET and VALREG cannot be equal at this point because the latter
2456 would not have REG_FUNCTION_VALUE_P true, while the former would if
2457 it were referring to the same register.
2459 If they refer to the same register, this move will be a no-op, except
2460 when function inlining is being done. */
2461 emit_move_insn (target, valreg);
2462 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2463 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2465 target = copy_to_reg (valreg);
2467 #ifdef PROMOTE_FUNCTION_RETURN
2468 /* If we promoted this return value, make the proper SUBREG. TARGET
2469 might be const0_rtx here, so be careful. */
2470 if (GET_CODE (target) == REG
2471 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2472 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2474 tree type = TREE_TYPE (exp);
2475 int unsignedp = TREE_UNSIGNED (type);
2477 /* If we don't promote as expected, something is wrong. */
2478 if (GET_MODE (target)
2479 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2482 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2483 SUBREG_PROMOTED_VAR_P (target) = 1;
2484 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2488 /* If size of args is variable or this was a constructor call for a stack
2489 argument, restore saved stack-pointer value. */
2491 if (old_stack_level)
2493 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2494 pending_stack_adjust = old_pending_adj;
2495 #ifdef ACCUMULATE_OUTGOING_ARGS
2496 stack_arg_under_construction = old_stack_arg_under_construction;
2497 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2498 stack_usage_map = initial_stack_usage_map;
2501 #ifdef ACCUMULATE_OUTGOING_ARGS
2504 #ifdef REG_PARM_STACK_SPACE
2506 restore_fixed_argument_area (save_area, argblock,
2507 high_to_save, low_to_save);
2510 /* If we saved any argument areas, restore them. */
2511 for (i = 0; i < num_actuals; i++)
2512 if (args[i].save_area)
2514 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2516 = gen_rtx_MEM (save_mode,
2517 memory_address (save_mode,
2518 XEXP (args[i].stack_slot, 0)));
2520 if (save_mode != BLKmode)
2521 emit_move_insn (stack_area, args[i].save_area);
2523 emit_block_move (stack_area, validize_mem (args[i].save_area),
2524 GEN_INT (args[i].size.constant),
2525 PARM_BOUNDARY / BITS_PER_UNIT);
2528 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2529 stack_usage_map = initial_stack_usage_map;
2533 /* If this was alloca, record the new stack level for nonlocal gotos.
2534 Check for the handler slots since we might not have a save area
2535 for non-local gotos. */
2537 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
2538 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2542 /* Free up storage we no longer need. */
2543 for (i = 0; i < num_actuals; ++i)
2544 if (args[i].aligned_regs)
2545 free (args[i].aligned_regs);
2550 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2551 (emitting the queue unless NO_QUEUE is nonzero),
2552 for a value of mode OUTMODE,
2553 with NARGS different arguments, passed as alternating rtx values
2554 and machine_modes to convert them to.
2555 The rtx values should have been passed through protect_from_queue already.
2557 NO_QUEUE will be true if and only if the library call is a `const' call
2558 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2559 to the variable is_const in expand_call.
2561 NO_QUEUE must be true for const calls, because if it isn't, then
2562 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2563 and will be lost if the libcall sequence is optimized away.
2565 NO_QUEUE must be false for non-const calls, because if it isn't, the
2566 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2567 optimized. For instance, the instruction scheduler may incorrectly
2568 move memory references across the non-const call. */
2571 emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2574 #ifndef ANSI_PROTOTYPES
2577 enum machine_mode outmode;
2581 /* Total size in bytes of all the stack-parms scanned so far. */
2582 struct args_size args_size;
2583 /* Size of arguments before any adjustments (such as rounding). */
2584 struct args_size original_args_size;
2585 register int argnum;
2589 struct args_size alignment_pad;
2591 CUMULATIVE_ARGS args_so_far;
2592 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2593 struct args_size offset; struct args_size size; rtx save_area; };
2595 int old_inhibit_defer_pop = inhibit_defer_pop;
2596 rtx call_fusage = 0;
2597 int reg_parm_stack_space = 0;
2598 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2599 /* Define the boundary of the register parm stack space that needs to be
2601 int low_to_save = -1, high_to_save = 0;
2602 rtx save_area = 0; /* Place that it is saved */
2605 #ifdef ACCUMULATE_OUTGOING_ARGS
2606 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2607 char *initial_stack_usage_map = stack_usage_map;
2611 #ifdef REG_PARM_STACK_SPACE
2612 /* Size of the stack reserved for parameter registers. */
2613 #ifdef MAYBE_REG_PARM_STACK_SPACE
2614 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2616 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
2620 VA_START (p, nargs);
2622 #ifndef ANSI_PROTOTYPES
2623 orgfun = va_arg (p, rtx);
2624 no_queue = va_arg (p, int);
2625 outmode = va_arg (p, enum machine_mode);
2626 nargs = va_arg (p, int);
2631 /* Copy all the libcall-arguments out of the varargs data
2632 and into a vector ARGVEC.
2634 Compute how to pass each argument. We only support a very small subset
2635 of the full argument passing conventions to limit complexity here since
2636 library functions shouldn't have many args. */
2638 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2639 bzero ((char *) argvec, nargs * sizeof (struct arg));
2642 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2644 args_size.constant = 0;
2649 for (count = 0; count < nargs; count++)
2651 rtx val = va_arg (p, rtx);
2652 enum machine_mode mode = va_arg (p, enum machine_mode);
2654 /* We cannot convert the arg value to the mode the library wants here;
2655 must do it earlier where we know the signedness of the arg. */
2657 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2660 /* On some machines, there's no way to pass a float to a library fcn.
2661 Pass it as a double instead. */
2662 #ifdef LIBGCC_NEEDS_DOUBLE
2663 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2664 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2667 /* There's no need to call protect_from_queue, because
2668 either emit_move_insn or emit_push_insn will do that. */
2670 /* Make sure it is a reasonable operand for a move or push insn. */
2671 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2672 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2673 val = force_operand (val, NULL_RTX);
2675 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2676 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2678 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2679 be viewed as just an efficiency improvement. */
2680 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2681 emit_move_insn (slot, val);
2682 val = force_operand (XEXP (slot, 0), NULL_RTX);
2687 argvec[count].value = val;
2688 argvec[count].mode = mode;
2690 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2691 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2693 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2694 argvec[count].partial
2695 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2697 argvec[count].partial = 0;
2700 locate_and_pad_parm (mode, NULL_TREE,
2701 argvec[count].reg && argvec[count].partial == 0,
2702 NULL_TREE, &args_size, &argvec[count].offset,
2703 &argvec[count].size, &alignment_pad);
2705 if (argvec[count].size.var)
2708 if (reg_parm_stack_space == 0 && argvec[count].partial)
2709 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2711 if (argvec[count].reg == 0 || argvec[count].partial != 0
2712 || reg_parm_stack_space > 0)
2713 args_size.constant += argvec[count].size.constant;
2715 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2719 #ifdef FINAL_REG_PARM_STACK_SPACE
2720 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2724 /* If this machine requires an external definition for library
2725 functions, write one out. */
2726 assemble_external_libcall (fun);
2728 original_args_size = args_size;
2729 #ifdef PREFERRED_STACK_BOUNDARY
2730 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2731 / STACK_BYTES) * STACK_BYTES);
2734 args_size.constant = MAX (args_size.constant,
2735 reg_parm_stack_space);
2737 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2738 args_size.constant -= reg_parm_stack_space;
2741 if (args_size.constant > current_function_outgoing_args_size)
2742 current_function_outgoing_args_size = args_size.constant;
2744 #ifdef ACCUMULATE_OUTGOING_ARGS
2745 /* Since the stack pointer will never be pushed, it is possible for
2746 the evaluation of a parm to clobber something we have already
2747 written to the stack. Since most function calls on RISC machines
2748 do not use the stack, this is uncommon, but must work correctly.
2750 Therefore, we save any area of the stack that was already written
2751 and that we are using. Here we set up to do this by making a new
2752 stack usage map from the old one.
2754 Another approach might be to try to reorder the argument
2755 evaluations to avoid this conflicting stack usage. */
2757 needed = args_size.constant;
2759 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2760 /* Since we will be writing into the entire argument area, the
2761 map must be allocated for its entire size, not just the part that
2762 is the responsibility of the caller. */
2763 needed += reg_parm_stack_space;
2766 #ifdef ARGS_GROW_DOWNWARD
2767 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2770 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2773 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2775 if (initial_highest_arg_in_use)
2776 bcopy (initial_stack_usage_map, stack_usage_map,
2777 initial_highest_arg_in_use);
2779 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2780 bzero (&stack_usage_map[initial_highest_arg_in_use],
2781 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2784 /* The address of the outgoing argument list must not be copied to a
2785 register here, because argblock would be left pointing to the
2786 wrong place after the call to allocate_dynamic_stack_space below.
2789 argblock = virtual_outgoing_args_rtx;
2790 #else /* not ACCUMULATE_OUTGOING_ARGS */
2791 #ifndef PUSH_ROUNDING
2792 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2796 #ifdef PUSH_ARGS_REVERSED
2797 #ifdef PREFERRED_STACK_BOUNDARY
2798 /* If we push args individually in reverse order, perform stack alignment
2799 before the first push (the last arg). */
2801 anti_adjust_stack (GEN_INT (args_size.constant
2802 - original_args_size.constant));
2806 #ifdef PUSH_ARGS_REVERSED
2814 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2815 /* The argument list is the property of the called routine and it
2816 may clobber it. If the fixed area has been used for previous
2817 parameters, we must save and restore it.
2819 Here we compute the boundary of the that needs to be saved, if any. */
2821 #ifdef ARGS_GROW_DOWNWARD
2822 for (count = 0; count < reg_parm_stack_space + 1; count++)
2824 for (count = 0; count < reg_parm_stack_space; count++)
2827 if (count >= highest_outgoing_arg_in_use
2828 || stack_usage_map[count] == 0)
2831 if (low_to_save == -1)
2832 low_to_save = count;
2834 high_to_save = count;
2837 if (low_to_save >= 0)
2839 int num_to_save = high_to_save - low_to_save + 1;
2840 enum machine_mode save_mode
2841 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2844 /* If we don't have the required alignment, must do this in BLKmode. */
2845 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2846 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2847 save_mode = BLKmode;
2849 #ifdef ARGS_GROW_DOWNWARD
2850 stack_area = gen_rtx_MEM (save_mode,
2851 memory_address (save_mode,
2852 plus_constant (argblock,
2855 stack_area = gen_rtx_MEM (save_mode,
2856 memory_address (save_mode,
2857 plus_constant (argblock,
2860 if (save_mode == BLKmode)
2862 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2863 emit_block_move (validize_mem (save_area), stack_area,
2864 GEN_INT (num_to_save),
2865 PARM_BOUNDARY / BITS_PER_UNIT);
2869 save_area = gen_reg_rtx (save_mode);
2870 emit_move_insn (save_area, stack_area);
2875 /* Push the args that need to be pushed. */
2877 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2878 are to be pushed. */
2879 for (count = 0; count < nargs; count++, argnum += inc)
2881 register enum machine_mode mode = argvec[argnum].mode;
2882 register rtx val = argvec[argnum].value;
2883 rtx reg = argvec[argnum].reg;
2884 int partial = argvec[argnum].partial;
2885 #ifdef ACCUMULATE_OUTGOING_ARGS
2886 int lower_bound, upper_bound, i;
2889 if (! (reg != 0 && partial == 0))
2891 #ifdef ACCUMULATE_OUTGOING_ARGS
2892 /* If this is being stored into a pre-allocated, fixed-size, stack
2893 area, save any previous data at that location. */
2895 #ifdef ARGS_GROW_DOWNWARD
2896 /* stack_slot is negative, but we want to index stack_usage_map
2897 with positive values. */
2898 upper_bound = -argvec[argnum].offset.constant + 1;
2899 lower_bound = upper_bound - argvec[argnum].size.constant;
2901 lower_bound = argvec[argnum].offset.constant;
2902 upper_bound = lower_bound + argvec[argnum].size.constant;
2905 for (i = lower_bound; i < upper_bound; i++)
2906 if (stack_usage_map[i]
2907 /* Don't store things in the fixed argument area at this point;
2908 it has already been saved. */
2909 && i > reg_parm_stack_space)
2912 if (i != upper_bound)
2914 /* We need to make a save area. See what mode we can make it. */
2915 enum machine_mode save_mode
2916 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
2923 plus_constant (argblock,
2924 argvec[argnum].offset.constant)));
2926 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2927 emit_move_insn (argvec[argnum].save_area, stack_area);
2930 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2931 argblock, GEN_INT (argvec[argnum].offset.constant),
2932 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
2934 #ifdef ACCUMULATE_OUTGOING_ARGS
2935 /* Now mark the segment we just used. */
2936 for (i = lower_bound; i < upper_bound; i++)
2937 stack_usage_map[i] = 1;
2944 #ifndef PUSH_ARGS_REVERSED
2945 #ifdef PREFERRED_STACK_BOUNDARY
2946 /* If we pushed args in forward order, perform stack alignment
2947 after pushing the last arg. */
2949 anti_adjust_stack (GEN_INT (args_size.constant
2950 - original_args_size.constant));
2954 #ifdef PUSH_ARGS_REVERSED
2960 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2962 /* Now load any reg parms into their regs. */
2964 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2965 are to be pushed. */
2966 for (count = 0; count < nargs; count++, argnum += inc)
2968 register rtx val = argvec[argnum].value;
2969 rtx reg = argvec[argnum].reg;
2970 int partial = argvec[argnum].partial;
2972 if (reg != 0 && partial == 0)
2973 emit_move_insn (reg, val);
2977 /* For version 1.37, try deleting this entirely. */
2981 /* Any regs containing parms remain in use through the call. */
2982 for (count = 0; count < nargs; count++)
2983 if (argvec[count].reg != 0)
2984 use_reg (&call_fusage, argvec[count].reg);
2986 /* Don't allow popping to be deferred, since then
2987 cse'ing of library calls could delete a call and leave the pop. */
2990 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2991 will set inhibit_defer_pop to that value. */
2993 /* The return type is needed to decide how many bytes the function pops.
2994 Signedness plays no role in that, so for simplicity, we pretend it's
2995 always signed. We also assume that the list of arguments passed has
2996 no impact, so we pretend it is unknown. */
2999 get_identifier (XSTR (orgfun, 0)),
3000 build_function_type (outmode == VOIDmode ? void_type_node
3001 : type_for_mode (outmode, 0), NULL_TREE),
3002 original_args_size.constant, args_size.constant, 0,
3003 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3004 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
3005 old_inhibit_defer_pop + 1, call_fusage, no_queue);
3009 /* Now restore inhibit_defer_pop to its actual original value. */
3012 #ifdef ACCUMULATE_OUTGOING_ARGS
3013 #ifdef REG_PARM_STACK_SPACE
3016 enum machine_mode save_mode = GET_MODE (save_area);
3017 #ifdef ARGS_GROW_DOWNWARD
3019 = gen_rtx_MEM (save_mode,
3020 memory_address (save_mode,
3021 plus_constant (argblock,
3025 = gen_rtx_MEM (save_mode,
3026 memory_address (save_mode,
3027 plus_constant (argblock, low_to_save)));
3030 if (save_mode != BLKmode)
3031 emit_move_insn (stack_area, save_area);
3033 emit_block_move (stack_area, validize_mem (save_area),
3034 GEN_INT (high_to_save - low_to_save + 1),
3035 PARM_BOUNDARY / BITS_PER_UNIT);
3039 /* If we saved any argument areas, restore them. */
3040 for (count = 0; count < nargs; count++)
3041 if (argvec[count].save_area)
3043 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3045 = gen_rtx_MEM (save_mode,
3048 plus_constant (argblock,
3049 argvec[count].offset.constant)));
3051 emit_move_insn (stack_area, argvec[count].save_area);
3054 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3055 stack_usage_map = initial_stack_usage_map;
3059 /* Like emit_library_call except that an extra argument, VALUE,
3060 comes second and says where to store the result.
3061 (If VALUE is zero, this function chooses a convenient way
3062 to return the value.
3064 This function returns an rtx for where the value is to be found.
3065 If VALUE is nonzero, VALUE is returned. */
3068 emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
3069 enum machine_mode outmode, int nargs, ...))
3071 #ifndef ANSI_PROTOTYPES
3075 enum machine_mode outmode;
3079 /* Total size in bytes of all the stack-parms scanned so far. */
3080 struct args_size args_size;
3081 /* Size of arguments before any adjustments (such as rounding). */
3082 struct args_size original_args_size;
3083 register int argnum;
3087 struct args_size alignment_pad;
3089 CUMULATIVE_ARGS args_so_far;
3090 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3091 struct args_size offset; struct args_size size; rtx save_area; };
3093 int old_inhibit_defer_pop = inhibit_defer_pop;
3094 rtx call_fusage = 0;
3096 int pcc_struct_value = 0;
3097 int struct_value_size = 0;
3099 int reg_parm_stack_space = 0;
3100 #ifdef ACCUMULATE_OUTGOING_ARGS
3104 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3105 /* Define the boundary of the register parm stack space that needs to be
3107 int low_to_save = -1, high_to_save = 0;
3108 rtx save_area = 0; /* Place that it is saved */
3111 #ifdef ACCUMULATE_OUTGOING_ARGS
3112 /* Size of the stack reserved for parameter registers. */
3113 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3114 char *initial_stack_usage_map = stack_usage_map;
3117 #ifdef REG_PARM_STACK_SPACE
3118 #ifdef MAYBE_REG_PARM_STACK_SPACE
3119 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3121 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3125 VA_START (p, nargs);
3127 #ifndef ANSI_PROTOTYPES
3128 orgfun = va_arg (p, rtx);
3129 value = va_arg (p, rtx);
3130 no_queue = va_arg (p, int);
3131 outmode = va_arg (p, enum machine_mode);
3132 nargs = va_arg (p, int);
3135 is_const = no_queue;
3138 /* If this kind of value comes back in memory,
3139 decide where in memory it should come back. */
3140 if (aggregate_value_p (type_for_mode (outmode, 0)))
3142 #ifdef PCC_STATIC_STRUCT_RETURN
3144 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3146 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3147 pcc_struct_value = 1;
3149 value = gen_reg_rtx (outmode);
3150 #else /* not PCC_STATIC_STRUCT_RETURN */
3151 struct_value_size = GET_MODE_SIZE (outmode);
3152 if (value != 0 && GET_CODE (value) == MEM)
3155 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3158 /* This call returns a big structure. */
3162 /* ??? Unfinished: must pass the memory address as an argument. */
3164 /* Copy all the libcall-arguments out of the varargs data
3165 and into a vector ARGVEC.
3167 Compute how to pass each argument. We only support a very small subset
3168 of the full argument passing conventions to limit complexity here since
3169 library functions shouldn't have many args. */
3171 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3172 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3174 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3176 args_size.constant = 0;
3183 /* If there's a structure value address to be passed,
3184 either pass it in the special place, or pass it as an extra argument. */
3185 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3187 rtx addr = XEXP (mem_value, 0);
3190 /* Make sure it is a reasonable operand for a move or push insn. */
3191 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3192 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3193 addr = force_operand (addr, NULL_RTX);
3195 argvec[count].value = addr;
3196 argvec[count].mode = Pmode;
3197 argvec[count].partial = 0;
3199 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3200 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3201 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3205 locate_and_pad_parm (Pmode, NULL_TREE,
3206 argvec[count].reg && argvec[count].partial == 0,
3207 NULL_TREE, &args_size, &argvec[count].offset,
3208 &argvec[count].size, &alignment_pad);
3211 if (argvec[count].reg == 0 || argvec[count].partial != 0
3212 || reg_parm_stack_space > 0)
3213 args_size.constant += argvec[count].size.constant;
3215 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3220 for (; count < nargs; count++)
3222 rtx val = va_arg (p, rtx);
3223 enum machine_mode mode = va_arg (p, enum machine_mode);
3225 /* We cannot convert the arg value to the mode the library wants here;
3226 must do it earlier where we know the signedness of the arg. */
3228 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3231 /* On some machines, there's no way to pass a float to a library fcn.
3232 Pass it as a double instead. */
3233 #ifdef LIBGCC_NEEDS_DOUBLE
3234 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3235 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3238 /* There's no need to call protect_from_queue, because
3239 either emit_move_insn or emit_push_insn will do that. */
3241 /* Make sure it is a reasonable operand for a move or push insn. */
3242 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3243 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3244 val = force_operand (val, NULL_RTX);
3246 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3247 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3249 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3250 be viewed as just an efficiency improvement. */
3251 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3252 emit_move_insn (slot, val);
3253 val = XEXP (slot, 0);
3258 argvec[count].value = val;
3259 argvec[count].mode = mode;
3261 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3262 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
3264 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3265 argvec[count].partial
3266 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3268 argvec[count].partial = 0;
3271 locate_and_pad_parm (mode, NULL_TREE,
3272 argvec[count].reg && argvec[count].partial == 0,
3273 NULL_TREE, &args_size, &argvec[count].offset,
3274 &argvec[count].size, &alignment_pad);
3276 if (argvec[count].size.var)
3279 if (reg_parm_stack_space == 0 && argvec[count].partial)
3280 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3282 if (argvec[count].reg == 0 || argvec[count].partial != 0
3283 || reg_parm_stack_space > 0)
3284 args_size.constant += argvec[count].size.constant;
3286 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3290 #ifdef FINAL_REG_PARM_STACK_SPACE
3291 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3294 /* If this machine requires an external definition for library
3295 functions, write one out. */
3296 assemble_external_libcall (fun);
3298 original_args_size = args_size;
3299 #ifdef PREFERRED_STACK_BOUNDARY
3300 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3301 / STACK_BYTES) * STACK_BYTES);
3304 args_size.constant = MAX (args_size.constant,
3305 reg_parm_stack_space);
3307 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3308 args_size.constant -= reg_parm_stack_space;
3311 if (args_size.constant > current_function_outgoing_args_size)
3312 current_function_outgoing_args_size = args_size.constant;
3314 #ifdef ACCUMULATE_OUTGOING_ARGS
3315 /* Since the stack pointer will never be pushed, it is possible for
3316 the evaluation of a parm to clobber something we have already
3317 written to the stack. Since most function calls on RISC machines
3318 do not use the stack, this is uncommon, but must work correctly.
3320 Therefore, we save any area of the stack that was already written
3321 and that we are using. Here we set up to do this by making a new
3322 stack usage map from the old one.
3324 Another approach might be to try to reorder the argument
3325 evaluations to avoid this conflicting stack usage. */
3327 needed = args_size.constant;
3329 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3330 /* Since we will be writing into the entire argument area, the
3331 map must be allocated for its entire size, not just the part that
3332 is the responsibility of the caller. */
3333 needed += reg_parm_stack_space;
3336 #ifdef ARGS_GROW_DOWNWARD
3337 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3340 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3343 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3345 if (initial_highest_arg_in_use)
3346 bcopy (initial_stack_usage_map, stack_usage_map,
3347 initial_highest_arg_in_use);
3349 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3350 bzero (&stack_usage_map[initial_highest_arg_in_use],
3351 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3354 /* The address of the outgoing argument list must not be copied to a
3355 register here, because argblock would be left pointing to the
3356 wrong place after the call to allocate_dynamic_stack_space below.
3359 argblock = virtual_outgoing_args_rtx;
3360 #else /* not ACCUMULATE_OUTGOING_ARGS */
3361 #ifndef PUSH_ROUNDING
3362 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3366 #ifdef PUSH_ARGS_REVERSED
3367 #ifdef PREFERRED_STACK_BOUNDARY
3368 /* If we push args individually in reverse order, perform stack alignment
3369 before the first push (the last arg). */
3371 anti_adjust_stack (GEN_INT (args_size.constant
3372 - original_args_size.constant));
3376 #ifdef PUSH_ARGS_REVERSED
3384 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3385 /* The argument list is the property of the called routine and it
3386 may clobber it. If the fixed area has been used for previous
3387 parameters, we must save and restore it.
3389 Here we compute the boundary of the that needs to be saved, if any. */
3391 #ifdef ARGS_GROW_DOWNWARD
3392 for (count = 0; count < reg_parm_stack_space + 1; count++)
3394 for (count = 0; count < reg_parm_stack_space; count++)
3397 if (count >= highest_outgoing_arg_in_use
3398 || stack_usage_map[count] == 0)
3401 if (low_to_save == -1)
3402 low_to_save = count;
3404 high_to_save = count;
3407 if (low_to_save >= 0)
3409 int num_to_save = high_to_save - low_to_save + 1;
3410 enum machine_mode save_mode
3411 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3414 /* If we don't have the required alignment, must do this in BLKmode. */
3415 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3416 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3417 save_mode = BLKmode;
3419 #ifdef ARGS_GROW_DOWNWARD
3420 stack_area = gen_rtx_MEM (save_mode,
3421 memory_address (save_mode,
3422 plus_constant (argblock,
3425 stack_area = gen_rtx_MEM (save_mode,
3426 memory_address (save_mode,
3427 plus_constant (argblock,
3430 if (save_mode == BLKmode)
3432 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3433 emit_block_move (validize_mem (save_area), stack_area,
3434 GEN_INT (num_to_save),
3435 PARM_BOUNDARY / BITS_PER_UNIT);
3439 save_area = gen_reg_rtx (save_mode);
3440 emit_move_insn (save_area, stack_area);
3445 /* Push the args that need to be pushed. */
3447 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3448 are to be pushed. */
3449 for (count = 0; count < nargs; count++, argnum += inc)
3451 register enum machine_mode mode = argvec[argnum].mode;
3452 register rtx val = argvec[argnum].value;
3453 rtx reg = argvec[argnum].reg;
3454 int partial = argvec[argnum].partial;
3455 #ifdef ACCUMULATE_OUTGOING_ARGS
3456 int lower_bound, upper_bound, i;
3459 if (! (reg != 0 && partial == 0))
3461 #ifdef ACCUMULATE_OUTGOING_ARGS
3462 /* If this is being stored into a pre-allocated, fixed-size, stack
3463 area, save any previous data at that location. */
3465 #ifdef ARGS_GROW_DOWNWARD
3466 /* stack_slot is negative, but we want to index stack_usage_map
3467 with positive values. */
3468 upper_bound = -argvec[argnum].offset.constant + 1;
3469 lower_bound = upper_bound - argvec[argnum].size.constant;
3471 lower_bound = argvec[argnum].offset.constant;
3472 upper_bound = lower_bound + argvec[argnum].size.constant;
3475 for (i = lower_bound; i < upper_bound; i++)
3476 if (stack_usage_map[i]
3477 /* Don't store things in the fixed argument area at this point;
3478 it has already been saved. */
3479 && i > reg_parm_stack_space)
3482 if (i != upper_bound)
3484 /* We need to make a save area. See what mode we can make it. */
3485 enum machine_mode save_mode
3486 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3493 plus_constant (argblock,
3494 argvec[argnum].offset.constant)));
3495 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3497 emit_move_insn (argvec[argnum].save_area, stack_area);
3500 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3501 argblock, GEN_INT (argvec[argnum].offset.constant),
3502 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3504 #ifdef ACCUMULATE_OUTGOING_ARGS
3505 /* Now mark the segment we just used. */
3506 for (i = lower_bound; i < upper_bound; i++)
3507 stack_usage_map[i] = 1;
3514 #ifndef PUSH_ARGS_REVERSED
3515 #ifdef PREFERRED_STACK_BOUNDARY
3516 /* If we pushed args in forward order, perform stack alignment
3517 after pushing the last arg. */
3519 anti_adjust_stack (GEN_INT (args_size.constant
3520 - original_args_size.constant));
3524 #ifdef PUSH_ARGS_REVERSED
3530 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3532 /* Now load any reg parms into their regs. */
3534 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3535 are to be pushed. */
3536 for (count = 0; count < nargs; count++, argnum += inc)
3538 register rtx val = argvec[argnum].value;
3539 rtx reg = argvec[argnum].reg;
3540 int partial = argvec[argnum].partial;
3542 if (reg != 0 && partial == 0)
3543 emit_move_insn (reg, val);
3548 /* For version 1.37, try deleting this entirely. */
3553 /* Any regs containing parms remain in use through the call. */
3554 for (count = 0; count < nargs; count++)
3555 if (argvec[count].reg != 0)
3556 use_reg (&call_fusage, argvec[count].reg);
3558 /* Pass the function the address in which to return a structure value. */
3559 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3561 emit_move_insn (struct_value_rtx,
3563 force_operand (XEXP (mem_value, 0),
3565 if (GET_CODE (struct_value_rtx) == REG)
3566 use_reg (&call_fusage, struct_value_rtx);
3569 /* Don't allow popping to be deferred, since then
3570 cse'ing of library calls could delete a call and leave the pop. */
3573 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3574 will set inhibit_defer_pop to that value. */
3575 /* See the comment in emit_library_call about the function type we build
3579 get_identifier (XSTR (orgfun, 0)),
3580 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3581 original_args_size.constant, args_size.constant,
3583 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3584 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3585 old_inhibit_defer_pop + 1, call_fusage, is_const);
3587 /* Now restore inhibit_defer_pop to its actual original value. */
3592 /* Copy the value to the right place. */
3593 if (outmode != VOIDmode)
3599 if (value != mem_value)
3600 emit_move_insn (value, mem_value);
3602 else if (value != 0)
3603 emit_move_insn (value, hard_libcall_value (outmode));
3605 value = hard_libcall_value (outmode);
3608 #ifdef ACCUMULATE_OUTGOING_ARGS
3609 #ifdef REG_PARM_STACK_SPACE
3612 enum machine_mode save_mode = GET_MODE (save_area);
3613 #ifdef ARGS_GROW_DOWNWARD
3615 = gen_rtx_MEM (save_mode,
3616 memory_address (save_mode,
3617 plus_constant (argblock,
3621 = gen_rtx_MEM (save_mode,
3622 memory_address (save_mode,
3623 plus_constant (argblock, low_to_save)));
3625 if (save_mode != BLKmode)
3626 emit_move_insn (stack_area, save_area);
3628 emit_block_move (stack_area, validize_mem (save_area),
3629 GEN_INT (high_to_save - low_to_save + 1),
3630 PARM_BOUNDARY / BITS_PER_UNIT);
3634 /* If we saved any argument areas, restore them. */
3635 for (count = 0; count < nargs; count++)
3636 if (argvec[count].save_area)
3638 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3640 = gen_rtx_MEM (save_mode,
3643 plus_constant (argblock,
3644 argvec[count].offset.constant)));
3646 emit_move_insn (stack_area, argvec[count].save_area);
3649 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3650 stack_usage_map = initial_stack_usage_map;
3657 /* Return an rtx which represents a suitable home on the stack
3658 given TYPE, the type of the argument looking for a home.
3659 This is called only for BLKmode arguments.
3661 SIZE is the size needed for this target.
3662 ARGS_ADDR is the address of the bottom of the argument block for this call.
3663 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3664 if this machine uses push insns. */
3667 target_for_arg (type, size, args_addr, offset)
3671 struct args_size offset;
3674 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3676 /* We do not call memory_address if possible,
3677 because we want to address as close to the stack
3678 as possible. For non-variable sized arguments,
3679 this will be stack-pointer relative addressing. */
3680 if (GET_CODE (offset_rtx) == CONST_INT)
3681 target = plus_constant (args_addr, INTVAL (offset_rtx));
3684 /* I have no idea how to guarantee that this
3685 will work in the presence of register parameters. */
3686 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3687 target = memory_address (QImode, target);
3690 return gen_rtx_MEM (BLKmode, target);
3694 /* Store a single argument for a function call
3695 into the register or memory area where it must be passed.
3696 *ARG describes the argument value and where to pass it.
3698 ARGBLOCK is the address of the stack-block for all the arguments,
3699 or 0 on a machine where arguments are pushed individually.
3701 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3702 so must be careful about how the stack is used.
3704 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3705 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3706 that we need not worry about saving and restoring the stack.
3708 FNDECL is the declaration of the function we are calling. */
3711 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3712 reg_parm_stack_space)
3713 struct arg_data *arg;
3716 int variable_size ATTRIBUTE_UNUSED;
3717 int reg_parm_stack_space;
3719 register tree pval = arg->tree_value;
3723 #ifdef ACCUMULATE_OUTGOING_ARGS
3724 int i, lower_bound = 0, upper_bound = 0;
3727 if (TREE_CODE (pval) == ERROR_MARK)
3730 /* Push a new temporary level for any temporaries we make for
3734 #ifdef ACCUMULATE_OUTGOING_ARGS
3735 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3736 save any previous data at that location. */
3737 if (argblock && ! variable_size && arg->stack)
3739 #ifdef ARGS_GROW_DOWNWARD
3740 /* stack_slot is negative, but we want to index stack_usage_map
3741 with positive values. */
3742 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3743 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3747 lower_bound = upper_bound - arg->size.constant;
3749 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3750 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3754 upper_bound = lower_bound + arg->size.constant;
3757 for (i = lower_bound; i < upper_bound; i++)
3758 if (stack_usage_map[i]
3759 /* Don't store things in the fixed argument area at this point;
3760 it has already been saved. */
3761 && i > reg_parm_stack_space)
3764 if (i != upper_bound)
3766 /* We need to make a save area. See what mode we can make it. */
3767 enum machine_mode save_mode
3768 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3770 = gen_rtx_MEM (save_mode,
3771 memory_address (save_mode,
3772 XEXP (arg->stack_slot, 0)));
3774 if (save_mode == BLKmode)
3776 arg->save_area = assign_stack_temp (BLKmode,
3777 arg->size.constant, 0);
3778 MEM_SET_IN_STRUCT_P (arg->save_area,
3779 AGGREGATE_TYPE_P (TREE_TYPE
3780 (arg->tree_value)));
3781 preserve_temp_slots (arg->save_area);
3782 emit_block_move (validize_mem (arg->save_area), stack_area,
3783 GEN_INT (arg->size.constant),
3784 PARM_BOUNDARY / BITS_PER_UNIT);
3788 arg->save_area = gen_reg_rtx (save_mode);
3789 emit_move_insn (arg->save_area, stack_area);
3794 /* Now that we have saved any slots that will be overwritten by this
3795 store, mark all slots this store will use. We must do this before
3796 we actually expand the argument since the expansion itself may
3797 trigger library calls which might need to use the same stack slot. */
3798 if (argblock && ! variable_size && arg->stack)
3799 for (i = lower_bound; i < upper_bound; i++)
3800 stack_usage_map[i] = 1;
3803 /* If this isn't going to be placed on both the stack and in registers,
3804 set up the register and number of words. */
3805 if (! arg->pass_on_stack)
3806 reg = arg->reg, partial = arg->partial;
3808 if (reg != 0 && partial == 0)
3809 /* Being passed entirely in a register. We shouldn't be called in
3813 /* If this arg needs special alignment, don't load the registers
3815 if (arg->n_aligned_regs != 0)
3818 /* If this is being passed partially in a register, we can't evaluate
3819 it directly into its stack slot. Otherwise, we can. */
3820 if (arg->value == 0)
3822 #ifdef ACCUMULATE_OUTGOING_ARGS
3823 /* stack_arg_under_construction is nonzero if a function argument is
3824 being evaluated directly into the outgoing argument list and
3825 expand_call must take special action to preserve the argument list
3826 if it is called recursively.
3828 For scalar function arguments stack_usage_map is sufficient to
3829 determine which stack slots must be saved and restored. Scalar
3830 arguments in general have pass_on_stack == 0.
3832 If this argument is initialized by a function which takes the
3833 address of the argument (a C++ constructor or a C function
3834 returning a BLKmode structure), then stack_usage_map is
3835 insufficient and expand_call must push the stack around the
3836 function call. Such arguments have pass_on_stack == 1.
3838 Note that it is always safe to set stack_arg_under_construction,
3839 but this generates suboptimal code if set when not needed. */
3841 if (arg->pass_on_stack)
3842 stack_arg_under_construction++;
3844 arg->value = expand_expr (pval,
3846 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3847 ? NULL_RTX : arg->stack,
3850 /* If we are promoting object (or for any other reason) the mode
3851 doesn't agree, convert the mode. */
3853 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3854 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3855 arg->value, arg->unsignedp);
3857 #ifdef ACCUMULATE_OUTGOING_ARGS
3858 if (arg->pass_on_stack)
3859 stack_arg_under_construction--;
3863 /* Don't allow anything left on stack from computation
3864 of argument to alloca. */
3866 do_pending_stack_adjust ();
3868 if (arg->value == arg->stack)
3870 /* If the value is already in the stack slot, we are done. */
3871 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
3873 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3874 XEXP (arg->stack, 0), Pmode,
3875 ARGS_SIZE_RTX (arg->size),
3876 TYPE_MODE (sizetype),
3877 GEN_INT (MEMORY_USE_RW),
3878 TYPE_MODE (integer_type_node));
3881 else if (arg->mode != BLKmode)
3885 /* Argument is a scalar, not entirely passed in registers.
3886 (If part is passed in registers, arg->partial says how much
3887 and emit_push_insn will take care of putting it there.)
3889 Push it, and if its size is less than the
3890 amount of space allocated to it,
3891 also bump stack pointer by the additional space.
3892 Note that in C the default argument promotions
3893 will prevent such mismatches. */
3895 size = GET_MODE_SIZE (arg->mode);
3896 /* Compute how much space the push instruction will push.
3897 On many machines, pushing a byte will advance the stack
3898 pointer by a halfword. */
3899 #ifdef PUSH_ROUNDING
3900 size = PUSH_ROUNDING (size);
3904 /* Compute how much space the argument should get:
3905 round up to a multiple of the alignment for arguments. */
3906 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3907 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3908 / (PARM_BOUNDARY / BITS_PER_UNIT))
3909 * (PARM_BOUNDARY / BITS_PER_UNIT));
3911 /* This isn't already where we want it on the stack, so put it there.
3912 This can either be done with push or copy insns. */
3913 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3914 partial, reg, used - size, argblock,
3915 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
3916 ARGS_SIZE_RTX (arg->alignment_pad));
3921 /* BLKmode, at least partly to be pushed. */
3923 register int excess;
3926 /* Pushing a nonscalar.
3927 If part is passed in registers, PARTIAL says how much
3928 and emit_push_insn will take care of putting it there. */
3930 /* Round its size up to a multiple
3931 of the allocation unit for arguments. */
3933 if (arg->size.var != 0)
3936 size_rtx = ARGS_SIZE_RTX (arg->size);
3940 /* PUSH_ROUNDING has no effect on us, because
3941 emit_push_insn for BLKmode is careful to avoid it. */
3942 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3943 + partial * UNITS_PER_WORD);
3944 size_rtx = expr_size (pval);
3947 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3948 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3949 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3950 reg_parm_stack_space,
3951 ARGS_SIZE_RTX (arg->alignment_pad));
3955 /* Unless this is a partially-in-register argument, the argument is now
3958 ??? Note that this can change arg->value from arg->stack to
3959 arg->stack_slot and it matters when they are not the same.
3960 It isn't totally clear that this is correct in all cases. */
3962 arg->value = arg->stack_slot;
3964 /* Once we have pushed something, pops can't safely
3965 be deferred during the rest of the arguments. */
3968 /* ANSI doesn't require a sequence point here,
3969 but PCC has one, so this will avoid some problems. */
3972 /* Free any temporary slots made in processing this argument. Show
3973 that we might have taken the address of something and pushed that
3975 preserve_temp_slots (NULL_RTX);