1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-97, 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
28 #include "insn-flags.h"
32 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
33 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
50 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
51 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
53 /* Data structure and subroutines used within expand_call. */
57 /* Tree node for this argument. */
59 /* Mode for value; TYPE_MODE unless promoted. */
60 enum machine_mode mode;
61 /* Current RTL value for argument, or 0 if it isn't precomputed. */
63 /* Initially-compute RTL value for argument; only for const functions. */
65 /* Register to pass this argument in, 0 if passed on stack, or an
66 PARALLEL if the arg is to be copied into multiple non-contiguous
69 /* If REG was promoted from the actual mode of the argument expression,
70 indicates whether the promotion is sign- or zero-extended. */
72 /* Number of registers to use. 0 means put the whole arg in registers.
73 Also 0 if not passed in registers. */
75 /* Non-zero if argument must be passed on stack.
76 Note that some arguments may be passed on the stack
77 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
78 pass_on_stack identifies arguments that *cannot* go in registers. */
80 /* Offset of this argument from beginning of stack-args. */
81 struct args_size offset;
82 /* Similar, but offset to the start of the stack slot. Different from
83 OFFSET if this arg pads downward. */
84 struct args_size slot_offset;
85 /* Size of this argument on the stack, rounded up for any padding it gets,
86 parts of the argument passed in registers do not count.
87 If REG_PARM_STACK_SPACE is defined, then register parms
88 are counted here as well. */
89 struct args_size size;
90 /* Location on the stack at which parameter should be stored. The store
91 has already been done if STACK == VALUE. */
93 /* Location on the stack of the start of this argument slot. This can
94 differ from STACK if this arg pads downward. This location is known
95 to be aligned to FUNCTION_ARG_BOUNDARY. */
97 #ifdef ACCUMULATE_OUTGOING_ARGS
98 /* Place that this stack area has been saved, if needed. */
101 /* If an argument's alignment does not permit direct copying into registers,
102 copy in smaller-sized pieces into pseudos. These are stored in a
103 block pointed to by this field. The next field says how many
104 word-sized pseudos we made. */
109 #ifdef ACCUMULATE_OUTGOING_ARGS
110 /* A vector of one char per byte of stack space. A byte if non-zero if
111 the corresponding stack location has been used.
112 This vector is used to prevent a function call within an argument from
113 clobbering any stack already set up. */
114 static char *stack_usage_map;
116 /* Size of STACK_USAGE_MAP. */
117 static int highest_outgoing_arg_in_use;
119 /* stack_arg_under_construction is nonzero when an argument may be
120 initialized with a constructor call (including a C function that
121 returns a BLKmode struct) and expand_call must take special action
122 to make sure the object being constructed does not overlap the
123 argument list for the constructor call. */
124 int stack_arg_under_construction;
127 static int calls_function PROTO ((tree, int));
128 static int calls_function_1 PROTO ((tree, int));
129 static void emit_call_1 PROTO ((rtx, tree, tree, HOST_WIDE_INT,
130 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
131 rtx, int, rtx, int));
132 static void special_function_p PROTO ((char *, tree, int *, int *,
134 static void precompute_register_parameters PROTO ((int, struct arg_data *,
136 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
138 static void store_unaligned_arguments_into_pseudos PROTO ((struct arg_data *,
140 static int finalize_must_preallocate PROTO ((int, int,
142 struct args_size *));
143 static void precompute_arguments PROTO ((int, int, int,
145 struct args_size *));
146 static int compute_argument_block_size PROTO ((int,
147 struct args_size *));
148 static void initialize_argument_information PROTO ((int,
155 static void compute_argument_addresses PROTO ((struct arg_data *,
157 static rtx rtx_for_function_call PROTO ((tree, tree));
158 static void load_register_parameters PROTO ((struct arg_data *,
161 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
162 static rtx save_fixed_argument_area PROTO ((int, rtx, int *, int *));
163 static void restore_fixed_argument_area PROTO ((rtx, rtx, int, int));
166 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
169 If WHICH is 0, return 1 if EXP contains a call to any function.
170 Actually, we only need return 1 if evaluating EXP would require pushing
171 arguments on the stack, but that is too difficult to compute, so we just
172 assume any function call might require the stack. */
174 static tree calls_function_save_exprs;
177 calls_function (exp, which)
182 calls_function_save_exprs = 0;
183 val = calls_function_1 (exp, which);
184 calls_function_save_exprs = 0;
189 calls_function_1 (exp, which)
194 enum tree_code code = TREE_CODE (exp);
195 int type = TREE_CODE_CLASS (code);
196 int length = tree_code_length[(int) code];
198 /* If this code is language-specific, we don't know what it will do. */
199 if ((int) code >= NUM_TREE_CODES)
202 /* Only expressions and references can contain calls. */
203 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
212 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
213 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
216 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
218 if ((DECL_BUILT_IN (fndecl)
219 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
220 || (DECL_SAVED_INSNS (fndecl)
221 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
222 & FUNCTION_FLAGS_CALLS_ALLOCA)))
226 /* Third operand is RTL. */
231 if (SAVE_EXPR_RTL (exp) != 0)
233 if (value_member (exp, calls_function_save_exprs))
235 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
236 calls_function_save_exprs);
237 return (TREE_OPERAND (exp, 0) != 0
238 && calls_function_1 (TREE_OPERAND (exp, 0), which));
244 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
245 if (DECL_INITIAL (local) != 0
246 && calls_function_1 (DECL_INITIAL (local), which))
250 register tree subblock;
252 for (subblock = BLOCK_SUBBLOCKS (exp);
254 subblock = TREE_CHAIN (subblock))
255 if (calls_function_1 (subblock, which))
260 case METHOD_CALL_EXPR:
264 case WITH_CLEANUP_EXPR:
275 for (i = 0; i < length; i++)
276 if (TREE_OPERAND (exp, i) != 0
277 && calls_function_1 (TREE_OPERAND (exp, i), which))
283 /* Force FUNEXP into a form suitable for the address of a CALL,
284 and return that as an rtx. Also load the static chain register
285 if FNDECL is a nested function.
287 CALL_FUSAGE points to a variable holding the prospective
288 CALL_INSN_FUNCTION_USAGE information. */
291 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
297 rtx static_chain_value = 0;
299 funexp = protect_from_queue (funexp, 0);
302 /* Get possible static chain value for nested function in C. */
303 static_chain_value = lookup_static_chain (fndecl);
305 /* Make a valid memory address and copy constants thru pseudo-regs,
306 but not for a constant address if -fno-function-cse. */
307 if (GET_CODE (funexp) != SYMBOL_REF)
308 /* If we are using registers for parameters, force the
309 function address into a register now. */
310 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
311 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
312 : memory_address (FUNCTION_MODE, funexp));
315 #ifndef NO_FUNCTION_CSE
316 if (optimize && ! flag_no_function_cse)
317 #ifdef NO_RECURSIVE_FUNCTION_CSE
318 if (fndecl != current_function_decl)
320 funexp = force_reg (Pmode, funexp);
324 if (static_chain_value != 0)
326 emit_move_insn (static_chain_rtx, static_chain_value);
328 if (GET_CODE (static_chain_rtx) == REG)
329 use_reg (call_fusage, static_chain_rtx);
335 /* Generate instructions to call function FUNEXP,
336 and optionally pop the results.
337 The CALL_INSN is the first insn generated.
339 FNDECL is the declaration node of the function. This is given to the
340 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
342 FUNTYPE is the data type of the function. This is given to the macro
343 RETURN_POPS_ARGS to determine whether this function pops its own args.
344 We used to allow an identifier for library functions, but that doesn't
345 work when the return type is an aggregate type and the calling convention
346 says that the pointer to this aggregate is to be popped by the callee.
348 STACK_SIZE is the number of bytes of arguments on the stack,
349 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
350 This is both to put into the call insn and
351 to generate explicit popping code if necessary.
353 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
354 It is zero if this call doesn't want a structure value.
356 NEXT_ARG_REG is the rtx that results from executing
357 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
358 just after all the args have had their registers assigned.
359 This could be whatever you like, but normally it is the first
360 arg-register beyond those used for args in this call,
361 or 0 if all the arg-registers are used in this call.
362 It is passed on to `gen_call' so you can put this info in the call insn.
364 VALREG is a hard register in which a value is returned,
365 or 0 if the call does not return a value.
367 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
368 the args to this call were processed.
369 We restore `inhibit_defer_pop' to that value.
371 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
372 denote registers used by the called function.
374 IS_CONST is true if this is a `const' call. */
377 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
378 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
379 call_fusage, is_const)
381 tree fndecl ATTRIBUTE_UNUSED;
382 tree funtype ATTRIBUTE_UNUSED;
383 HOST_WIDE_INT stack_size;
384 HOST_WIDE_INT rounded_stack_size;
385 HOST_WIDE_INT struct_value_size;
388 int old_inhibit_defer_pop;
392 rtx stack_size_rtx = GEN_INT (stack_size);
393 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
395 #ifndef ACCUMULATE_OUTGOING_ARGS
396 int already_popped = 0;
397 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
400 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
401 and we don't want to load it into a register as an optimization,
402 because prepare_call_address already did it if it should be done. */
403 if (GET_CODE (funexp) != SYMBOL_REF)
404 funexp = memory_address (FUNCTION_MODE, funexp);
406 #ifndef ACCUMULATE_OUTGOING_ARGS
407 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
408 if (HAVE_call_pop && HAVE_call_value_pop && n_popped > 0)
410 rtx n_pop = GEN_INT (n_popped);
413 /* If this subroutine pops its own args, record that in the call insn
414 if possible, for the sake of frame pointer elimination. */
417 pat = gen_call_value_pop (valreg,
418 gen_rtx_MEM (FUNCTION_MODE, funexp),
419 stack_size_rtx, next_arg_reg, n_pop);
421 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
422 stack_size_rtx, next_arg_reg, n_pop);
424 emit_call_insn (pat);
431 #if defined (HAVE_call) && defined (HAVE_call_value)
432 if (HAVE_call && HAVE_call_value)
435 emit_call_insn (gen_call_value (valreg,
436 gen_rtx_MEM (FUNCTION_MODE, funexp),
437 stack_size_rtx, next_arg_reg,
440 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
441 stack_size_rtx, next_arg_reg,
442 struct_value_size_rtx));
448 /* Find the CALL insn we just emitted. */
449 for (call_insn = get_last_insn ();
450 call_insn && GET_CODE (call_insn) != CALL_INSN;
451 call_insn = PREV_INSN (call_insn))
457 /* Put the register usage information on the CALL. If there is already
458 some usage information, put ours at the end. */
459 if (CALL_INSN_FUNCTION_USAGE (call_insn))
463 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
464 link = XEXP (link, 1))
467 XEXP (link, 1) = call_fusage;
470 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
472 /* If this is a const call, then set the insn's unchanging bit. */
474 CONST_CALL_P (call_insn) = 1;
476 /* Restore this now, so that we do defer pops for this call's args
477 if the context of the call as a whole permits. */
478 inhibit_defer_pop = old_inhibit_defer_pop;
480 #ifndef ACCUMULATE_OUTGOING_ARGS
481 /* If returning from the subroutine does not automatically pop the args,
482 we need an instruction to pop them sooner or later.
483 Perhaps do it now; perhaps just record how much space to pop later.
485 If returning from the subroutine does pop the args, indicate that the
486 stack pointer will be changed. */
491 CALL_INSN_FUNCTION_USAGE (call_insn)
492 = gen_rtx_EXPR_LIST (VOIDmode,
493 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
494 CALL_INSN_FUNCTION_USAGE (call_insn));
495 stack_size -= n_popped;
496 rounded_stack_size -= n_popped;
499 if (rounded_stack_size != 0)
501 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
502 pending_stack_adjust += rounded_stack_size;
504 adjust_stack (GEN_INT (rounded_stack_size));
509 /* Determine if the function identified by NAME and FNDECL is one with
510 special properties we wish to know about.
512 For example, if the function might return more than one time (setjmp), then
513 set RETURNS_TWICE to a nonzero value.
515 Similarly set IS_LONGJMP for if the function is in the longjmp family.
517 Set IS_MALLOC for any of the standard memory allocation functions which
518 allocate from the heap.
520 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
521 space from the stack such as alloca. */
524 special_function_p (name, fndecl, returns_twice, is_longjmp,
525 is_malloc, may_be_alloca)
538 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
539 /* Exclude functions not at the file scope, or not `extern',
540 since they are not the magic functions we would otherwise
542 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
546 /* We assume that alloca will always be called by name. It
547 makes no sense to pass it as a pointer-to-function to
548 anything that does not understand its behavior. */
550 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
552 && ! strcmp (name, "alloca"))
553 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
555 && ! strcmp (name, "__builtin_alloca"))));
557 /* Disregard prefix _, __ or __x. */
560 if (name[1] == '_' && name[2] == 'x')
562 else if (name[1] == '_')
572 && (! strcmp (tname, "setjmp")
573 || ! strcmp (tname, "setjmp_syscall")))
575 && ! strcmp (tname, "sigsetjmp"))
577 && ! strcmp (tname, "savectx")));
579 && ! strcmp (tname, "siglongjmp"))
582 else if ((tname[0] == 'q' && tname[1] == 's'
583 && ! strcmp (tname, "qsetjmp"))
584 || (tname[0] == 'v' && tname[1] == 'f'
585 && ! strcmp (tname, "vfork")))
588 else if (tname[0] == 'l' && tname[1] == 'o'
589 && ! strcmp (tname, "longjmp"))
591 /* XXX should have "malloc" attribute on functions instead
592 of recognizing them by name. */
593 else if (! strcmp (tname, "malloc")
594 || ! strcmp (tname, "calloc")
595 || ! strcmp (tname, "realloc")
596 /* Note use of NAME rather than TNAME here. These functions
597 are only reserved when preceded with __. */
598 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
599 || ! strcmp (name, "__nw") /* mangled __builtin_new */
600 || ! strcmp (name, "__builtin_new")
601 || ! strcmp (name, "__builtin_vec_new"))
606 /* Precompute all register parameters as described by ARGS, storing values
607 into fields within the ARGS array.
609 NUM_ACTUALS indicates the total number elements in the ARGS array.
611 Set REG_PARM_SEEN if we encounter a register parameter. */
614 precompute_register_parameters (num_actuals, args, reg_parm_seen)
616 struct arg_data *args;
623 for (i = 0; i < num_actuals; i++)
624 if (args[i].reg != 0 && ! args[i].pass_on_stack)
628 if (args[i].value == 0)
631 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
633 preserve_temp_slots (args[i].value);
636 /* ANSI doesn't require a sequence point here,
637 but PCC has one, so this will avoid some problems. */
641 /* If we are to promote the function arg to a wider mode,
644 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
646 = convert_modes (args[i].mode,
647 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
648 args[i].value, args[i].unsignedp);
650 /* If the value is expensive, and we are inside an appropriately
651 short loop, put the value into a pseudo and then put the pseudo
654 For small register classes, also do this if this call uses
655 register parameters. This is to avoid reload conflicts while
656 loading the parameters registers. */
658 if ((! (GET_CODE (args[i].value) == REG
659 || (GET_CODE (args[i].value) == SUBREG
660 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
661 && args[i].mode != BLKmode
662 && rtx_cost (args[i].value, SET) > 2
663 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
664 || preserve_subexpressions_p ()))
665 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
669 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
671 /* The argument list is the property of the called routine and it
672 may clobber it. If the fixed area has been used for previous
673 parameters, we must save and restore it. */
675 save_fixed_argument_area (reg_parm_stack_space, argblock,
676 low_to_save, high_to_save)
677 int reg_parm_stack_space;
683 rtx save_area = NULL_RTX;
685 /* Compute the boundary of the that needs to be saved, if any. */
686 #ifdef ARGS_GROW_DOWNWARD
687 for (i = 0; i < reg_parm_stack_space + 1; i++)
689 for (i = 0; i < reg_parm_stack_space; i++)
692 if (i >= highest_outgoing_arg_in_use
693 || stack_usage_map[i] == 0)
696 if (*low_to_save == -1)
702 if (*low_to_save >= 0)
704 int num_to_save = *high_to_save - *low_to_save + 1;
705 enum machine_mode save_mode
706 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
709 /* If we don't have the required alignment, must do this in BLKmode. */
710 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
711 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
714 #ifdef ARGS_GROW_DOWNWARD
715 stack_area = gen_rtx_MEM (save_mode,
716 memory_address (save_mode,
717 plus_constant (argblock,
720 stack_area = gen_rtx_MEM (save_mode,
721 memory_address (save_mode,
722 plus_constant (argblock,
725 if (save_mode == BLKmode)
727 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
728 emit_block_move (validize_mem (save_area), stack_area,
729 GEN_INT (num_to_save),
730 PARM_BOUNDARY / BITS_PER_UNIT);
734 save_area = gen_reg_rtx (save_mode);
735 emit_move_insn (save_area, stack_area);
742 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
748 enum machine_mode save_mode = GET_MODE (save_area);
749 #ifdef ARGS_GROW_DOWNWARD
751 = gen_rtx_MEM (save_mode,
752 memory_address (save_mode,
753 plus_constant (argblock,
757 = gen_rtx_MEM (save_mode,
758 memory_address (save_mode,
759 plus_constant (argblock,
763 if (save_mode != BLKmode)
764 emit_move_insn (stack_area, save_area);
766 emit_block_move (stack_area, validize_mem (save_area),
767 GEN_INT (high_to_save - low_to_save + 1),
768 PARM_BOUNDARY / BITS_PER_UNIT);
772 /* If any elements in ARGS refer to parameters that are to be passed in
773 registers, but not in memory, and whose alignment does not permit a
774 direct copy into registers. Copy the values into a group of pseudos
775 which we will later copy into the appropriate hard registers.
777 Pseudos for each unaligned argument will be stored into the array
778 args[argnum].aligned_regs. The caller is responsible for deallocating
779 the aligned_regs array if it is nonzero. */
782 store_unaligned_arguments_into_pseudos (args, num_actuals)
783 struct arg_data *args;
788 for (i = 0; i < num_actuals; i++)
789 if (args[i].reg != 0 && ! args[i].pass_on_stack
790 && args[i].mode == BLKmode
791 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
792 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
794 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
795 int big_endian_correction = 0;
797 args[i].n_aligned_regs
798 = args[i].partial ? args[i].partial
799 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
801 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
802 * args[i].n_aligned_regs);
804 /* Structures smaller than a word are aligned to the least
805 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
806 this means we must skip the empty high order bytes when
807 calculating the bit offset. */
808 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
809 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
811 for (j = 0; j < args[i].n_aligned_regs; j++)
813 rtx reg = gen_reg_rtx (word_mode);
814 rtx word = operand_subword_force (args[i].value, j, BLKmode);
815 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
816 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
818 args[i].aligned_regs[j] = reg;
820 /* There is no need to restrict this code to loading items
821 in TYPE_ALIGN sized hunks. The bitfield instructions can
822 load up entire word sized registers efficiently.
824 ??? This may not be needed anymore.
825 We use to emit a clobber here but that doesn't let later
826 passes optimize the instructions we emit. By storing 0 into
827 the register later passes know the first AND to zero out the
828 bitfield being set in the register is unnecessary. The store
829 of 0 will be deleted as will at least the first AND. */
831 emit_move_insn (reg, const0_rtx);
833 bytes -= bitsize / BITS_PER_UNIT;
834 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
835 extract_bit_field (word, bitsize, 0, 1,
838 bitalign / BITS_PER_UNIT,
840 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
845 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
848 NUM_ACTUALS is the total number of parameters.
850 N_NAMED_ARGS is the total number of named arguments.
852 FNDECL is the tree code for the target of this call (if known)
854 ARGS_SO_FAR holds state needed by the target to know where to place
857 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
858 for arguments which are passed in registers.
860 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
861 and may be modified by this routine.
863 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
864 flags which may may be modified by this routine. */
867 initialize_argument_information (num_actuals, args, args_size, n_named_args,
868 actparms, fndecl, args_so_far,
869 reg_parm_stack_space, old_stack_level,
870 old_pending_adj, must_preallocate, is_const)
871 int num_actuals ATTRIBUTE_UNUSED;
872 struct arg_data *args;
873 struct args_size *args_size;
874 int n_named_args ATTRIBUTE_UNUSED;
877 CUMULATIVE_ARGS *args_so_far;
878 int reg_parm_stack_space;
879 rtx *old_stack_level;
880 int *old_pending_adj;
881 int *must_preallocate;
884 /* 1 if scanning parms front to back, -1 if scanning back to front. */
887 /* Count arg position in order args appear. */
893 args_size->constant = 0;
896 /* In this loop, we consider args in the order they are written.
897 We fill up ARGS from the front or from the back if necessary
898 so that in any case the first arg to be pushed ends up at the front. */
900 #ifdef PUSH_ARGS_REVERSED
901 i = num_actuals - 1, inc = -1;
902 /* In this case, must reverse order of args
903 so that we compute and push the last arg first. */
908 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
909 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
911 tree type = TREE_TYPE (TREE_VALUE (p));
913 enum machine_mode mode;
915 args[i].tree_value = TREE_VALUE (p);
917 /* Replace erroneous argument with constant zero. */
918 if (type == error_mark_node || TYPE_SIZE (type) == 0)
919 args[i].tree_value = integer_zero_node, type = integer_type_node;
921 /* If TYPE is a transparent union, pass things the way we would
922 pass the first field of the union. We have already verified that
923 the modes are the same. */
924 if (TYPE_TRANSPARENT_UNION (type))
925 type = TREE_TYPE (TYPE_FIELDS (type));
927 /* Decide where to pass this arg.
929 args[i].reg is nonzero if all or part is passed in registers.
931 args[i].partial is nonzero if part but not all is passed in registers,
932 and the exact value says how many words are passed in registers.
934 args[i].pass_on_stack is nonzero if the argument must at least be
935 computed on the stack. It may then be loaded back into registers
936 if args[i].reg is nonzero.
938 These decisions are driven by the FUNCTION_... macros and must agree
939 with those made by function.c. */
941 /* See if this argument should be passed by invisible reference. */
942 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
943 && contains_placeholder_p (TYPE_SIZE (type)))
944 || TREE_ADDRESSABLE (type)
945 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
946 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
947 type, argpos < n_named_args)
951 /* If we're compiling a thunk, pass through invisible
952 references instead of making a copy. */
953 if (current_function_is_thunk
954 #ifdef FUNCTION_ARG_CALLEE_COPIES
955 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
956 type, argpos < n_named_args)
957 /* If it's in a register, we must make a copy of it too. */
958 /* ??? Is this a sufficient test? Is there a better one? */
959 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
960 && REG_P (DECL_RTL (args[i].tree_value)))
961 && ! TREE_ADDRESSABLE (type))
965 /* C++ uses a TARGET_EXPR to indicate that we want to make a
966 new object from the argument. If we are passing by
967 invisible reference, the callee will do that for us, so we
968 can strip off the TARGET_EXPR. This is not always safe,
969 but it is safe in the only case where this is a useful
970 optimization; namely, when the argument is a plain object.
971 In that case, the frontend is just asking the backend to
972 make a bitwise copy of the argument. */
974 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
975 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
976 (args[i].tree_value, 1)))
978 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
979 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
981 args[i].tree_value = build1 (ADDR_EXPR,
982 build_pointer_type (type),
984 type = build_pointer_type (type);
988 /* We make a copy of the object and pass the address to the
989 function being called. */
992 if (TYPE_SIZE (type) == 0
993 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
994 || (flag_stack_check && ! STACK_CHECK_BUILTIN
995 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
996 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
997 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
999 /* This is a variable-sized object. Make space on the stack
1001 rtx size_rtx = expr_size (TREE_VALUE (p));
1003 if (*old_stack_level == 0)
1005 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1006 *old_pending_adj = pending_stack_adjust;
1007 pending_stack_adjust = 0;
1010 copy = gen_rtx_MEM (BLKmode,
1011 allocate_dynamic_stack_space (size_rtx,
1013 TYPE_ALIGN (type)));
1017 int size = int_size_in_bytes (type);
1018 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1021 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1023 store_expr (args[i].tree_value, copy, 0);
1026 args[i].tree_value = build1 (ADDR_EXPR,
1027 build_pointer_type (type),
1028 make_tree (type, copy));
1029 type = build_pointer_type (type);
1033 mode = TYPE_MODE (type);
1034 unsignedp = TREE_UNSIGNED (type);
1036 #ifdef PROMOTE_FUNCTION_ARGS
1037 mode = promote_mode (type, mode, &unsignedp, 1);
1040 args[i].unsignedp = unsignedp;
1041 args[i].mode = mode;
1042 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1043 argpos < n_named_args);
1044 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1047 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1048 argpos < n_named_args);
1051 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1053 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1054 it means that we are to pass this arg in the register(s) designated
1055 by the PARALLEL, but also to pass it in the stack. */
1056 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1057 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1058 args[i].pass_on_stack = 1;
1060 /* If this is an addressable type, we must preallocate the stack
1061 since we must evaluate the object into its final location.
1063 If this is to be passed in both registers and the stack, it is simpler
1065 if (TREE_ADDRESSABLE (type)
1066 || (args[i].pass_on_stack && args[i].reg != 0))
1067 *must_preallocate = 1;
1069 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1070 we cannot consider this function call constant. */
1071 if (TREE_ADDRESSABLE (type))
1074 /* Compute the stack-size of this argument. */
1075 if (args[i].reg == 0 || args[i].partial != 0
1076 || reg_parm_stack_space > 0
1077 || args[i].pass_on_stack)
1078 locate_and_pad_parm (mode, type,
1079 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1084 fndecl, args_size, &args[i].offset,
1087 #ifndef ARGS_GROW_DOWNWARD
1088 args[i].slot_offset = *args_size;
1091 /* If a part of the arg was put into registers,
1092 don't include that part in the amount pushed. */
1093 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1094 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1095 / (PARM_BOUNDARY / BITS_PER_UNIT)
1096 * (PARM_BOUNDARY / BITS_PER_UNIT));
1098 /* Update ARGS_SIZE, the total stack space for args so far. */
1100 args_size->constant += args[i].size.constant;
1101 if (args[i].size.var)
1103 ADD_PARM_SIZE (*args_size, args[i].size.var);
1106 /* Since the slot offset points to the bottom of the slot,
1107 we must record it after incrementing if the args grow down. */
1108 #ifdef ARGS_GROW_DOWNWARD
1109 args[i].slot_offset = *args_size;
1111 args[i].slot_offset.constant = -args_size->constant;
1114 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1118 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1119 have been used, etc. */
1121 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1122 argpos < n_named_args);
1126 /* Update ARGS_SIZE to contain the total size for the argument block.
1127 Return the original constant component of the argument block's size.
1129 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1130 for arguments passed in registers. */
1133 compute_argument_block_size (reg_parm_stack_space, args_size)
1134 int reg_parm_stack_space;
1135 struct args_size *args_size;
1137 int unadjusted_args_size = args_size->constant;
1139 /* Compute the actual size of the argument block required. The variable
1140 and constant sizes must be combined, the size may have to be rounded,
1141 and there may be a minimum required size. */
1145 args_size->var = ARGS_SIZE_TREE (*args_size);
1146 args_size->constant = 0;
1148 #ifdef PREFERRED_STACK_BOUNDARY
1149 if (PREFERRED_STACK_BOUNDARY != BITS_PER_UNIT)
1150 args_size->var = round_up (args_size->var, STACK_BYTES);
1153 if (reg_parm_stack_space > 0)
1156 = size_binop (MAX_EXPR, args_size->var,
1157 size_int (reg_parm_stack_space));
1159 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1160 /* The area corresponding to register parameters is not to count in
1161 the size of the block we need. So make the adjustment. */
1163 = size_binop (MINUS_EXPR, args_size->var,
1164 size_int (reg_parm_stack_space));
1170 #ifdef PREFERRED_STACK_BOUNDARY
1171 args_size->constant = (((args_size->constant
1172 + pending_stack_adjust
1174 / STACK_BYTES * STACK_BYTES)
1175 - pending_stack_adjust);
1178 args_size->constant = MAX (args_size->constant,
1179 reg_parm_stack_space);
1181 #ifdef MAYBE_REG_PARM_STACK_SPACE
1182 if (reg_parm_stack_space == 0)
1183 args_size->constant = 0;
1186 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1187 args_size->constant -= reg_parm_stack_space;
1190 return unadjusted_args_size;
1193 /* Precompute parameters has needed for a function call.
1195 IS_CONST indicates the target function is a pure function.
1197 MUST_PREALLOCATE indicates that we must preallocate stack space for
1198 any stack arguments.
1200 NUM_ACTUALS is the number of arguments.
1202 ARGS is an array containing information for each argument; this routine
1203 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1205 ARGS_SIZE contains information about the size of the arg list. */
1208 precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1210 int must_preallocate;
1212 struct arg_data *args;
1213 struct args_size *args_size;
1217 /* If this function call is cse'able, precompute all the parameters.
1218 Note that if the parameter is constructed into a temporary, this will
1219 cause an additional copy because the parameter will be constructed
1220 into a temporary location and then copied into the outgoing arguments.
1221 If a parameter contains a call to alloca and this function uses the
1222 stack, precompute the parameter. */
1224 /* If we preallocated the stack space, and some arguments must be passed
1225 on the stack, then we must precompute any parameter which contains a
1226 function call which will store arguments on the stack.
1227 Otherwise, evaluating the parameter may clobber previous parameters
1228 which have already been stored into the stack. */
1230 for (i = 0; i < num_actuals; i++)
1232 || ((args_size->var != 0 || args_size->constant != 0)
1233 && calls_function (args[i].tree_value, 1))
1234 || (must_preallocate
1235 && (args_size->var != 0 || args_size->constant != 0)
1236 && calls_function (args[i].tree_value, 0)))
1238 /* If this is an addressable type, we cannot pre-evaluate it. */
1239 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1244 args[i].initial_value = args[i].value
1245 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1247 preserve_temp_slots (args[i].value);
1250 /* ANSI doesn't require a sequence point here,
1251 but PCC has one, so this will avoid some problems. */
1254 args[i].initial_value = args[i].value
1255 = protect_from_queue (args[i].initial_value, 0);
1257 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1259 = convert_modes (args[i].mode,
1260 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1261 args[i].value, args[i].unsignedp);
1265 /* Given the current state of MUST_PREALLOCATE and information about
1266 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1267 compute and return the final value for MUST_PREALLOCATE. */
1270 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1271 int must_preallocate;
1273 struct arg_data *args;
1274 struct args_size *args_size;
1276 /* See if we have or want to preallocate stack space.
1278 If we would have to push a partially-in-regs parm
1279 before other stack parms, preallocate stack space instead.
1281 If the size of some parm is not a multiple of the required stack
1282 alignment, we must preallocate.
1284 If the total size of arguments that would otherwise create a copy in
1285 a temporary (such as a CALL) is more than half the total argument list
1286 size, preallocation is faster.
1288 Another reason to preallocate is if we have a machine (like the m88k)
1289 where stack alignment is required to be maintained between every
1290 pair of insns, not just when the call is made. However, we assume here
1291 that such machines either do not have push insns (and hence preallocation
1292 would occur anyway) or the problem is taken care of with
1295 if (! must_preallocate)
1297 int partial_seen = 0;
1298 int copy_to_evaluate_size = 0;
1301 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1303 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1305 else if (partial_seen && args[i].reg == 0)
1306 must_preallocate = 1;
1308 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1309 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1310 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1311 || TREE_CODE (args[i].tree_value) == COND_EXPR
1312 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1313 copy_to_evaluate_size
1314 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1317 if (copy_to_evaluate_size * 2 >= args_size->constant
1318 && args_size->constant > 0)
1319 must_preallocate = 1;
1321 return must_preallocate;
1324 /* If we preallocated stack space, compute the address of each argument
1325 and store it into the ARGS array.
1327 We need not ensure it is a valid memory address here; it will be
1328 validized when it is used.
1330 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1333 compute_argument_addresses (args, argblock, num_actuals)
1334 struct arg_data *args;
1340 rtx arg_reg = argblock;
1341 int i, arg_offset = 0;
1343 if (GET_CODE (argblock) == PLUS)
1344 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1346 for (i = 0; i < num_actuals; i++)
1348 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1349 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1352 /* Skip this parm if it will not be passed on the stack. */
1353 if (! args[i].pass_on_stack && args[i].reg != 0)
1356 if (GET_CODE (offset) == CONST_INT)
1357 addr = plus_constant (arg_reg, INTVAL (offset));
1359 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1361 addr = plus_constant (addr, arg_offset);
1362 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1365 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1367 if (GET_CODE (slot_offset) == CONST_INT)
1368 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1370 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1372 addr = plus_constant (addr, arg_offset);
1373 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1378 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1379 in a call instruction.
1381 FNDECL is the tree node for the target function. For an indirect call
1382 FNDECL will be NULL_TREE.
1384 EXP is the CALL_EXPR for this call. */
1387 rtx_for_function_call (fndecl, exp)
1393 /* Get the function to call, in the form of RTL. */
1396 /* If this is the first use of the function, see if we need to
1397 make an external definition for it. */
1398 if (! TREE_USED (fndecl))
1400 assemble_external (fndecl);
1401 TREE_USED (fndecl) = 1;
1404 /* Get a SYMBOL_REF rtx for the function address. */
1405 funexp = XEXP (DECL_RTL (fndecl), 0);
1408 /* Generate an rtx (probably a pseudo-register) for the address. */
1411 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1412 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1414 /* Check the function is executable. */
1415 if (current_function_check_memory_usage)
1416 emit_library_call (chkr_check_exec_libfunc, 1,
1424 /* Do the register loads required for any wholly-register parms or any
1425 parms which are passed both on the stack and in a register. Their
1426 expressions were already evaluated.
1428 Mark all register-parms as living through the call, putting these USE
1429 insns in the CALL_INSN_FUNCTION_USAGE field. */
1432 load_register_parameters (args, num_actuals, call_fusage)
1433 struct arg_data *args;
1439 #ifdef LOAD_ARGS_REVERSED
1440 for (i = num_actuals - 1; i >= 0; i--)
1442 for (i = 0; i < num_actuals; i++)
1445 rtx reg = args[i].reg;
1446 int partial = args[i].partial;
1451 /* Set to non-negative if must move a word at a time, even if just
1452 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1453 we just use a normal move insn. This value can be zero if the
1454 argument is a zero size structure with no fields. */
1455 nregs = (partial ? partial
1456 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1457 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1458 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1461 /* Handle calls that pass values in multiple non-contiguous
1462 locations. The Irix 6 ABI has examples of this. */
1464 if (GET_CODE (reg) == PARALLEL)
1466 emit_group_load (reg, args[i].value,
1467 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1468 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1472 /* If simple case, just do move. If normal partial, store_one_arg
1473 has already loaded the register for us. In all other cases,
1474 load the register(s) from memory. */
1476 else if (nregs == -1)
1477 emit_move_insn (reg, args[i].value);
1479 /* If we have pre-computed the values to put in the registers in
1480 the case of non-aligned structures, copy them in now. */
1482 else if (args[i].n_aligned_regs != 0)
1483 for (j = 0; j < args[i].n_aligned_regs; j++)
1484 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1485 args[i].aligned_regs[j]);
1487 else if (partial == 0 || args[i].pass_on_stack)
1488 move_block_to_reg (REGNO (reg),
1489 validize_mem (args[i].value), nregs,
1492 /* Handle calls that pass values in multiple non-contiguous
1493 locations. The Irix 6 ABI has examples of this. */
1494 if (GET_CODE (reg) == PARALLEL)
1495 use_group_regs (call_fusage, reg);
1496 else if (nregs == -1)
1497 use_reg (call_fusage, reg);
1499 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1504 /* Generate all the code for a function call
1505 and return an rtx for its value.
1506 Store the value in TARGET (specified as an rtx) if convenient.
1507 If the value is stored in TARGET then TARGET is returned.
1508 If IGNORE is nonzero, then we ignore the value of the function call. */
1511 expand_call (exp, target, ignore)
1516 /* List of actual parameters. */
1517 tree actparms = TREE_OPERAND (exp, 1);
1518 /* RTX for the function to be called. */
1520 /* Data type of the function. */
1522 /* Declaration of the function being called,
1523 or 0 if the function is computed (not known by name). */
1527 /* Register in which non-BLKmode value will be returned,
1528 or 0 if no value or if value is BLKmode. */
1530 /* Address where we should return a BLKmode value;
1531 0 if value not BLKmode. */
1532 rtx structure_value_addr = 0;
1533 /* Nonzero if that address is being passed by treating it as
1534 an extra, implicit first parameter. Otherwise,
1535 it is passed by being copied directly into struct_value_rtx. */
1536 int structure_value_addr_parm = 0;
1537 /* Size of aggregate value wanted, or zero if none wanted
1538 or if we are using the non-reentrant PCC calling convention
1539 or expecting the value in registers. */
1540 HOST_WIDE_INT struct_value_size = 0;
1541 /* Nonzero if called function returns an aggregate in memory PCC style,
1542 by returning the address of where to find it. */
1543 int pcc_struct_value = 0;
1545 /* Number of actual parameters in this call, including struct value addr. */
1547 /* Number of named args. Args after this are anonymous ones
1548 and they must all go on the stack. */
1551 /* Vector of information about each argument.
1552 Arguments are numbered in the order they will be pushed,
1553 not the order they are written. */
1554 struct arg_data *args;
1556 /* Total size in bytes of all the stack-parms scanned so far. */
1557 struct args_size args_size;
1558 /* Size of arguments before any adjustments (such as rounding). */
1559 int unadjusted_args_size;
1560 /* Data on reg parms scanned so far. */
1561 CUMULATIVE_ARGS args_so_far;
1562 /* Nonzero if a reg parm has been scanned. */
1564 /* Nonzero if this is an indirect function call. */
1566 /* Nonzero if we must avoid push-insns in the args for this call.
1567 If stack space is allocated for register parameters, but not by the
1568 caller, then it is preallocated in the fixed part of the stack frame.
1569 So the entire argument block must then be preallocated (i.e., we
1570 ignore PUSH_ROUNDING in that case). */
1572 #ifdef PUSH_ROUNDING
1573 int must_preallocate = 0;
1575 int must_preallocate = 1;
1578 /* Size of the stack reserved for parameter registers. */
1579 int reg_parm_stack_space = 0;
1581 /* Address of space preallocated for stack parms
1582 (on machines that lack push insns), or 0 if space not preallocated. */
1585 /* Nonzero if it is plausible that this is a call to alloca. */
1587 /* Nonzero if this is a call to malloc or a related function. */
1589 /* Nonzero if this is a call to setjmp or a related function. */
1591 /* Nonzero if this is a call to `longjmp'. */
1593 /* Nonzero if this is a call to an inline function. */
1594 int is_integrable = 0;
1595 /* Nonzero if this is a call to a `const' function.
1596 Note that only explicitly named functions are handled as `const' here. */
1598 /* Nonzero if this is a call to a `volatile' function. */
1599 int is_volatile = 0;
1600 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1601 /* Define the boundary of the register parm stack space that needs to be
1603 int low_to_save = -1, high_to_save;
1604 rtx save_area = 0; /* Place that it is saved */
1607 #ifdef ACCUMULATE_OUTGOING_ARGS
1608 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1609 char *initial_stack_usage_map = stack_usage_map;
1610 int old_stack_arg_under_construction;
1613 rtx old_stack_level = 0;
1614 int old_pending_adj = 0;
1615 int old_inhibit_defer_pop = inhibit_defer_pop;
1616 rtx call_fusage = 0;
1620 /* The value of the function call can be put in a hard register. But
1621 if -fcheck-memory-usage, code which invokes functions (and thus
1622 damages some hard registers) can be inserted before using the value.
1623 So, target is always a pseudo-register in that case. */
1624 if (current_function_check_memory_usage)
1627 /* See if we can find a DECL-node for the actual function.
1628 As a result, decide whether this is a call to an integrable function. */
1630 p = TREE_OPERAND (exp, 0);
1631 if (TREE_CODE (p) == ADDR_EXPR)
1633 fndecl = TREE_OPERAND (p, 0);
1634 if (TREE_CODE (fndecl) != FUNCTION_DECL)
1639 && fndecl != current_function_decl
1640 && DECL_INLINE (fndecl)
1641 && DECL_SAVED_INSNS (fndecl)
1642 && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl)))
1644 else if (! TREE_ADDRESSABLE (fndecl))
1646 /* In case this function later becomes inlinable,
1647 record that there was already a non-inline call to it.
1649 Use abstraction instead of setting TREE_ADDRESSABLE
1651 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1654 warning_with_decl (fndecl, "can't inline call to `%s'");
1655 warning ("called from here");
1657 mark_addressable (fndecl);
1660 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1661 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
1664 if (TREE_THIS_VOLATILE (fndecl))
1669 /* If we don't have specific function to call, see if we have a
1670 constant or `noreturn' function from the type. */
1673 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1674 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1677 #ifdef REG_PARM_STACK_SPACE
1678 #ifdef MAYBE_REG_PARM_STACK_SPACE
1679 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1681 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1685 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1686 if (reg_parm_stack_space > 0)
1687 must_preallocate = 1;
1690 /* Warn if this value is an aggregate type,
1691 regardless of which calling convention we are using for it. */
1692 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1693 warning ("function call has aggregate value");
1695 /* Set up a place to return a structure. */
1697 /* Cater to broken compilers. */
1698 if (aggregate_value_p (exp))
1700 /* This call returns a big structure. */
1703 #ifdef PCC_STATIC_STRUCT_RETURN
1705 pcc_struct_value = 1;
1706 /* Easier than making that case work right. */
1709 /* In case this is a static function, note that it has been
1711 if (! TREE_ADDRESSABLE (fndecl))
1712 mark_addressable (fndecl);
1716 #else /* not PCC_STATIC_STRUCT_RETURN */
1718 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1720 if (target && GET_CODE (target) == MEM)
1721 structure_value_addr = XEXP (target, 0);
1724 /* Assign a temporary to hold the value. */
1727 /* For variable-sized objects, we must be called with a target
1728 specified. If we were to allocate space on the stack here,
1729 we would have no way of knowing when to free it. */
1731 if (struct_value_size < 0)
1734 /* This DECL is just something to feed to mark_addressable;
1735 it doesn't get pushed. */
1736 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1737 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1738 mark_addressable (d);
1739 structure_value_addr = XEXP (DECL_RTL (d), 0);
1744 #endif /* not PCC_STATIC_STRUCT_RETURN */
1747 /* If called function is inline, try to integrate it. */
1752 #ifdef ACCUMULATE_OUTGOING_ARGS
1753 rtx before_call = get_last_insn ();
1756 temp = expand_inline_function (fndecl, actparms, target,
1757 ignore, TREE_TYPE (exp),
1758 structure_value_addr);
1760 /* If inlining succeeded, return. */
1761 if (temp != (rtx) (HOST_WIDE_INT) -1)
1763 #ifdef ACCUMULATE_OUTGOING_ARGS
1764 /* If the outgoing argument list must be preserved, push
1765 the stack before executing the inlined function if it
1768 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1769 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1772 if (stack_arg_under_construction || i >= 0)
1775 = before_call ? NEXT_INSN (before_call) : get_insns ();
1778 /* Look for a call in the inline function code.
1779 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
1780 nonzero then there is a call and it is not necessary
1781 to scan the insns. */
1783 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
1784 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1785 if (GET_CODE (insn) == CALL_INSN)
1790 /* Reserve enough stack space so that the largest
1791 argument list of any function call in the inline
1792 function does not overlap the argument list being
1793 evaluated. This is usually an overestimate because
1794 allocate_dynamic_stack_space reserves space for an
1795 outgoing argument list in addition to the requested
1796 space, but there is no way to ask for stack space such
1797 that an argument list of a certain length can be
1800 Add the stack space reserved for register arguments, if
1801 any, in the inline function. What is really needed is the
1802 largest value of reg_parm_stack_space in the inline
1803 function, but that is not available. Using the current
1804 value of reg_parm_stack_space is wrong, but gives
1805 correct results on all supported machines. */
1807 int adjust = (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl))
1808 + reg_parm_stack_space);
1811 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1812 allocate_dynamic_stack_space (GEN_INT (adjust),
1813 NULL_RTX, BITS_PER_UNIT);
1816 emit_insns_before (seq, first_insn);
1817 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1822 /* If the result is equivalent to TARGET, return TARGET to simplify
1823 checks in store_expr. They can be equivalent but not equal in the
1824 case of a function that returns BLKmode. */
1825 if (temp != target && rtx_equal_p (temp, target))
1830 /* If inlining failed, mark FNDECL as needing to be compiled
1831 separately after all. If function was declared inline,
1833 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1834 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1836 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1837 warning ("called from here");
1839 mark_addressable (fndecl);
1842 function_call_count++;
1844 if (fndecl && DECL_NAME (fndecl))
1845 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1847 /* See if this is a call to a function that can return more than once
1848 or a call to longjmp or malloc. */
1849 special_function_p (name, fndecl, &returns_twice, &is_longjmp,
1850 &is_malloc, &may_be_alloca);
1853 current_function_calls_alloca = 1;
1855 /* Operand 0 is a pointer-to-function; get the type of the function. */
1856 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1857 if (! POINTER_TYPE_P (funtype))
1859 funtype = TREE_TYPE (funtype);
1861 /* When calling a const function, we must pop the stack args right away,
1862 so that the pop is deleted or moved with the call. */
1866 /* Don't let pending stack adjusts add up to too much.
1867 Also, do all pending adjustments now
1868 if there is any chance this might be a call to alloca. */
1870 if (pending_stack_adjust >= 32
1871 || (pending_stack_adjust > 0 && may_be_alloca))
1872 do_pending_stack_adjust ();
1874 /* Push the temporary stack slot level so that we can free any temporaries
1878 /* Start updating where the next arg would go.
1880 On some machines (such as the PA) indirect calls have a different
1881 calling convention than normal calls. The last argument in
1882 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1884 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
1886 /* If struct_value_rtx is 0, it means pass the address
1887 as if it were an extra parameter. */
1888 if (structure_value_addr && struct_value_rtx == 0)
1890 /* If structure_value_addr is a REG other than
1891 virtual_outgoing_args_rtx, we can use always use it. If it
1892 is not a REG, we must always copy it into a register.
1893 If it is virtual_outgoing_args_rtx, we must copy it to another
1894 register in some cases. */
1895 rtx temp = (GET_CODE (structure_value_addr) != REG
1896 #ifdef ACCUMULATE_OUTGOING_ARGS
1897 || (stack_arg_under_construction
1898 && structure_value_addr == virtual_outgoing_args_rtx)
1900 ? copy_addr_to_reg (structure_value_addr)
1901 : structure_value_addr);
1904 = tree_cons (error_mark_node,
1905 make_tree (build_pointer_type (TREE_TYPE (funtype)),
1908 structure_value_addr_parm = 1;
1911 /* Count the arguments and set NUM_ACTUALS. */
1912 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
1915 /* Compute number of named args.
1916 Normally, don't include the last named arg if anonymous args follow.
1917 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
1918 (If no anonymous args follow, the result of list_length is actually
1919 one too large. This is harmless.)
1921 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
1922 zero, this machine will be able to place unnamed args that were passed in
1923 registers into the stack. So treat all args as named. This allows the
1924 insns emitting for a specific argument list to be independent of the
1925 function declaration.
1927 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
1928 way to pass unnamed args in registers, so we must force them into
1931 if ((STRICT_ARGUMENT_NAMING
1932 || ! PRETEND_OUTGOING_VARARGS_NAMED)
1933 && TYPE_ARG_TYPES (funtype) != 0)
1935 = (list_length (TYPE_ARG_TYPES (funtype))
1936 /* Don't include the last named arg. */
1937 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
1938 /* Count the struct value address, if it is passed as a parm. */
1939 + structure_value_addr_parm);
1941 /* If we know nothing, treat all args as named. */
1942 n_named_args = num_actuals;
1944 /* Make a vector to hold all the information about each arg. */
1945 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
1946 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
1948 /* Build up entries inthe ARGS array, compute the size of the arguments
1949 into ARGS_SIZE, etc. */
1950 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
1951 actparms, fndecl, &args_so_far,
1952 reg_parm_stack_space, &old_stack_level,
1953 &old_pending_adj, &must_preallocate,
1956 #ifdef FINAL_REG_PARM_STACK_SPACE
1957 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1963 /* If this function requires a variable-sized argument list, don't try to
1964 make a cse'able block for this call. We may be able to do this
1965 eventually, but it is too complicated to keep track of what insns go
1966 in the cse'able block and which don't. */
1969 must_preallocate = 1;
1972 /* Compute the actual size of the argument block required. The variable
1973 and constant sizes must be combined, the size may have to be rounded,
1974 and there may be a minimum required size. */
1975 unadjusted_args_size
1976 = compute_argument_block_size (reg_parm_stack_space, &args_size);
1978 /* Now make final decision about preallocating stack space. */
1979 must_preallocate = finalize_must_preallocate (must_preallocate,
1980 num_actuals, args, &args_size);
1982 /* If the structure value address will reference the stack pointer, we must
1983 stabilize it. We don't need to do this if we know that we are not going
1984 to adjust the stack pointer in processing this call. */
1986 if (structure_value_addr
1987 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1988 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1990 #ifndef ACCUMULATE_OUTGOING_ARGS
1991 || args_size.constant
1994 structure_value_addr = copy_to_reg (structure_value_addr);
1996 /* Precompute any arguments as needed. */
1997 precompute_arguments (is_const, must_preallocate, num_actuals,
2000 /* Now we are about to start emitting insns that can be deleted
2001 if a libcall is deleted. */
2002 if (is_const || is_malloc)
2005 /* If we have no actual push instructions, or shouldn't use them,
2006 make space for all args right now. */
2008 if (args_size.var != 0)
2010 if (old_stack_level == 0)
2012 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2013 old_pending_adj = pending_stack_adjust;
2014 pending_stack_adjust = 0;
2015 #ifdef ACCUMULATE_OUTGOING_ARGS
2016 /* stack_arg_under_construction says whether a stack arg is
2017 being constructed at the old stack level. Pushing the stack
2018 gets a clean outgoing argument block. */
2019 old_stack_arg_under_construction = stack_arg_under_construction;
2020 stack_arg_under_construction = 0;
2023 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2027 /* Note that we must go through the motions of allocating an argument
2028 block even if the size is zero because we may be storing args
2029 in the area reserved for register arguments, which may be part of
2032 int needed = args_size.constant;
2034 /* Store the maximum argument space used. It will be pushed by
2035 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2038 if (needed > current_function_outgoing_args_size)
2039 current_function_outgoing_args_size = needed;
2041 if (must_preallocate)
2043 #ifdef ACCUMULATE_OUTGOING_ARGS
2044 /* Since the stack pointer will never be pushed, it is possible for
2045 the evaluation of a parm to clobber something we have already
2046 written to the stack. Since most function calls on RISC machines
2047 do not use the stack, this is uncommon, but must work correctly.
2049 Therefore, we save any area of the stack that was already written
2050 and that we are using. Here we set up to do this by making a new
2051 stack usage map from the old one. The actual save will be done
2054 Another approach might be to try to reorder the argument
2055 evaluations to avoid this conflicting stack usage. */
2057 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2058 /* Since we will be writing into the entire argument area, the
2059 map must be allocated for its entire size, not just the part that
2060 is the responsibility of the caller. */
2061 needed += reg_parm_stack_space;
2064 #ifdef ARGS_GROW_DOWNWARD
2065 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2068 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2071 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2073 if (initial_highest_arg_in_use)
2074 bcopy (initial_stack_usage_map, stack_usage_map,
2075 initial_highest_arg_in_use);
2077 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2078 bzero (&stack_usage_map[initial_highest_arg_in_use],
2079 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2082 /* The address of the outgoing argument list must not be copied to a
2083 register here, because argblock would be left pointing to the
2084 wrong place after the call to allocate_dynamic_stack_space below.
2087 argblock = virtual_outgoing_args_rtx;
2089 #else /* not ACCUMULATE_OUTGOING_ARGS */
2090 if (inhibit_defer_pop == 0)
2092 /* Try to reuse some or all of the pending_stack_adjust
2093 to get this space. Maybe we can avoid any pushing. */
2094 if (needed > pending_stack_adjust)
2096 needed -= pending_stack_adjust;
2097 pending_stack_adjust = 0;
2101 pending_stack_adjust -= needed;
2105 /* Special case this because overhead of `push_block' in this
2106 case is non-trivial. */
2108 argblock = virtual_outgoing_args_rtx;
2110 argblock = push_block (GEN_INT (needed), 0, 0);
2112 /* We only really need to call `copy_to_reg' in the case where push
2113 insns are going to be used to pass ARGBLOCK to a function
2114 call in ARGS. In that case, the stack pointer changes value
2115 from the allocation point to the call point, and hence
2116 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2117 But might as well always do it. */
2118 argblock = copy_to_reg (argblock);
2119 #endif /* not ACCUMULATE_OUTGOING_ARGS */
2123 #ifdef ACCUMULATE_OUTGOING_ARGS
2124 /* The save/restore code in store_one_arg handles all cases except one:
2125 a constructor call (including a C function returning a BLKmode struct)
2126 to initialize an argument. */
2127 if (stack_arg_under_construction)
2129 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2130 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
2132 rtx push_size = GEN_INT (args_size.constant);
2134 if (old_stack_level == 0)
2136 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2137 old_pending_adj = pending_stack_adjust;
2138 pending_stack_adjust = 0;
2139 /* stack_arg_under_construction says whether a stack arg is
2140 being constructed at the old stack level. Pushing the stack
2141 gets a clean outgoing argument block. */
2142 old_stack_arg_under_construction = stack_arg_under_construction;
2143 stack_arg_under_construction = 0;
2144 /* Make a new map for the new argument list. */
2145 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2146 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2147 highest_outgoing_arg_in_use = 0;
2149 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
2151 /* If argument evaluation might modify the stack pointer, copy the
2152 address of the argument list to a register. */
2153 for (i = 0; i < num_actuals; i++)
2154 if (args[i].pass_on_stack)
2156 argblock = copy_addr_to_reg (argblock);
2161 compute_argument_addresses (args, argblock, num_actuals);
2163 #ifdef PUSH_ARGS_REVERSED
2164 #ifdef PREFERRED_STACK_BOUNDARY
2165 /* If we push args individually in reverse order, perform stack alignment
2166 before the first push (the last arg). */
2168 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2172 /* Don't try to defer pops if preallocating, not even from the first arg,
2173 since ARGBLOCK probably refers to the SP. */
2177 funexp = rtx_for_function_call (fndecl, exp);
2179 /* Figure out the register where the value, if any, will come back. */
2181 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2182 && ! structure_value_addr)
2184 if (pcc_struct_value)
2185 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2188 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
2191 /* Precompute all register parameters. It isn't safe to compute anything
2192 once we have started filling any specific hard regs. */
2193 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2195 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2197 /* Save the fixed argument area if it's part of the caller's frame and
2198 is clobbered by argument setup for this call. */
2199 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2200 &low_to_save, &high_to_save);
2204 /* Now store (and compute if necessary) all non-register parms.
2205 These come before register parms, since they can require block-moves,
2206 which could clobber the registers used for register parms.
2207 Parms which have partial registers are not stored here,
2208 but we do preallocate space here if they want that. */
2210 for (i = 0; i < num_actuals; i++)
2211 if (args[i].reg == 0 || args[i].pass_on_stack)
2212 store_one_arg (&args[i], argblock, may_be_alloca,
2213 args_size.var != 0, reg_parm_stack_space);
2215 /* If we have a parm that is passed in registers but not in memory
2216 and whose alignment does not permit a direct copy into registers,
2217 make a group of pseudos that correspond to each register that we
2219 if (STRICT_ALIGNMENT)
2220 store_unaligned_arguments_into_pseudos (args, num_actuals);
2222 /* Now store any partially-in-registers parm.
2223 This is the last place a block-move can happen. */
2225 for (i = 0; i < num_actuals; i++)
2226 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2227 store_one_arg (&args[i], argblock, may_be_alloca,
2228 args_size.var != 0, reg_parm_stack_space);
2230 #ifndef PUSH_ARGS_REVERSED
2231 #ifdef PREFERRED_STACK_BOUNDARY
2232 /* If we pushed args in forward order, perform stack alignment
2233 after pushing the last arg. */
2235 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2239 /* If register arguments require space on the stack and stack space
2240 was not preallocated, allocate stack space here for arguments
2241 passed in registers. */
2242 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
2243 if (must_preallocate == 0 && reg_parm_stack_space > 0)
2244 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2247 /* Pass the function the address in which to return a structure value. */
2248 if (structure_value_addr && ! structure_value_addr_parm)
2250 emit_move_insn (struct_value_rtx,
2252 force_operand (structure_value_addr,
2255 /* Mark the memory for the aggregate as write-only. */
2256 if (current_function_check_memory_usage)
2257 emit_library_call (chkr_set_right_libfunc, 1,
2259 structure_value_addr, ptr_mode,
2260 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
2261 GEN_INT (MEMORY_USE_WO),
2262 TYPE_MODE (integer_type_node));
2264 if (GET_CODE (struct_value_rtx) == REG)
2265 use_reg (&call_fusage, struct_value_rtx);
2268 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
2270 load_register_parameters (args, num_actuals, &call_fusage);
2272 /* Perform postincrements before actually calling the function. */
2275 /* All arguments and registers used for the call must be set up by now! */
2277 /* Generate the actual call instruction. */
2278 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2279 args_size.constant, struct_value_size,
2280 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2281 valreg, old_inhibit_defer_pop, call_fusage, is_const);
2283 /* If call is cse'able, make appropriate pair of reg-notes around it.
2284 Test valreg so we don't crash; may safely ignore `const'
2285 if return type is void. Disable for PARALLEL return values, because
2286 we have no way to move such values into a pseudo register. */
2287 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2290 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2293 /* Mark the return value as a pointer if needed. */
2294 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2296 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2297 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2300 /* Construct an "equal form" for the value which mentions all the
2301 arguments in order as well as the function name. */
2302 #ifdef PUSH_ARGS_REVERSED
2303 for (i = 0; i < num_actuals; i++)
2304 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2306 for (i = num_actuals - 1; i >= 0; i--)
2307 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2309 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2311 insns = get_insns ();
2314 emit_libcall_block (insns, temp, valreg, note);
2320 /* Otherwise, just write out the sequence without a note. */
2321 rtx insns = get_insns ();
2328 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2331 /* The return value from a malloc-like function is a pointer. */
2332 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2333 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2335 emit_move_insn (temp, valreg);
2337 /* The return value from a malloc-like function can not alias
2339 last = get_last_insn ();
2341 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2343 /* Write out the sequence. */
2344 insns = get_insns ();
2350 /* For calls to `setjmp', etc., inform flow.c it should complain
2351 if nonvolatile values are live. */
2355 emit_note (name, NOTE_INSN_SETJMP);
2356 current_function_calls_setjmp = 1;
2360 current_function_calls_longjmp = 1;
2362 /* Notice functions that cannot return.
2363 If optimizing, insns emitted below will be dead.
2364 If not optimizing, they will exist, which is useful
2365 if the user uses the `return' command in the debugger. */
2367 if (is_volatile || is_longjmp)
2370 /* If value type not void, return an rtx for the value. */
2372 /* If there are cleanups to be called, don't use a hard reg as target.
2373 We need to double check this and see if it matters anymore. */
2374 if (any_pending_cleanups (1)
2375 && target && REG_P (target)
2376 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2379 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2382 target = const0_rtx;
2384 else if (structure_value_addr)
2386 if (target == 0 || GET_CODE (target) != MEM)
2388 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2389 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2390 structure_value_addr));
2391 MEM_SET_IN_STRUCT_P (target,
2392 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2395 else if (pcc_struct_value)
2397 /* This is the special C++ case where we need to
2398 know what the true target was. We take care to
2399 never use this value more than once in one expression. */
2400 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2401 copy_to_reg (valreg));
2402 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2404 /* Handle calls that return values in multiple non-contiguous locations.
2405 The Irix 6 ABI has examples of this. */
2406 else if (GET_CODE (valreg) == PARALLEL)
2408 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2412 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2413 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2414 preserve_temp_slots (target);
2417 emit_group_store (target, valreg, bytes,
2418 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2420 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2421 && GET_MODE (target) == GET_MODE (valreg))
2422 /* TARGET and VALREG cannot be equal at this point because the latter
2423 would not have REG_FUNCTION_VALUE_P true, while the former would if
2424 it were referring to the same register.
2426 If they refer to the same register, this move will be a no-op, except
2427 when function inlining is being done. */
2428 emit_move_insn (target, valreg);
2429 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2430 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2432 target = copy_to_reg (valreg);
2434 #ifdef PROMOTE_FUNCTION_RETURN
2435 /* If we promoted this return value, make the proper SUBREG. TARGET
2436 might be const0_rtx here, so be careful. */
2437 if (GET_CODE (target) == REG
2438 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2439 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2441 tree type = TREE_TYPE (exp);
2442 int unsignedp = TREE_UNSIGNED (type);
2444 /* If we don't promote as expected, something is wrong. */
2445 if (GET_MODE (target)
2446 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2449 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2450 SUBREG_PROMOTED_VAR_P (target) = 1;
2451 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2455 /* If size of args is variable or this was a constructor call for a stack
2456 argument, restore saved stack-pointer value. */
2458 if (old_stack_level)
2460 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2461 pending_stack_adjust = old_pending_adj;
2462 #ifdef ACCUMULATE_OUTGOING_ARGS
2463 stack_arg_under_construction = old_stack_arg_under_construction;
2464 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2465 stack_usage_map = initial_stack_usage_map;
2468 #ifdef ACCUMULATE_OUTGOING_ARGS
2471 #ifdef REG_PARM_STACK_SPACE
2473 restore_fixed_argument_area (save_area, argblock,
2474 high_to_save, low_to_save);
2477 /* If we saved any argument areas, restore them. */
2478 for (i = 0; i < num_actuals; i++)
2479 if (args[i].save_area)
2481 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2483 = gen_rtx_MEM (save_mode,
2484 memory_address (save_mode,
2485 XEXP (args[i].stack_slot, 0)));
2487 if (save_mode != BLKmode)
2488 emit_move_insn (stack_area, args[i].save_area);
2490 emit_block_move (stack_area, validize_mem (args[i].save_area),
2491 GEN_INT (args[i].size.constant),
2492 PARM_BOUNDARY / BITS_PER_UNIT);
2495 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2496 stack_usage_map = initial_stack_usage_map;
2500 /* If this was alloca, record the new stack level for nonlocal gotos.
2501 Check for the handler slots since we might not have a save area
2502 for non-local gotos. */
2504 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
2505 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2509 /* Free up storage we no longer need. */
2510 for (i = 0; i < num_actuals; ++i)
2511 if (args[i].aligned_regs)
2512 free (args[i].aligned_regs);
2517 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2518 (emitting the queue unless NO_QUEUE is nonzero),
2519 for a value of mode OUTMODE,
2520 with NARGS different arguments, passed as alternating rtx values
2521 and machine_modes to convert them to.
2522 The rtx values should have been passed through protect_from_queue already.
2524 NO_QUEUE will be true if and only if the library call is a `const' call
2525 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2526 to the variable is_const in expand_call.
2528 NO_QUEUE must be true for const calls, because if it isn't, then
2529 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2530 and will be lost if the libcall sequence is optimized away.
2532 NO_QUEUE must be false for non-const calls, because if it isn't, the
2533 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2534 optimized. For instance, the instruction scheduler may incorrectly
2535 move memory references across the non-const call. */
2538 emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2541 #ifndef ANSI_PROTOTYPES
2544 enum machine_mode outmode;
2548 /* Total size in bytes of all the stack-parms scanned so far. */
2549 struct args_size args_size;
2550 /* Size of arguments before any adjustments (such as rounding). */
2551 struct args_size original_args_size;
2552 register int argnum;
2557 CUMULATIVE_ARGS args_so_far;
2558 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2559 struct args_size offset; struct args_size size; rtx save_area; };
2561 int old_inhibit_defer_pop = inhibit_defer_pop;
2562 rtx call_fusage = 0;
2563 int reg_parm_stack_space = 0;
2564 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2565 /* Define the boundary of the register parm stack space that needs to be
2567 int low_to_save = -1, high_to_save;
2568 rtx save_area = 0; /* Place that it is saved */
2571 #ifdef ACCUMULATE_OUTGOING_ARGS
2572 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2573 char *initial_stack_usage_map = stack_usage_map;
2577 #ifdef REG_PARM_STACK_SPACE
2578 /* Size of the stack reserved for parameter registers. */
2579 #ifdef MAYBE_REG_PARM_STACK_SPACE
2580 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2582 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
2586 VA_START (p, nargs);
2588 #ifndef ANSI_PROTOTYPES
2589 orgfun = va_arg (p, rtx);
2590 no_queue = va_arg (p, int);
2591 outmode = va_arg (p, enum machine_mode);
2592 nargs = va_arg (p, int);
2597 /* Copy all the libcall-arguments out of the varargs data
2598 and into a vector ARGVEC.
2600 Compute how to pass each argument. We only support a very small subset
2601 of the full argument passing conventions to limit complexity here since
2602 library functions shouldn't have many args. */
2604 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2605 bzero ((char *) argvec, nargs * sizeof (struct arg));
2608 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2610 args_size.constant = 0;
2615 for (count = 0; count < nargs; count++)
2617 rtx val = va_arg (p, rtx);
2618 enum machine_mode mode = va_arg (p, enum machine_mode);
2620 /* We cannot convert the arg value to the mode the library wants here;
2621 must do it earlier where we know the signedness of the arg. */
2623 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2626 /* On some machines, there's no way to pass a float to a library fcn.
2627 Pass it as a double instead. */
2628 #ifdef LIBGCC_NEEDS_DOUBLE
2629 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2630 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2633 /* There's no need to call protect_from_queue, because
2634 either emit_move_insn or emit_push_insn will do that. */
2636 /* Make sure it is a reasonable operand for a move or push insn. */
2637 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2638 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2639 val = force_operand (val, NULL_RTX);
2641 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2642 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2644 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2645 be viewed as just an efficiency improvement. */
2646 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2647 emit_move_insn (slot, val);
2648 val = force_operand (XEXP (slot, 0), NULL_RTX);
2653 argvec[count].value = val;
2654 argvec[count].mode = mode;
2656 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2657 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2659 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2660 argvec[count].partial
2661 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2663 argvec[count].partial = 0;
2666 locate_and_pad_parm (mode, NULL_TREE,
2667 argvec[count].reg && argvec[count].partial == 0,
2668 NULL_TREE, &args_size, &argvec[count].offset,
2669 &argvec[count].size);
2671 if (argvec[count].size.var)
2674 if (reg_parm_stack_space == 0 && argvec[count].partial)
2675 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2677 if (argvec[count].reg == 0 || argvec[count].partial != 0
2678 || reg_parm_stack_space > 0)
2679 args_size.constant += argvec[count].size.constant;
2681 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2685 #ifdef FINAL_REG_PARM_STACK_SPACE
2686 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2690 /* If this machine requires an external definition for library
2691 functions, write one out. */
2692 assemble_external_libcall (fun);
2694 original_args_size = args_size;
2695 #ifdef PREFERRED_STACK_BOUNDARY
2696 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2697 / STACK_BYTES) * STACK_BYTES);
2700 args_size.constant = MAX (args_size.constant,
2701 reg_parm_stack_space);
2703 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2704 args_size.constant -= reg_parm_stack_space;
2707 if (args_size.constant > current_function_outgoing_args_size)
2708 current_function_outgoing_args_size = args_size.constant;
2710 #ifdef ACCUMULATE_OUTGOING_ARGS
2711 /* Since the stack pointer will never be pushed, it is possible for
2712 the evaluation of a parm to clobber something we have already
2713 written to the stack. Since most function calls on RISC machines
2714 do not use the stack, this is uncommon, but must work correctly.
2716 Therefore, we save any area of the stack that was already written
2717 and that we are using. Here we set up to do this by making a new
2718 stack usage map from the old one.
2720 Another approach might be to try to reorder the argument
2721 evaluations to avoid this conflicting stack usage. */
2723 needed = args_size.constant;
2725 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2726 /* Since we will be writing into the entire argument area, the
2727 map must be allocated for its entire size, not just the part that
2728 is the responsibility of the caller. */
2729 needed += reg_parm_stack_space;
2732 #ifdef ARGS_GROW_DOWNWARD
2733 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2736 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2739 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2741 if (initial_highest_arg_in_use)
2742 bcopy (initial_stack_usage_map, stack_usage_map,
2743 initial_highest_arg_in_use);
2745 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2746 bzero (&stack_usage_map[initial_highest_arg_in_use],
2747 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2750 /* The address of the outgoing argument list must not be copied to a
2751 register here, because argblock would be left pointing to the
2752 wrong place after the call to allocate_dynamic_stack_space below.
2755 argblock = virtual_outgoing_args_rtx;
2756 #else /* not ACCUMULATE_OUTGOING_ARGS */
2757 #ifndef PUSH_ROUNDING
2758 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2762 #ifdef PUSH_ARGS_REVERSED
2763 #ifdef PREFERRED_STACK_BOUNDARY
2764 /* If we push args individually in reverse order, perform stack alignment
2765 before the first push (the last arg). */
2767 anti_adjust_stack (GEN_INT (args_size.constant
2768 - original_args_size.constant));
2772 #ifdef PUSH_ARGS_REVERSED
2780 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2781 /* The argument list is the property of the called routine and it
2782 may clobber it. If the fixed area has been used for previous
2783 parameters, we must save and restore it.
2785 Here we compute the boundary of the that needs to be saved, if any. */
2787 #ifdef ARGS_GROW_DOWNWARD
2788 for (count = 0; count < reg_parm_stack_space + 1; count++)
2790 for (count = 0; count < reg_parm_stack_space; count++)
2793 if (count >= highest_outgoing_arg_in_use
2794 || stack_usage_map[count] == 0)
2797 if (low_to_save == -1)
2798 low_to_save = count;
2800 high_to_save = count;
2803 if (low_to_save >= 0)
2805 int num_to_save = high_to_save - low_to_save + 1;
2806 enum machine_mode save_mode
2807 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2810 /* If we don't have the required alignment, must do this in BLKmode. */
2811 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2812 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2813 save_mode = BLKmode;
2815 #ifdef ARGS_GROW_DOWNWARD
2816 stack_area = gen_rtx_MEM (save_mode,
2817 memory_address (save_mode,
2818 plus_constant (argblock,
2821 stack_area = gen_rtx_MEM (save_mode,
2822 memory_address (save_mode,
2823 plus_constant (argblock,
2826 if (save_mode == BLKmode)
2828 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2829 emit_block_move (validize_mem (save_area), stack_area,
2830 GEN_INT (num_to_save),
2831 PARM_BOUNDARY / BITS_PER_UNIT);
2835 save_area = gen_reg_rtx (save_mode);
2836 emit_move_insn (save_area, stack_area);
2841 /* Push the args that need to be pushed. */
2843 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2844 are to be pushed. */
2845 for (count = 0; count < nargs; count++, argnum += inc)
2847 register enum machine_mode mode = argvec[argnum].mode;
2848 register rtx val = argvec[argnum].value;
2849 rtx reg = argvec[argnum].reg;
2850 int partial = argvec[argnum].partial;
2851 #ifdef ACCUMULATE_OUTGOING_ARGS
2852 int lower_bound, upper_bound, i;
2855 if (! (reg != 0 && partial == 0))
2857 #ifdef ACCUMULATE_OUTGOING_ARGS
2858 /* If this is being stored into a pre-allocated, fixed-size, stack
2859 area, save any previous data at that location. */
2861 #ifdef ARGS_GROW_DOWNWARD
2862 /* stack_slot is negative, but we want to index stack_usage_map
2863 with positive values. */
2864 upper_bound = -argvec[argnum].offset.constant + 1;
2865 lower_bound = upper_bound - argvec[argnum].size.constant;
2867 lower_bound = argvec[argnum].offset.constant;
2868 upper_bound = lower_bound + argvec[argnum].size.constant;
2871 for (i = lower_bound; i < upper_bound; i++)
2872 if (stack_usage_map[i]
2873 /* Don't store things in the fixed argument area at this point;
2874 it has already been saved. */
2875 && i > reg_parm_stack_space)
2878 if (i != upper_bound)
2880 /* We need to make a save area. See what mode we can make it. */
2881 enum machine_mode save_mode
2882 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
2885 = gen_rtx_MEM (save_mode,
2886 memory_address (save_mode,
2887 plus_constant (argblock, argvec[argnum].offset.constant)));
2888 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2889 emit_move_insn (argvec[argnum].save_area, stack_area);
2892 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2893 argblock, GEN_INT (argvec[argnum].offset.constant),
2894 reg_parm_stack_space);
2896 #ifdef ACCUMULATE_OUTGOING_ARGS
2897 /* Now mark the segment we just used. */
2898 for (i = lower_bound; i < upper_bound; i++)
2899 stack_usage_map[i] = 1;
2906 #ifndef PUSH_ARGS_REVERSED
2907 #ifdef PREFERRED_STACK_BOUNDARY
2908 /* If we pushed args in forward order, perform stack alignment
2909 after pushing the last arg. */
2911 anti_adjust_stack (GEN_INT (args_size.constant
2912 - original_args_size.constant));
2916 #ifdef PUSH_ARGS_REVERSED
2922 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2924 /* Now load any reg parms into their regs. */
2926 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2927 are to be pushed. */
2928 for (count = 0; count < nargs; count++, argnum += inc)
2930 register rtx val = argvec[argnum].value;
2931 rtx reg = argvec[argnum].reg;
2932 int partial = argvec[argnum].partial;
2934 if (reg != 0 && partial == 0)
2935 emit_move_insn (reg, val);
2939 /* For version 1.37, try deleting this entirely. */
2943 /* Any regs containing parms remain in use through the call. */
2944 for (count = 0; count < nargs; count++)
2945 if (argvec[count].reg != 0)
2946 use_reg (&call_fusage, argvec[count].reg);
2948 /* Don't allow popping to be deferred, since then
2949 cse'ing of library calls could delete a call and leave the pop. */
2952 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2953 will set inhibit_defer_pop to that value. */
2955 /* The return type is needed to decide how many bytes the function pops.
2956 Signedness plays no role in that, so for simplicity, we pretend it's
2957 always signed. We also assume that the list of arguments passed has
2958 no impact, so we pretend it is unknown. */
2961 get_identifier (XSTR (orgfun, 0)),
2962 build_function_type (outmode == VOIDmode ? void_type_node
2963 : type_for_mode (outmode, 0), NULL_TREE),
2964 original_args_size.constant, args_size.constant, 0,
2965 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2966 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2967 old_inhibit_defer_pop + 1, call_fusage, no_queue);
2971 /* Now restore inhibit_defer_pop to its actual original value. */
2974 #ifdef ACCUMULATE_OUTGOING_ARGS
2975 #ifdef REG_PARM_STACK_SPACE
2978 enum machine_mode save_mode = GET_MODE (save_area);
2979 #ifdef ARGS_GROW_DOWNWARD
2981 = gen_rtx_MEM (save_mode,
2982 memory_address (save_mode,
2983 plus_constant (argblock,
2987 = gen_rtx_MEM (save_mode,
2988 memory_address (save_mode,
2989 plus_constant (argblock, low_to_save)));
2992 if (save_mode != BLKmode)
2993 emit_move_insn (stack_area, save_area);
2995 emit_block_move (stack_area, validize_mem (save_area),
2996 GEN_INT (high_to_save - low_to_save + 1),
2997 PARM_BOUNDARY / BITS_PER_UNIT);
3001 /* If we saved any argument areas, restore them. */
3002 for (count = 0; count < nargs; count++)
3003 if (argvec[count].save_area)
3005 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3007 = gen_rtx_MEM (save_mode,
3008 memory_address (save_mode,
3009 plus_constant (argblock, argvec[count].offset.constant)));
3011 emit_move_insn (stack_area, argvec[count].save_area);
3014 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3015 stack_usage_map = initial_stack_usage_map;
3019 /* Like emit_library_call except that an extra argument, VALUE,
3020 comes second and says where to store the result.
3021 (If VALUE is zero, this function chooses a convenient way
3022 to return the value.
3024 This function returns an rtx for where the value is to be found.
3025 If VALUE is nonzero, VALUE is returned. */
3028 emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
3029 enum machine_mode outmode, int nargs, ...))
3031 #ifndef ANSI_PROTOTYPES
3035 enum machine_mode outmode;
3039 /* Total size in bytes of all the stack-parms scanned so far. */
3040 struct args_size args_size;
3041 /* Size of arguments before any adjustments (such as rounding). */
3042 struct args_size original_args_size;
3043 register int argnum;
3048 CUMULATIVE_ARGS args_so_far;
3049 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3050 struct args_size offset; struct args_size size; rtx save_area; };
3052 int old_inhibit_defer_pop = inhibit_defer_pop;
3053 rtx call_fusage = 0;
3055 int pcc_struct_value = 0;
3056 int struct_value_size = 0;
3058 int reg_parm_stack_space = 0;
3059 #ifdef ACCUMULATE_OUTGOING_ARGS
3063 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3064 /* Define the boundary of the register parm stack space that needs to be
3066 int low_to_save = -1, high_to_save;
3067 rtx save_area = 0; /* Place that it is saved */
3070 #ifdef ACCUMULATE_OUTGOING_ARGS
3071 /* Size of the stack reserved for parameter registers. */
3072 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3073 char *initial_stack_usage_map = stack_usage_map;
3076 #ifdef REG_PARM_STACK_SPACE
3077 #ifdef MAYBE_REG_PARM_STACK_SPACE
3078 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3080 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3084 VA_START (p, nargs);
3086 #ifndef ANSI_PROTOTYPES
3087 orgfun = va_arg (p, rtx);
3088 value = va_arg (p, rtx);
3089 no_queue = va_arg (p, int);
3090 outmode = va_arg (p, enum machine_mode);
3091 nargs = va_arg (p, int);
3094 is_const = no_queue;
3097 /* If this kind of value comes back in memory,
3098 decide where in memory it should come back. */
3099 if (aggregate_value_p (type_for_mode (outmode, 0)))
3101 #ifdef PCC_STATIC_STRUCT_RETURN
3103 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3105 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3106 pcc_struct_value = 1;
3108 value = gen_reg_rtx (outmode);
3109 #else /* not PCC_STATIC_STRUCT_RETURN */
3110 struct_value_size = GET_MODE_SIZE (outmode);
3111 if (value != 0 && GET_CODE (value) == MEM)
3114 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3117 /* This call returns a big structure. */
3121 /* ??? Unfinished: must pass the memory address as an argument. */
3123 /* Copy all the libcall-arguments out of the varargs data
3124 and into a vector ARGVEC.
3126 Compute how to pass each argument. We only support a very small subset
3127 of the full argument passing conventions to limit complexity here since
3128 library functions shouldn't have many args. */
3130 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3131 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3133 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3135 args_size.constant = 0;
3142 /* If there's a structure value address to be passed,
3143 either pass it in the special place, or pass it as an extra argument. */
3144 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3146 rtx addr = XEXP (mem_value, 0);
3149 /* Make sure it is a reasonable operand for a move or push insn. */
3150 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3151 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3152 addr = force_operand (addr, NULL_RTX);
3154 argvec[count].value = addr;
3155 argvec[count].mode = Pmode;
3156 argvec[count].partial = 0;
3158 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3159 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3160 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3164 locate_and_pad_parm (Pmode, NULL_TREE,
3165 argvec[count].reg && argvec[count].partial == 0,
3166 NULL_TREE, &args_size, &argvec[count].offset,
3167 &argvec[count].size);
3170 if (argvec[count].reg == 0 || argvec[count].partial != 0
3171 || reg_parm_stack_space > 0)
3172 args_size.constant += argvec[count].size.constant;
3174 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3179 for (; count < nargs; count++)
3181 rtx val = va_arg (p, rtx);
3182 enum machine_mode mode = va_arg (p, enum machine_mode);
3184 /* We cannot convert the arg value to the mode the library wants here;
3185 must do it earlier where we know the signedness of the arg. */
3187 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3190 /* On some machines, there's no way to pass a float to a library fcn.
3191 Pass it as a double instead. */
3192 #ifdef LIBGCC_NEEDS_DOUBLE
3193 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3194 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3197 /* There's no need to call protect_from_queue, because
3198 either emit_move_insn or emit_push_insn will do that. */
3200 /* Make sure it is a reasonable operand for a move or push insn. */
3201 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3202 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3203 val = force_operand (val, NULL_RTX);
3205 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3206 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3208 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3209 be viewed as just an efficiency improvement. */
3210 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3211 emit_move_insn (slot, val);
3212 val = XEXP (slot, 0);
3217 argvec[count].value = val;
3218 argvec[count].mode = mode;
3220 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3221 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
3223 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3224 argvec[count].partial
3225 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3227 argvec[count].partial = 0;
3230 locate_and_pad_parm (mode, NULL_TREE,
3231 argvec[count].reg && argvec[count].partial == 0,
3232 NULL_TREE, &args_size, &argvec[count].offset,
3233 &argvec[count].size);
3235 if (argvec[count].size.var)
3238 if (reg_parm_stack_space == 0 && argvec[count].partial)
3239 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3241 if (argvec[count].reg == 0 || argvec[count].partial != 0
3242 || reg_parm_stack_space > 0)
3243 args_size.constant += argvec[count].size.constant;
3245 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3249 #ifdef FINAL_REG_PARM_STACK_SPACE
3250 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3253 /* If this machine requires an external definition for library
3254 functions, write one out. */
3255 assemble_external_libcall (fun);
3257 original_args_size = args_size;
3258 #ifdef PREFERRED_STACK_BOUNDARY
3259 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3260 / STACK_BYTES) * STACK_BYTES);
3263 args_size.constant = MAX (args_size.constant,
3264 reg_parm_stack_space);
3266 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3267 args_size.constant -= reg_parm_stack_space;
3270 if (args_size.constant > current_function_outgoing_args_size)
3271 current_function_outgoing_args_size = args_size.constant;
3273 #ifdef ACCUMULATE_OUTGOING_ARGS
3274 /* Since the stack pointer will never be pushed, it is possible for
3275 the evaluation of a parm to clobber something we have already
3276 written to the stack. Since most function calls on RISC machines
3277 do not use the stack, this is uncommon, but must work correctly.
3279 Therefore, we save any area of the stack that was already written
3280 and that we are using. Here we set up to do this by making a new
3281 stack usage map from the old one.
3283 Another approach might be to try to reorder the argument
3284 evaluations to avoid this conflicting stack usage. */
3286 needed = args_size.constant;
3288 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3289 /* Since we will be writing into the entire argument area, the
3290 map must be allocated for its entire size, not just the part that
3291 is the responsibility of the caller. */
3292 needed += reg_parm_stack_space;
3295 #ifdef ARGS_GROW_DOWNWARD
3296 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3299 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3302 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3304 if (initial_highest_arg_in_use)
3305 bcopy (initial_stack_usage_map, stack_usage_map,
3306 initial_highest_arg_in_use);
3308 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3309 bzero (&stack_usage_map[initial_highest_arg_in_use],
3310 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3313 /* The address of the outgoing argument list must not be copied to a
3314 register here, because argblock would be left pointing to the
3315 wrong place after the call to allocate_dynamic_stack_space below.
3318 argblock = virtual_outgoing_args_rtx;
3319 #else /* not ACCUMULATE_OUTGOING_ARGS */
3320 #ifndef PUSH_ROUNDING
3321 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3325 #ifdef PUSH_ARGS_REVERSED
3326 #ifdef PREFERRED_STACK_BOUNDARY
3327 /* If we push args individually in reverse order, perform stack alignment
3328 before the first push (the last arg). */
3330 anti_adjust_stack (GEN_INT (args_size.constant
3331 - original_args_size.constant));
3335 #ifdef PUSH_ARGS_REVERSED
3343 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3344 /* The argument list is the property of the called routine and it
3345 may clobber it. If the fixed area has been used for previous
3346 parameters, we must save and restore it.
3348 Here we compute the boundary of the that needs to be saved, if any. */
3350 #ifdef ARGS_GROW_DOWNWARD
3351 for (count = 0; count < reg_parm_stack_space + 1; count++)
3353 for (count = 0; count < reg_parm_stack_space; count++)
3356 if (count >= highest_outgoing_arg_in_use
3357 || stack_usage_map[count] == 0)
3360 if (low_to_save == -1)
3361 low_to_save = count;
3363 high_to_save = count;
3366 if (low_to_save >= 0)
3368 int num_to_save = high_to_save - low_to_save + 1;
3369 enum machine_mode save_mode
3370 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3373 /* If we don't have the required alignment, must do this in BLKmode. */
3374 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3375 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3376 save_mode = BLKmode;
3378 #ifdef ARGS_GROW_DOWNWARD
3379 stack_area = gen_rtx_MEM (save_mode,
3380 memory_address (save_mode,
3381 plus_constant (argblock,
3384 stack_area = gen_rtx_MEM (save_mode,
3385 memory_address (save_mode,
3386 plus_constant (argblock,
3389 if (save_mode == BLKmode)
3391 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3392 emit_block_move (validize_mem (save_area), stack_area,
3393 GEN_INT (num_to_save),
3394 PARM_BOUNDARY / BITS_PER_UNIT);
3398 save_area = gen_reg_rtx (save_mode);
3399 emit_move_insn (save_area, stack_area);
3404 /* Push the args that need to be pushed. */
3406 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3407 are to be pushed. */
3408 for (count = 0; count < nargs; count++, argnum += inc)
3410 register enum machine_mode mode = argvec[argnum].mode;
3411 register rtx val = argvec[argnum].value;
3412 rtx reg = argvec[argnum].reg;
3413 int partial = argvec[argnum].partial;
3414 #ifdef ACCUMULATE_OUTGOING_ARGS
3415 int lower_bound, upper_bound, i;
3418 if (! (reg != 0 && partial == 0))
3420 #ifdef ACCUMULATE_OUTGOING_ARGS
3421 /* If this is being stored into a pre-allocated, fixed-size, stack
3422 area, save any previous data at that location. */
3424 #ifdef ARGS_GROW_DOWNWARD
3425 /* stack_slot is negative, but we want to index stack_usage_map
3426 with positive values. */
3427 upper_bound = -argvec[argnum].offset.constant + 1;
3428 lower_bound = upper_bound - argvec[argnum].size.constant;
3430 lower_bound = argvec[argnum].offset.constant;
3431 upper_bound = lower_bound + argvec[argnum].size.constant;
3434 for (i = lower_bound; i < upper_bound; i++)
3435 if (stack_usage_map[i]
3436 /* Don't store things in the fixed argument area at this point;
3437 it has already been saved. */
3438 && i > reg_parm_stack_space)
3441 if (i != upper_bound)
3443 /* We need to make a save area. See what mode we can make it. */
3444 enum machine_mode save_mode
3445 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3448 = gen_rtx_MEM (save_mode,
3449 memory_address (save_mode,
3450 plus_constant (argblock,
3451 argvec[argnum].offset.constant)));
3452 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3453 emit_move_insn (argvec[argnum].save_area, stack_area);
3456 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3457 argblock, GEN_INT (argvec[argnum].offset.constant),
3458 reg_parm_stack_space);
3460 #ifdef ACCUMULATE_OUTGOING_ARGS
3461 /* Now mark the segment we just used. */
3462 for (i = lower_bound; i < upper_bound; i++)
3463 stack_usage_map[i] = 1;
3470 #ifndef PUSH_ARGS_REVERSED
3471 #ifdef PREFERRED_STACK_BOUNDARY
3472 /* If we pushed args in forward order, perform stack alignment
3473 after pushing the last arg. */
3475 anti_adjust_stack (GEN_INT (args_size.constant
3476 - original_args_size.constant));
3480 #ifdef PUSH_ARGS_REVERSED
3486 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3488 /* Now load any reg parms into their regs. */
3490 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3491 are to be pushed. */
3492 for (count = 0; count < nargs; count++, argnum += inc)
3494 register rtx val = argvec[argnum].value;
3495 rtx reg = argvec[argnum].reg;
3496 int partial = argvec[argnum].partial;
3498 if (reg != 0 && partial == 0)
3499 emit_move_insn (reg, val);
3504 /* For version 1.37, try deleting this entirely. */
3509 /* Any regs containing parms remain in use through the call. */
3510 for (count = 0; count < nargs; count++)
3511 if (argvec[count].reg != 0)
3512 use_reg (&call_fusage, argvec[count].reg);
3514 /* Pass the function the address in which to return a structure value. */
3515 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3517 emit_move_insn (struct_value_rtx,
3519 force_operand (XEXP (mem_value, 0),
3521 if (GET_CODE (struct_value_rtx) == REG)
3522 use_reg (&call_fusage, struct_value_rtx);
3525 /* Don't allow popping to be deferred, since then
3526 cse'ing of library calls could delete a call and leave the pop. */
3529 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3530 will set inhibit_defer_pop to that value. */
3531 /* See the comment in emit_library_call about the function type we build
3535 get_identifier (XSTR (orgfun, 0)),
3536 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3537 original_args_size.constant, args_size.constant,
3539 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3540 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3541 old_inhibit_defer_pop + 1, call_fusage, is_const);
3543 /* Now restore inhibit_defer_pop to its actual original value. */
3548 /* Copy the value to the right place. */
3549 if (outmode != VOIDmode)
3555 if (value != mem_value)
3556 emit_move_insn (value, mem_value);
3558 else if (value != 0)
3559 emit_move_insn (value, hard_libcall_value (outmode));
3561 value = hard_libcall_value (outmode);
3564 #ifdef ACCUMULATE_OUTGOING_ARGS
3565 #ifdef REG_PARM_STACK_SPACE
3568 enum machine_mode save_mode = GET_MODE (save_area);
3569 #ifdef ARGS_GROW_DOWNWARD
3571 = gen_rtx_MEM (save_mode,
3572 memory_address (save_mode,
3573 plus_constant (argblock,
3577 = gen_rtx_MEM (save_mode,
3578 memory_address (save_mode,
3579 plus_constant (argblock, low_to_save)));
3581 if (save_mode != BLKmode)
3582 emit_move_insn (stack_area, save_area);
3584 emit_block_move (stack_area, validize_mem (save_area),
3585 GEN_INT (high_to_save - low_to_save + 1),
3586 PARM_BOUNDARY / BITS_PER_UNIT);
3590 /* If we saved any argument areas, restore them. */
3591 for (count = 0; count < nargs; count++)
3592 if (argvec[count].save_area)
3594 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3596 = gen_rtx_MEM (save_mode,
3597 memory_address (save_mode, plus_constant (argblock,
3598 argvec[count].offset.constant)));
3600 emit_move_insn (stack_area, argvec[count].save_area);
3603 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3604 stack_usage_map = initial_stack_usage_map;
3611 /* Return an rtx which represents a suitable home on the stack
3612 given TYPE, the type of the argument looking for a home.
3613 This is called only for BLKmode arguments.
3615 SIZE is the size needed for this target.
3616 ARGS_ADDR is the address of the bottom of the argument block for this call.
3617 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3618 if this machine uses push insns. */
3621 target_for_arg (type, size, args_addr, offset)
3625 struct args_size offset;
3628 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3630 /* We do not call memory_address if possible,
3631 because we want to address as close to the stack
3632 as possible. For non-variable sized arguments,
3633 this will be stack-pointer relative addressing. */
3634 if (GET_CODE (offset_rtx) == CONST_INT)
3635 target = plus_constant (args_addr, INTVAL (offset_rtx));
3638 /* I have no idea how to guarantee that this
3639 will work in the presence of register parameters. */
3640 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3641 target = memory_address (QImode, target);
3644 return gen_rtx_MEM (BLKmode, target);
3648 /* Store a single argument for a function call
3649 into the register or memory area where it must be passed.
3650 *ARG describes the argument value and where to pass it.
3652 ARGBLOCK is the address of the stack-block for all the arguments,
3653 or 0 on a machine where arguments are pushed individually.
3655 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3656 so must be careful about how the stack is used.
3658 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3659 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3660 that we need not worry about saving and restoring the stack.
3662 FNDECL is the declaration of the function we are calling. */
3665 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3666 reg_parm_stack_space)
3667 struct arg_data *arg;
3670 int variable_size ATTRIBUTE_UNUSED;
3671 int reg_parm_stack_space;
3673 register tree pval = arg->tree_value;
3677 #ifdef ACCUMULATE_OUTGOING_ARGS
3678 int i, lower_bound, upper_bound;
3681 if (TREE_CODE (pval) == ERROR_MARK)
3684 /* Push a new temporary level for any temporaries we make for
3688 #ifdef ACCUMULATE_OUTGOING_ARGS
3689 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3690 save any previous data at that location. */
3691 if (argblock && ! variable_size && arg->stack)
3693 #ifdef ARGS_GROW_DOWNWARD
3694 /* stack_slot is negative, but we want to index stack_usage_map
3695 with positive values. */
3696 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3697 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3701 lower_bound = upper_bound - arg->size.constant;
3703 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3704 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3708 upper_bound = lower_bound + arg->size.constant;
3711 for (i = lower_bound; i < upper_bound; i++)
3712 if (stack_usage_map[i]
3713 /* Don't store things in the fixed argument area at this point;
3714 it has already been saved. */
3715 && i > reg_parm_stack_space)
3718 if (i != upper_bound)
3720 /* We need to make a save area. See what mode we can make it. */
3721 enum machine_mode save_mode
3722 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3724 = gen_rtx_MEM (save_mode,
3725 memory_address (save_mode,
3726 XEXP (arg->stack_slot, 0)));
3728 if (save_mode == BLKmode)
3730 arg->save_area = assign_stack_temp (BLKmode,
3731 arg->size.constant, 0);
3732 MEM_SET_IN_STRUCT_P (arg->save_area,
3733 AGGREGATE_TYPE_P (TREE_TYPE
3734 (arg->tree_value)));
3735 preserve_temp_slots (arg->save_area);
3736 emit_block_move (validize_mem (arg->save_area), stack_area,
3737 GEN_INT (arg->size.constant),
3738 PARM_BOUNDARY / BITS_PER_UNIT);
3742 arg->save_area = gen_reg_rtx (save_mode);
3743 emit_move_insn (arg->save_area, stack_area);
3748 /* Now that we have saved any slots that will be overwritten by this
3749 store, mark all slots this store will use. We must do this before
3750 we actually expand the argument since the expansion itself may
3751 trigger library calls which might need to use the same stack slot. */
3752 if (argblock && ! variable_size && arg->stack)
3753 for (i = lower_bound; i < upper_bound; i++)
3754 stack_usage_map[i] = 1;
3757 /* If this isn't going to be placed on both the stack and in registers,
3758 set up the register and number of words. */
3759 if (! arg->pass_on_stack)
3760 reg = arg->reg, partial = arg->partial;
3762 if (reg != 0 && partial == 0)
3763 /* Being passed entirely in a register. We shouldn't be called in
3767 /* If this arg needs special alignment, don't load the registers
3769 if (arg->n_aligned_regs != 0)
3772 /* If this is being passed partially in a register, we can't evaluate
3773 it directly into its stack slot. Otherwise, we can. */
3774 if (arg->value == 0)
3776 #ifdef ACCUMULATE_OUTGOING_ARGS
3777 /* stack_arg_under_construction is nonzero if a function argument is
3778 being evaluated directly into the outgoing argument list and
3779 expand_call must take special action to preserve the argument list
3780 if it is called recursively.
3782 For scalar function arguments stack_usage_map is sufficient to
3783 determine which stack slots must be saved and restored. Scalar
3784 arguments in general have pass_on_stack == 0.
3786 If this argument is initialized by a function which takes the
3787 address of the argument (a C++ constructor or a C function
3788 returning a BLKmode structure), then stack_usage_map is
3789 insufficient and expand_call must push the stack around the
3790 function call. Such arguments have pass_on_stack == 1.
3792 Note that it is always safe to set stack_arg_under_construction,
3793 but this generates suboptimal code if set when not needed. */
3795 if (arg->pass_on_stack)
3796 stack_arg_under_construction++;
3798 arg->value = expand_expr (pval,
3800 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3801 ? NULL_RTX : arg->stack,
3804 /* If we are promoting object (or for any other reason) the mode
3805 doesn't agree, convert the mode. */
3807 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3808 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3809 arg->value, arg->unsignedp);
3811 #ifdef ACCUMULATE_OUTGOING_ARGS
3812 if (arg->pass_on_stack)
3813 stack_arg_under_construction--;
3817 /* Don't allow anything left on stack from computation
3818 of argument to alloca. */
3820 do_pending_stack_adjust ();
3822 if (arg->value == arg->stack)
3824 /* If the value is already in the stack slot, we are done moving
3826 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
3828 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3829 XEXP (arg->stack, 0), ptr_mode,
3830 ARGS_SIZE_RTX (arg->size),
3831 TYPE_MODE (sizetype),
3832 GEN_INT (MEMORY_USE_RW),
3833 TYPE_MODE (integer_type_node));
3836 else if (arg->mode != BLKmode)
3840 /* Argument is a scalar, not entirely passed in registers.
3841 (If part is passed in registers, arg->partial says how much
3842 and emit_push_insn will take care of putting it there.)
3844 Push it, and if its size is less than the
3845 amount of space allocated to it,
3846 also bump stack pointer by the additional space.
3847 Note that in C the default argument promotions
3848 will prevent such mismatches. */
3850 size = GET_MODE_SIZE (arg->mode);
3851 /* Compute how much space the push instruction will push.
3852 On many machines, pushing a byte will advance the stack
3853 pointer by a halfword. */
3854 #ifdef PUSH_ROUNDING
3855 size = PUSH_ROUNDING (size);
3859 /* Compute how much space the argument should get:
3860 round up to a multiple of the alignment for arguments. */
3861 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3862 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3863 / (PARM_BOUNDARY / BITS_PER_UNIT))
3864 * (PARM_BOUNDARY / BITS_PER_UNIT));
3866 /* This isn't already where we want it on the stack, so put it there.
3867 This can either be done with push or copy insns. */
3868 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3869 partial, reg, used - size, argblock,
3870 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space);
3874 /* BLKmode, at least partly to be pushed. */
3876 register int excess;
3879 /* Pushing a nonscalar.
3880 If part is passed in registers, PARTIAL says how much
3881 and emit_push_insn will take care of putting it there. */
3883 /* Round its size up to a multiple
3884 of the allocation unit for arguments. */
3886 if (arg->size.var != 0)
3889 size_rtx = ARGS_SIZE_RTX (arg->size);
3893 /* PUSH_ROUNDING has no effect on us, because
3894 emit_push_insn for BLKmode is careful to avoid it. */
3895 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3896 + partial * UNITS_PER_WORD);
3897 size_rtx = expr_size (pval);
3900 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3901 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3902 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3903 reg_parm_stack_space);
3907 /* Unless this is a partially-in-register argument, the argument is now
3910 ??? Note that this can change arg->value from arg->stack to
3911 arg->stack_slot and it matters when they are not the same.
3912 It isn't totally clear that this is correct in all cases. */
3914 arg->value = arg->stack_slot;
3916 /* Once we have pushed something, pops can't safely
3917 be deferred during the rest of the arguments. */
3920 /* ANSI doesn't require a sequence point here,
3921 but PCC has one, so this will avoid some problems. */
3924 /* Free any temporary slots made in processing this argument. Show
3925 that we might have taken the address of something and pushed that
3927 preserve_temp_slots (NULL_RTX);