1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-97, 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
28 #include "insn-flags.h"
32 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
33 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
50 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
51 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
53 /* Data structure and subroutines used within expand_call. */
57 /* Tree node for this argument. */
59 /* Mode for value; TYPE_MODE unless promoted. */
60 enum machine_mode mode;
61 /* Current RTL value for argument, or 0 if it isn't precomputed. */
63 /* Initially-compute RTL value for argument; only for const functions. */
65 /* Register to pass this argument in, 0 if passed on stack, or an
66 PARALLEL if the arg is to be copied into multiple non-contiguous
69 /* If REG was promoted from the actual mode of the argument expression,
70 indicates whether the promotion is sign- or zero-extended. */
72 /* Number of registers to use. 0 means put the whole arg in registers.
73 Also 0 if not passed in registers. */
75 /* Non-zero if argument must be passed on stack.
76 Note that some arguments may be passed on the stack
77 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
78 pass_on_stack identifies arguments that *cannot* go in registers. */
80 /* Offset of this argument from beginning of stack-args. */
81 struct args_size offset;
82 /* Similar, but offset to the start of the stack slot. Different from
83 OFFSET if this arg pads downward. */
84 struct args_size slot_offset;
85 /* Size of this argument on the stack, rounded up for any padding it gets,
86 parts of the argument passed in registers do not count.
87 If REG_PARM_STACK_SPACE is defined, then register parms
88 are counted here as well. */
89 struct args_size size;
90 /* Location on the stack at which parameter should be stored. The store
91 has already been done if STACK == VALUE. */
93 /* Location on the stack of the start of this argument slot. This can
94 differ from STACK if this arg pads downward. This location is known
95 to be aligned to FUNCTION_ARG_BOUNDARY. */
97 #ifdef ACCUMULATE_OUTGOING_ARGS
98 /* Place that this stack area has been saved, if needed. */
101 /* If an argument's alignment does not permit direct copying into registers,
102 copy in smaller-sized pieces into pseudos. These are stored in a
103 block pointed to by this field. The next field says how many
104 word-sized pseudos we made. */
109 #ifdef ACCUMULATE_OUTGOING_ARGS
110 /* A vector of one char per byte of stack space. A byte if non-zero if
111 the corresponding stack location has been used.
112 This vector is used to prevent a function call within an argument from
113 clobbering any stack already set up. */
114 static char *stack_usage_map;
116 /* Size of STACK_USAGE_MAP. */
117 static int highest_outgoing_arg_in_use;
119 /* stack_arg_under_construction is nonzero when an argument may be
120 initialized with a constructor call (including a C function that
121 returns a BLKmode struct) and expand_call must take special action
122 to make sure the object being constructed does not overlap the
123 argument list for the constructor call. */
124 int stack_arg_under_construction;
127 static int calls_function PROTO ((tree, int));
128 static int calls_function_1 PROTO ((tree, int));
129 static void emit_call_1 PROTO ((rtx, tree, tree, HOST_WIDE_INT,
130 HOST_WIDE_INT, rtx, rtx,
132 static void special_function_p PROTO ((char *, tree, int *, int *,
134 static void precompute_register_parameters PROTO ((int, struct arg_data *,
136 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
138 static void store_unaligned_arguments_into_pseudos PROTO ((struct arg_data *,
140 static int finalize_must_preallocate PROTO ((int, int,
142 struct args_size *));
143 static void precompute_arguments PROTO ((int, int, int,
145 struct args_size *));
146 static int compute_argument_block_size PROTO ((int,
147 struct args_size *));
148 static void initialize_argument_information PROTO ((int,
156 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
157 static rtx save_fixed_argument_area PROTO ((int, rtx, int *, int *));
158 static void restore_fixed_argument_area PROTO ((rtx, rtx, int, int));
161 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
164 If WHICH is 0, return 1 if EXP contains a call to any function.
165 Actually, we only need return 1 if evaluating EXP would require pushing
166 arguments on the stack, but that is too difficult to compute, so we just
167 assume any function call might require the stack. */
169 static tree calls_function_save_exprs;
172 calls_function (exp, which)
177 calls_function_save_exprs = 0;
178 val = calls_function_1 (exp, which);
179 calls_function_save_exprs = 0;
184 calls_function_1 (exp, which)
189 enum tree_code code = TREE_CODE (exp);
190 int type = TREE_CODE_CLASS (code);
191 int length = tree_code_length[(int) code];
193 /* If this code is language-specific, we don't know what it will do. */
194 if ((int) code >= NUM_TREE_CODES)
197 /* Only expressions and references can contain calls. */
198 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
207 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
208 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
211 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
213 if ((DECL_BUILT_IN (fndecl)
214 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
215 || (DECL_SAVED_INSNS (fndecl)
216 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
217 & FUNCTION_FLAGS_CALLS_ALLOCA)))
221 /* Third operand is RTL. */
226 if (SAVE_EXPR_RTL (exp) != 0)
228 if (value_member (exp, calls_function_save_exprs))
230 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
231 calls_function_save_exprs);
232 return (TREE_OPERAND (exp, 0) != 0
233 && calls_function_1 (TREE_OPERAND (exp, 0), which));
239 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
240 if (DECL_INITIAL (local) != 0
241 && calls_function_1 (DECL_INITIAL (local), which))
245 register tree subblock;
247 for (subblock = BLOCK_SUBBLOCKS (exp);
249 subblock = TREE_CHAIN (subblock))
250 if (calls_function_1 (subblock, which))
255 case METHOD_CALL_EXPR:
259 case WITH_CLEANUP_EXPR:
270 for (i = 0; i < length; i++)
271 if (TREE_OPERAND (exp, i) != 0
272 && calls_function_1 (TREE_OPERAND (exp, i), which))
278 /* Force FUNEXP into a form suitable for the address of a CALL,
279 and return that as an rtx. Also load the static chain register
280 if FNDECL is a nested function.
282 CALL_FUSAGE points to a variable holding the prospective
283 CALL_INSN_FUNCTION_USAGE information. */
286 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
292 rtx static_chain_value = 0;
294 funexp = protect_from_queue (funexp, 0);
297 /* Get possible static chain value for nested function in C. */
298 static_chain_value = lookup_static_chain (fndecl);
300 /* Make a valid memory address and copy constants thru pseudo-regs,
301 but not for a constant address if -fno-function-cse. */
302 if (GET_CODE (funexp) != SYMBOL_REF)
303 /* If we are using registers for parameters, force the
304 function address into a register now. */
305 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
306 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
307 : memory_address (FUNCTION_MODE, funexp));
310 #ifndef NO_FUNCTION_CSE
311 if (optimize && ! flag_no_function_cse)
312 #ifdef NO_RECURSIVE_FUNCTION_CSE
313 if (fndecl != current_function_decl)
315 funexp = force_reg (Pmode, funexp);
319 if (static_chain_value != 0)
321 emit_move_insn (static_chain_rtx, static_chain_value);
323 if (GET_CODE (static_chain_rtx) == REG)
324 use_reg (call_fusage, static_chain_rtx);
330 /* Generate instructions to call function FUNEXP,
331 and optionally pop the results.
332 The CALL_INSN is the first insn generated.
334 FNDECL is the declaration node of the function. This is given to the
335 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
337 FUNTYPE is the data type of the function. This is given to the macro
338 RETURN_POPS_ARGS to determine whether this function pops its own args.
339 We used to allow an identifier for library functions, but that doesn't
340 work when the return type is an aggregate type and the calling convention
341 says that the pointer to this aggregate is to be popped by the callee.
343 STACK_SIZE is the number of bytes of arguments on the stack,
344 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
345 This is both to put into the call insn and
346 to generate explicit popping code if necessary.
348 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
349 It is zero if this call doesn't want a structure value.
351 NEXT_ARG_REG is the rtx that results from executing
352 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
353 just after all the args have had their registers assigned.
354 This could be whatever you like, but normally it is the first
355 arg-register beyond those used for args in this call,
356 or 0 if all the arg-registers are used in this call.
357 It is passed on to `gen_call' so you can put this info in the call insn.
359 VALREG is a hard register in which a value is returned,
360 or 0 if the call does not return a value.
362 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
363 the args to this call were processed.
364 We restore `inhibit_defer_pop' to that value.
366 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
367 denote registers used by the called function.
369 IS_CONST is true if this is a `const' call. */
372 emit_call_1 (funexp, fndecl, funtype, stack_size, struct_value_size,
373 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
376 tree fndecl ATTRIBUTE_UNUSED;
377 tree funtype ATTRIBUTE_UNUSED;
378 HOST_WIDE_INT stack_size;
379 HOST_WIDE_INT struct_value_size;
382 int old_inhibit_defer_pop;
386 rtx stack_size_rtx = GEN_INT (stack_size);
387 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
389 #ifndef ACCUMULATE_OUTGOING_ARGS
390 int already_popped = 0;
393 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
394 and we don't want to load it into a register as an optimization,
395 because prepare_call_address already did it if it should be done. */
396 if (GET_CODE (funexp) != SYMBOL_REF)
397 funexp = memory_address (FUNCTION_MODE, funexp);
399 #ifndef ACCUMULATE_OUTGOING_ARGS
400 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
401 if (HAVE_call_pop && HAVE_call_value_pop
402 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
405 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
408 /* If this subroutine pops its own args, record that in the call insn
409 if possible, for the sake of frame pointer elimination. */
412 pat = gen_call_value_pop (valreg,
413 gen_rtx_MEM (FUNCTION_MODE, funexp),
414 stack_size_rtx, next_arg_reg, n_pop);
416 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
417 stack_size_rtx, next_arg_reg, n_pop);
419 emit_call_insn (pat);
426 #if defined (HAVE_call) && defined (HAVE_call_value)
427 if (HAVE_call && HAVE_call_value)
430 emit_call_insn (gen_call_value (valreg,
431 gen_rtx_MEM (FUNCTION_MODE, funexp),
432 stack_size_rtx, next_arg_reg,
435 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
436 stack_size_rtx, next_arg_reg,
437 struct_value_size_rtx));
443 /* Find the CALL insn we just emitted. */
444 for (call_insn = get_last_insn ();
445 call_insn && GET_CODE (call_insn) != CALL_INSN;
446 call_insn = PREV_INSN (call_insn))
452 /* Put the register usage information on the CALL. If there is already
453 some usage information, put ours at the end. */
454 if (CALL_INSN_FUNCTION_USAGE (call_insn))
458 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
459 link = XEXP (link, 1))
462 XEXP (link, 1) = call_fusage;
465 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
467 /* If this is a const call, then set the insn's unchanging bit. */
469 CONST_CALL_P (call_insn) = 1;
471 /* Restore this now, so that we do defer pops for this call's args
472 if the context of the call as a whole permits. */
473 inhibit_defer_pop = old_inhibit_defer_pop;
475 #ifndef ACCUMULATE_OUTGOING_ARGS
476 /* If returning from the subroutine does not automatically pop the args,
477 we need an instruction to pop them sooner or later.
478 Perhaps do it now; perhaps just record how much space to pop later.
480 If returning from the subroutine does pop the args, indicate that the
481 stack pointer will be changed. */
483 if (stack_size != 0 && RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0)
486 CALL_INSN_FUNCTION_USAGE (call_insn)
487 = gen_rtx_EXPR_LIST (VOIDmode,
488 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
489 CALL_INSN_FUNCTION_USAGE (call_insn));
490 stack_size -= RETURN_POPS_ARGS (fndecl, funtype, stack_size);
491 stack_size_rtx = GEN_INT (stack_size);
496 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
497 pending_stack_adjust += stack_size;
499 adjust_stack (stack_size_rtx);
504 /* Determine if the function identified by NAME and FNDECL is one with
505 special properties we wish to know about.
507 For example, if the function might return more than one time (setjmp), then
508 set RETURNS_TWICE to a nonzero value.
510 Similarly set IS_LONGJMP for if the function is in the longjmp family.
512 Set IS_MALLOC for any of the standard memory allocation functions which
513 allocate from the heap.
515 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
516 space from the stack such as alloca. */
519 special_function_p (name, fndecl, returns_twice, is_longjmp,
520 is_malloc, may_be_alloca)
533 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
534 /* Exclude functions not at the file scope, or not `extern',
535 since they are not the magic functions we would otherwise
537 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
541 /* We assume that alloca will always be called by name. It
542 makes no sense to pass it as a pointer-to-function to
543 anything that does not understand its behavior. */
545 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
547 && ! strcmp (name, "alloca"))
548 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
550 && ! strcmp (name, "__builtin_alloca"))));
552 /* Disregard prefix _, __ or __x. */
555 if (name[1] == '_' && name[2] == 'x')
557 else if (name[1] == '_')
567 && (! strcmp (tname, "setjmp")
568 || ! strcmp (tname, "setjmp_syscall")))
570 && ! strcmp (tname, "sigsetjmp"))
572 && ! strcmp (tname, "savectx")));
574 && ! strcmp (tname, "siglongjmp"))
577 else if ((tname[0] == 'q' && tname[1] == 's'
578 && ! strcmp (tname, "qsetjmp"))
579 || (tname[0] == 'v' && tname[1] == 'f'
580 && ! strcmp (tname, "vfork")))
583 else if (tname[0] == 'l' && tname[1] == 'o'
584 && ! strcmp (tname, "longjmp"))
586 /* XXX should have "malloc" attribute on functions instead
587 of recognizing them by name. */
588 else if (! strcmp (tname, "malloc")
589 || ! strcmp (tname, "calloc")
590 || ! strcmp (tname, "realloc")
591 /* Note use of NAME rather than TNAME here. These functions
592 are only reserved when preceded with __. */
593 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
594 || ! strcmp (name, "__nw") /* mangled __builtin_new */
595 || ! strcmp (name, "__builtin_new")
596 || ! strcmp (name, "__builtin_vec_new"))
601 /* Precompute all register parameters as described by ARGS, storing values
602 into fields within the ARGS array.
604 NUM_ACTUALS indicates the total number elements in the ARGS array.
606 Set REG_PARM_SEEN if we encounter a register parameter. */
609 precompute_register_parameters (num_actuals, args, reg_parm_seen)
611 struct arg_data *args;
618 for (i = 0; i < num_actuals; i++)
619 if (args[i].reg != 0 && ! args[i].pass_on_stack)
623 if (args[i].value == 0)
626 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
628 preserve_temp_slots (args[i].value);
631 /* ANSI doesn't require a sequence point here,
632 but PCC has one, so this will avoid some problems. */
636 /* If we are to promote the function arg to a wider mode,
639 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
641 = convert_modes (args[i].mode,
642 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
643 args[i].value, args[i].unsignedp);
645 /* If the value is expensive, and we are inside an appropriately
646 short loop, put the value into a pseudo and then put the pseudo
649 For small register classes, also do this if this call uses
650 register parameters. This is to avoid reload conflicts while
651 loading the parameters registers. */
653 if ((! (GET_CODE (args[i].value) == REG
654 || (GET_CODE (args[i].value) == SUBREG
655 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
656 && args[i].mode != BLKmode
657 && rtx_cost (args[i].value, SET) > 2
658 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
659 || preserve_subexpressions_p ()))
660 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
664 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
666 /* The argument list is the property of the called routine and it
667 may clobber it. If the fixed area has been used for previous
668 parameters, we must save and restore it. */
670 save_fixed_argument_area (reg_parm_stack_space, argblock,
671 low_to_save, high_to_save)
672 int reg_parm_stack_space;
678 rtx save_area = NULL_RTX;
680 /* Compute the boundary of the that needs to be saved, if any. */
681 #ifdef ARGS_GROW_DOWNWARD
682 for (i = 0; i < reg_parm_stack_space + 1; i++)
684 for (i = 0; i < reg_parm_stack_space; i++)
687 if (i >= highest_outgoing_arg_in_use
688 || stack_usage_map[i] == 0)
691 if (*low_to_save == -1)
697 if (*low_to_save >= 0)
699 int num_to_save = *high_to_save - *low_to_save + 1;
700 enum machine_mode save_mode
701 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
704 /* If we don't have the required alignment, must do this in BLKmode. */
705 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
706 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
709 #ifdef ARGS_GROW_DOWNWARD
710 stack_area = gen_rtx_MEM (save_mode,
711 memory_address (save_mode,
712 plus_constant (argblock,
715 stack_area = gen_rtx_MEM (save_mode,
716 memory_address (save_mode,
717 plus_constant (argblock,
720 if (save_mode == BLKmode)
722 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
723 emit_block_move (validize_mem (save_area), stack_area,
724 GEN_INT (num_to_save),
725 PARM_BOUNDARY / BITS_PER_UNIT);
729 save_area = gen_reg_rtx (save_mode);
730 emit_move_insn (save_area, stack_area);
737 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
743 enum machine_mode save_mode = GET_MODE (save_area);
744 #ifdef ARGS_GROW_DOWNWARD
746 = gen_rtx_MEM (save_mode,
747 memory_address (save_mode,
748 plus_constant (argblock,
752 = gen_rtx_MEM (save_mode,
753 memory_address (save_mode,
754 plus_constant (argblock,
758 if (save_mode != BLKmode)
759 emit_move_insn (stack_area, save_area);
761 emit_block_move (stack_area, validize_mem (save_area),
762 GEN_INT (high_to_save - low_to_save + 1),
763 PARM_BOUNDARY / BITS_PER_UNIT);
767 /* If any elements in ARGS refer to parameters that are to be passed in
768 registers, but not in memory, and whose alignment does not permit a
769 direct copy into registers. Copy the values into a group of pseudos
770 which we will later copy into the appropriate hard registers.
772 Pseudos for each unaligned argument will be stored into the array
773 args[argnum].aligned_regs. The caller is responsible for deallocating
774 the aligned_regs array if it is nonzero. */
777 store_unaligned_arguments_into_pseudos (args, num_actuals)
778 struct arg_data *args;
783 for (i = 0; i < num_actuals; i++)
784 if (args[i].reg != 0 && ! args[i].pass_on_stack
785 && args[i].mode == BLKmode
786 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
787 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
789 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
790 int big_endian_correction = 0;
792 args[i].n_aligned_regs
793 = args[i].partial ? args[i].partial
794 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
796 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
797 * args[i].n_aligned_regs);
799 /* Structures smaller than a word are aligned to the least
800 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
801 this means we must skip the empty high order bytes when
802 calculating the bit offset. */
803 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
804 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
806 for (j = 0; j < args[i].n_aligned_regs; j++)
808 rtx reg = gen_reg_rtx (word_mode);
809 rtx word = operand_subword_force (args[i].value, j, BLKmode);
810 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
811 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
813 args[i].aligned_regs[j] = reg;
815 /* There is no need to restrict this code to loading items
816 in TYPE_ALIGN sized hunks. The bitfield instructions can
817 load up entire word sized registers efficiently.
819 ??? This may not be needed anymore.
820 We use to emit a clobber here but that doesn't let later
821 passes optimize the instructions we emit. By storing 0 into
822 the register later passes know the first AND to zero out the
823 bitfield being set in the register is unnecessary. The store
824 of 0 will be deleted as will at least the first AND. */
826 emit_move_insn (reg, const0_rtx);
828 bytes -= bitsize / BITS_PER_UNIT;
829 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
830 extract_bit_field (word, bitsize, 0, 1,
833 bitalign / BITS_PER_UNIT,
835 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
840 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
843 NUM_ACTUALS is the total number of parameters.
845 N_NAMED_ARGS is the total number of named arguments.
847 FNDECL is the tree code for the target of this call (if known)
849 ARGS_SO_FAR holds state needed by the target to know where to place
852 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
853 for arguments which are passed in registers.
855 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
856 and may be modified by this routine.
858 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
859 flags which may may be modified by this routine. */
862 initialize_argument_information (num_actuals, args, args_size, n_named_args,
863 actparms, fndecl, args_so_far,
864 reg_parm_stack_space, old_stack_level,
865 old_pending_adj, must_preallocate, is_const)
867 struct arg_data *args;
868 struct args_size *args_size;
872 CUMULATIVE_ARGS args_so_far;
873 int reg_parm_stack_space;
874 rtx *old_stack_level;
875 int *old_pending_adj;
876 int *must_preallocate;
879 /* 1 if scanning parms front to back, -1 if scanning back to front. */
882 /* Count arg position in order args appear. */
888 args_size->constant = 0;
891 /* In this loop, we consider args in the order they are written.
892 We fill up ARGS from the front or from the back if necessary
893 so that in any case the first arg to be pushed ends up at the front. */
895 #ifdef PUSH_ARGS_REVERSED
896 i = num_actuals - 1, inc = -1;
897 /* In this case, must reverse order of args
898 so that we compute and push the last arg first. */
903 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
904 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
906 tree type = TREE_TYPE (TREE_VALUE (p));
908 enum machine_mode mode;
910 args[i].tree_value = TREE_VALUE (p);
912 /* Replace erroneous argument with constant zero. */
913 if (type == error_mark_node || TYPE_SIZE (type) == 0)
914 args[i].tree_value = integer_zero_node, type = integer_type_node;
916 /* If TYPE is a transparent union, pass things the way we would
917 pass the first field of the union. We have already verified that
918 the modes are the same. */
919 if (TYPE_TRANSPARENT_UNION (type))
920 type = TREE_TYPE (TYPE_FIELDS (type));
922 /* Decide where to pass this arg.
924 args[i].reg is nonzero if all or part is passed in registers.
926 args[i].partial is nonzero if part but not all is passed in registers,
927 and the exact value says how many words are passed in registers.
929 args[i].pass_on_stack is nonzero if the argument must at least be
930 computed on the stack. It may then be loaded back into registers
931 if args[i].reg is nonzero.
933 These decisions are driven by the FUNCTION_... macros and must agree
934 with those made by function.c. */
936 /* See if this argument should be passed by invisible reference. */
937 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
938 && contains_placeholder_p (TYPE_SIZE (type)))
939 || TREE_ADDRESSABLE (type)
940 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
941 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type),
942 type, argpos < n_named_args)
946 /* If we're compiling a thunk, pass through invisible
947 references instead of making a copy. */
948 if (current_function_is_thunk
949 #ifdef FUNCTION_ARG_CALLEE_COPIES
950 || (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type),
951 type, argpos < n_named_args)
952 /* If it's in a register, we must make a copy of it too. */
953 /* ??? Is this a sufficient test? Is there a better one? */
954 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
955 && REG_P (DECL_RTL (args[i].tree_value)))
956 && ! TREE_ADDRESSABLE (type))
960 /* C++ uses a TARGET_EXPR to indicate that we want to make a
961 new object from the argument. If we are passing by
962 invisible reference, the callee will do that for us, so we
963 can strip off the TARGET_EXPR. This is not always safe,
964 but it is safe in the only case where this is a useful
965 optimization; namely, when the argument is a plain object.
966 In that case, the frontend is just asking the backend to
967 make a bitwise copy of the argument. */
969 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
970 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
971 (args[i].tree_value, 1)))
973 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
974 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
976 args[i].tree_value = build1 (ADDR_EXPR,
977 build_pointer_type (type),
979 type = build_pointer_type (type);
983 /* We make a copy of the object and pass the address to the
984 function being called. */
987 if (TYPE_SIZE (type) == 0
988 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
989 || (flag_stack_check && ! STACK_CHECK_BUILTIN
990 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
991 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
992 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
994 /* This is a variable-sized object. Make space on the stack
996 rtx size_rtx = expr_size (TREE_VALUE (p));
998 if (*old_stack_level == 0)
1000 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1001 *old_pending_adj = pending_stack_adjust;
1002 pending_stack_adjust = 0;
1005 copy = gen_rtx_MEM (BLKmode,
1006 allocate_dynamic_stack_space (size_rtx,
1008 TYPE_ALIGN (type)));
1012 int size = int_size_in_bytes (type);
1013 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1016 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1018 store_expr (args[i].tree_value, copy, 0);
1021 args[i].tree_value = build1 (ADDR_EXPR,
1022 build_pointer_type (type),
1023 make_tree (type, copy));
1024 type = build_pointer_type (type);
1028 mode = TYPE_MODE (type);
1029 unsignedp = TREE_UNSIGNED (type);
1031 #ifdef PROMOTE_FUNCTION_ARGS
1032 mode = promote_mode (type, mode, &unsignedp, 1);
1035 args[i].unsignedp = unsignedp;
1036 args[i].mode = mode;
1037 args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
1038 argpos < n_named_args);
1039 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1042 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type,
1043 argpos < n_named_args);
1046 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1048 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1049 it means that we are to pass this arg in the register(s) designated
1050 by the PARALLEL, but also to pass it in the stack. */
1051 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1052 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1053 args[i].pass_on_stack = 1;
1055 /* If this is an addressable type, we must preallocate the stack
1056 since we must evaluate the object into its final location.
1058 If this is to be passed in both registers and the stack, it is simpler
1060 if (TREE_ADDRESSABLE (type)
1061 || (args[i].pass_on_stack && args[i].reg != 0))
1062 *must_preallocate = 1;
1064 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1065 we cannot consider this function call constant. */
1066 if (TREE_ADDRESSABLE (type))
1069 /* Compute the stack-size of this argument. */
1070 if (args[i].reg == 0 || args[i].partial != 0
1071 || reg_parm_stack_space > 0
1072 || args[i].pass_on_stack)
1073 locate_and_pad_parm (mode, type,
1074 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1079 fndecl, args_size, &args[i].offset,
1082 #ifndef ARGS_GROW_DOWNWARD
1083 args[i].slot_offset = *args_size;
1086 /* If a part of the arg was put into registers,
1087 don't include that part in the amount pushed. */
1088 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1089 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1090 / (PARM_BOUNDARY / BITS_PER_UNIT)
1091 * (PARM_BOUNDARY / BITS_PER_UNIT));
1093 /* Update ARGS_SIZE, the total stack space for args so far. */
1095 args_size->constant += args[i].size.constant;
1096 if (args[i].size.var)
1098 ADD_PARM_SIZE (*args_size, args[i].size.var);
1101 /* Since the slot offset points to the bottom of the slot,
1102 we must record it after incrementing if the args grow down. */
1103 #ifdef ARGS_GROW_DOWNWARD
1104 args[i].slot_offset = *args_size;
1106 args[i].slot_offset.constant = -args_size->constant;
1109 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1113 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1114 have been used, etc. */
1116 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
1117 argpos < n_named_args);
1121 /* Update ARGS_SIZE to contain the total size for the argument block.
1122 Return the original constant component of the argument block's size.
1124 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1125 for arguments passed in registers. */
1128 compute_argument_block_size (reg_parm_stack_space, args_size)
1129 int reg_parm_stack_space;
1130 struct args_size *args_size;
1132 int unadjusted_args_size = args_size->constant;
1134 /* Compute the actual size of the argument block required. The variable
1135 and constant sizes must be combined, the size may have to be rounded,
1136 and there may be a minimum required size. */
1140 args_size->var = ARGS_SIZE_TREE (*args_size);
1141 args_size->constant = 0;
1143 #ifdef PREFERRED_STACK_BOUNDARY
1144 if (PREFERRED_STACK_BOUNDARY != BITS_PER_UNIT)
1145 args_size->var = round_up (args_size->var, STACK_BYTES);
1148 if (reg_parm_stack_space > 0)
1151 = size_binop (MAX_EXPR, args_size->var,
1152 size_int (reg_parm_stack_space));
1154 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1155 /* The area corresponding to register parameters is not to count in
1156 the size of the block we need. So make the adjustment. */
1158 = size_binop (MINUS_EXPR, args_size->var,
1159 size_int (reg_parm_stack_space));
1165 #ifdef PREFERRED_STACK_BOUNDARY
1166 args_size->constant = (((args_size->constant + (STACK_BYTES - 1))
1167 / STACK_BYTES) * STACK_BYTES);
1170 args_size->constant = MAX (args_size->constant,
1171 reg_parm_stack_space);
1173 #ifdef MAYBE_REG_PARM_STACK_SPACE
1174 if (reg_parm_stack_space == 0)
1175 args_size->constant = 0;
1178 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1179 args_size->constant -= reg_parm_stack_space;
1182 return unadjusted_args_size;
1185 /* Precompute parameters has needed for a function call.
1187 IS_CONST indicates the target function is a pure function.
1189 MUST_PREALLOCATE indicates that we must preallocate stack space for
1190 any stack arguments.
1192 NUM_ACTUALS is the number of arguments.
1194 ARGS is an array containing information for each argument; this routine
1195 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1197 ARGS_SIZE contains information about the size of the arg list. */
1200 precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1202 int must_preallocate;
1204 struct arg_data *args;
1205 struct args_size *args_size;
1209 /* If this function call is cse'able, precompute all the parameters.
1210 Note that if the parameter is constructed into a temporary, this will
1211 cause an additional copy because the parameter will be constructed
1212 into a temporary location and then copied into the outgoing arguments.
1213 If a parameter contains a call to alloca and this function uses the
1214 stack, precompute the parameter. */
1216 /* If we preallocated the stack space, and some arguments must be passed
1217 on the stack, then we must precompute any parameter which contains a
1218 function call which will store arguments on the stack.
1219 Otherwise, evaluating the parameter may clobber previous parameters
1220 which have already been stored into the stack. */
1222 for (i = 0; i < num_actuals; i++)
1224 || ((args_size->var != 0 || args_size->constant != 0)
1225 && calls_function (args[i].tree_value, 1))
1226 || (must_preallocate
1227 && (args_size->var != 0 || args_size->constant != 0)
1228 && calls_function (args[i].tree_value, 0)))
1230 /* If this is an addressable type, we cannot pre-evaluate it. */
1231 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1236 args[i].initial_value = args[i].value
1237 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1239 preserve_temp_slots (args[i].value);
1242 /* ANSI doesn't require a sequence point here,
1243 but PCC has one, so this will avoid some problems. */
1246 args[i].initial_value = args[i].value
1247 = protect_from_queue (args[i].initial_value, 0);
1249 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1251 = convert_modes (args[i].mode,
1252 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1253 args[i].value, args[i].unsignedp);
1257 /* Given the current state of MUST_PREALLOCATE and information about
1258 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1259 compute and return the final value for MUST_PREALLOCATE. */
1262 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1263 int must_preallocate;
1265 struct arg_data *args;
1266 struct args_size *args_size;
1268 /* See if we have or want to preallocate stack space.
1270 If we would have to push a partially-in-regs parm
1271 before other stack parms, preallocate stack space instead.
1273 If the size of some parm is not a multiple of the required stack
1274 alignment, we must preallocate.
1276 If the total size of arguments that would otherwise create a copy in
1277 a temporary (such as a CALL) is more than half the total argument list
1278 size, preallocation is faster.
1280 Another reason to preallocate is if we have a machine (like the m88k)
1281 where stack alignment is required to be maintained between every
1282 pair of insns, not just when the call is made. However, we assume here
1283 that such machines either do not have push insns (and hence preallocation
1284 would occur anyway) or the problem is taken care of with
1287 if (! must_preallocate)
1289 int partial_seen = 0;
1290 int copy_to_evaluate_size = 0;
1293 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1295 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1297 else if (partial_seen && args[i].reg == 0)
1298 must_preallocate = 1;
1300 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1301 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1302 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1303 || TREE_CODE (args[i].tree_value) == COND_EXPR
1304 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1305 copy_to_evaluate_size
1306 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1309 if (copy_to_evaluate_size * 2 >= args_size->constant
1310 && args_size->constant > 0)
1311 must_preallocate = 1;
1313 return must_preallocate;
1316 /* Generate all the code for a function call
1317 and return an rtx for its value.
1318 Store the value in TARGET (specified as an rtx) if convenient.
1319 If the value is stored in TARGET then TARGET is returned.
1320 If IGNORE is nonzero, then we ignore the value of the function call. */
1323 expand_call (exp, target, ignore)
1328 /* List of actual parameters. */
1329 tree actparms = TREE_OPERAND (exp, 1);
1330 /* RTX for the function to be called. */
1332 /* Data type of the function. */
1334 /* Declaration of the function being called,
1335 or 0 if the function is computed (not known by name). */
1339 /* Register in which non-BLKmode value will be returned,
1340 or 0 if no value or if value is BLKmode. */
1342 /* Address where we should return a BLKmode value;
1343 0 if value not BLKmode. */
1344 rtx structure_value_addr = 0;
1345 /* Nonzero if that address is being passed by treating it as
1346 an extra, implicit first parameter. Otherwise,
1347 it is passed by being copied directly into struct_value_rtx. */
1348 int structure_value_addr_parm = 0;
1349 /* Size of aggregate value wanted, or zero if none wanted
1350 or if we are using the non-reentrant PCC calling convention
1351 or expecting the value in registers. */
1352 HOST_WIDE_INT struct_value_size = 0;
1353 /* Nonzero if called function returns an aggregate in memory PCC style,
1354 by returning the address of where to find it. */
1355 int pcc_struct_value = 0;
1357 /* Number of actual parameters in this call, including struct value addr. */
1359 /* Number of named args. Args after this are anonymous ones
1360 and they must all go on the stack. */
1363 /* Vector of information about each argument.
1364 Arguments are numbered in the order they will be pushed,
1365 not the order they are written. */
1366 struct arg_data *args;
1368 /* Total size in bytes of all the stack-parms scanned so far. */
1369 struct args_size args_size;
1370 /* Size of arguments before any adjustments (such as rounding). */
1371 int unadjusted_args_size;
1372 /* Data on reg parms scanned so far. */
1373 CUMULATIVE_ARGS args_so_far;
1374 /* Nonzero if a reg parm has been scanned. */
1376 /* Nonzero if this is an indirect function call. */
1378 /* Nonzero if we must avoid push-insns in the args for this call.
1379 If stack space is allocated for register parameters, but not by the
1380 caller, then it is preallocated in the fixed part of the stack frame.
1381 So the entire argument block must then be preallocated (i.e., we
1382 ignore PUSH_ROUNDING in that case). */
1384 #ifdef PUSH_ROUNDING
1385 int must_preallocate = 0;
1387 int must_preallocate = 1;
1390 /* Size of the stack reserved for parameter registers. */
1391 int reg_parm_stack_space = 0;
1393 /* Address of space preallocated for stack parms
1394 (on machines that lack push insns), or 0 if space not preallocated. */
1397 /* Nonzero if it is plausible that this is a call to alloca. */
1399 /* Nonzero if this is a call to malloc or a related function. */
1401 /* Nonzero if this is a call to setjmp or a related function. */
1403 /* Nonzero if this is a call to `longjmp'. */
1405 /* Nonzero if this is a call to an inline function. */
1406 int is_integrable = 0;
1407 /* Nonzero if this is a call to a `const' function.
1408 Note that only explicitly named functions are handled as `const' here. */
1410 /* Nonzero if this is a call to a `volatile' function. */
1411 int is_volatile = 0;
1412 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1413 /* Define the boundary of the register parm stack space that needs to be
1415 int low_to_save = -1, high_to_save;
1416 rtx save_area = 0; /* Place that it is saved */
1419 #ifdef ACCUMULATE_OUTGOING_ARGS
1420 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1421 char *initial_stack_usage_map = stack_usage_map;
1422 int old_stack_arg_under_construction;
1425 rtx old_stack_level = 0;
1426 int old_pending_adj = 0;
1427 int old_inhibit_defer_pop = inhibit_defer_pop;
1428 rtx call_fusage = 0;
1432 /* The value of the function call can be put in a hard register. But
1433 if -fcheck-memory-usage, code which invokes functions (and thus
1434 damages some hard registers) can be inserted before using the value.
1435 So, target is always a pseudo-register in that case. */
1436 if (current_function_check_memory_usage)
1439 /* See if we can find a DECL-node for the actual function.
1440 As a result, decide whether this is a call to an integrable function. */
1442 p = TREE_OPERAND (exp, 0);
1443 if (TREE_CODE (p) == ADDR_EXPR)
1445 fndecl = TREE_OPERAND (p, 0);
1446 if (TREE_CODE (fndecl) != FUNCTION_DECL)
1451 && fndecl != current_function_decl
1452 && DECL_INLINE (fndecl)
1453 && DECL_SAVED_INSNS (fndecl)
1454 && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl)))
1456 else if (! TREE_ADDRESSABLE (fndecl))
1458 /* In case this function later becomes inlinable,
1459 record that there was already a non-inline call to it.
1461 Use abstraction instead of setting TREE_ADDRESSABLE
1463 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1466 warning_with_decl (fndecl, "can't inline call to `%s'");
1467 warning ("called from here");
1469 mark_addressable (fndecl);
1472 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1473 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
1476 if (TREE_THIS_VOLATILE (fndecl))
1481 /* If we don't have specific function to call, see if we have a
1482 constant or `noreturn' function from the type. */
1485 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1486 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1489 #ifdef REG_PARM_STACK_SPACE
1490 #ifdef MAYBE_REG_PARM_STACK_SPACE
1491 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1493 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1497 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1498 if (reg_parm_stack_space > 0)
1499 must_preallocate = 1;
1502 /* Warn if this value is an aggregate type,
1503 regardless of which calling convention we are using for it. */
1504 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1505 warning ("function call has aggregate value");
1507 /* Set up a place to return a structure. */
1509 /* Cater to broken compilers. */
1510 if (aggregate_value_p (exp))
1512 /* This call returns a big structure. */
1515 #ifdef PCC_STATIC_STRUCT_RETURN
1517 pcc_struct_value = 1;
1518 /* Easier than making that case work right. */
1521 /* In case this is a static function, note that it has been
1523 if (! TREE_ADDRESSABLE (fndecl))
1524 mark_addressable (fndecl);
1528 #else /* not PCC_STATIC_STRUCT_RETURN */
1530 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1532 if (target && GET_CODE (target) == MEM)
1533 structure_value_addr = XEXP (target, 0);
1536 /* Assign a temporary to hold the value. */
1539 /* For variable-sized objects, we must be called with a target
1540 specified. If we were to allocate space on the stack here,
1541 we would have no way of knowing when to free it. */
1543 if (struct_value_size < 0)
1546 /* This DECL is just something to feed to mark_addressable;
1547 it doesn't get pushed. */
1548 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1549 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1550 mark_addressable (d);
1551 structure_value_addr = XEXP (DECL_RTL (d), 0);
1556 #endif /* not PCC_STATIC_STRUCT_RETURN */
1559 /* If called function is inline, try to integrate it. */
1564 #ifdef ACCUMULATE_OUTGOING_ARGS
1565 rtx before_call = get_last_insn ();
1568 temp = expand_inline_function (fndecl, actparms, target,
1569 ignore, TREE_TYPE (exp),
1570 structure_value_addr);
1572 /* If inlining succeeded, return. */
1573 if (temp != (rtx) (HOST_WIDE_INT) -1)
1575 #ifdef ACCUMULATE_OUTGOING_ARGS
1576 /* If the outgoing argument list must be preserved, push
1577 the stack before executing the inlined function if it
1580 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1581 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1584 if (stack_arg_under_construction || i >= 0)
1587 = before_call ? NEXT_INSN (before_call) : get_insns ();
1590 /* Look for a call in the inline function code.
1591 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
1592 nonzero then there is a call and it is not necessary
1593 to scan the insns. */
1595 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
1596 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1597 if (GET_CODE (insn) == CALL_INSN)
1602 /* Reserve enough stack space so that the largest
1603 argument list of any function call in the inline
1604 function does not overlap the argument list being
1605 evaluated. This is usually an overestimate because
1606 allocate_dynamic_stack_space reserves space for an
1607 outgoing argument list in addition to the requested
1608 space, but there is no way to ask for stack space such
1609 that an argument list of a certain length can be
1612 Add the stack space reserved for register arguments, if
1613 any, in the inline function. What is really needed is the
1614 largest value of reg_parm_stack_space in the inline
1615 function, but that is not available. Using the current
1616 value of reg_parm_stack_space is wrong, but gives
1617 correct results on all supported machines. */
1619 int adjust = (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl))
1620 + reg_parm_stack_space);
1623 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1624 allocate_dynamic_stack_space (GEN_INT (adjust),
1625 NULL_RTX, BITS_PER_UNIT);
1628 emit_insns_before (seq, first_insn);
1629 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1634 /* If the result is equivalent to TARGET, return TARGET to simplify
1635 checks in store_expr. They can be equivalent but not equal in the
1636 case of a function that returns BLKmode. */
1637 if (temp != target && rtx_equal_p (temp, target))
1642 /* If inlining failed, mark FNDECL as needing to be compiled
1643 separately after all. If function was declared inline,
1645 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1646 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1648 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1649 warning ("called from here");
1651 mark_addressable (fndecl);
1654 /* When calling a const function, we must pop the stack args right away,
1655 so that the pop is deleted or moved with the call. */
1659 function_call_count++;
1661 if (fndecl && DECL_NAME (fndecl))
1662 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1664 /* See if this is a call to a function that can return more than once
1665 or a call to longjmp or malloc. */
1666 special_function_p (name, fndecl, &returns_twice, &is_longjmp,
1667 &is_malloc, &may_be_alloca);
1670 current_function_calls_alloca = 1;
1672 /* Don't let pending stack adjusts add up to too much.
1673 Also, do all pending adjustments now
1674 if there is any chance this might be a call to alloca. */
1676 if (pending_stack_adjust >= 32
1677 || (pending_stack_adjust > 0 && may_be_alloca))
1678 do_pending_stack_adjust ();
1680 /* Operand 0 is a pointer-to-function; get the type of the function. */
1681 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1682 if (! POINTER_TYPE_P (funtype))
1685 funtype = TREE_TYPE (funtype);
1687 /* Push the temporary stack slot level so that we can free any temporaries
1691 /* Start updating where the next arg would go.
1693 On some machines (such as the PA) indirect calls have a different
1694 calling convention than normal calls. The last argument in
1695 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1697 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
1699 /* If struct_value_rtx is 0, it means pass the address
1700 as if it were an extra parameter. */
1701 if (structure_value_addr && struct_value_rtx == 0)
1703 /* If structure_value_addr is a REG other than
1704 virtual_outgoing_args_rtx, we can use always use it. If it
1705 is not a REG, we must always copy it into a register.
1706 If it is virtual_outgoing_args_rtx, we must copy it to another
1707 register in some cases. */
1708 rtx temp = (GET_CODE (structure_value_addr) != REG
1709 #ifdef ACCUMULATE_OUTGOING_ARGS
1710 || (stack_arg_under_construction
1711 && structure_value_addr == virtual_outgoing_args_rtx)
1713 ? copy_addr_to_reg (structure_value_addr)
1714 : structure_value_addr);
1717 = tree_cons (error_mark_node,
1718 make_tree (build_pointer_type (TREE_TYPE (funtype)),
1721 structure_value_addr_parm = 1;
1724 /* Count the arguments and set NUM_ACTUALS. */
1725 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
1728 /* Compute number of named args.
1729 Normally, don't include the last named arg if anonymous args follow.
1730 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
1731 (If no anonymous args follow, the result of list_length is actually
1732 one too large. This is harmless.)
1734 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
1735 zero, this machine will be able to place unnamed args that were passed in
1736 registers into the stack. So treat all args as named. This allows the
1737 insns emitting for a specific argument list to be independent of the
1738 function declaration.
1740 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
1741 way to pass unnamed args in registers, so we must force them into
1744 if ((STRICT_ARGUMENT_NAMING
1745 || ! PRETEND_OUTGOING_VARARGS_NAMED)
1746 && TYPE_ARG_TYPES (funtype) != 0)
1748 = (list_length (TYPE_ARG_TYPES (funtype))
1749 /* Don't include the last named arg. */
1750 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
1751 /* Count the struct value address, if it is passed as a parm. */
1752 + structure_value_addr_parm);
1754 /* If we know nothing, treat all args as named. */
1755 n_named_args = num_actuals;
1757 /* Make a vector to hold all the information about each arg. */
1758 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
1759 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
1761 /* Build up entries inthe ARGS array, compute the size of the arguments
1762 into ARGS_SIZE, etc. */
1763 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
1764 actparms, fndecl, args_so_far,
1765 reg_parm_stack_space, &old_stack_level,
1766 &old_pending_adj, &must_preallocate,
1769 #ifdef FINAL_REG_PARM_STACK_SPACE
1770 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1776 /* If this function requires a variable-sized argument list, don't try to
1777 make a cse'able block for this call. We may be able to do this
1778 eventually, but it is too complicated to keep track of what insns go
1779 in the cse'able block and which don't. */
1782 must_preallocate = 1;
1785 /* Compute the actual size of the argument block required. The variable
1786 and constant sizes must be combined, the size may have to be rounded,
1787 and there may be a minimum required size. */
1788 unadjusted_args_size
1789 = compute_argument_block_size (reg_parm_stack_space, &args_size);
1791 /* Now make final decision about preallocating stack space. */
1792 must_preallocate = finalize_must_preallocate (must_preallocate,
1793 num_actuals, args, &args_size);
1795 /* If the structure value address will reference the stack pointer, we must
1796 stabilize it. We don't need to do this if we know that we are not going
1797 to adjust the stack pointer in processing this call. */
1799 if (structure_value_addr
1800 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1801 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1803 #ifndef ACCUMULATE_OUTGOING_ARGS
1804 || args_size.constant
1807 structure_value_addr = copy_to_reg (structure_value_addr);
1809 /* Precompute any arguments as needed. */
1810 precompute_arguments (is_const, must_preallocate, num_actuals,
1813 /* Now we are about to start emitting insns that can be deleted
1814 if a libcall is deleted. */
1815 if (is_const || is_malloc)
1818 /* If we have no actual push instructions, or shouldn't use them,
1819 make space for all args right now. */
1821 if (args_size.var != 0)
1823 if (old_stack_level == 0)
1825 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1826 old_pending_adj = pending_stack_adjust;
1827 pending_stack_adjust = 0;
1828 #ifdef ACCUMULATE_OUTGOING_ARGS
1829 /* stack_arg_under_construction says whether a stack arg is
1830 being constructed at the old stack level. Pushing the stack
1831 gets a clean outgoing argument block. */
1832 old_stack_arg_under_construction = stack_arg_under_construction;
1833 stack_arg_under_construction = 0;
1836 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1840 /* Note that we must go through the motions of allocating an argument
1841 block even if the size is zero because we may be storing args
1842 in the area reserved for register arguments, which may be part of
1845 int needed = args_size.constant;
1847 /* Store the maximum argument space used. It will be pushed by
1848 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
1851 if (needed > current_function_outgoing_args_size)
1852 current_function_outgoing_args_size = needed;
1854 if (must_preallocate)
1856 #ifdef ACCUMULATE_OUTGOING_ARGS
1857 /* Since the stack pointer will never be pushed, it is possible for
1858 the evaluation of a parm to clobber something we have already
1859 written to the stack. Since most function calls on RISC machines
1860 do not use the stack, this is uncommon, but must work correctly.
1862 Therefore, we save any area of the stack that was already written
1863 and that we are using. Here we set up to do this by making a new
1864 stack usage map from the old one. The actual save will be done
1867 Another approach might be to try to reorder the argument
1868 evaluations to avoid this conflicting stack usage. */
1870 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1871 /* Since we will be writing into the entire argument area, the
1872 map must be allocated for its entire size, not just the part that
1873 is the responsibility of the caller. */
1874 needed += reg_parm_stack_space;
1877 #ifdef ARGS_GROW_DOWNWARD
1878 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1881 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1884 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
1886 if (initial_highest_arg_in_use)
1887 bcopy (initial_stack_usage_map, stack_usage_map,
1888 initial_highest_arg_in_use);
1890 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1891 bzero (&stack_usage_map[initial_highest_arg_in_use],
1892 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1895 /* The address of the outgoing argument list must not be copied to a
1896 register here, because argblock would be left pointing to the
1897 wrong place after the call to allocate_dynamic_stack_space below.
1900 argblock = virtual_outgoing_args_rtx;
1902 #else /* not ACCUMULATE_OUTGOING_ARGS */
1903 if (inhibit_defer_pop == 0)
1905 /* Try to reuse some or all of the pending_stack_adjust
1906 to get this space. Maybe we can avoid any pushing. */
1907 if (needed > pending_stack_adjust)
1909 needed -= pending_stack_adjust;
1910 pending_stack_adjust = 0;
1914 pending_stack_adjust -= needed;
1918 /* Special case this because overhead of `push_block' in this
1919 case is non-trivial. */
1921 argblock = virtual_outgoing_args_rtx;
1923 argblock = push_block (GEN_INT (needed), 0, 0);
1925 /* We only really need to call `copy_to_reg' in the case where push
1926 insns are going to be used to pass ARGBLOCK to a function
1927 call in ARGS. In that case, the stack pointer changes value
1928 from the allocation point to the call point, and hence
1929 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1930 But might as well always do it. */
1931 argblock = copy_to_reg (argblock);
1932 #endif /* not ACCUMULATE_OUTGOING_ARGS */
1936 #ifdef ACCUMULATE_OUTGOING_ARGS
1937 /* The save/restore code in store_one_arg handles all cases except one:
1938 a constructor call (including a C function returning a BLKmode struct)
1939 to initialize an argument. */
1940 if (stack_arg_under_construction)
1942 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1943 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
1945 rtx push_size = GEN_INT (args_size.constant);
1947 if (old_stack_level == 0)
1949 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1950 old_pending_adj = pending_stack_adjust;
1951 pending_stack_adjust = 0;
1952 /* stack_arg_under_construction says whether a stack arg is
1953 being constructed at the old stack level. Pushing the stack
1954 gets a clean outgoing argument block. */
1955 old_stack_arg_under_construction = stack_arg_under_construction;
1956 stack_arg_under_construction = 0;
1957 /* Make a new map for the new argument list. */
1958 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
1959 bzero (stack_usage_map, highest_outgoing_arg_in_use);
1960 highest_outgoing_arg_in_use = 0;
1962 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
1964 /* If argument evaluation might modify the stack pointer, copy the
1965 address of the argument list to a register. */
1966 for (i = 0; i < num_actuals; i++)
1967 if (args[i].pass_on_stack)
1969 argblock = copy_addr_to_reg (argblock);
1975 /* If we preallocated stack space, compute the address of each argument.
1976 We need not ensure it is a valid memory address here; it will be
1977 validized when it is used. */
1980 rtx arg_reg = argblock;
1983 if (GET_CODE (argblock) == PLUS)
1984 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1986 for (i = 0; i < num_actuals; i++)
1988 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1989 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1992 /* Skip this parm if it will not be passed on the stack. */
1993 if (! args[i].pass_on_stack && args[i].reg != 0)
1996 if (GET_CODE (offset) == CONST_INT)
1997 addr = plus_constant (arg_reg, INTVAL (offset));
1999 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
2001 addr = plus_constant (addr, arg_offset);
2002 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2005 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
2007 if (GET_CODE (slot_offset) == CONST_INT)
2008 addr = plus_constant (arg_reg, INTVAL (slot_offset));
2010 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
2012 addr = plus_constant (addr, arg_offset);
2013 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2017 #ifdef PUSH_ARGS_REVERSED
2018 #ifdef PREFERRED_STACK_BOUNDARY
2019 /* If we push args individually in reverse order, perform stack alignment
2020 before the first push (the last arg). */
2022 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2026 /* Don't try to defer pops if preallocating, not even from the first arg,
2027 since ARGBLOCK probably refers to the SP. */
2031 /* Get the function to call, in the form of RTL. */
2034 /* If this is the first use of the function, see if we need to
2035 make an external definition for it. */
2036 if (! TREE_USED (fndecl))
2038 assemble_external (fndecl);
2039 TREE_USED (fndecl) = 1;
2042 /* Get a SYMBOL_REF rtx for the function address. */
2043 funexp = XEXP (DECL_RTL (fndecl), 0);
2046 /* Generate an rtx (probably a pseudo-register) for the address. */
2049 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
2050 pop_temp_slots (); /* FUNEXP can't be BLKmode */
2052 /* Check the function is executable. */
2053 if (current_function_check_memory_usage)
2054 emit_library_call (chkr_check_exec_libfunc, 1,
2060 /* Figure out the register where the value, if any, will come back. */
2062 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2063 && ! structure_value_addr)
2065 if (pcc_struct_value)
2066 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2069 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
2072 /* Precompute all register parameters. It isn't safe to compute anything
2073 once we have started filling any specific hard regs. */
2074 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2076 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2078 /* Save the fixed argument area if it's part of the caller's frame and
2079 is clobbered by argument setup for this call. */
2080 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2081 &low_to_save, &high_to_save);
2085 /* Now store (and compute if necessary) all non-register parms.
2086 These come before register parms, since they can require block-moves,
2087 which could clobber the registers used for register parms.
2088 Parms which have partial registers are not stored here,
2089 but we do preallocate space here if they want that. */
2091 for (i = 0; i < num_actuals; i++)
2092 if (args[i].reg == 0 || args[i].pass_on_stack)
2093 store_one_arg (&args[i], argblock, may_be_alloca,
2094 args_size.var != 0, reg_parm_stack_space);
2096 /* If we have a parm that is passed in registers but not in memory
2097 and whose alignment does not permit a direct copy into registers,
2098 make a group of pseudos that correspond to each register that we
2100 if (STRICT_ALIGNMENT)
2101 store_unaligned_arguments_into_pseudos (args, num_actuals);
2103 /* Now store any partially-in-registers parm.
2104 This is the last place a block-move can happen. */
2106 for (i = 0; i < num_actuals; i++)
2107 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2108 store_one_arg (&args[i], argblock, may_be_alloca,
2109 args_size.var != 0, reg_parm_stack_space);
2111 #ifndef PUSH_ARGS_REVERSED
2112 #ifdef PREFERRED_STACK_BOUNDARY
2113 /* If we pushed args in forward order, perform stack alignment
2114 after pushing the last arg. */
2116 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2120 /* If register arguments require space on the stack and stack space
2121 was not preallocated, allocate stack space here for arguments
2122 passed in registers. */
2123 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
2124 if (must_preallocate == 0 && reg_parm_stack_space > 0)
2125 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2128 /* Pass the function the address in which to return a structure value. */
2129 if (structure_value_addr && ! structure_value_addr_parm)
2131 emit_move_insn (struct_value_rtx,
2133 force_operand (structure_value_addr,
2136 /* Mark the memory for the aggregate as write-only. */
2137 if (current_function_check_memory_usage)
2138 emit_library_call (chkr_set_right_libfunc, 1,
2140 structure_value_addr, ptr_mode,
2141 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
2142 GEN_INT (MEMORY_USE_WO),
2143 TYPE_MODE (integer_type_node));
2145 if (GET_CODE (struct_value_rtx) == REG)
2146 use_reg (&call_fusage, struct_value_rtx);
2149 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
2151 /* Now do the register loads required for any wholly-register parms or any
2152 parms which are passed both on the stack and in a register. Their
2153 expressions were already evaluated.
2155 Mark all register-parms as living through the call, putting these USE
2156 insns in the CALL_INSN_FUNCTION_USAGE field. */
2158 #ifdef LOAD_ARGS_REVERSED
2159 for (i = num_actuals - 1; i >= 0; i--)
2161 for (i = 0; i < num_actuals; i++)
2164 rtx reg = args[i].reg;
2165 int partial = args[i].partial;
2170 /* Set to non-negative if must move a word at a time, even if just
2171 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
2172 we just use a normal move insn. This value can be zero if the
2173 argument is a zero size structure with no fields. */
2174 nregs = (partial ? partial
2175 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2176 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
2177 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
2180 /* Handle calls that pass values in multiple non-contiguous
2181 locations. The Irix 6 ABI has examples of this. */
2183 if (GET_CODE (reg) == PARALLEL)
2185 emit_group_load (reg, args[i].value,
2186 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
2187 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
2191 /* If simple case, just do move. If normal partial, store_one_arg
2192 has already loaded the register for us. In all other cases,
2193 load the register(s) from memory. */
2195 else if (nregs == -1)
2196 emit_move_insn (reg, args[i].value);
2198 /* If we have pre-computed the values to put in the registers in
2199 the case of non-aligned structures, copy them in now. */
2201 else if (args[i].n_aligned_regs != 0)
2202 for (j = 0; j < args[i].n_aligned_regs; j++)
2203 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2204 args[i].aligned_regs[j]);
2206 else if (partial == 0 || args[i].pass_on_stack)
2207 move_block_to_reg (REGNO (reg),
2208 validize_mem (args[i].value), nregs,
2211 /* Handle calls that pass values in multiple non-contiguous
2212 locations. The Irix 6 ABI has examples of this. */
2213 if (GET_CODE (reg) == PARALLEL)
2214 use_group_regs (&call_fusage, reg);
2215 else if (nregs == -1)
2216 use_reg (&call_fusage, reg);
2218 use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
2222 /* Perform postincrements before actually calling the function. */
2225 /* All arguments and registers used for the call must be set up by now! */
2227 /* Generate the actual call instruction. */
2228 emit_call_1 (funexp, fndecl, funtype, args_size.constant, struct_value_size,
2229 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2230 valreg, old_inhibit_defer_pop, call_fusage, is_const);
2232 /* If call is cse'able, make appropriate pair of reg-notes around it.
2233 Test valreg so we don't crash; may safely ignore `const'
2234 if return type is void. Disable for PARALLEL return values, because
2235 we have no way to move such values into a pseudo register. */
2236 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2239 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2242 /* Mark the return value as a pointer if needed. */
2243 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2245 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2246 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2249 /* Construct an "equal form" for the value which mentions all the
2250 arguments in order as well as the function name. */
2251 #ifdef PUSH_ARGS_REVERSED
2252 for (i = 0; i < num_actuals; i++)
2253 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2255 for (i = num_actuals - 1; i >= 0; i--)
2256 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2258 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2260 insns = get_insns ();
2263 emit_libcall_block (insns, temp, valreg, note);
2269 /* Otherwise, just write out the sequence without a note. */
2270 rtx insns = get_insns ();
2277 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2280 /* The return value from a malloc-like function is a pointer. */
2281 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2282 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2284 emit_move_insn (temp, valreg);
2286 /* The return value from a malloc-like function can not alias
2288 last = get_last_insn ();
2290 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2292 /* Write out the sequence. */
2293 insns = get_insns ();
2299 /* For calls to `setjmp', etc., inform flow.c it should complain
2300 if nonvolatile values are live. */
2304 emit_note (name, NOTE_INSN_SETJMP);
2305 current_function_calls_setjmp = 1;
2309 current_function_calls_longjmp = 1;
2311 /* Notice functions that cannot return.
2312 If optimizing, insns emitted below will be dead.
2313 If not optimizing, they will exist, which is useful
2314 if the user uses the `return' command in the debugger. */
2316 if (is_volatile || is_longjmp)
2319 /* If value type not void, return an rtx for the value. */
2321 /* If there are cleanups to be called, don't use a hard reg as target.
2322 We need to double check this and see if it matters anymore. */
2323 if (any_pending_cleanups (1)
2324 && target && REG_P (target)
2325 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2328 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2331 target = const0_rtx;
2333 else if (structure_value_addr)
2335 if (target == 0 || GET_CODE (target) != MEM)
2337 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2338 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2339 structure_value_addr));
2340 MEM_SET_IN_STRUCT_P (target,
2341 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2344 else if (pcc_struct_value)
2346 /* This is the special C++ case where we need to
2347 know what the true target was. We take care to
2348 never use this value more than once in one expression. */
2349 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2350 copy_to_reg (valreg));
2351 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2353 /* Handle calls that return values in multiple non-contiguous locations.
2354 The Irix 6 ABI has examples of this. */
2355 else if (GET_CODE (valreg) == PARALLEL)
2357 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2361 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2362 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2363 preserve_temp_slots (target);
2366 emit_group_store (target, valreg, bytes,
2367 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2369 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2370 && GET_MODE (target) == GET_MODE (valreg))
2371 /* TARGET and VALREG cannot be equal at this point because the latter
2372 would not have REG_FUNCTION_VALUE_P true, while the former would if
2373 it were referring to the same register.
2375 If they refer to the same register, this move will be a no-op, except
2376 when function inlining is being done. */
2377 emit_move_insn (target, valreg);
2378 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2379 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2381 target = copy_to_reg (valreg);
2383 #ifdef PROMOTE_FUNCTION_RETURN
2384 /* If we promoted this return value, make the proper SUBREG. TARGET
2385 might be const0_rtx here, so be careful. */
2386 if (GET_CODE (target) == REG
2387 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2388 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2390 tree type = TREE_TYPE (exp);
2391 int unsignedp = TREE_UNSIGNED (type);
2393 /* If we don't promote as expected, something is wrong. */
2394 if (GET_MODE (target)
2395 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2398 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2399 SUBREG_PROMOTED_VAR_P (target) = 1;
2400 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2404 /* If size of args is variable or this was a constructor call for a stack
2405 argument, restore saved stack-pointer value. */
2407 if (old_stack_level)
2409 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2410 pending_stack_adjust = old_pending_adj;
2411 #ifdef ACCUMULATE_OUTGOING_ARGS
2412 stack_arg_under_construction = old_stack_arg_under_construction;
2413 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2414 stack_usage_map = initial_stack_usage_map;
2417 #ifdef ACCUMULATE_OUTGOING_ARGS
2420 #ifdef REG_PARM_STACK_SPACE
2422 restore_fixed_argument_area (save_area, argblock,
2423 high_to_save, low_to_save);
2426 /* If we saved any argument areas, restore them. */
2427 for (i = 0; i < num_actuals; i++)
2428 if (args[i].save_area)
2430 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2432 = gen_rtx_MEM (save_mode,
2433 memory_address (save_mode,
2434 XEXP (args[i].stack_slot, 0)));
2436 if (save_mode != BLKmode)
2437 emit_move_insn (stack_area, args[i].save_area);
2439 emit_block_move (stack_area, validize_mem (args[i].save_area),
2440 GEN_INT (args[i].size.constant),
2441 PARM_BOUNDARY / BITS_PER_UNIT);
2444 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2445 stack_usage_map = initial_stack_usage_map;
2449 /* If this was alloca, record the new stack level for nonlocal gotos.
2450 Check for the handler slots since we might not have a save area
2451 for non-local gotos. */
2453 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
2454 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2458 /* Free up storage we no longer need. */
2459 for (i = 0; i < num_actuals; ++i)
2460 if (args[i].aligned_regs)
2461 free (args[i].aligned_regs);
2466 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2467 (emitting the queue unless NO_QUEUE is nonzero),
2468 for a value of mode OUTMODE,
2469 with NARGS different arguments, passed as alternating rtx values
2470 and machine_modes to convert them to.
2471 The rtx values should have been passed through protect_from_queue already.
2473 NO_QUEUE will be true if and only if the library call is a `const' call
2474 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2475 to the variable is_const in expand_call.
2477 NO_QUEUE must be true for const calls, because if it isn't, then
2478 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2479 and will be lost if the libcall sequence is optimized away.
2481 NO_QUEUE must be false for non-const calls, because if it isn't, the
2482 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2483 optimized. For instance, the instruction scheduler may incorrectly
2484 move memory references across the non-const call. */
2487 emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2490 #ifndef ANSI_PROTOTYPES
2493 enum machine_mode outmode;
2497 /* Total size in bytes of all the stack-parms scanned so far. */
2498 struct args_size args_size;
2499 /* Size of arguments before any adjustments (such as rounding). */
2500 struct args_size original_args_size;
2501 register int argnum;
2506 CUMULATIVE_ARGS args_so_far;
2507 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2508 struct args_size offset; struct args_size size; rtx save_area; };
2510 int old_inhibit_defer_pop = inhibit_defer_pop;
2511 rtx call_fusage = 0;
2512 int reg_parm_stack_space = 0;
2513 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2514 /* Define the boundary of the register parm stack space that needs to be
2516 int low_to_save = -1, high_to_save;
2517 rtx save_area = 0; /* Place that it is saved */
2520 #ifdef ACCUMULATE_OUTGOING_ARGS
2521 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2522 char *initial_stack_usage_map = stack_usage_map;
2526 #ifdef REG_PARM_STACK_SPACE
2527 /* Size of the stack reserved for parameter registers. */
2528 #ifdef MAYBE_REG_PARM_STACK_SPACE
2529 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2531 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
2535 VA_START (p, nargs);
2537 #ifndef ANSI_PROTOTYPES
2538 orgfun = va_arg (p, rtx);
2539 no_queue = va_arg (p, int);
2540 outmode = va_arg (p, enum machine_mode);
2541 nargs = va_arg (p, int);
2546 /* Copy all the libcall-arguments out of the varargs data
2547 and into a vector ARGVEC.
2549 Compute how to pass each argument. We only support a very small subset
2550 of the full argument passing conventions to limit complexity here since
2551 library functions shouldn't have many args. */
2553 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2554 bzero ((char *) argvec, nargs * sizeof (struct arg));
2557 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2559 args_size.constant = 0;
2564 for (count = 0; count < nargs; count++)
2566 rtx val = va_arg (p, rtx);
2567 enum machine_mode mode = va_arg (p, enum machine_mode);
2569 /* We cannot convert the arg value to the mode the library wants here;
2570 must do it earlier where we know the signedness of the arg. */
2572 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2575 /* On some machines, there's no way to pass a float to a library fcn.
2576 Pass it as a double instead. */
2577 #ifdef LIBGCC_NEEDS_DOUBLE
2578 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2579 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2582 /* There's no need to call protect_from_queue, because
2583 either emit_move_insn or emit_push_insn will do that. */
2585 /* Make sure it is a reasonable operand for a move or push insn. */
2586 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2587 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2588 val = force_operand (val, NULL_RTX);
2590 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2591 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2593 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2594 be viewed as just an efficiency improvement. */
2595 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2596 emit_move_insn (slot, val);
2597 val = force_operand (XEXP (slot, 0), NULL_RTX);
2602 argvec[count].value = val;
2603 argvec[count].mode = mode;
2605 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2606 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2608 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2609 argvec[count].partial
2610 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2612 argvec[count].partial = 0;
2615 locate_and_pad_parm (mode, NULL_TREE,
2616 argvec[count].reg && argvec[count].partial == 0,
2617 NULL_TREE, &args_size, &argvec[count].offset,
2618 &argvec[count].size);
2620 if (argvec[count].size.var)
2623 if (reg_parm_stack_space == 0 && argvec[count].partial)
2624 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2626 if (argvec[count].reg == 0 || argvec[count].partial != 0
2627 || reg_parm_stack_space > 0)
2628 args_size.constant += argvec[count].size.constant;
2630 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2634 #ifdef FINAL_REG_PARM_STACK_SPACE
2635 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2639 /* If this machine requires an external definition for library
2640 functions, write one out. */
2641 assemble_external_libcall (fun);
2643 original_args_size = args_size;
2644 #ifdef PREFERRED_STACK_BOUNDARY
2645 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2646 / STACK_BYTES) * STACK_BYTES);
2649 args_size.constant = MAX (args_size.constant,
2650 reg_parm_stack_space);
2652 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2653 args_size.constant -= reg_parm_stack_space;
2656 if (args_size.constant > current_function_outgoing_args_size)
2657 current_function_outgoing_args_size = args_size.constant;
2659 #ifdef ACCUMULATE_OUTGOING_ARGS
2660 /* Since the stack pointer will never be pushed, it is possible for
2661 the evaluation of a parm to clobber something we have already
2662 written to the stack. Since most function calls on RISC machines
2663 do not use the stack, this is uncommon, but must work correctly.
2665 Therefore, we save any area of the stack that was already written
2666 and that we are using. Here we set up to do this by making a new
2667 stack usage map from the old one.
2669 Another approach might be to try to reorder the argument
2670 evaluations to avoid this conflicting stack usage. */
2672 needed = args_size.constant;
2674 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2675 /* Since we will be writing into the entire argument area, the
2676 map must be allocated for its entire size, not just the part that
2677 is the responsibility of the caller. */
2678 needed += reg_parm_stack_space;
2681 #ifdef ARGS_GROW_DOWNWARD
2682 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2685 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2688 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2690 if (initial_highest_arg_in_use)
2691 bcopy (initial_stack_usage_map, stack_usage_map,
2692 initial_highest_arg_in_use);
2694 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2695 bzero (&stack_usage_map[initial_highest_arg_in_use],
2696 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2699 /* The address of the outgoing argument list must not be copied to a
2700 register here, because argblock would be left pointing to the
2701 wrong place after the call to allocate_dynamic_stack_space below.
2704 argblock = virtual_outgoing_args_rtx;
2705 #else /* not ACCUMULATE_OUTGOING_ARGS */
2706 #ifndef PUSH_ROUNDING
2707 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2711 #ifdef PUSH_ARGS_REVERSED
2712 #ifdef PREFERRED_STACK_BOUNDARY
2713 /* If we push args individually in reverse order, perform stack alignment
2714 before the first push (the last arg). */
2716 anti_adjust_stack (GEN_INT (args_size.constant
2717 - original_args_size.constant));
2721 #ifdef PUSH_ARGS_REVERSED
2729 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2730 /* The argument list is the property of the called routine and it
2731 may clobber it. If the fixed area has been used for previous
2732 parameters, we must save and restore it.
2734 Here we compute the boundary of the that needs to be saved, if any. */
2736 #ifdef ARGS_GROW_DOWNWARD
2737 for (count = 0; count < reg_parm_stack_space + 1; count++)
2739 for (count = 0; count < reg_parm_stack_space; count++)
2742 if (count >= highest_outgoing_arg_in_use
2743 || stack_usage_map[count] == 0)
2746 if (low_to_save == -1)
2747 low_to_save = count;
2749 high_to_save = count;
2752 if (low_to_save >= 0)
2754 int num_to_save = high_to_save - low_to_save + 1;
2755 enum machine_mode save_mode
2756 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2759 /* If we don't have the required alignment, must do this in BLKmode. */
2760 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2761 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2762 save_mode = BLKmode;
2764 #ifdef ARGS_GROW_DOWNWARD
2765 stack_area = gen_rtx_MEM (save_mode,
2766 memory_address (save_mode,
2767 plus_constant (argblock,
2770 stack_area = gen_rtx_MEM (save_mode,
2771 memory_address (save_mode,
2772 plus_constant (argblock,
2775 if (save_mode == BLKmode)
2777 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2778 emit_block_move (validize_mem (save_area), stack_area,
2779 GEN_INT (num_to_save),
2780 PARM_BOUNDARY / BITS_PER_UNIT);
2784 save_area = gen_reg_rtx (save_mode);
2785 emit_move_insn (save_area, stack_area);
2790 /* Push the args that need to be pushed. */
2792 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2793 are to be pushed. */
2794 for (count = 0; count < nargs; count++, argnum += inc)
2796 register enum machine_mode mode = argvec[argnum].mode;
2797 register rtx val = argvec[argnum].value;
2798 rtx reg = argvec[argnum].reg;
2799 int partial = argvec[argnum].partial;
2800 #ifdef ACCUMULATE_OUTGOING_ARGS
2801 int lower_bound, upper_bound, i;
2804 if (! (reg != 0 && partial == 0))
2806 #ifdef ACCUMULATE_OUTGOING_ARGS
2807 /* If this is being stored into a pre-allocated, fixed-size, stack
2808 area, save any previous data at that location. */
2810 #ifdef ARGS_GROW_DOWNWARD
2811 /* stack_slot is negative, but we want to index stack_usage_map
2812 with positive values. */
2813 upper_bound = -argvec[argnum].offset.constant + 1;
2814 lower_bound = upper_bound - argvec[argnum].size.constant;
2816 lower_bound = argvec[argnum].offset.constant;
2817 upper_bound = lower_bound + argvec[argnum].size.constant;
2820 for (i = lower_bound; i < upper_bound; i++)
2821 if (stack_usage_map[i]
2822 /* Don't store things in the fixed argument area at this point;
2823 it has already been saved. */
2824 && i > reg_parm_stack_space)
2827 if (i != upper_bound)
2829 /* We need to make a save area. See what mode we can make it. */
2830 enum machine_mode save_mode
2831 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
2834 = gen_rtx_MEM (save_mode,
2835 memory_address (save_mode,
2836 plus_constant (argblock, argvec[argnum].offset.constant)));
2837 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2838 emit_move_insn (argvec[argnum].save_area, stack_area);
2841 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2842 argblock, GEN_INT (argvec[argnum].offset.constant),
2843 reg_parm_stack_space);
2845 #ifdef ACCUMULATE_OUTGOING_ARGS
2846 /* Now mark the segment we just used. */
2847 for (i = lower_bound; i < upper_bound; i++)
2848 stack_usage_map[i] = 1;
2855 #ifndef PUSH_ARGS_REVERSED
2856 #ifdef PREFERRED_STACK_BOUNDARY
2857 /* If we pushed args in forward order, perform stack alignment
2858 after pushing the last arg. */
2860 anti_adjust_stack (GEN_INT (args_size.constant
2861 - original_args_size.constant));
2865 #ifdef PUSH_ARGS_REVERSED
2871 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2873 /* Now load any reg parms into their regs. */
2875 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2876 are to be pushed. */
2877 for (count = 0; count < nargs; count++, argnum += inc)
2879 register rtx val = argvec[argnum].value;
2880 rtx reg = argvec[argnum].reg;
2881 int partial = argvec[argnum].partial;
2883 if (reg != 0 && partial == 0)
2884 emit_move_insn (reg, val);
2888 /* For version 1.37, try deleting this entirely. */
2892 /* Any regs containing parms remain in use through the call. */
2893 for (count = 0; count < nargs; count++)
2894 if (argvec[count].reg != 0)
2895 use_reg (&call_fusage, argvec[count].reg);
2897 /* Don't allow popping to be deferred, since then
2898 cse'ing of library calls could delete a call and leave the pop. */
2901 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2902 will set inhibit_defer_pop to that value. */
2904 /* The return type is needed to decide how many bytes the function pops.
2905 Signedness plays no role in that, so for simplicity, we pretend it's
2906 always signed. We also assume that the list of arguments passed has
2907 no impact, so we pretend it is unknown. */
2910 get_identifier (XSTR (orgfun, 0)),
2911 build_function_type (outmode == VOIDmode ? void_type_node
2912 : type_for_mode (outmode, 0), NULL_TREE),
2913 args_size.constant, 0,
2914 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2915 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2916 old_inhibit_defer_pop + 1, call_fusage, no_queue);
2920 /* Now restore inhibit_defer_pop to its actual original value. */
2923 #ifdef ACCUMULATE_OUTGOING_ARGS
2924 #ifdef REG_PARM_STACK_SPACE
2927 enum machine_mode save_mode = GET_MODE (save_area);
2928 #ifdef ARGS_GROW_DOWNWARD
2930 = gen_rtx_MEM (save_mode,
2931 memory_address (save_mode,
2932 plus_constant (argblock,
2936 = gen_rtx_MEM (save_mode,
2937 memory_address (save_mode,
2938 plus_constant (argblock, low_to_save)));
2941 if (save_mode != BLKmode)
2942 emit_move_insn (stack_area, save_area);
2944 emit_block_move (stack_area, validize_mem (save_area),
2945 GEN_INT (high_to_save - low_to_save + 1),
2946 PARM_BOUNDARY / BITS_PER_UNIT);
2950 /* If we saved any argument areas, restore them. */
2951 for (count = 0; count < nargs; count++)
2952 if (argvec[count].save_area)
2954 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
2956 = gen_rtx_MEM (save_mode,
2957 memory_address (save_mode,
2958 plus_constant (argblock, argvec[count].offset.constant)));
2960 emit_move_insn (stack_area, argvec[count].save_area);
2963 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2964 stack_usage_map = initial_stack_usage_map;
2968 /* Like emit_library_call except that an extra argument, VALUE,
2969 comes second and says where to store the result.
2970 (If VALUE is zero, this function chooses a convenient way
2971 to return the value.
2973 This function returns an rtx for where the value is to be found.
2974 If VALUE is nonzero, VALUE is returned. */
2977 emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
2978 enum machine_mode outmode, int nargs, ...))
2980 #ifndef ANSI_PROTOTYPES
2984 enum machine_mode outmode;
2988 /* Total size in bytes of all the stack-parms scanned so far. */
2989 struct args_size args_size;
2990 /* Size of arguments before any adjustments (such as rounding). */
2991 struct args_size original_args_size;
2992 register int argnum;
2997 CUMULATIVE_ARGS args_so_far;
2998 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2999 struct args_size offset; struct args_size size; rtx save_area; };
3001 int old_inhibit_defer_pop = inhibit_defer_pop;
3002 rtx call_fusage = 0;
3004 int pcc_struct_value = 0;
3005 int struct_value_size = 0;
3007 int reg_parm_stack_space = 0;
3008 #ifdef ACCUMULATE_OUTGOING_ARGS
3012 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3013 /* Define the boundary of the register parm stack space that needs to be
3015 int low_to_save = -1, high_to_save;
3016 rtx save_area = 0; /* Place that it is saved */
3019 #ifdef ACCUMULATE_OUTGOING_ARGS
3020 /* Size of the stack reserved for parameter registers. */
3021 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3022 char *initial_stack_usage_map = stack_usage_map;
3025 #ifdef REG_PARM_STACK_SPACE
3026 #ifdef MAYBE_REG_PARM_STACK_SPACE
3027 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3029 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3033 VA_START (p, nargs);
3035 #ifndef ANSI_PROTOTYPES
3036 orgfun = va_arg (p, rtx);
3037 value = va_arg (p, rtx);
3038 no_queue = va_arg (p, int);
3039 outmode = va_arg (p, enum machine_mode);
3040 nargs = va_arg (p, int);
3043 is_const = no_queue;
3046 /* If this kind of value comes back in memory,
3047 decide where in memory it should come back. */
3048 if (aggregate_value_p (type_for_mode (outmode, 0)))
3050 #ifdef PCC_STATIC_STRUCT_RETURN
3052 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3054 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3055 pcc_struct_value = 1;
3057 value = gen_reg_rtx (outmode);
3058 #else /* not PCC_STATIC_STRUCT_RETURN */
3059 struct_value_size = GET_MODE_SIZE (outmode);
3060 if (value != 0 && GET_CODE (value) == MEM)
3063 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3066 /* This call returns a big structure. */
3070 /* ??? Unfinished: must pass the memory address as an argument. */
3072 /* Copy all the libcall-arguments out of the varargs data
3073 and into a vector ARGVEC.
3075 Compute how to pass each argument. We only support a very small subset
3076 of the full argument passing conventions to limit complexity here since
3077 library functions shouldn't have many args. */
3079 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3080 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3082 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3084 args_size.constant = 0;
3091 /* If there's a structure value address to be passed,
3092 either pass it in the special place, or pass it as an extra argument. */
3093 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3095 rtx addr = XEXP (mem_value, 0);
3098 /* Make sure it is a reasonable operand for a move or push insn. */
3099 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3100 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3101 addr = force_operand (addr, NULL_RTX);
3103 argvec[count].value = addr;
3104 argvec[count].mode = Pmode;
3105 argvec[count].partial = 0;
3107 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3108 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3109 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3113 locate_and_pad_parm (Pmode, NULL_TREE,
3114 argvec[count].reg && argvec[count].partial == 0,
3115 NULL_TREE, &args_size, &argvec[count].offset,
3116 &argvec[count].size);
3119 if (argvec[count].reg == 0 || argvec[count].partial != 0
3120 || reg_parm_stack_space > 0)
3121 args_size.constant += argvec[count].size.constant;
3123 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3128 for (; count < nargs; count++)
3130 rtx val = va_arg (p, rtx);
3131 enum machine_mode mode = va_arg (p, enum machine_mode);
3133 /* We cannot convert the arg value to the mode the library wants here;
3134 must do it earlier where we know the signedness of the arg. */
3136 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3139 /* On some machines, there's no way to pass a float to a library fcn.
3140 Pass it as a double instead. */
3141 #ifdef LIBGCC_NEEDS_DOUBLE
3142 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3143 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3146 /* There's no need to call protect_from_queue, because
3147 either emit_move_insn or emit_push_insn will do that. */
3149 /* Make sure it is a reasonable operand for a move or push insn. */
3150 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3151 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3152 val = force_operand (val, NULL_RTX);
3154 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3155 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3157 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3158 be viewed as just an efficiency improvement. */
3159 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3160 emit_move_insn (slot, val);
3161 val = XEXP (slot, 0);
3166 argvec[count].value = val;
3167 argvec[count].mode = mode;
3169 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3170 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
3172 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3173 argvec[count].partial
3174 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3176 argvec[count].partial = 0;
3179 locate_and_pad_parm (mode, NULL_TREE,
3180 argvec[count].reg && argvec[count].partial == 0,
3181 NULL_TREE, &args_size, &argvec[count].offset,
3182 &argvec[count].size);
3184 if (argvec[count].size.var)
3187 if (reg_parm_stack_space == 0 && argvec[count].partial)
3188 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3190 if (argvec[count].reg == 0 || argvec[count].partial != 0
3191 || reg_parm_stack_space > 0)
3192 args_size.constant += argvec[count].size.constant;
3194 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3198 #ifdef FINAL_REG_PARM_STACK_SPACE
3199 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3202 /* If this machine requires an external definition for library
3203 functions, write one out. */
3204 assemble_external_libcall (fun);
3206 original_args_size = args_size;
3207 #ifdef PREFERRED_STACK_BOUNDARY
3208 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3209 / STACK_BYTES) * STACK_BYTES);
3212 args_size.constant = MAX (args_size.constant,
3213 reg_parm_stack_space);
3215 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3216 args_size.constant -= reg_parm_stack_space;
3219 if (args_size.constant > current_function_outgoing_args_size)
3220 current_function_outgoing_args_size = args_size.constant;
3222 #ifdef ACCUMULATE_OUTGOING_ARGS
3223 /* Since the stack pointer will never be pushed, it is possible for
3224 the evaluation of a parm to clobber something we have already
3225 written to the stack. Since most function calls on RISC machines
3226 do not use the stack, this is uncommon, but must work correctly.
3228 Therefore, we save any area of the stack that was already written
3229 and that we are using. Here we set up to do this by making a new
3230 stack usage map from the old one.
3232 Another approach might be to try to reorder the argument
3233 evaluations to avoid this conflicting stack usage. */
3235 needed = args_size.constant;
3237 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3238 /* Since we will be writing into the entire argument area, the
3239 map must be allocated for its entire size, not just the part that
3240 is the responsibility of the caller. */
3241 needed += reg_parm_stack_space;
3244 #ifdef ARGS_GROW_DOWNWARD
3245 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3248 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3251 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3253 if (initial_highest_arg_in_use)
3254 bcopy (initial_stack_usage_map, stack_usage_map,
3255 initial_highest_arg_in_use);
3257 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3258 bzero (&stack_usage_map[initial_highest_arg_in_use],
3259 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3262 /* The address of the outgoing argument list must not be copied to a
3263 register here, because argblock would be left pointing to the
3264 wrong place after the call to allocate_dynamic_stack_space below.
3267 argblock = virtual_outgoing_args_rtx;
3268 #else /* not ACCUMULATE_OUTGOING_ARGS */
3269 #ifndef PUSH_ROUNDING
3270 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3274 #ifdef PUSH_ARGS_REVERSED
3275 #ifdef PREFERRED_STACK_BOUNDARY
3276 /* If we push args individually in reverse order, perform stack alignment
3277 before the first push (the last arg). */
3279 anti_adjust_stack (GEN_INT (args_size.constant
3280 - original_args_size.constant));
3284 #ifdef PUSH_ARGS_REVERSED
3292 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3293 /* The argument list is the property of the called routine and it
3294 may clobber it. If the fixed area has been used for previous
3295 parameters, we must save and restore it.
3297 Here we compute the boundary of the that needs to be saved, if any. */
3299 #ifdef ARGS_GROW_DOWNWARD
3300 for (count = 0; count < reg_parm_stack_space + 1; count++)
3302 for (count = 0; count < reg_parm_stack_space; count++)
3305 if (count >= highest_outgoing_arg_in_use
3306 || stack_usage_map[count] == 0)
3309 if (low_to_save == -1)
3310 low_to_save = count;
3312 high_to_save = count;
3315 if (low_to_save >= 0)
3317 int num_to_save = high_to_save - low_to_save + 1;
3318 enum machine_mode save_mode
3319 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3322 /* If we don't have the required alignment, must do this in BLKmode. */
3323 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3324 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3325 save_mode = BLKmode;
3327 #ifdef ARGS_GROW_DOWNWARD
3328 stack_area = gen_rtx_MEM (save_mode,
3329 memory_address (save_mode,
3330 plus_constant (argblock,
3333 stack_area = gen_rtx_MEM (save_mode,
3334 memory_address (save_mode,
3335 plus_constant (argblock,
3338 if (save_mode == BLKmode)
3340 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3341 emit_block_move (validize_mem (save_area), stack_area,
3342 GEN_INT (num_to_save),
3343 PARM_BOUNDARY / BITS_PER_UNIT);
3347 save_area = gen_reg_rtx (save_mode);
3348 emit_move_insn (save_area, stack_area);
3353 /* Push the args that need to be pushed. */
3355 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3356 are to be pushed. */
3357 for (count = 0; count < nargs; count++, argnum += inc)
3359 register enum machine_mode mode = argvec[argnum].mode;
3360 register rtx val = argvec[argnum].value;
3361 rtx reg = argvec[argnum].reg;
3362 int partial = argvec[argnum].partial;
3363 #ifdef ACCUMULATE_OUTGOING_ARGS
3364 int lower_bound, upper_bound, i;
3367 if (! (reg != 0 && partial == 0))
3369 #ifdef ACCUMULATE_OUTGOING_ARGS
3370 /* If this is being stored into a pre-allocated, fixed-size, stack
3371 area, save any previous data at that location. */
3373 #ifdef ARGS_GROW_DOWNWARD
3374 /* stack_slot is negative, but we want to index stack_usage_map
3375 with positive values. */
3376 upper_bound = -argvec[argnum].offset.constant + 1;
3377 lower_bound = upper_bound - argvec[argnum].size.constant;
3379 lower_bound = argvec[argnum].offset.constant;
3380 upper_bound = lower_bound + argvec[argnum].size.constant;
3383 for (i = lower_bound; i < upper_bound; i++)
3384 if (stack_usage_map[i]
3385 /* Don't store things in the fixed argument area at this point;
3386 it has already been saved. */
3387 && i > reg_parm_stack_space)
3390 if (i != upper_bound)
3392 /* We need to make a save area. See what mode we can make it. */
3393 enum machine_mode save_mode
3394 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3397 = gen_rtx_MEM (save_mode,
3398 memory_address (save_mode,
3399 plus_constant (argblock,
3400 argvec[argnum].offset.constant)));
3401 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3402 emit_move_insn (argvec[argnum].save_area, stack_area);
3405 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3406 argblock, GEN_INT (argvec[argnum].offset.constant),
3407 reg_parm_stack_space);
3409 #ifdef ACCUMULATE_OUTGOING_ARGS
3410 /* Now mark the segment we just used. */
3411 for (i = lower_bound; i < upper_bound; i++)
3412 stack_usage_map[i] = 1;
3419 #ifndef PUSH_ARGS_REVERSED
3420 #ifdef PREFERRED_STACK_BOUNDARY
3421 /* If we pushed args in forward order, perform stack alignment
3422 after pushing the last arg. */
3424 anti_adjust_stack (GEN_INT (args_size.constant
3425 - original_args_size.constant));
3429 #ifdef PUSH_ARGS_REVERSED
3435 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3437 /* Now load any reg parms into their regs. */
3439 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3440 are to be pushed. */
3441 for (count = 0; count < nargs; count++, argnum += inc)
3443 register rtx val = argvec[argnum].value;
3444 rtx reg = argvec[argnum].reg;
3445 int partial = argvec[argnum].partial;
3447 if (reg != 0 && partial == 0)
3448 emit_move_insn (reg, val);
3453 /* For version 1.37, try deleting this entirely. */
3458 /* Any regs containing parms remain in use through the call. */
3459 for (count = 0; count < nargs; count++)
3460 if (argvec[count].reg != 0)
3461 use_reg (&call_fusage, argvec[count].reg);
3463 /* Pass the function the address in which to return a structure value. */
3464 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3466 emit_move_insn (struct_value_rtx,
3468 force_operand (XEXP (mem_value, 0),
3470 if (GET_CODE (struct_value_rtx) == REG)
3471 use_reg (&call_fusage, struct_value_rtx);
3474 /* Don't allow popping to be deferred, since then
3475 cse'ing of library calls could delete a call and leave the pop. */
3478 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3479 will set inhibit_defer_pop to that value. */
3480 /* See the comment in emit_library_call about the function type we build
3484 get_identifier (XSTR (orgfun, 0)),
3485 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3486 args_size.constant, struct_value_size,
3487 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3488 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3489 old_inhibit_defer_pop + 1, call_fusage, is_const);
3491 /* Now restore inhibit_defer_pop to its actual original value. */
3496 /* Copy the value to the right place. */
3497 if (outmode != VOIDmode)
3503 if (value != mem_value)
3504 emit_move_insn (value, mem_value);
3506 else if (value != 0)
3507 emit_move_insn (value, hard_libcall_value (outmode));
3509 value = hard_libcall_value (outmode);
3512 #ifdef ACCUMULATE_OUTGOING_ARGS
3513 #ifdef REG_PARM_STACK_SPACE
3516 enum machine_mode save_mode = GET_MODE (save_area);
3517 #ifdef ARGS_GROW_DOWNWARD
3519 = gen_rtx_MEM (save_mode,
3520 memory_address (save_mode,
3521 plus_constant (argblock,
3525 = gen_rtx_MEM (save_mode,
3526 memory_address (save_mode,
3527 plus_constant (argblock, low_to_save)));
3529 if (save_mode != BLKmode)
3530 emit_move_insn (stack_area, save_area);
3532 emit_block_move (stack_area, validize_mem (save_area),
3533 GEN_INT (high_to_save - low_to_save + 1),
3534 PARM_BOUNDARY / BITS_PER_UNIT);
3538 /* If we saved any argument areas, restore them. */
3539 for (count = 0; count < nargs; count++)
3540 if (argvec[count].save_area)
3542 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3544 = gen_rtx_MEM (save_mode,
3545 memory_address (save_mode, plus_constant (argblock,
3546 argvec[count].offset.constant)));
3548 emit_move_insn (stack_area, argvec[count].save_area);
3551 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3552 stack_usage_map = initial_stack_usage_map;
3559 /* Return an rtx which represents a suitable home on the stack
3560 given TYPE, the type of the argument looking for a home.
3561 This is called only for BLKmode arguments.
3563 SIZE is the size needed for this target.
3564 ARGS_ADDR is the address of the bottom of the argument block for this call.
3565 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3566 if this machine uses push insns. */
3569 target_for_arg (type, size, args_addr, offset)
3573 struct args_size offset;
3576 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3578 /* We do not call memory_address if possible,
3579 because we want to address as close to the stack
3580 as possible. For non-variable sized arguments,
3581 this will be stack-pointer relative addressing. */
3582 if (GET_CODE (offset_rtx) == CONST_INT)
3583 target = plus_constant (args_addr, INTVAL (offset_rtx));
3586 /* I have no idea how to guarantee that this
3587 will work in the presence of register parameters. */
3588 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3589 target = memory_address (QImode, target);
3592 return gen_rtx_MEM (BLKmode, target);
3596 /* Store a single argument for a function call
3597 into the register or memory area where it must be passed.
3598 *ARG describes the argument value and where to pass it.
3600 ARGBLOCK is the address of the stack-block for all the arguments,
3601 or 0 on a machine where arguments are pushed individually.
3603 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3604 so must be careful about how the stack is used.
3606 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3607 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3608 that we need not worry about saving and restoring the stack.
3610 FNDECL is the declaration of the function we are calling. */
3613 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3614 reg_parm_stack_space)
3615 struct arg_data *arg;
3618 int variable_size ATTRIBUTE_UNUSED;
3619 int reg_parm_stack_space;
3621 register tree pval = arg->tree_value;
3625 #ifdef ACCUMULATE_OUTGOING_ARGS
3626 int i, lower_bound, upper_bound;
3629 if (TREE_CODE (pval) == ERROR_MARK)
3632 /* Push a new temporary level for any temporaries we make for
3636 #ifdef ACCUMULATE_OUTGOING_ARGS
3637 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3638 save any previous data at that location. */
3639 if (argblock && ! variable_size && arg->stack)
3641 #ifdef ARGS_GROW_DOWNWARD
3642 /* stack_slot is negative, but we want to index stack_usage_map
3643 with positive values. */
3644 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3645 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3649 lower_bound = upper_bound - arg->size.constant;
3651 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3652 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3656 upper_bound = lower_bound + arg->size.constant;
3659 for (i = lower_bound; i < upper_bound; i++)
3660 if (stack_usage_map[i]
3661 /* Don't store things in the fixed argument area at this point;
3662 it has already been saved. */
3663 && i > reg_parm_stack_space)
3666 if (i != upper_bound)
3668 /* We need to make a save area. See what mode we can make it. */
3669 enum machine_mode save_mode
3670 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3672 = gen_rtx_MEM (save_mode,
3673 memory_address (save_mode,
3674 XEXP (arg->stack_slot, 0)));
3676 if (save_mode == BLKmode)
3678 arg->save_area = assign_stack_temp (BLKmode,
3679 arg->size.constant, 0);
3680 MEM_SET_IN_STRUCT_P (arg->save_area,
3681 AGGREGATE_TYPE_P (TREE_TYPE
3682 (arg->tree_value)));
3683 preserve_temp_slots (arg->save_area);
3684 emit_block_move (validize_mem (arg->save_area), stack_area,
3685 GEN_INT (arg->size.constant),
3686 PARM_BOUNDARY / BITS_PER_UNIT);
3690 arg->save_area = gen_reg_rtx (save_mode);
3691 emit_move_insn (arg->save_area, stack_area);
3696 /* Now that we have saved any slots that will be overwritten by this
3697 store, mark all slots this store will use. We must do this before
3698 we actually expand the argument since the expansion itself may
3699 trigger library calls which might need to use the same stack slot. */
3700 if (argblock && ! variable_size && arg->stack)
3701 for (i = lower_bound; i < upper_bound; i++)
3702 stack_usage_map[i] = 1;
3705 /* If this isn't going to be placed on both the stack and in registers,
3706 set up the register and number of words. */
3707 if (! arg->pass_on_stack)
3708 reg = arg->reg, partial = arg->partial;
3710 if (reg != 0 && partial == 0)
3711 /* Being passed entirely in a register. We shouldn't be called in
3715 /* If this arg needs special alignment, don't load the registers
3717 if (arg->n_aligned_regs != 0)
3720 /* If this is being passed partially in a register, we can't evaluate
3721 it directly into its stack slot. Otherwise, we can. */
3722 if (arg->value == 0)
3724 #ifdef ACCUMULATE_OUTGOING_ARGS
3725 /* stack_arg_under_construction is nonzero if a function argument is
3726 being evaluated directly into the outgoing argument list and
3727 expand_call must take special action to preserve the argument list
3728 if it is called recursively.
3730 For scalar function arguments stack_usage_map is sufficient to
3731 determine which stack slots must be saved and restored. Scalar
3732 arguments in general have pass_on_stack == 0.
3734 If this argument is initialized by a function which takes the
3735 address of the argument (a C++ constructor or a C function
3736 returning a BLKmode structure), then stack_usage_map is
3737 insufficient and expand_call must push the stack around the
3738 function call. Such arguments have pass_on_stack == 1.
3740 Note that it is always safe to set stack_arg_under_construction,
3741 but this generates suboptimal code if set when not needed. */
3743 if (arg->pass_on_stack)
3744 stack_arg_under_construction++;
3746 arg->value = expand_expr (pval,
3748 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3749 ? NULL_RTX : arg->stack,
3752 /* If we are promoting object (or for any other reason) the mode
3753 doesn't agree, convert the mode. */
3755 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3756 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3757 arg->value, arg->unsignedp);
3759 #ifdef ACCUMULATE_OUTGOING_ARGS
3760 if (arg->pass_on_stack)
3761 stack_arg_under_construction--;
3765 /* Don't allow anything left on stack from computation
3766 of argument to alloca. */
3768 do_pending_stack_adjust ();
3770 if (arg->value == arg->stack)
3772 /* If the value is already in the stack slot, we are done moving
3774 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
3776 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3777 XEXP (arg->stack, 0), ptr_mode,
3778 ARGS_SIZE_RTX (arg->size),
3779 TYPE_MODE (sizetype),
3780 GEN_INT (MEMORY_USE_RW),
3781 TYPE_MODE (integer_type_node));
3784 else if (arg->mode != BLKmode)
3788 /* Argument is a scalar, not entirely passed in registers.
3789 (If part is passed in registers, arg->partial says how much
3790 and emit_push_insn will take care of putting it there.)
3792 Push it, and if its size is less than the
3793 amount of space allocated to it,
3794 also bump stack pointer by the additional space.
3795 Note that in C the default argument promotions
3796 will prevent such mismatches. */
3798 size = GET_MODE_SIZE (arg->mode);
3799 /* Compute how much space the push instruction will push.
3800 On many machines, pushing a byte will advance the stack
3801 pointer by a halfword. */
3802 #ifdef PUSH_ROUNDING
3803 size = PUSH_ROUNDING (size);
3807 /* Compute how much space the argument should get:
3808 round up to a multiple of the alignment for arguments. */
3809 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3810 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3811 / (PARM_BOUNDARY / BITS_PER_UNIT))
3812 * (PARM_BOUNDARY / BITS_PER_UNIT));
3814 /* This isn't already where we want it on the stack, so put it there.
3815 This can either be done with push or copy insns. */
3816 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3817 partial, reg, used - size, argblock,
3818 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space);
3822 /* BLKmode, at least partly to be pushed. */
3824 register int excess;
3827 /* Pushing a nonscalar.
3828 If part is passed in registers, PARTIAL says how much
3829 and emit_push_insn will take care of putting it there. */
3831 /* Round its size up to a multiple
3832 of the allocation unit for arguments. */
3834 if (arg->size.var != 0)
3837 size_rtx = ARGS_SIZE_RTX (arg->size);
3841 /* PUSH_ROUNDING has no effect on us, because
3842 emit_push_insn for BLKmode is careful to avoid it. */
3843 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3844 + partial * UNITS_PER_WORD);
3845 size_rtx = expr_size (pval);
3848 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3849 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3850 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3851 reg_parm_stack_space);
3855 /* Unless this is a partially-in-register argument, the argument is now
3858 ??? Note that this can change arg->value from arg->stack to
3859 arg->stack_slot and it matters when they are not the same.
3860 It isn't totally clear that this is correct in all cases. */
3862 arg->value = arg->stack_slot;
3864 /* Once we have pushed something, pops can't safely
3865 be deferred during the rest of the arguments. */
3868 /* ANSI doesn't require a sequence point here,
3869 but PCC has one, so this will avoid some problems. */
3872 /* Free any temporary slots made in processing this argument. Show
3873 that we might have taken the address of something and pushed that
3875 preserve_temp_slots (NULL_RTX);