1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "insn-flags.h"
35 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
36 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
39 /* Decide whether a function's arguments should be processed
40 from first to last or from last to first.
42 They should if the stack and args grow in opposite directions, but
43 only if we have push insns. */
47 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
48 #define PUSH_ARGS_REVERSED /* If it's last to first */
53 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
54 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
56 /* Data structure and subroutines used within expand_call. */
60 /* Tree node for this argument. */
62 /* Mode for value; TYPE_MODE unless promoted. */
63 enum machine_mode mode;
64 /* Current RTL value for argument, or 0 if it isn't precomputed. */
66 /* Initially-compute RTL value for argument; only for const functions. */
68 /* Register to pass this argument in, 0 if passed on stack, or an
69 PARALLEL if the arg is to be copied into multiple non-contiguous
72 /* If REG was promoted from the actual mode of the argument expression,
73 indicates whether the promotion is sign- or zero-extended. */
75 /* Number of registers to use. 0 means put the whole arg in registers.
76 Also 0 if not passed in registers. */
78 /* Non-zero if argument must be passed on stack.
79 Note that some arguments may be passed on the stack
80 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
81 pass_on_stack identifies arguments that *cannot* go in registers. */
83 /* Offset of this argument from beginning of stack-args. */
84 struct args_size offset;
85 /* Similar, but offset to the start of the stack slot. Different from
86 OFFSET if this arg pads downward. */
87 struct args_size slot_offset;
88 /* Size of this argument on the stack, rounded up for any padding it gets,
89 parts of the argument passed in registers do not count.
90 If REG_PARM_STACK_SPACE is defined, then register parms
91 are counted here as well. */
92 struct args_size size;
93 /* Location on the stack at which parameter should be stored. The store
94 has already been done if STACK == VALUE. */
96 /* Location on the stack of the start of this argument slot. This can
97 differ from STACK if this arg pads downward. This location is known
98 to be aligned to FUNCTION_ARG_BOUNDARY. */
100 #ifdef ACCUMULATE_OUTGOING_ARGS
101 /* Place that this stack area has been saved, if needed. */
104 /* If an argument's alignment does not permit direct copying into registers,
105 copy in smaller-sized pieces into pseudos. These are stored in a
106 block pointed to by this field. The next field says how many
107 word-sized pseudos we made. */
110 /* The amount that the stack pointer needs to be adjusted to
111 force alignment for the next argument. */
112 struct args_size alignment_pad;
115 #ifdef ACCUMULATE_OUTGOING_ARGS
116 /* A vector of one char per byte of stack space. A byte if non-zero if
117 the corresponding stack location has been used.
118 This vector is used to prevent a function call within an argument from
119 clobbering any stack already set up. */
120 static char *stack_usage_map;
122 /* Size of STACK_USAGE_MAP. */
123 static int highest_outgoing_arg_in_use;
125 /* stack_arg_under_construction is nonzero when an argument may be
126 initialized with a constructor call (including a C function that
127 returns a BLKmode struct) and expand_call must take special action
128 to make sure the object being constructed does not overlap the
129 argument list for the constructor call. */
130 int stack_arg_under_construction;
133 static int calls_function PARAMS ((tree, int));
134 static int calls_function_1 PARAMS ((tree, int));
135 static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
136 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
137 rtx, int, rtx, int));
138 static void precompute_register_parameters PARAMS ((int,
141 static void store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
143 static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
145 static int finalize_must_preallocate PARAMS ((int, int,
147 struct args_size *));
148 static void precompute_arguments PARAMS ((int, int, int,
150 struct args_size *));
151 static int compute_argument_block_size PARAMS ((int,
154 static void initialize_argument_information PARAMS ((int,
161 static void compute_argument_addresses PARAMS ((struct arg_data *,
163 static rtx rtx_for_function_call PARAMS ((tree, tree));
164 static void load_register_parameters PARAMS ((struct arg_data *,
167 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
168 static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
169 static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
172 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
175 If WHICH is 0, return 1 if EXP contains a call to any function.
176 Actually, we only need return 1 if evaluating EXP would require pushing
177 arguments on the stack, but that is too difficult to compute, so we just
178 assume any function call might require the stack. */
180 static tree calls_function_save_exprs;
183 calls_function (exp, which)
188 calls_function_save_exprs = 0;
189 val = calls_function_1 (exp, which);
190 calls_function_save_exprs = 0;
195 calls_function_1 (exp, which)
200 enum tree_code code = TREE_CODE (exp);
201 int type = TREE_CODE_CLASS (code);
202 int length = tree_code_length[(int) code];
204 /* If this code is language-specific, we don't know what it will do. */
205 if ((int) code >= NUM_TREE_CODES)
208 /* Only expressions and references can contain calls. */
209 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
218 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
219 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
222 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
224 if ((DECL_BUILT_IN (fndecl)
225 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
226 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
227 || (DECL_SAVED_INSNS (fndecl)
228 && DECL_SAVED_INSNS (fndecl)->calls_alloca))
232 /* Third operand is RTL. */
237 if (SAVE_EXPR_RTL (exp) != 0)
239 if (value_member (exp, calls_function_save_exprs))
241 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
242 calls_function_save_exprs);
243 return (TREE_OPERAND (exp, 0) != 0
244 && calls_function_1 (TREE_OPERAND (exp, 0), which));
250 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
251 if (DECL_INITIAL (local) != 0
252 && calls_function_1 (DECL_INITIAL (local), which))
256 register tree subblock;
258 for (subblock = BLOCK_SUBBLOCKS (exp);
260 subblock = TREE_CHAIN (subblock))
261 if (calls_function_1 (subblock, which))
266 case METHOD_CALL_EXPR:
270 case WITH_CLEANUP_EXPR:
281 for (i = 0; i < length; i++)
282 if (TREE_OPERAND (exp, i) != 0
283 && calls_function_1 (TREE_OPERAND (exp, i), which))
289 /* Force FUNEXP into a form suitable for the address of a CALL,
290 and return that as an rtx. Also load the static chain register
291 if FNDECL is a nested function.
293 CALL_FUSAGE points to a variable holding the prospective
294 CALL_INSN_FUNCTION_USAGE information. */
297 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
303 rtx static_chain_value = 0;
305 funexp = protect_from_queue (funexp, 0);
308 /* Get possible static chain value for nested function in C. */
309 static_chain_value = lookup_static_chain (fndecl);
311 /* Make a valid memory address and copy constants thru pseudo-regs,
312 but not for a constant address if -fno-function-cse. */
313 if (GET_CODE (funexp) != SYMBOL_REF)
314 /* If we are using registers for parameters, force the
315 function address into a register now. */
316 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
317 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
318 : memory_address (FUNCTION_MODE, funexp));
321 #ifndef NO_FUNCTION_CSE
322 if (optimize && ! flag_no_function_cse)
323 #ifdef NO_RECURSIVE_FUNCTION_CSE
324 if (fndecl != current_function_decl)
326 funexp = force_reg (Pmode, funexp);
330 if (static_chain_value != 0)
332 emit_move_insn (static_chain_rtx, static_chain_value);
334 if (GET_CODE (static_chain_rtx) == REG)
335 use_reg (call_fusage, static_chain_rtx);
341 /* Generate instructions to call function FUNEXP,
342 and optionally pop the results.
343 The CALL_INSN is the first insn generated.
345 FNDECL is the declaration node of the function. This is given to the
346 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
348 FUNTYPE is the data type of the function. This is given to the macro
349 RETURN_POPS_ARGS to determine whether this function pops its own args.
350 We used to allow an identifier for library functions, but that doesn't
351 work when the return type is an aggregate type and the calling convention
352 says that the pointer to this aggregate is to be popped by the callee.
354 STACK_SIZE is the number of bytes of arguments on the stack,
355 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
356 This is both to put into the call insn and
357 to generate explicit popping code if necessary.
359 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
360 It is zero if this call doesn't want a structure value.
362 NEXT_ARG_REG is the rtx that results from executing
363 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
364 just after all the args have had their registers assigned.
365 This could be whatever you like, but normally it is the first
366 arg-register beyond those used for args in this call,
367 or 0 if all the arg-registers are used in this call.
368 It is passed on to `gen_call' so you can put this info in the call insn.
370 VALREG is a hard register in which a value is returned,
371 or 0 if the call does not return a value.
373 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
374 the args to this call were processed.
375 We restore `inhibit_defer_pop' to that value.
377 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
378 denote registers used by the called function.
380 IS_CONST is true if this is a `const' call. */
383 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
384 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
385 call_fusage, is_const)
387 tree fndecl ATTRIBUTE_UNUSED;
388 tree funtype ATTRIBUTE_UNUSED;
389 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
390 HOST_WIDE_INT rounded_stack_size;
391 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
394 int old_inhibit_defer_pop;
398 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
399 #if defined (HAVE_call) && defined (HAVE_call_value)
400 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
403 #ifndef ACCUMULATE_OUTGOING_ARGS
404 int already_popped = 0;
405 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
408 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
409 and we don't want to load it into a register as an optimization,
410 because prepare_call_address already did it if it should be done. */
411 if (GET_CODE (funexp) != SYMBOL_REF)
412 funexp = memory_address (FUNCTION_MODE, funexp);
414 #ifndef ACCUMULATE_OUTGOING_ARGS
415 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
416 /* If the target has "call" or "call_value" insns, then prefer them
417 if no arguments are actually popped. If the target does not have
418 "call" or "call_value" insns, then we must use the popping versions
419 even if the call has no arguments to pop. */
420 #if defined (HAVE_call) && defined (HAVE_call_value)
421 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
424 if (HAVE_call_pop && HAVE_call_value_pop)
427 rtx n_pop = GEN_INT (n_popped);
430 /* If this subroutine pops its own args, record that in the call insn
431 if possible, for the sake of frame pointer elimination. */
434 pat = gen_call_value_pop (valreg,
435 gen_rtx_MEM (FUNCTION_MODE, funexp),
436 rounded_stack_size_rtx, next_arg_reg, n_pop);
438 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
439 rounded_stack_size_rtx, next_arg_reg, n_pop);
441 emit_call_insn (pat);
448 #if defined (HAVE_call) && defined (HAVE_call_value)
449 if (HAVE_call && HAVE_call_value)
452 emit_call_insn (gen_call_value (valreg,
453 gen_rtx_MEM (FUNCTION_MODE, funexp),
454 rounded_stack_size_rtx, next_arg_reg,
457 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
458 rounded_stack_size_rtx, next_arg_reg,
459 struct_value_size_rtx));
465 /* Find the CALL insn we just emitted. */
466 for (call_insn = get_last_insn ();
467 call_insn && GET_CODE (call_insn) != CALL_INSN;
468 call_insn = PREV_INSN (call_insn))
474 /* Put the register usage information on the CALL. If there is already
475 some usage information, put ours at the end. */
476 if (CALL_INSN_FUNCTION_USAGE (call_insn))
480 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
481 link = XEXP (link, 1))
484 XEXP (link, 1) = call_fusage;
487 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
489 /* If this is a const call, then set the insn's unchanging bit. */
491 CONST_CALL_P (call_insn) = 1;
493 /* Restore this now, so that we do defer pops for this call's args
494 if the context of the call as a whole permits. */
495 inhibit_defer_pop = old_inhibit_defer_pop;
497 #ifndef ACCUMULATE_OUTGOING_ARGS
498 /* If returning from the subroutine does not automatically pop the args,
499 we need an instruction to pop them sooner or later.
500 Perhaps do it now; perhaps just record how much space to pop later.
502 If returning from the subroutine does pop the args, indicate that the
503 stack pointer will be changed. */
508 CALL_INSN_FUNCTION_USAGE (call_insn)
509 = gen_rtx_EXPR_LIST (VOIDmode,
510 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
511 CALL_INSN_FUNCTION_USAGE (call_insn));
512 rounded_stack_size -= n_popped;
513 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
516 if (rounded_stack_size != 0)
518 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
519 pending_stack_adjust += rounded_stack_size;
521 adjust_stack (rounded_stack_size_rtx);
526 /* Determine if the function identified by NAME and FNDECL is one with
527 special properties we wish to know about.
529 For example, if the function might return more than one time (setjmp), then
530 set RETURNS_TWICE to a nonzero value.
532 Similarly set IS_LONGJMP for if the function is in the longjmp family.
534 Set IS_MALLOC for any of the standard memory allocation functions which
535 allocate from the heap.
537 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
538 space from the stack such as alloca. */
541 special_function_p (fndecl, returns_twice, is_longjmp, fork_or_exec,
542 is_malloc, may_be_alloca)
555 /* The function decl may have the `malloc' attribute. */
556 *is_malloc = fndecl && DECL_IS_MALLOC (fndecl);
559 && fndecl && DECL_NAME (fndecl)
560 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
561 /* Exclude functions not at the file scope, or not `extern',
562 since they are not the magic functions we would otherwise
564 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
566 char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
569 /* We assume that alloca will always be called by name. It
570 makes no sense to pass it as a pointer-to-function to
571 anything that does not understand its behavior. */
573 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
575 && ! strcmp (name, "alloca"))
576 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
578 && ! strcmp (name, "__builtin_alloca"))));
580 /* Disregard prefix _, __ or __x. */
583 if (name[1] == '_' && name[2] == 'x')
585 else if (name[1] == '_')
595 && (! strcmp (tname, "setjmp")
596 || ! strcmp (tname, "setjmp_syscall")))
598 && ! strcmp (tname, "sigsetjmp"))
600 && ! strcmp (tname, "savectx")));
602 && ! strcmp (tname, "siglongjmp"))
605 else if ((tname[0] == 'q' && tname[1] == 's'
606 && ! strcmp (tname, "qsetjmp"))
607 || (tname[0] == 'v' && tname[1] == 'f'
608 && ! strcmp (tname, "vfork")))
611 else if (tname[0] == 'l' && tname[1] == 'o'
612 && ! strcmp (tname, "longjmp"))
615 else if ((tname[0] == 'f' && tname[1] == 'o'
616 && ! strcmp (tname, "fork"))
617 /* Linux specific: __clone. check NAME to insist on the
618 leading underscores, to avoid polluting the ISO / POSIX
620 || (name[0] == '_' && name[1] == '_'
621 && ! strcmp (tname, "clone"))
622 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
623 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
625 || ((tname[5] == 'p' || tname[5] == 'e')
626 && tname[6] == '\0'))))
629 /* Do not add any more malloc-like functions to this list,
630 instead mark them as malloc functions using the malloc attribute.
631 Note, realloc is not suitable for attribute malloc since
632 it may return the same address across multiple calls. */
633 else if (! strcmp (tname, "malloc")
634 || ! strcmp (tname, "calloc")
635 || ! strcmp (tname, "strdup")
636 /* Note use of NAME rather than TNAME here. These functions
637 are only reserved when preceded with __. */
638 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
639 || ! strcmp (name, "__nw") /* mangled __builtin_new */
640 || ! strcmp (name, "__builtin_new")
641 || ! strcmp (name, "__builtin_vec_new"))
646 /* Precompute all register parameters as described by ARGS, storing values
647 into fields within the ARGS array.
649 NUM_ACTUALS indicates the total number elements in the ARGS array.
651 Set REG_PARM_SEEN if we encounter a register parameter. */
654 precompute_register_parameters (num_actuals, args, reg_parm_seen)
656 struct arg_data *args;
663 for (i = 0; i < num_actuals; i++)
664 if (args[i].reg != 0 && ! args[i].pass_on_stack)
668 if (args[i].value == 0)
671 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
673 preserve_temp_slots (args[i].value);
676 /* ANSI doesn't require a sequence point here,
677 but PCC has one, so this will avoid some problems. */
681 /* If we are to promote the function arg to a wider mode,
684 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
686 = convert_modes (args[i].mode,
687 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
688 args[i].value, args[i].unsignedp);
690 /* If the value is expensive, and we are inside an appropriately
691 short loop, put the value into a pseudo and then put the pseudo
694 For small register classes, also do this if this call uses
695 register parameters. This is to avoid reload conflicts while
696 loading the parameters registers. */
698 if ((! (GET_CODE (args[i].value) == REG
699 || (GET_CODE (args[i].value) == SUBREG
700 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
701 && args[i].mode != BLKmode
702 && rtx_cost (args[i].value, SET) > 2
703 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
704 || preserve_subexpressions_p ()))
705 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
709 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
711 /* The argument list is the property of the called routine and it
712 may clobber it. If the fixed area has been used for previous
713 parameters, we must save and restore it. */
715 save_fixed_argument_area (reg_parm_stack_space, argblock,
716 low_to_save, high_to_save)
717 int reg_parm_stack_space;
723 rtx save_area = NULL_RTX;
725 /* Compute the boundary of the that needs to be saved, if any. */
726 #ifdef ARGS_GROW_DOWNWARD
727 for (i = 0; i < reg_parm_stack_space + 1; i++)
729 for (i = 0; i < reg_parm_stack_space; i++)
732 if (i >= highest_outgoing_arg_in_use
733 || stack_usage_map[i] == 0)
736 if (*low_to_save == -1)
742 if (*low_to_save >= 0)
744 int num_to_save = *high_to_save - *low_to_save + 1;
745 enum machine_mode save_mode
746 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
749 /* If we don't have the required alignment, must do this in BLKmode. */
750 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
751 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
754 #ifdef ARGS_GROW_DOWNWARD
755 stack_area = gen_rtx_MEM (save_mode,
756 memory_address (save_mode,
757 plus_constant (argblock,
760 stack_area = gen_rtx_MEM (save_mode,
761 memory_address (save_mode,
762 plus_constant (argblock,
765 if (save_mode == BLKmode)
767 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
768 /* Cannot use emit_block_move here because it can be done by a library
769 call which in turn gets into this place again and deadly infinite
770 recursion happens. */
771 move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
772 PARM_BOUNDARY / BITS_PER_UNIT);
776 save_area = gen_reg_rtx (save_mode);
777 emit_move_insn (save_area, stack_area);
784 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
790 enum machine_mode save_mode = GET_MODE (save_area);
791 #ifdef ARGS_GROW_DOWNWARD
793 = gen_rtx_MEM (save_mode,
794 memory_address (save_mode,
795 plus_constant (argblock,
799 = gen_rtx_MEM (save_mode,
800 memory_address (save_mode,
801 plus_constant (argblock,
805 if (save_mode != BLKmode)
806 emit_move_insn (stack_area, save_area);
808 /* Cannot use emit_block_move here because it can be done by a library
809 call which in turn gets into this place again and deadly infinite
810 recursion happens. */
811 move_by_pieces (stack_area, validize_mem (save_area),
812 high_to_save - low_to_save + 1,
813 PARM_BOUNDARY / BITS_PER_UNIT);
817 /* If any elements in ARGS refer to parameters that are to be passed in
818 registers, but not in memory, and whose alignment does not permit a
819 direct copy into registers. Copy the values into a group of pseudos
820 which we will later copy into the appropriate hard registers.
822 Pseudos for each unaligned argument will be stored into the array
823 args[argnum].aligned_regs. The caller is responsible for deallocating
824 the aligned_regs array if it is nonzero. */
827 store_unaligned_arguments_into_pseudos (args, num_actuals)
828 struct arg_data *args;
833 for (i = 0; i < num_actuals; i++)
834 if (args[i].reg != 0 && ! args[i].pass_on_stack
835 && args[i].mode == BLKmode
836 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
837 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
839 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
840 int big_endian_correction = 0;
842 args[i].n_aligned_regs
843 = args[i].partial ? args[i].partial
844 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
846 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
847 * args[i].n_aligned_regs);
849 /* Structures smaller than a word are aligned to the least
850 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
851 this means we must skip the empty high order bytes when
852 calculating the bit offset. */
853 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
854 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
856 for (j = 0; j < args[i].n_aligned_regs; j++)
858 rtx reg = gen_reg_rtx (word_mode);
859 rtx word = operand_subword_force (args[i].value, j, BLKmode);
860 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
861 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
863 args[i].aligned_regs[j] = reg;
865 /* There is no need to restrict this code to loading items
866 in TYPE_ALIGN sized hunks. The bitfield instructions can
867 load up entire word sized registers efficiently.
869 ??? This may not be needed anymore.
870 We use to emit a clobber here but that doesn't let later
871 passes optimize the instructions we emit. By storing 0 into
872 the register later passes know the first AND to zero out the
873 bitfield being set in the register is unnecessary. The store
874 of 0 will be deleted as will at least the first AND. */
876 emit_move_insn (reg, const0_rtx);
878 bytes -= bitsize / BITS_PER_UNIT;
879 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
880 extract_bit_field (word, bitsize, 0, 1,
883 bitalign / BITS_PER_UNIT,
885 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
890 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
893 NUM_ACTUALS is the total number of parameters.
895 N_NAMED_ARGS is the total number of named arguments.
897 FNDECL is the tree code for the target of this call (if known)
899 ARGS_SO_FAR holds state needed by the target to know where to place
902 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
903 for arguments which are passed in registers.
905 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
906 and may be modified by this routine.
908 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
909 flags which may may be modified by this routine. */
912 initialize_argument_information (num_actuals, args, args_size, n_named_args,
913 actparms, fndecl, args_so_far,
914 reg_parm_stack_space, old_stack_level,
915 old_pending_adj, must_preallocate, is_const)
916 int num_actuals ATTRIBUTE_UNUSED;
917 struct arg_data *args;
918 struct args_size *args_size;
919 int n_named_args ATTRIBUTE_UNUSED;
922 CUMULATIVE_ARGS *args_so_far;
923 int reg_parm_stack_space;
924 rtx *old_stack_level;
925 int *old_pending_adj;
926 int *must_preallocate;
929 /* 1 if scanning parms front to back, -1 if scanning back to front. */
932 /* Count arg position in order args appear. */
935 struct args_size alignment_pad;
939 args_size->constant = 0;
942 /* In this loop, we consider args in the order they are written.
943 We fill up ARGS from the front or from the back if necessary
944 so that in any case the first arg to be pushed ends up at the front. */
946 #ifdef PUSH_ARGS_REVERSED
947 i = num_actuals - 1, inc = -1;
948 /* In this case, must reverse order of args
949 so that we compute and push the last arg first. */
954 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
955 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
957 tree type = TREE_TYPE (TREE_VALUE (p));
959 enum machine_mode mode;
961 args[i].tree_value = TREE_VALUE (p);
963 /* Replace erroneous argument with constant zero. */
964 if (type == error_mark_node || TYPE_SIZE (type) == 0)
965 args[i].tree_value = integer_zero_node, type = integer_type_node;
967 /* If TYPE is a transparent union, pass things the way we would
968 pass the first field of the union. We have already verified that
969 the modes are the same. */
970 if (TYPE_TRANSPARENT_UNION (type))
971 type = TREE_TYPE (TYPE_FIELDS (type));
973 /* Decide where to pass this arg.
975 args[i].reg is nonzero if all or part is passed in registers.
977 args[i].partial is nonzero if part but not all is passed in registers,
978 and the exact value says how many words are passed in registers.
980 args[i].pass_on_stack is nonzero if the argument must at least be
981 computed on the stack. It may then be loaded back into registers
982 if args[i].reg is nonzero.
984 These decisions are driven by the FUNCTION_... macros and must agree
985 with those made by function.c. */
987 /* See if this argument should be passed by invisible reference. */
988 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
989 && contains_placeholder_p (TYPE_SIZE (type)))
990 || TREE_ADDRESSABLE (type)
991 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
992 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
993 type, argpos < n_named_args)
997 /* If we're compiling a thunk, pass through invisible
998 references instead of making a copy. */
999 if (current_function_is_thunk
1000 #ifdef FUNCTION_ARG_CALLEE_COPIES
1001 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1002 type, argpos < n_named_args)
1003 /* If it's in a register, we must make a copy of it too. */
1004 /* ??? Is this a sufficient test? Is there a better one? */
1005 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1006 && REG_P (DECL_RTL (args[i].tree_value)))
1007 && ! TREE_ADDRESSABLE (type))
1011 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1012 new object from the argument. If we are passing by
1013 invisible reference, the callee will do that for us, so we
1014 can strip off the TARGET_EXPR. This is not always safe,
1015 but it is safe in the only case where this is a useful
1016 optimization; namely, when the argument is a plain object.
1017 In that case, the frontend is just asking the backend to
1018 make a bitwise copy of the argument. */
1020 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1021 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
1022 (args[i].tree_value, 1)))
1024 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1025 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1027 args[i].tree_value = build1 (ADDR_EXPR,
1028 build_pointer_type (type),
1029 args[i].tree_value);
1030 type = build_pointer_type (type);
1034 /* We make a copy of the object and pass the address to the
1035 function being called. */
1038 if (TYPE_SIZE (type) == 0
1039 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1040 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1041 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1042 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1043 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
1045 /* This is a variable-sized object. Make space on the stack
1047 rtx size_rtx = expr_size (TREE_VALUE (p));
1049 if (*old_stack_level == 0)
1051 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1052 *old_pending_adj = pending_stack_adjust;
1053 pending_stack_adjust = 0;
1056 copy = gen_rtx_MEM (BLKmode,
1057 allocate_dynamic_stack_space (size_rtx,
1059 TYPE_ALIGN (type)));
1063 int size = int_size_in_bytes (type);
1064 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1067 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1069 store_expr (args[i].tree_value, copy, 0);
1072 args[i].tree_value = build1 (ADDR_EXPR,
1073 build_pointer_type (type),
1074 make_tree (type, copy));
1075 type = build_pointer_type (type);
1079 mode = TYPE_MODE (type);
1080 unsignedp = TREE_UNSIGNED (type);
1082 #ifdef PROMOTE_FUNCTION_ARGS
1083 mode = promote_mode (type, mode, &unsignedp, 1);
1086 args[i].unsignedp = unsignedp;
1087 args[i].mode = mode;
1088 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1089 argpos < n_named_args);
1090 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1093 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1094 argpos < n_named_args);
1097 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1099 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1100 it means that we are to pass this arg in the register(s) designated
1101 by the PARALLEL, but also to pass it in the stack. */
1102 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1103 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1104 args[i].pass_on_stack = 1;
1106 /* If this is an addressable type, we must preallocate the stack
1107 since we must evaluate the object into its final location.
1109 If this is to be passed in both registers and the stack, it is simpler
1111 if (TREE_ADDRESSABLE (type)
1112 || (args[i].pass_on_stack && args[i].reg != 0))
1113 *must_preallocate = 1;
1115 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1116 we cannot consider this function call constant. */
1117 if (TREE_ADDRESSABLE (type))
1120 /* Compute the stack-size of this argument. */
1121 if (args[i].reg == 0 || args[i].partial != 0
1122 || reg_parm_stack_space > 0
1123 || args[i].pass_on_stack)
1124 locate_and_pad_parm (mode, type,
1125 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1130 fndecl, args_size, &args[i].offset,
1131 &args[i].size, &alignment_pad);
1133 #ifndef ARGS_GROW_DOWNWARD
1134 args[i].slot_offset = *args_size;
1137 args[i].alignment_pad = alignment_pad;
1139 /* If a part of the arg was put into registers,
1140 don't include that part in the amount pushed. */
1141 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1142 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1143 / (PARM_BOUNDARY / BITS_PER_UNIT)
1144 * (PARM_BOUNDARY / BITS_PER_UNIT));
1146 /* Update ARGS_SIZE, the total stack space for args so far. */
1148 args_size->constant += args[i].size.constant;
1149 if (args[i].size.var)
1151 ADD_PARM_SIZE (*args_size, args[i].size.var);
1154 /* Since the slot offset points to the bottom of the slot,
1155 we must record it after incrementing if the args grow down. */
1156 #ifdef ARGS_GROW_DOWNWARD
1157 args[i].slot_offset = *args_size;
1159 args[i].slot_offset.constant = -args_size->constant;
1161 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1164 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1165 have been used, etc. */
1167 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1168 argpos < n_named_args);
1172 /* Update ARGS_SIZE to contain the total size for the argument block.
1173 Return the original constant component of the argument block's size.
1175 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1176 for arguments passed in registers. */
1179 compute_argument_block_size (reg_parm_stack_space, args_size,
1180 preferred_stack_boundary)
1181 int reg_parm_stack_space;
1182 struct args_size *args_size;
1183 int preferred_stack_boundary ATTRIBUTE_UNUSED;
1185 int unadjusted_args_size = args_size->constant;
1187 /* Compute the actual size of the argument block required. The variable
1188 and constant sizes must be combined, the size may have to be rounded,
1189 and there may be a minimum required size. */
1193 args_size->var = ARGS_SIZE_TREE (*args_size);
1194 args_size->constant = 0;
1196 #ifdef PREFERRED_STACK_BOUNDARY
1197 preferred_stack_boundary /= BITS_PER_UNIT;
1198 if (preferred_stack_boundary > 1)
1199 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1202 if (reg_parm_stack_space > 0)
1205 = size_binop (MAX_EXPR, args_size->var,
1206 ssize_int (reg_parm_stack_space));
1208 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1209 /* The area corresponding to register parameters is not to count in
1210 the size of the block we need. So make the adjustment. */
1212 = size_binop (MINUS_EXPR, args_size->var,
1213 ssize_int (reg_parm_stack_space));
1219 #ifdef PREFERRED_STACK_BOUNDARY
1220 preferred_stack_boundary /= BITS_PER_UNIT;
1221 args_size->constant = (((args_size->constant
1222 + pending_stack_adjust
1223 + preferred_stack_boundary - 1)
1224 / preferred_stack_boundary
1225 * preferred_stack_boundary)
1226 - pending_stack_adjust);
1229 args_size->constant = MAX (args_size->constant,
1230 reg_parm_stack_space);
1232 #ifdef MAYBE_REG_PARM_STACK_SPACE
1233 if (reg_parm_stack_space == 0)
1234 args_size->constant = 0;
1237 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1238 args_size->constant -= reg_parm_stack_space;
1241 return unadjusted_args_size;
1244 /* Precompute parameters as needed for a function call.
1246 IS_CONST indicates the target function is a pure function.
1248 MUST_PREALLOCATE indicates that we must preallocate stack space for
1249 any stack arguments.
1251 NUM_ACTUALS is the number of arguments.
1253 ARGS is an array containing information for each argument; this routine
1254 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1256 ARGS_SIZE contains information about the size of the arg list. */
1259 precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1261 int must_preallocate;
1263 struct arg_data *args;
1264 struct args_size *args_size;
1268 /* If this function call is cse'able, precompute all the parameters.
1269 Note that if the parameter is constructed into a temporary, this will
1270 cause an additional copy because the parameter will be constructed
1271 into a temporary location and then copied into the outgoing arguments.
1272 If a parameter contains a call to alloca and this function uses the
1273 stack, precompute the parameter. */
1275 /* If we preallocated the stack space, and some arguments must be passed
1276 on the stack, then we must precompute any parameter which contains a
1277 function call which will store arguments on the stack.
1278 Otherwise, evaluating the parameter may clobber previous parameters
1279 which have already been stored into the stack. */
1281 for (i = 0; i < num_actuals; i++)
1283 || ((args_size->var != 0 || args_size->constant != 0)
1284 && calls_function (args[i].tree_value, 1))
1285 || (must_preallocate
1286 && (args_size->var != 0 || args_size->constant != 0)
1287 && calls_function (args[i].tree_value, 0)))
1289 /* If this is an addressable type, we cannot pre-evaluate it. */
1290 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1296 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1298 preserve_temp_slots (args[i].value);
1301 /* ANSI doesn't require a sequence point here,
1302 but PCC has one, so this will avoid some problems. */
1305 args[i].initial_value = args[i].value
1306 = protect_from_queue (args[i].value, 0);
1308 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1311 = convert_modes (args[i].mode,
1312 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1313 args[i].value, args[i].unsignedp);
1314 #ifdef PROMOTE_FOR_CALL_ONLY
1315 /* CSE will replace this only if it contains args[i].value
1316 pseudo, so convert it down to the declared mode using
1318 if (GET_CODE (args[i].value) == REG
1319 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1321 args[i].initial_value
1322 = gen_rtx_SUBREG (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1324 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1325 SUBREG_PROMOTED_UNSIGNED_P (args[i].initial_value)
1326 = args[i].unsignedp;
1333 /* Given the current state of MUST_PREALLOCATE and information about
1334 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1335 compute and return the final value for MUST_PREALLOCATE. */
1338 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1339 int must_preallocate;
1341 struct arg_data *args;
1342 struct args_size *args_size;
1344 /* See if we have or want to preallocate stack space.
1346 If we would have to push a partially-in-regs parm
1347 before other stack parms, preallocate stack space instead.
1349 If the size of some parm is not a multiple of the required stack
1350 alignment, we must preallocate.
1352 If the total size of arguments that would otherwise create a copy in
1353 a temporary (such as a CALL) is more than half the total argument list
1354 size, preallocation is faster.
1356 Another reason to preallocate is if we have a machine (like the m88k)
1357 where stack alignment is required to be maintained between every
1358 pair of insns, not just when the call is made. However, we assume here
1359 that such machines either do not have push insns (and hence preallocation
1360 would occur anyway) or the problem is taken care of with
1363 if (! must_preallocate)
1365 int partial_seen = 0;
1366 int copy_to_evaluate_size = 0;
1369 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1371 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1373 else if (partial_seen && args[i].reg == 0)
1374 must_preallocate = 1;
1376 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1377 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1378 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1379 || TREE_CODE (args[i].tree_value) == COND_EXPR
1380 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1381 copy_to_evaluate_size
1382 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1385 if (copy_to_evaluate_size * 2 >= args_size->constant
1386 && args_size->constant > 0)
1387 must_preallocate = 1;
1389 return must_preallocate;
1392 /* If we preallocated stack space, compute the address of each argument
1393 and store it into the ARGS array.
1395 We need not ensure it is a valid memory address here; it will be
1396 validized when it is used.
1398 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1401 compute_argument_addresses (args, argblock, num_actuals)
1402 struct arg_data *args;
1408 rtx arg_reg = argblock;
1409 int i, arg_offset = 0;
1411 if (GET_CODE (argblock) == PLUS)
1412 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1414 for (i = 0; i < num_actuals; i++)
1416 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1417 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1420 /* Skip this parm if it will not be passed on the stack. */
1421 if (! args[i].pass_on_stack && args[i].reg != 0)
1424 if (GET_CODE (offset) == CONST_INT)
1425 addr = plus_constant (arg_reg, INTVAL (offset));
1427 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1429 addr = plus_constant (addr, arg_offset);
1430 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1433 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1435 if (GET_CODE (slot_offset) == CONST_INT)
1436 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1438 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1440 addr = plus_constant (addr, arg_offset);
1441 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1446 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1447 in a call instruction.
1449 FNDECL is the tree node for the target function. For an indirect call
1450 FNDECL will be NULL_TREE.
1452 EXP is the CALL_EXPR for this call. */
1455 rtx_for_function_call (fndecl, exp)
1461 /* Get the function to call, in the form of RTL. */
1464 /* If this is the first use of the function, see if we need to
1465 make an external definition for it. */
1466 if (! TREE_USED (fndecl))
1468 assemble_external (fndecl);
1469 TREE_USED (fndecl) = 1;
1472 /* Get a SYMBOL_REF rtx for the function address. */
1473 funexp = XEXP (DECL_RTL (fndecl), 0);
1476 /* Generate an rtx (probably a pseudo-register) for the address. */
1481 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1482 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1484 /* Check the function is executable. */
1485 if (current_function_check_memory_usage)
1487 #ifdef POINTERS_EXTEND_UNSIGNED
1488 /* It might be OK to convert funexp in place, but there's
1489 a lot going on between here and when it happens naturally
1490 that this seems safer. */
1491 funaddr = convert_memory_address (Pmode, funexp);
1493 emit_library_call (chkr_check_exec_libfunc, 1,
1502 /* Do the register loads required for any wholly-register parms or any
1503 parms which are passed both on the stack and in a register. Their
1504 expressions were already evaluated.
1506 Mark all register-parms as living through the call, putting these USE
1507 insns in the CALL_INSN_FUNCTION_USAGE field. */
1510 load_register_parameters (args, num_actuals, call_fusage)
1511 struct arg_data *args;
1517 #ifdef LOAD_ARGS_REVERSED
1518 for (i = num_actuals - 1; i >= 0; i--)
1520 for (i = 0; i < num_actuals; i++)
1523 rtx reg = args[i].reg;
1524 int partial = args[i].partial;
1529 /* Set to non-negative if must move a word at a time, even if just
1530 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1531 we just use a normal move insn. This value can be zero if the
1532 argument is a zero size structure with no fields. */
1533 nregs = (partial ? partial
1534 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1535 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1536 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1539 /* Handle calls that pass values in multiple non-contiguous
1540 locations. The Irix 6 ABI has examples of this. */
1542 if (GET_CODE (reg) == PARALLEL)
1544 emit_group_load (reg, args[i].value,
1545 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1546 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1550 /* If simple case, just do move. If normal partial, store_one_arg
1551 has already loaded the register for us. In all other cases,
1552 load the register(s) from memory. */
1554 else if (nregs == -1)
1555 emit_move_insn (reg, args[i].value);
1557 /* If we have pre-computed the values to put in the registers in
1558 the case of non-aligned structures, copy them in now. */
1560 else if (args[i].n_aligned_regs != 0)
1561 for (j = 0; j < args[i].n_aligned_regs; j++)
1562 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1563 args[i].aligned_regs[j]);
1565 else if (partial == 0 || args[i].pass_on_stack)
1566 move_block_to_reg (REGNO (reg),
1567 validize_mem (args[i].value), nregs,
1570 /* Handle calls that pass values in multiple non-contiguous
1571 locations. The Irix 6 ABI has examples of this. */
1572 if (GET_CODE (reg) == PARALLEL)
1573 use_group_regs (call_fusage, reg);
1574 else if (nregs == -1)
1575 use_reg (call_fusage, reg);
1577 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1582 /* Generate all the code for a function call
1583 and return an rtx for its value.
1584 Store the value in TARGET (specified as an rtx) if convenient.
1585 If the value is stored in TARGET then TARGET is returned.
1586 If IGNORE is nonzero, then we ignore the value of the function call. */
1589 expand_call (exp, target, ignore)
1594 /* List of actual parameters. */
1595 tree actparms = TREE_OPERAND (exp, 1);
1596 /* RTX for the function to be called. */
1598 /* Data type of the function. */
1600 /* Declaration of the function being called,
1601 or 0 if the function is computed (not known by name). */
1606 /* Register in which non-BLKmode value will be returned,
1607 or 0 if no value or if value is BLKmode. */
1609 /* Address where we should return a BLKmode value;
1610 0 if value not BLKmode. */
1611 rtx structure_value_addr = 0;
1612 /* Nonzero if that address is being passed by treating it as
1613 an extra, implicit first parameter. Otherwise,
1614 it is passed by being copied directly into struct_value_rtx. */
1615 int structure_value_addr_parm = 0;
1616 /* Size of aggregate value wanted, or zero if none wanted
1617 or if we are using the non-reentrant PCC calling convention
1618 or expecting the value in registers. */
1619 HOST_WIDE_INT struct_value_size = 0;
1620 /* Nonzero if called function returns an aggregate in memory PCC style,
1621 by returning the address of where to find it. */
1622 int pcc_struct_value = 0;
1624 /* Number of actual parameters in this call, including struct value addr. */
1626 /* Number of named args. Args after this are anonymous ones
1627 and they must all go on the stack. */
1630 /* Vector of information about each argument.
1631 Arguments are numbered in the order they will be pushed,
1632 not the order they are written. */
1633 struct arg_data *args;
1635 /* Total size in bytes of all the stack-parms scanned so far. */
1636 struct args_size args_size;
1637 /* Size of arguments before any adjustments (such as rounding). */
1638 int unadjusted_args_size;
1639 /* Data on reg parms scanned so far. */
1640 CUMULATIVE_ARGS args_so_far;
1641 /* Nonzero if a reg parm has been scanned. */
1643 /* Nonzero if this is an indirect function call. */
1645 /* Nonzero if we must avoid push-insns in the args for this call.
1646 If stack space is allocated for register parameters, but not by the
1647 caller, then it is preallocated in the fixed part of the stack frame.
1648 So the entire argument block must then be preallocated (i.e., we
1649 ignore PUSH_ROUNDING in that case). */
1651 #ifdef PUSH_ROUNDING
1652 int must_preallocate = 0;
1654 int must_preallocate = 1;
1657 /* Size of the stack reserved for parameter registers. */
1658 int reg_parm_stack_space = 0;
1660 /* Address of space preallocated for stack parms
1661 (on machines that lack push insns), or 0 if space not preallocated. */
1664 /* Nonzero if it is plausible that this is a call to alloca. */
1666 /* Nonzero if this is a call to malloc or a related function. */
1668 /* Nonzero if this is a call to setjmp or a related function. */
1670 /* Nonzero if this is a call to `longjmp'. */
1672 /* Nonzero if this is a syscall that makes a new process in the image of
1675 /* Nonzero if this is a call to an inline function. */
1676 int is_integrable = 0;
1677 /* Nonzero if this is a call to a `const' function.
1678 Note that only explicitly named functions are handled as `const' here. */
1680 /* Nonzero if this is a call to a `volatile' function. */
1681 int is_volatile = 0;
1682 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1683 /* Define the boundary of the register parm stack space that needs to be
1685 int low_to_save = -1, high_to_save;
1686 rtx save_area = 0; /* Place that it is saved */
1689 #ifdef ACCUMULATE_OUTGOING_ARGS
1690 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1691 char *initial_stack_usage_map = stack_usage_map;
1692 int old_stack_arg_under_construction = 0;
1695 rtx old_stack_level = 0;
1696 int old_pending_adj = 0;
1697 int old_inhibit_defer_pop = inhibit_defer_pop;
1698 rtx call_fusage = 0;
1701 #ifdef PREFERRED_STACK_BOUNDARY
1702 int preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1704 /* In this case preferred_stack_boundary variable is meaningless.
1705 It is used only in order to keep ifdef noise down when calling
1706 compute_argument_block_size. */
1707 int preferred_stack_boundary = 0;
1710 /* The value of the function call can be put in a hard register. But
1711 if -fcheck-memory-usage, code which invokes functions (and thus
1712 damages some hard registers) can be inserted before using the value.
1713 So, target is always a pseudo-register in that case. */
1714 if (current_function_check_memory_usage)
1717 /* See if we can find a DECL-node for the actual function.
1718 As a result, decide whether this is a call to an integrable function. */
1720 p = TREE_OPERAND (exp, 0);
1721 if (TREE_CODE (p) == ADDR_EXPR)
1723 fndecl = TREE_OPERAND (p, 0);
1724 if (TREE_CODE (fndecl) != FUNCTION_DECL)
1729 && fndecl != current_function_decl
1730 && DECL_INLINE (fndecl)
1731 && DECL_SAVED_INSNS (fndecl)
1732 && DECL_SAVED_INSNS (fndecl)->inlinable)
1734 else if (! TREE_ADDRESSABLE (fndecl))
1736 /* In case this function later becomes inlinable,
1737 record that there was already a non-inline call to it.
1739 Use abstraction instead of setting TREE_ADDRESSABLE
1741 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1744 warning_with_decl (fndecl, "can't inline call to `%s'");
1745 warning ("called from here");
1747 mark_addressable (fndecl);
1750 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1751 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
1754 if (TREE_THIS_VOLATILE (fndecl))
1759 /* If we don't have specific function to call, see if we have a
1760 constant or `noreturn' function from the type. */
1763 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1764 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1767 #ifdef REG_PARM_STACK_SPACE
1768 #ifdef MAYBE_REG_PARM_STACK_SPACE
1769 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1771 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1775 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1776 if (reg_parm_stack_space > 0)
1777 must_preallocate = 1;
1780 /* Warn if this value is an aggregate type,
1781 regardless of which calling convention we are using for it. */
1782 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1783 warning ("function call has aggregate value");
1785 /* Set up a place to return a structure. */
1787 /* Cater to broken compilers. */
1788 if (aggregate_value_p (exp))
1790 /* This call returns a big structure. */
1793 #ifdef PCC_STATIC_STRUCT_RETURN
1795 pcc_struct_value = 1;
1796 /* Easier than making that case work right. */
1799 /* In case this is a static function, note that it has been
1801 if (! TREE_ADDRESSABLE (fndecl))
1802 mark_addressable (fndecl);
1806 #else /* not PCC_STATIC_STRUCT_RETURN */
1808 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1810 if (target && GET_CODE (target) == MEM)
1811 structure_value_addr = XEXP (target, 0);
1814 /* Assign a temporary to hold the value. */
1817 /* For variable-sized objects, we must be called with a target
1818 specified. If we were to allocate space on the stack here,
1819 we would have no way of knowing when to free it. */
1821 if (struct_value_size < 0)
1824 /* This DECL is just something to feed to mark_addressable;
1825 it doesn't get pushed. */
1826 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1827 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1828 mark_addressable (d);
1829 mark_temp_addr_taken (DECL_RTL (d));
1830 structure_value_addr = XEXP (DECL_RTL (d), 0);
1835 #endif /* not PCC_STATIC_STRUCT_RETURN */
1838 /* If called function is inline, try to integrate it. */
1844 #ifdef ACCUMULATE_OUTGOING_ARGS
1845 before_call = get_last_insn ();
1848 temp = expand_inline_function (fndecl, actparms, target,
1849 ignore, TREE_TYPE (exp),
1850 structure_value_addr);
1852 /* If inlining succeeded, return. */
1853 if (temp != (rtx) (HOST_WIDE_INT) -1)
1855 #ifdef ACCUMULATE_OUTGOING_ARGS
1856 /* If the outgoing argument list must be preserved, push
1857 the stack before executing the inlined function if it
1860 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1861 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1864 if (stack_arg_under_construction || i >= 0)
1867 = before_call ? NEXT_INSN (before_call) : get_insns ();
1868 rtx insn = NULL_RTX, seq;
1870 /* Look for a call in the inline function code.
1871 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1872 nonzero then there is a call and it is not necessary
1873 to scan the insns. */
1875 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1876 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1877 if (GET_CODE (insn) == CALL_INSN)
1882 /* Reserve enough stack space so that the largest
1883 argument list of any function call in the inline
1884 function does not overlap the argument list being
1885 evaluated. This is usually an overestimate because
1886 allocate_dynamic_stack_space reserves space for an
1887 outgoing argument list in addition to the requested
1888 space, but there is no way to ask for stack space such
1889 that an argument list of a certain length can be
1892 Add the stack space reserved for register arguments, if
1893 any, in the inline function. What is really needed is the
1894 largest value of reg_parm_stack_space in the inline
1895 function, but that is not available. Using the current
1896 value of reg_parm_stack_space is wrong, but gives
1897 correct results on all supported machines. */
1899 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1900 + reg_parm_stack_space);
1903 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1904 allocate_dynamic_stack_space (GEN_INT (adjust),
1905 NULL_RTX, BITS_PER_UNIT);
1908 emit_insns_before (seq, first_insn);
1909 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1914 /* If the result is equivalent to TARGET, return TARGET to simplify
1915 checks in store_expr. They can be equivalent but not equal in the
1916 case of a function that returns BLKmode. */
1917 if (temp != target && rtx_equal_p (temp, target))
1922 /* If inlining failed, mark FNDECL as needing to be compiled
1923 separately after all. If function was declared inline,
1925 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1926 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1928 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1929 warning ("called from here");
1931 mark_addressable (fndecl);
1934 function_call_count++;
1936 if (fndecl && DECL_NAME (fndecl))
1937 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1939 /* Ensure current function's preferred stack boundary is at least
1940 what we need. We don't have to increase alignment for recursive
1942 if (cfun->preferred_stack_boundary < preferred_stack_boundary
1943 && fndecl != current_function_decl)
1944 cfun->preferred_stack_boundary = preferred_stack_boundary;
1946 /* See if this is a call to a function that can return more than once
1947 or a call to longjmp or malloc. */
1948 special_function_p (fndecl, &returns_twice, &is_longjmp, &fork_or_exec,
1949 &is_malloc, &may_be_alloca);
1952 current_function_calls_alloca = 1;
1954 /* Operand 0 is a pointer-to-function; get the type of the function. */
1955 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1956 if (! POINTER_TYPE_P (funtype))
1958 funtype = TREE_TYPE (funtype);
1960 /* When calling a const function, we must pop the stack args right away,
1961 so that the pop is deleted or moved with the call. */
1965 /* Don't let pending stack adjusts add up to too much.
1966 Also, do all pending adjustments now
1967 if there is any chance this might be a call to alloca. */
1969 if (pending_stack_adjust >= 32
1970 || (pending_stack_adjust > 0 && may_be_alloca))
1971 do_pending_stack_adjust ();
1973 if (profile_arc_flag && fork_or_exec)
1975 /* A fork duplicates the profile information, and an exec discards
1976 it. We can't rely on fork/exec to be paired. So write out the
1977 profile information we have gathered so far, and clear it. */
1978 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"), 0,
1981 /* ??? When __clone is called with CLONE_VM set, profiling is
1982 subject to race conditions, just as with multithreaded programs. */
1985 /* Push the temporary stack slot level so that we can free any temporaries
1989 /* Start updating where the next arg would go.
1991 On some machines (such as the PA) indirect calls have a different
1992 calling convention than normal calls. The last argument in
1993 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1995 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
1997 /* If struct_value_rtx is 0, it means pass the address
1998 as if it were an extra parameter. */
1999 if (structure_value_addr && struct_value_rtx == 0)
2001 /* If structure_value_addr is a REG other than
2002 virtual_outgoing_args_rtx, we can use always use it. If it
2003 is not a REG, we must always copy it into a register.
2004 If it is virtual_outgoing_args_rtx, we must copy it to another
2005 register in some cases. */
2006 rtx temp = (GET_CODE (structure_value_addr) != REG
2007 #ifdef ACCUMULATE_OUTGOING_ARGS
2008 || (stack_arg_under_construction
2009 && structure_value_addr == virtual_outgoing_args_rtx)
2011 ? copy_addr_to_reg (structure_value_addr)
2012 : structure_value_addr);
2015 = tree_cons (error_mark_node,
2016 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2019 structure_value_addr_parm = 1;
2022 /* Count the arguments and set NUM_ACTUALS. */
2023 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
2026 /* Compute number of named args.
2027 Normally, don't include the last named arg if anonymous args follow.
2028 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2029 (If no anonymous args follow, the result of list_length is actually
2030 one too large. This is harmless.)
2032 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2033 zero, this machine will be able to place unnamed args that were passed in
2034 registers into the stack. So treat all args as named. This allows the
2035 insns emitting for a specific argument list to be independent of the
2036 function declaration.
2038 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
2039 way to pass unnamed args in registers, so we must force them into
2042 if ((STRICT_ARGUMENT_NAMING
2043 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2044 && TYPE_ARG_TYPES (funtype) != 0)
2046 = (list_length (TYPE_ARG_TYPES (funtype))
2047 /* Don't include the last named arg. */
2048 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2049 /* Count the struct value address, if it is passed as a parm. */
2050 + structure_value_addr_parm);
2052 /* If we know nothing, treat all args as named. */
2053 n_named_args = num_actuals;
2055 /* Make a vector to hold all the information about each arg. */
2056 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
2057 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
2059 /* Build up entries inthe ARGS array, compute the size of the arguments
2060 into ARGS_SIZE, etc. */
2061 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
2062 actparms, fndecl, &args_so_far,
2063 reg_parm_stack_space, &old_stack_level,
2064 &old_pending_adj, &must_preallocate,
2067 #ifdef FINAL_REG_PARM_STACK_SPACE
2068 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2074 /* If this function requires a variable-sized argument list, don't try to
2075 make a cse'able block for this call. We may be able to do this
2076 eventually, but it is too complicated to keep track of what insns go
2077 in the cse'able block and which don't. */
2080 must_preallocate = 1;
2083 /* Compute the actual size of the argument block required. The variable
2084 and constant sizes must be combined, the size may have to be rounded,
2085 and there may be a minimum required size. */
2086 unadjusted_args_size
2087 = compute_argument_block_size (reg_parm_stack_space, &args_size,
2088 preferred_stack_boundary);
2090 /* Now make final decision about preallocating stack space. */
2091 must_preallocate = finalize_must_preallocate (must_preallocate,
2092 num_actuals, args, &args_size);
2094 /* If the structure value address will reference the stack pointer, we must
2095 stabilize it. We don't need to do this if we know that we are not going
2096 to adjust the stack pointer in processing this call. */
2098 if (structure_value_addr
2099 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2100 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
2102 #ifndef ACCUMULATE_OUTGOING_ARGS
2103 || args_size.constant
2106 structure_value_addr = copy_to_reg (structure_value_addr);
2108 /* Precompute any arguments as needed. */
2109 precompute_arguments (is_const, must_preallocate, num_actuals,
2112 /* Now we are about to start emitting insns that can be deleted
2113 if a libcall is deleted. */
2114 if (is_const || is_malloc)
2117 /* If we have no actual push instructions, or shouldn't use them,
2118 make space for all args right now. */
2120 if (args_size.var != 0)
2122 if (old_stack_level == 0)
2124 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2125 old_pending_adj = pending_stack_adjust;
2126 pending_stack_adjust = 0;
2127 #ifdef ACCUMULATE_OUTGOING_ARGS
2128 /* stack_arg_under_construction says whether a stack arg is
2129 being constructed at the old stack level. Pushing the stack
2130 gets a clean outgoing argument block. */
2131 old_stack_arg_under_construction = stack_arg_under_construction;
2132 stack_arg_under_construction = 0;
2135 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2139 /* Note that we must go through the motions of allocating an argument
2140 block even if the size is zero because we may be storing args
2141 in the area reserved for register arguments, which may be part of
2144 int needed = args_size.constant;
2146 /* Store the maximum argument space used. It will be pushed by
2147 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2150 if (needed > current_function_outgoing_args_size)
2151 current_function_outgoing_args_size = needed;
2153 if (must_preallocate)
2155 #ifdef ACCUMULATE_OUTGOING_ARGS
2156 /* Since the stack pointer will never be pushed, it is possible for
2157 the evaluation of a parm to clobber something we have already
2158 written to the stack. Since most function calls on RISC machines
2159 do not use the stack, this is uncommon, but must work correctly.
2161 Therefore, we save any area of the stack that was already written
2162 and that we are using. Here we set up to do this by making a new
2163 stack usage map from the old one. The actual save will be done
2166 Another approach might be to try to reorder the argument
2167 evaluations to avoid this conflicting stack usage. */
2169 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2170 /* Since we will be writing into the entire argument area, the
2171 map must be allocated for its entire size, not just the part that
2172 is the responsibility of the caller. */
2173 needed += reg_parm_stack_space;
2176 #ifdef ARGS_GROW_DOWNWARD
2177 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2180 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2183 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2185 if (initial_highest_arg_in_use)
2186 bcopy (initial_stack_usage_map, stack_usage_map,
2187 initial_highest_arg_in_use);
2189 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2190 bzero (&stack_usage_map[initial_highest_arg_in_use],
2191 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2194 /* The address of the outgoing argument list must not be copied to a
2195 register here, because argblock would be left pointing to the
2196 wrong place after the call to allocate_dynamic_stack_space below.
2199 argblock = virtual_outgoing_args_rtx;
2201 #else /* not ACCUMULATE_OUTGOING_ARGS */
2202 if (inhibit_defer_pop == 0)
2204 /* Try to reuse some or all of the pending_stack_adjust
2205 to get this space. Maybe we can avoid any pushing. */
2206 if (needed > pending_stack_adjust)
2208 needed -= pending_stack_adjust;
2209 pending_stack_adjust = 0;
2213 pending_stack_adjust -= needed;
2217 /* Special case this because overhead of `push_block' in this
2218 case is non-trivial. */
2220 argblock = virtual_outgoing_args_rtx;
2222 argblock = push_block (GEN_INT (needed), 0, 0);
2224 /* We only really need to call `copy_to_reg' in the case where push
2225 insns are going to be used to pass ARGBLOCK to a function
2226 call in ARGS. In that case, the stack pointer changes value
2227 from the allocation point to the call point, and hence
2228 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2229 But might as well always do it. */
2230 argblock = copy_to_reg (argblock);
2231 #endif /* not ACCUMULATE_OUTGOING_ARGS */
2235 #ifdef ACCUMULATE_OUTGOING_ARGS
2236 /* The save/restore code in store_one_arg handles all cases except one:
2237 a constructor call (including a C function returning a BLKmode struct)
2238 to initialize an argument. */
2239 if (stack_arg_under_construction)
2241 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2242 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
2244 rtx push_size = GEN_INT (args_size.constant);
2246 if (old_stack_level == 0)
2248 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2249 old_pending_adj = pending_stack_adjust;
2250 pending_stack_adjust = 0;
2251 /* stack_arg_under_construction says whether a stack arg is
2252 being constructed at the old stack level. Pushing the stack
2253 gets a clean outgoing argument block. */
2254 old_stack_arg_under_construction = stack_arg_under_construction;
2255 stack_arg_under_construction = 0;
2256 /* Make a new map for the new argument list. */
2257 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2258 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2259 highest_outgoing_arg_in_use = 0;
2261 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
2263 /* If argument evaluation might modify the stack pointer, copy the
2264 address of the argument list to a register. */
2265 for (i = 0; i < num_actuals; i++)
2266 if (args[i].pass_on_stack)
2268 argblock = copy_addr_to_reg (argblock);
2273 compute_argument_addresses (args, argblock, num_actuals);
2275 #ifdef PUSH_ARGS_REVERSED
2276 #ifdef PREFERRED_STACK_BOUNDARY
2277 /* If we push args individually in reverse order, perform stack alignment
2278 before the first push (the last arg). */
2279 if (args_size.constant != unadjusted_args_size)
2281 /* When the stack adjustment is pending,
2282 we get better code by combining the adjustments. */
2283 if (pending_stack_adjust && !is_const
2284 && !inhibit_defer_pop)
2286 args_size.constant = (unadjusted_args_size
2287 + ((pending_stack_adjust + args_size.constant
2288 - unadjusted_args_size)
2289 % (preferred_stack_boundary / BITS_PER_UNIT)));
2290 pending_stack_adjust -= args_size.constant - unadjusted_args_size;
2291 do_pending_stack_adjust ();
2293 else if (argblock == 0)
2294 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2299 /* Don't try to defer pops if preallocating, not even from the first arg,
2300 since ARGBLOCK probably refers to the SP. */
2304 funexp = rtx_for_function_call (fndecl, exp);
2306 /* Figure out the register where the value, if any, will come back. */
2308 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2309 && ! structure_value_addr)
2311 if (pcc_struct_value)
2312 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2315 valreg = hard_function_value (TREE_TYPE (exp), fndecl, 0);
2318 /* Precompute all register parameters. It isn't safe to compute anything
2319 once we have started filling any specific hard regs. */
2320 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2322 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2324 /* Save the fixed argument area if it's part of the caller's frame and
2325 is clobbered by argument setup for this call. */
2326 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2327 &low_to_save, &high_to_save);
2331 /* Now store (and compute if necessary) all non-register parms.
2332 These come before register parms, since they can require block-moves,
2333 which could clobber the registers used for register parms.
2334 Parms which have partial registers are not stored here,
2335 but we do preallocate space here if they want that. */
2337 for (i = 0; i < num_actuals; i++)
2338 if (args[i].reg == 0 || args[i].pass_on_stack)
2339 store_one_arg (&args[i], argblock, may_be_alloca,
2340 args_size.var != 0, reg_parm_stack_space);
2342 /* If we have a parm that is passed in registers but not in memory
2343 and whose alignment does not permit a direct copy into registers,
2344 make a group of pseudos that correspond to each register that we
2346 if (STRICT_ALIGNMENT)
2347 store_unaligned_arguments_into_pseudos (args, num_actuals);
2349 /* Now store any partially-in-registers parm.
2350 This is the last place a block-move can happen. */
2352 for (i = 0; i < num_actuals; i++)
2353 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2354 store_one_arg (&args[i], argblock, may_be_alloca,
2355 args_size.var != 0, reg_parm_stack_space);
2357 #ifndef PUSH_ARGS_REVERSED
2358 #ifdef PREFERRED_STACK_BOUNDARY
2359 /* If we pushed args in forward order, perform stack alignment
2360 after pushing the last arg. */
2362 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2366 /* If register arguments require space on the stack and stack space
2367 was not preallocated, allocate stack space here for arguments
2368 passed in registers. */
2369 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
2370 if (must_preallocate == 0 && reg_parm_stack_space > 0)
2371 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2374 /* Pass the function the address in which to return a structure value. */
2375 if (structure_value_addr && ! structure_value_addr_parm)
2377 emit_move_insn (struct_value_rtx,
2379 force_operand (structure_value_addr,
2382 /* Mark the memory for the aggregate as write-only. */
2383 if (current_function_check_memory_usage)
2384 emit_library_call (chkr_set_right_libfunc, 1,
2386 structure_value_addr, Pmode,
2387 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
2388 GEN_INT (MEMORY_USE_WO),
2389 TYPE_MODE (integer_type_node));
2391 if (GET_CODE (struct_value_rtx) == REG)
2392 use_reg (&call_fusage, struct_value_rtx);
2395 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
2397 load_register_parameters (args, num_actuals, &call_fusage);
2399 /* Perform postincrements before actually calling the function. */
2402 /* Save a pointer to the last insn before the call, so that we can
2403 later safely search backwards to find the CALL_INSN. */
2404 before_call = get_last_insn ();
2406 /* All arguments and registers used for the call must be set up by now! */
2408 /* Generate the actual call instruction. */
2409 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2410 args_size.constant, struct_value_size,
2411 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2412 valreg, old_inhibit_defer_pop, call_fusage, is_const);
2414 /* If call is cse'able, make appropriate pair of reg-notes around it.
2415 Test valreg so we don't crash; may safely ignore `const'
2416 if return type is void. Disable for PARALLEL return values, because
2417 we have no way to move such values into a pseudo register. */
2418 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2421 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2424 /* Mark the return value as a pointer if needed. */
2425 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2427 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2428 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2431 /* Construct an "equal form" for the value which mentions all the
2432 arguments in order as well as the function name. */
2433 #ifdef PUSH_ARGS_REVERSED
2434 for (i = 0; i < num_actuals; i++)
2435 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2437 for (i = num_actuals - 1; i >= 0; i--)
2438 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2440 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2442 insns = get_insns ();
2445 emit_libcall_block (insns, temp, valreg, note);
2451 /* Otherwise, just write out the sequence without a note. */
2452 rtx insns = get_insns ();
2459 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2462 /* The return value from a malloc-like function is a pointer. */
2463 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2464 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2466 emit_move_insn (temp, valreg);
2468 /* The return value from a malloc-like function can not alias
2470 last = get_last_insn ();
2472 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2474 /* Write out the sequence. */
2475 insns = get_insns ();
2481 /* For calls to `setjmp', etc., inform flow.c it should complain
2482 if nonvolatile values are live. */
2486 /* The NOTE_INSN_SETJMP note must be emitted immediately after the
2487 CALL_INSN. Some ports emit more than just a CALL_INSN above, so
2488 we must search for it here. */
2489 rtx last = get_last_insn ();
2490 while (GET_CODE (last) != CALL_INSN)
2492 last = PREV_INSN (last);
2493 /* There was no CALL_INSN? */
2494 if (last == before_call)
2497 emit_note_after (NOTE_INSN_SETJMP, last);
2498 current_function_calls_setjmp = 1;
2502 current_function_calls_longjmp = 1;
2504 /* Notice functions that cannot return.
2505 If optimizing, insns emitted below will be dead.
2506 If not optimizing, they will exist, which is useful
2507 if the user uses the `return' command in the debugger. */
2509 if (is_volatile || is_longjmp)
2512 /* If value type not void, return an rtx for the value. */
2514 /* If there are cleanups to be called, don't use a hard reg as target.
2515 We need to double check this and see if it matters anymore. */
2516 if (any_pending_cleanups (1)
2517 && target && REG_P (target)
2518 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2521 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2524 target = const0_rtx;
2526 else if (structure_value_addr)
2528 if (target == 0 || GET_CODE (target) != MEM)
2530 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2531 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2532 structure_value_addr));
2533 MEM_SET_IN_STRUCT_P (target,
2534 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2537 else if (pcc_struct_value)
2539 /* This is the special C++ case where we need to
2540 know what the true target was. We take care to
2541 never use this value more than once in one expression. */
2542 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2543 copy_to_reg (valreg));
2544 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2546 /* Handle calls that return values in multiple non-contiguous locations.
2547 The Irix 6 ABI has examples of this. */
2548 else if (GET_CODE (valreg) == PARALLEL)
2550 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2554 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2555 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2556 preserve_temp_slots (target);
2559 if (! rtx_equal_p (target, valreg))
2560 emit_group_store (target, valreg, bytes,
2561 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2563 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2564 && GET_MODE (target) == GET_MODE (valreg))
2565 /* TARGET and VALREG cannot be equal at this point because the latter
2566 would not have REG_FUNCTION_VALUE_P true, while the former would if
2567 it were referring to the same register.
2569 If they refer to the same register, this move will be a no-op, except
2570 when function inlining is being done. */
2571 emit_move_insn (target, valreg);
2572 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2573 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2575 target = copy_to_reg (valreg);
2577 #ifdef PROMOTE_FUNCTION_RETURN
2578 /* If we promoted this return value, make the proper SUBREG. TARGET
2579 might be const0_rtx here, so be careful. */
2580 if (GET_CODE (target) == REG
2581 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2582 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2584 tree type = TREE_TYPE (exp);
2585 int unsignedp = TREE_UNSIGNED (type);
2587 /* If we don't promote as expected, something is wrong. */
2588 if (GET_MODE (target)
2589 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2592 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2593 SUBREG_PROMOTED_VAR_P (target) = 1;
2594 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2598 /* If size of args is variable or this was a constructor call for a stack
2599 argument, restore saved stack-pointer value. */
2601 if (old_stack_level)
2603 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2604 pending_stack_adjust = old_pending_adj;
2605 #ifdef ACCUMULATE_OUTGOING_ARGS
2606 stack_arg_under_construction = old_stack_arg_under_construction;
2607 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2608 stack_usage_map = initial_stack_usage_map;
2611 #ifdef ACCUMULATE_OUTGOING_ARGS
2614 #ifdef REG_PARM_STACK_SPACE
2616 restore_fixed_argument_area (save_area, argblock,
2617 high_to_save, low_to_save);
2620 /* If we saved any argument areas, restore them. */
2621 for (i = 0; i < num_actuals; i++)
2622 if (args[i].save_area)
2624 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2626 = gen_rtx_MEM (save_mode,
2627 memory_address (save_mode,
2628 XEXP (args[i].stack_slot, 0)));
2630 if (save_mode != BLKmode)
2631 emit_move_insn (stack_area, args[i].save_area);
2633 emit_block_move (stack_area, validize_mem (args[i].save_area),
2634 GEN_INT (args[i].size.constant),
2635 PARM_BOUNDARY / BITS_PER_UNIT);
2638 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2639 stack_usage_map = initial_stack_usage_map;
2643 /* If this was alloca, record the new stack level for nonlocal gotos.
2644 Check for the handler slots since we might not have a save area
2645 for non-local gotos. */
2647 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
2648 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2652 /* Free up storage we no longer need. */
2653 for (i = 0; i < num_actuals; ++i)
2654 if (args[i].aligned_regs)
2655 free (args[i].aligned_regs);
2660 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2661 (emitting the queue unless NO_QUEUE is nonzero),
2662 for a value of mode OUTMODE,
2663 with NARGS different arguments, passed as alternating rtx values
2664 and machine_modes to convert them to.
2665 The rtx values should have been passed through protect_from_queue already.
2667 NO_QUEUE will be true if and only if the library call is a `const' call
2668 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2669 to the variable is_const in expand_call.
2671 NO_QUEUE must be true for const calls, because if it isn't, then
2672 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2673 and will be lost if the libcall sequence is optimized away.
2675 NO_QUEUE must be false for non-const calls, because if it isn't, the
2676 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2677 optimized. For instance, the instruction scheduler may incorrectly
2678 move memory references across the non-const call. */
2681 emit_library_call VPARAMS((rtx orgfun, int no_queue, enum machine_mode outmode,
2684 #ifndef ANSI_PROTOTYPES
2687 enum machine_mode outmode;
2691 /* Total size in bytes of all the stack-parms scanned so far. */
2692 struct args_size args_size;
2693 /* Size of arguments before any adjustments (such as rounding). */
2694 struct args_size original_args_size;
2695 register int argnum;
2699 struct args_size alignment_pad;
2701 CUMULATIVE_ARGS args_so_far;
2702 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2703 struct args_size offset; struct args_size size; rtx save_area; };
2705 int old_inhibit_defer_pop = inhibit_defer_pop;
2706 rtx call_fusage = 0;
2707 int reg_parm_stack_space = 0;
2708 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2709 /* Define the boundary of the register parm stack space that needs to be
2711 int low_to_save = -1, high_to_save = 0;
2712 rtx save_area = 0; /* Place that it is saved */
2715 #ifdef ACCUMULATE_OUTGOING_ARGS
2716 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2717 char *initial_stack_usage_map = stack_usage_map;
2721 #ifdef REG_PARM_STACK_SPACE
2722 /* Size of the stack reserved for parameter registers. */
2723 #ifdef MAYBE_REG_PARM_STACK_SPACE
2724 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2726 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
2730 VA_START (p, nargs);
2732 #ifndef ANSI_PROTOTYPES
2733 orgfun = va_arg (p, rtx);
2734 no_queue = va_arg (p, int);
2735 outmode = va_arg (p, enum machine_mode);
2736 nargs = va_arg (p, int);
2741 /* Copy all the libcall-arguments out of the varargs data
2742 and into a vector ARGVEC.
2744 Compute how to pass each argument. We only support a very small subset
2745 of the full argument passing conventions to limit complexity here since
2746 library functions shouldn't have many args. */
2748 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2749 bzero ((char *) argvec, nargs * sizeof (struct arg));
2752 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2754 args_size.constant = 0;
2759 #ifdef PREFERRED_STACK_BOUNDARY
2760 /* Ensure current function's preferred stack boundary is at least
2762 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
2763 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2766 for (count = 0; count < nargs; count++)
2768 rtx val = va_arg (p, rtx);
2769 enum machine_mode mode = va_arg (p, enum machine_mode);
2771 /* We cannot convert the arg value to the mode the library wants here;
2772 must do it earlier where we know the signedness of the arg. */
2774 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2777 /* On some machines, there's no way to pass a float to a library fcn.
2778 Pass it as a double instead. */
2779 #ifdef LIBGCC_NEEDS_DOUBLE
2780 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2781 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2784 /* There's no need to call protect_from_queue, because
2785 either emit_move_insn or emit_push_insn will do that. */
2787 /* Make sure it is a reasonable operand for a move or push insn. */
2788 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2789 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2790 val = force_operand (val, NULL_RTX);
2792 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2793 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2795 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2796 be viewed as just an efficiency improvement. */
2797 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2798 emit_move_insn (slot, val);
2799 val = force_operand (XEXP (slot, 0), NULL_RTX);
2804 argvec[count].value = val;
2805 argvec[count].mode = mode;
2807 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2809 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2810 argvec[count].partial
2811 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2813 argvec[count].partial = 0;
2816 locate_and_pad_parm (mode, NULL_TREE,
2817 argvec[count].reg && argvec[count].partial == 0,
2818 NULL_TREE, &args_size, &argvec[count].offset,
2819 &argvec[count].size, &alignment_pad);
2821 if (argvec[count].size.var)
2824 if (reg_parm_stack_space == 0 && argvec[count].partial)
2825 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2827 if (argvec[count].reg == 0 || argvec[count].partial != 0
2828 || reg_parm_stack_space > 0)
2829 args_size.constant += argvec[count].size.constant;
2831 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2835 #ifdef FINAL_REG_PARM_STACK_SPACE
2836 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2840 /* If this machine requires an external definition for library
2841 functions, write one out. */
2842 assemble_external_libcall (fun);
2844 original_args_size = args_size;
2845 #ifdef PREFERRED_STACK_BOUNDARY
2846 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2847 / STACK_BYTES) * STACK_BYTES);
2850 args_size.constant = MAX (args_size.constant,
2851 reg_parm_stack_space);
2853 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2854 args_size.constant -= reg_parm_stack_space;
2857 if (args_size.constant > current_function_outgoing_args_size)
2858 current_function_outgoing_args_size = args_size.constant;
2860 #ifdef ACCUMULATE_OUTGOING_ARGS
2861 /* Since the stack pointer will never be pushed, it is possible for
2862 the evaluation of a parm to clobber something we have already
2863 written to the stack. Since most function calls on RISC machines
2864 do not use the stack, this is uncommon, but must work correctly.
2866 Therefore, we save any area of the stack that was already written
2867 and that we are using. Here we set up to do this by making a new
2868 stack usage map from the old one.
2870 Another approach might be to try to reorder the argument
2871 evaluations to avoid this conflicting stack usage. */
2873 needed = args_size.constant;
2875 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2876 /* Since we will be writing into the entire argument area, the
2877 map must be allocated for its entire size, not just the part that
2878 is the responsibility of the caller. */
2879 needed += reg_parm_stack_space;
2882 #ifdef ARGS_GROW_DOWNWARD
2883 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2886 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2889 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2891 if (initial_highest_arg_in_use)
2892 bcopy (initial_stack_usage_map, stack_usage_map,
2893 initial_highest_arg_in_use);
2895 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2896 bzero (&stack_usage_map[initial_highest_arg_in_use],
2897 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2900 /* The address of the outgoing argument list must not be copied to a
2901 register here, because argblock would be left pointing to the
2902 wrong place after the call to allocate_dynamic_stack_space below.
2905 argblock = virtual_outgoing_args_rtx;
2906 #else /* not ACCUMULATE_OUTGOING_ARGS */
2907 #ifndef PUSH_ROUNDING
2908 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2912 #ifdef PUSH_ARGS_REVERSED
2913 #ifdef PREFERRED_STACK_BOUNDARY
2914 /* If we push args individually in reverse order, perform stack alignment
2915 before the first push (the last arg). */
2917 anti_adjust_stack (GEN_INT (args_size.constant
2918 - original_args_size.constant));
2922 #ifdef PUSH_ARGS_REVERSED
2930 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2931 /* The argument list is the property of the called routine and it
2932 may clobber it. If the fixed area has been used for previous
2933 parameters, we must save and restore it.
2935 Here we compute the boundary of the that needs to be saved, if any. */
2937 #ifdef ARGS_GROW_DOWNWARD
2938 for (count = 0; count < reg_parm_stack_space + 1; count++)
2940 for (count = 0; count < reg_parm_stack_space; count++)
2943 if (count >= highest_outgoing_arg_in_use
2944 || stack_usage_map[count] == 0)
2947 if (low_to_save == -1)
2948 low_to_save = count;
2950 high_to_save = count;
2953 if (low_to_save >= 0)
2955 int num_to_save = high_to_save - low_to_save + 1;
2956 enum machine_mode save_mode
2957 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2960 /* If we don't have the required alignment, must do this in BLKmode. */
2961 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2962 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2963 save_mode = BLKmode;
2965 #ifdef ARGS_GROW_DOWNWARD
2966 stack_area = gen_rtx_MEM (save_mode,
2967 memory_address (save_mode,
2968 plus_constant (argblock,
2971 stack_area = gen_rtx_MEM (save_mode,
2972 memory_address (save_mode,
2973 plus_constant (argblock,
2976 if (save_mode == BLKmode)
2978 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2979 emit_block_move (validize_mem (save_area), stack_area,
2980 GEN_INT (num_to_save),
2981 PARM_BOUNDARY / BITS_PER_UNIT);
2985 save_area = gen_reg_rtx (save_mode);
2986 emit_move_insn (save_area, stack_area);
2991 /* Push the args that need to be pushed. */
2993 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2994 are to be pushed. */
2995 for (count = 0; count < nargs; count++, argnum += inc)
2997 register enum machine_mode mode = argvec[argnum].mode;
2998 register rtx val = argvec[argnum].value;
2999 rtx reg = argvec[argnum].reg;
3000 int partial = argvec[argnum].partial;
3001 #ifdef ACCUMULATE_OUTGOING_ARGS
3002 int lower_bound, upper_bound, i;
3005 if (! (reg != 0 && partial == 0))
3007 #ifdef ACCUMULATE_OUTGOING_ARGS
3008 /* If this is being stored into a pre-allocated, fixed-size, stack
3009 area, save any previous data at that location. */
3011 #ifdef ARGS_GROW_DOWNWARD
3012 /* stack_slot is negative, but we want to index stack_usage_map
3013 with positive values. */
3014 upper_bound = -argvec[argnum].offset.constant + 1;
3015 lower_bound = upper_bound - argvec[argnum].size.constant;
3017 lower_bound = argvec[argnum].offset.constant;
3018 upper_bound = lower_bound + argvec[argnum].size.constant;
3021 for (i = lower_bound; i < upper_bound; i++)
3022 if (stack_usage_map[i]
3023 /* Don't store things in the fixed argument area at this point;
3024 it has already been saved. */
3025 && i > reg_parm_stack_space)
3028 if (i != upper_bound)
3030 /* We need to make a save area. See what mode we can make it. */
3031 enum machine_mode save_mode
3032 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3039 plus_constant (argblock,
3040 argvec[argnum].offset.constant)));
3042 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3043 emit_move_insn (argvec[argnum].save_area, stack_area);
3046 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3047 argblock, GEN_INT (argvec[argnum].offset.constant),
3048 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3050 #ifdef ACCUMULATE_OUTGOING_ARGS
3051 /* Now mark the segment we just used. */
3052 for (i = lower_bound; i < upper_bound; i++)
3053 stack_usage_map[i] = 1;
3060 #ifndef PUSH_ARGS_REVERSED
3061 #ifdef PREFERRED_STACK_BOUNDARY
3062 /* If we pushed args in forward order, perform stack alignment
3063 after pushing the last arg. */
3065 anti_adjust_stack (GEN_INT (args_size.constant
3066 - original_args_size.constant));
3070 #ifdef PUSH_ARGS_REVERSED
3076 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3078 /* Now load any reg parms into their regs. */
3080 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3081 are to be pushed. */
3082 for (count = 0; count < nargs; count++, argnum += inc)
3084 register rtx val = argvec[argnum].value;
3085 rtx reg = argvec[argnum].reg;
3086 int partial = argvec[argnum].partial;
3088 /* Handle calls that pass values in multiple non-contiguous
3089 locations. The PA64 has examples of this for library calls. */
3090 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3091 emit_group_load (reg, val,
3092 GET_MODE_SIZE (GET_MODE (val)),
3093 GET_MODE_ALIGNMENT (GET_MODE (val)));
3094 else if (reg != 0 && partial == 0)
3095 emit_move_insn (reg, val);
3100 /* For version 1.37, try deleting this entirely. */
3104 /* Any regs containing parms remain in use through the call. */
3105 for (count = 0; count < nargs; count++)
3107 rtx reg = argvec[count].reg;
3108 if (reg != 0 && GET_CODE (argvec[count].reg) == PARALLEL)
3109 use_group_regs (&call_fusage, reg);
3111 use_reg (&call_fusage, reg);
3114 /* Don't allow popping to be deferred, since then
3115 cse'ing of library calls could delete a call and leave the pop. */
3118 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3119 will set inhibit_defer_pop to that value. */
3121 /* The return type is needed to decide how many bytes the function pops.
3122 Signedness plays no role in that, so for simplicity, we pretend it's
3123 always signed. We also assume that the list of arguments passed has
3124 no impact, so we pretend it is unknown. */
3127 get_identifier (XSTR (orgfun, 0)),
3128 build_function_type (outmode == VOIDmode ? void_type_node
3129 : type_for_mode (outmode, 0), NULL_TREE),
3130 original_args_size.constant, args_size.constant, 0,
3131 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3132 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
3133 old_inhibit_defer_pop + 1, call_fusage, no_queue);
3137 /* Now restore inhibit_defer_pop to its actual original value. */
3140 #ifdef ACCUMULATE_OUTGOING_ARGS
3141 #ifdef REG_PARM_STACK_SPACE
3144 enum machine_mode save_mode = GET_MODE (save_area);
3145 #ifdef ARGS_GROW_DOWNWARD
3147 = gen_rtx_MEM (save_mode,
3148 memory_address (save_mode,
3149 plus_constant (argblock,
3153 = gen_rtx_MEM (save_mode,
3154 memory_address (save_mode,
3155 plus_constant (argblock, low_to_save)));
3158 if (save_mode != BLKmode)
3159 emit_move_insn (stack_area, save_area);
3161 emit_block_move (stack_area, validize_mem (save_area),
3162 GEN_INT (high_to_save - low_to_save + 1),
3163 PARM_BOUNDARY / BITS_PER_UNIT);
3167 /* If we saved any argument areas, restore them. */
3168 for (count = 0; count < nargs; count++)
3169 if (argvec[count].save_area)
3171 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3173 = gen_rtx_MEM (save_mode,
3176 plus_constant (argblock,
3177 argvec[count].offset.constant)));
3179 emit_move_insn (stack_area, argvec[count].save_area);
3182 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3183 stack_usage_map = initial_stack_usage_map;
3187 /* Like emit_library_call except that an extra argument, VALUE,
3188 comes second and says where to store the result.
3189 (If VALUE is zero, this function chooses a convenient way
3190 to return the value.
3192 This function returns an rtx for where the value is to be found.
3193 If VALUE is nonzero, VALUE is returned. */
3196 emit_library_call_value VPARAMS((rtx orgfun, rtx value, int no_queue,
3197 enum machine_mode outmode, int nargs, ...))
3199 #ifndef ANSI_PROTOTYPES
3203 enum machine_mode outmode;
3207 /* Total size in bytes of all the stack-parms scanned so far. */
3208 struct args_size args_size;
3209 /* Size of arguments before any adjustments (such as rounding). */
3210 struct args_size original_args_size;
3211 register int argnum;
3215 struct args_size alignment_pad;
3217 CUMULATIVE_ARGS args_so_far;
3218 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3219 struct args_size offset; struct args_size size; rtx save_area; };
3221 int old_inhibit_defer_pop = inhibit_defer_pop;
3222 rtx call_fusage = 0;
3224 int pcc_struct_value = 0;
3225 int struct_value_size = 0;
3227 int reg_parm_stack_space = 0;
3228 #ifdef ACCUMULATE_OUTGOING_ARGS
3232 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3233 /* Define the boundary of the register parm stack space that needs to be
3235 int low_to_save = -1, high_to_save = 0;
3236 rtx save_area = 0; /* Place that it is saved */
3239 #ifdef ACCUMULATE_OUTGOING_ARGS
3240 /* Size of the stack reserved for parameter registers. */
3241 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3242 char *initial_stack_usage_map = stack_usage_map;
3245 #ifdef REG_PARM_STACK_SPACE
3246 #ifdef MAYBE_REG_PARM_STACK_SPACE
3247 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3249 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3253 VA_START (p, nargs);
3255 #ifndef ANSI_PROTOTYPES
3256 orgfun = va_arg (p, rtx);
3257 value = va_arg (p, rtx);
3258 no_queue = va_arg (p, int);
3259 outmode = va_arg (p, enum machine_mode);
3260 nargs = va_arg (p, int);
3263 is_const = no_queue;
3266 #ifdef PREFERRED_STACK_BOUNDARY
3267 /* Ensure current function's preferred stack boundary is at least
3269 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3270 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3273 /* If this kind of value comes back in memory,
3274 decide where in memory it should come back. */
3275 if (aggregate_value_p (type_for_mode (outmode, 0)))
3277 #ifdef PCC_STATIC_STRUCT_RETURN
3279 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3281 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3282 pcc_struct_value = 1;
3284 value = gen_reg_rtx (outmode);
3285 #else /* not PCC_STATIC_STRUCT_RETURN */
3286 struct_value_size = GET_MODE_SIZE (outmode);
3287 if (value != 0 && GET_CODE (value) == MEM)
3290 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3293 /* This call returns a big structure. */
3297 /* ??? Unfinished: must pass the memory address as an argument. */
3299 /* Copy all the libcall-arguments out of the varargs data
3300 and into a vector ARGVEC.
3302 Compute how to pass each argument. We only support a very small subset
3303 of the full argument passing conventions to limit complexity here since
3304 library functions shouldn't have many args. */
3306 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3307 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3309 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3311 args_size.constant = 0;
3318 /* If there's a structure value address to be passed,
3319 either pass it in the special place, or pass it as an extra argument. */
3320 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3322 rtx addr = XEXP (mem_value, 0);
3325 /* Make sure it is a reasonable operand for a move or push insn. */
3326 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3327 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3328 addr = force_operand (addr, NULL_RTX);
3330 argvec[count].value = addr;
3331 argvec[count].mode = Pmode;
3332 argvec[count].partial = 0;
3334 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3335 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3336 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3340 locate_and_pad_parm (Pmode, NULL_TREE,
3341 argvec[count].reg && argvec[count].partial == 0,
3342 NULL_TREE, &args_size, &argvec[count].offset,
3343 &argvec[count].size, &alignment_pad);
3346 if (argvec[count].reg == 0 || argvec[count].partial != 0
3347 || reg_parm_stack_space > 0)
3348 args_size.constant += argvec[count].size.constant;
3350 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3355 for (; count < nargs; count++)
3357 rtx val = va_arg (p, rtx);
3358 enum machine_mode mode = va_arg (p, enum machine_mode);
3360 /* We cannot convert the arg value to the mode the library wants here;
3361 must do it earlier where we know the signedness of the arg. */
3363 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3366 /* On some machines, there's no way to pass a float to a library fcn.
3367 Pass it as a double instead. */
3368 #ifdef LIBGCC_NEEDS_DOUBLE
3369 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3370 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3373 /* There's no need to call protect_from_queue, because
3374 either emit_move_insn or emit_push_insn will do that. */
3376 /* Make sure it is a reasonable operand for a move or push insn. */
3377 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3378 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3379 val = force_operand (val, NULL_RTX);
3381 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3382 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3384 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3385 be viewed as just an efficiency improvement. */
3386 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3387 emit_move_insn (slot, val);
3388 val = XEXP (slot, 0);
3393 argvec[count].value = val;
3394 argvec[count].mode = mode;
3396 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3398 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3399 argvec[count].partial
3400 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3402 argvec[count].partial = 0;
3405 locate_and_pad_parm (mode, NULL_TREE,
3406 argvec[count].reg && argvec[count].partial == 0,
3407 NULL_TREE, &args_size, &argvec[count].offset,
3408 &argvec[count].size, &alignment_pad);
3410 if (argvec[count].size.var)
3413 if (reg_parm_stack_space == 0 && argvec[count].partial)
3414 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3416 if (argvec[count].reg == 0 || argvec[count].partial != 0
3417 || reg_parm_stack_space > 0)
3418 args_size.constant += argvec[count].size.constant;
3420 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3424 #ifdef FINAL_REG_PARM_STACK_SPACE
3425 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3428 /* If this machine requires an external definition for library
3429 functions, write one out. */
3430 assemble_external_libcall (fun);
3432 original_args_size = args_size;
3433 #ifdef PREFERRED_STACK_BOUNDARY
3434 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3435 / STACK_BYTES) * STACK_BYTES);
3438 args_size.constant = MAX (args_size.constant,
3439 reg_parm_stack_space);
3441 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3442 args_size.constant -= reg_parm_stack_space;
3445 if (args_size.constant > current_function_outgoing_args_size)
3446 current_function_outgoing_args_size = args_size.constant;
3448 #ifdef ACCUMULATE_OUTGOING_ARGS
3449 /* Since the stack pointer will never be pushed, it is possible for
3450 the evaluation of a parm to clobber something we have already
3451 written to the stack. Since most function calls on RISC machines
3452 do not use the stack, this is uncommon, but must work correctly.
3454 Therefore, we save any area of the stack that was already written
3455 and that we are using. Here we set up to do this by making a new
3456 stack usage map from the old one.
3458 Another approach might be to try to reorder the argument
3459 evaluations to avoid this conflicting stack usage. */
3461 needed = args_size.constant;
3463 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3464 /* Since we will be writing into the entire argument area, the
3465 map must be allocated for its entire size, not just the part that
3466 is the responsibility of the caller. */
3467 needed += reg_parm_stack_space;
3470 #ifdef ARGS_GROW_DOWNWARD
3471 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3474 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3477 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3479 if (initial_highest_arg_in_use)
3480 bcopy (initial_stack_usage_map, stack_usage_map,
3481 initial_highest_arg_in_use);
3483 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3484 bzero (&stack_usage_map[initial_highest_arg_in_use],
3485 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3488 /* The address of the outgoing argument list must not be copied to a
3489 register here, because argblock would be left pointing to the
3490 wrong place after the call to allocate_dynamic_stack_space below.
3493 argblock = virtual_outgoing_args_rtx;
3494 #else /* not ACCUMULATE_OUTGOING_ARGS */
3495 #ifndef PUSH_ROUNDING
3496 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3500 #ifdef PUSH_ARGS_REVERSED
3501 #ifdef PREFERRED_STACK_BOUNDARY
3502 /* If we push args individually in reverse order, perform stack alignment
3503 before the first push (the last arg). */
3505 anti_adjust_stack (GEN_INT (args_size.constant
3506 - original_args_size.constant));
3510 #ifdef PUSH_ARGS_REVERSED
3518 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3519 /* The argument list is the property of the called routine and it
3520 may clobber it. If the fixed area has been used for previous
3521 parameters, we must save and restore it.
3523 Here we compute the boundary of the that needs to be saved, if any. */
3525 #ifdef ARGS_GROW_DOWNWARD
3526 for (count = 0; count < reg_parm_stack_space + 1; count++)
3528 for (count = 0; count < reg_parm_stack_space; count++)
3531 if (count >= highest_outgoing_arg_in_use
3532 || stack_usage_map[count] == 0)
3535 if (low_to_save == -1)
3536 low_to_save = count;
3538 high_to_save = count;
3541 if (low_to_save >= 0)
3543 int num_to_save = high_to_save - low_to_save + 1;
3544 enum machine_mode save_mode
3545 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3548 /* If we don't have the required alignment, must do this in BLKmode. */
3549 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3550 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3551 save_mode = BLKmode;
3553 #ifdef ARGS_GROW_DOWNWARD
3554 stack_area = gen_rtx_MEM (save_mode,
3555 memory_address (save_mode,
3556 plus_constant (argblock,
3559 stack_area = gen_rtx_MEM (save_mode,
3560 memory_address (save_mode,
3561 plus_constant (argblock,
3564 if (save_mode == BLKmode)
3566 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3567 emit_block_move (validize_mem (save_area), stack_area,
3568 GEN_INT (num_to_save),
3569 PARM_BOUNDARY / BITS_PER_UNIT);
3573 save_area = gen_reg_rtx (save_mode);
3574 emit_move_insn (save_area, stack_area);
3579 /* Push the args that need to be pushed. */
3581 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3582 are to be pushed. */
3583 for (count = 0; count < nargs; count++, argnum += inc)
3585 register enum machine_mode mode = argvec[argnum].mode;
3586 register rtx val = argvec[argnum].value;
3587 rtx reg = argvec[argnum].reg;
3588 int partial = argvec[argnum].partial;
3589 #ifdef ACCUMULATE_OUTGOING_ARGS
3590 int lower_bound, upper_bound, i;
3593 if (! (reg != 0 && partial == 0))
3595 #ifdef ACCUMULATE_OUTGOING_ARGS
3596 /* If this is being stored into a pre-allocated, fixed-size, stack
3597 area, save any previous data at that location. */
3599 #ifdef ARGS_GROW_DOWNWARD
3600 /* stack_slot is negative, but we want to index stack_usage_map
3601 with positive values. */
3602 upper_bound = -argvec[argnum].offset.constant + 1;
3603 lower_bound = upper_bound - argvec[argnum].size.constant;
3605 lower_bound = argvec[argnum].offset.constant;
3606 upper_bound = lower_bound + argvec[argnum].size.constant;
3609 for (i = lower_bound; i < upper_bound; i++)
3610 if (stack_usage_map[i]
3611 /* Don't store things in the fixed argument area at this point;
3612 it has already been saved. */
3613 && i > reg_parm_stack_space)
3616 if (i != upper_bound)
3618 /* We need to make a save area. See what mode we can make it. */
3619 enum machine_mode save_mode
3620 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3627 plus_constant (argblock,
3628 argvec[argnum].offset.constant)));
3629 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3631 emit_move_insn (argvec[argnum].save_area, stack_area);
3634 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3635 argblock, GEN_INT (argvec[argnum].offset.constant),
3636 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3638 #ifdef ACCUMULATE_OUTGOING_ARGS
3639 /* Now mark the segment we just used. */
3640 for (i = lower_bound; i < upper_bound; i++)
3641 stack_usage_map[i] = 1;
3648 #ifndef PUSH_ARGS_REVERSED
3649 #ifdef PREFERRED_STACK_BOUNDARY
3650 /* If we pushed args in forward order, perform stack alignment
3651 after pushing the last arg. */
3653 anti_adjust_stack (GEN_INT (args_size.constant
3654 - original_args_size.constant));
3658 #ifdef PUSH_ARGS_REVERSED
3664 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3666 /* Now load any reg parms into their regs. */
3668 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3669 are to be pushed. */
3670 for (count = 0; count < nargs; count++, argnum += inc)
3672 register rtx val = argvec[argnum].value;
3673 rtx reg = argvec[argnum].reg;
3674 int partial = argvec[argnum].partial;
3676 /* Handle calls that pass values in multiple non-contiguous
3677 locations. The PA64 has examples of this for library calls. */
3678 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3679 emit_group_load (reg, val,
3680 GET_MODE_SIZE (GET_MODE (val)),
3681 GET_MODE_ALIGNMENT (GET_MODE (val)));
3682 else if (reg != 0 && partial == 0)
3683 emit_move_insn (reg, val);
3689 /* For version 1.37, try deleting this entirely. */
3694 /* Any regs containing parms remain in use through the call. */
3695 for (count = 0; count < nargs; count++)
3697 rtx reg = argvec[count].reg;
3698 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3699 use_group_regs (&call_fusage, reg);
3701 use_reg (&call_fusage, reg);
3704 /* Pass the function the address in which to return a structure value. */
3705 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3707 emit_move_insn (struct_value_rtx,
3709 force_operand (XEXP (mem_value, 0),
3711 if (GET_CODE (struct_value_rtx) == REG)
3712 use_reg (&call_fusage, struct_value_rtx);
3715 /* Don't allow popping to be deferred, since then
3716 cse'ing of library calls could delete a call and leave the pop. */
3719 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3720 will set inhibit_defer_pop to that value. */
3721 /* See the comment in emit_library_call about the function type we build
3725 get_identifier (XSTR (orgfun, 0)),
3726 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3727 original_args_size.constant, args_size.constant,
3729 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3730 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3731 old_inhibit_defer_pop + 1, call_fusage, is_const);
3733 /* Now restore inhibit_defer_pop to its actual original value. */
3738 /* Copy the value to the right place. */
3739 if (outmode != VOIDmode)
3745 if (value != mem_value)
3746 emit_move_insn (value, mem_value);
3748 else if (value != 0)
3749 emit_move_insn (value, hard_libcall_value (outmode));
3751 value = hard_libcall_value (outmode);
3754 #ifdef ACCUMULATE_OUTGOING_ARGS
3755 #ifdef REG_PARM_STACK_SPACE
3758 enum machine_mode save_mode = GET_MODE (save_area);
3759 #ifdef ARGS_GROW_DOWNWARD
3761 = gen_rtx_MEM (save_mode,
3762 memory_address (save_mode,
3763 plus_constant (argblock,
3767 = gen_rtx_MEM (save_mode,
3768 memory_address (save_mode,
3769 plus_constant (argblock, low_to_save)));
3771 if (save_mode != BLKmode)
3772 emit_move_insn (stack_area, save_area);
3774 emit_block_move (stack_area, validize_mem (save_area),
3775 GEN_INT (high_to_save - low_to_save + 1),
3776 PARM_BOUNDARY / BITS_PER_UNIT);
3780 /* If we saved any argument areas, restore them. */
3781 for (count = 0; count < nargs; count++)
3782 if (argvec[count].save_area)
3784 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3786 = gen_rtx_MEM (save_mode,
3789 plus_constant (argblock,
3790 argvec[count].offset.constant)));
3792 emit_move_insn (stack_area, argvec[count].save_area);
3795 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3796 stack_usage_map = initial_stack_usage_map;
3803 /* Return an rtx which represents a suitable home on the stack
3804 given TYPE, the type of the argument looking for a home.
3805 This is called only for BLKmode arguments.
3807 SIZE is the size needed for this target.
3808 ARGS_ADDR is the address of the bottom of the argument block for this call.
3809 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3810 if this machine uses push insns. */
3813 target_for_arg (type, size, args_addr, offset)
3817 struct args_size offset;
3820 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3822 /* We do not call memory_address if possible,
3823 because we want to address as close to the stack
3824 as possible. For non-variable sized arguments,
3825 this will be stack-pointer relative addressing. */
3826 if (GET_CODE (offset_rtx) == CONST_INT)
3827 target = plus_constant (args_addr, INTVAL (offset_rtx));
3830 /* I have no idea how to guarantee that this
3831 will work in the presence of register parameters. */
3832 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3833 target = memory_address (QImode, target);
3836 return gen_rtx_MEM (BLKmode, target);
3840 /* Store a single argument for a function call
3841 into the register or memory area where it must be passed.
3842 *ARG describes the argument value and where to pass it.
3844 ARGBLOCK is the address of the stack-block for all the arguments,
3845 or 0 on a machine where arguments are pushed individually.
3847 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3848 so must be careful about how the stack is used.
3850 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3851 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3852 that we need not worry about saving and restoring the stack.
3854 FNDECL is the declaration of the function we are calling. */
3857 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3858 reg_parm_stack_space)
3859 struct arg_data *arg;
3862 int variable_size ATTRIBUTE_UNUSED;
3863 int reg_parm_stack_space;
3865 register tree pval = arg->tree_value;
3869 #ifdef ACCUMULATE_OUTGOING_ARGS
3870 int i, lower_bound = 0, upper_bound = 0;
3873 if (TREE_CODE (pval) == ERROR_MARK)
3876 /* Push a new temporary level for any temporaries we make for
3880 #ifdef ACCUMULATE_OUTGOING_ARGS
3881 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3882 save any previous data at that location. */
3883 if (argblock && ! variable_size && arg->stack)
3885 #ifdef ARGS_GROW_DOWNWARD
3886 /* stack_slot is negative, but we want to index stack_usage_map
3887 with positive values. */
3888 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3889 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3893 lower_bound = upper_bound - arg->size.constant;
3895 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3896 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3900 upper_bound = lower_bound + arg->size.constant;
3903 for (i = lower_bound; i < upper_bound; i++)
3904 if (stack_usage_map[i]
3905 /* Don't store things in the fixed argument area at this point;
3906 it has already been saved. */
3907 && i > reg_parm_stack_space)
3910 if (i != upper_bound)
3912 /* We need to make a save area. See what mode we can make it. */
3913 enum machine_mode save_mode
3914 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3916 = gen_rtx_MEM (save_mode,
3917 memory_address (save_mode,
3918 XEXP (arg->stack_slot, 0)));
3920 if (save_mode == BLKmode)
3922 arg->save_area = assign_stack_temp (BLKmode,
3923 arg->size.constant, 0);
3924 MEM_SET_IN_STRUCT_P (arg->save_area,
3925 AGGREGATE_TYPE_P (TREE_TYPE
3926 (arg->tree_value)));
3927 preserve_temp_slots (arg->save_area);
3928 emit_block_move (validize_mem (arg->save_area), stack_area,
3929 GEN_INT (arg->size.constant),
3930 PARM_BOUNDARY / BITS_PER_UNIT);
3934 arg->save_area = gen_reg_rtx (save_mode);
3935 emit_move_insn (arg->save_area, stack_area);
3940 /* Now that we have saved any slots that will be overwritten by this
3941 store, mark all slots this store will use. We must do this before
3942 we actually expand the argument since the expansion itself may
3943 trigger library calls which might need to use the same stack slot. */
3944 if (argblock && ! variable_size && arg->stack)
3945 for (i = lower_bound; i < upper_bound; i++)
3946 stack_usage_map[i] = 1;
3949 /* If this isn't going to be placed on both the stack and in registers,
3950 set up the register and number of words. */
3951 if (! arg->pass_on_stack)
3952 reg = arg->reg, partial = arg->partial;
3954 if (reg != 0 && partial == 0)
3955 /* Being passed entirely in a register. We shouldn't be called in
3959 /* If this arg needs special alignment, don't load the registers
3961 if (arg->n_aligned_regs != 0)
3964 /* If this is being passed partially in a register, we can't evaluate
3965 it directly into its stack slot. Otherwise, we can. */
3966 if (arg->value == 0)
3968 #ifdef ACCUMULATE_OUTGOING_ARGS
3969 /* stack_arg_under_construction is nonzero if a function argument is
3970 being evaluated directly into the outgoing argument list and
3971 expand_call must take special action to preserve the argument list
3972 if it is called recursively.
3974 For scalar function arguments stack_usage_map is sufficient to
3975 determine which stack slots must be saved and restored. Scalar
3976 arguments in general have pass_on_stack == 0.
3978 If this argument is initialized by a function which takes the
3979 address of the argument (a C++ constructor or a C function
3980 returning a BLKmode structure), then stack_usage_map is
3981 insufficient and expand_call must push the stack around the
3982 function call. Such arguments have pass_on_stack == 1.
3984 Note that it is always safe to set stack_arg_under_construction,
3985 but this generates suboptimal code if set when not needed. */
3987 if (arg->pass_on_stack)
3988 stack_arg_under_construction++;
3990 arg->value = expand_expr (pval,
3992 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3993 ? NULL_RTX : arg->stack,
3996 /* If we are promoting object (or for any other reason) the mode
3997 doesn't agree, convert the mode. */
3999 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4000 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4001 arg->value, arg->unsignedp);
4003 #ifdef ACCUMULATE_OUTGOING_ARGS
4004 if (arg->pass_on_stack)
4005 stack_arg_under_construction--;
4009 /* Don't allow anything left on stack from computation
4010 of argument to alloca. */
4012 do_pending_stack_adjust ();
4014 if (arg->value == arg->stack)
4016 /* If the value is already in the stack slot, we are done. */
4017 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
4019 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4020 XEXP (arg->stack, 0), Pmode,
4021 ARGS_SIZE_RTX (arg->size),
4022 TYPE_MODE (sizetype),
4023 GEN_INT (MEMORY_USE_RW),
4024 TYPE_MODE (integer_type_node));
4027 else if (arg->mode != BLKmode)
4031 /* Argument is a scalar, not entirely passed in registers.
4032 (If part is passed in registers, arg->partial says how much
4033 and emit_push_insn will take care of putting it there.)
4035 Push it, and if its size is less than the
4036 amount of space allocated to it,
4037 also bump stack pointer by the additional space.
4038 Note that in C the default argument promotions
4039 will prevent such mismatches. */
4041 size = GET_MODE_SIZE (arg->mode);
4042 /* Compute how much space the push instruction will push.
4043 On many machines, pushing a byte will advance the stack
4044 pointer by a halfword. */
4045 #ifdef PUSH_ROUNDING
4046 size = PUSH_ROUNDING (size);
4050 /* Compute how much space the argument should get:
4051 round up to a multiple of the alignment for arguments. */
4052 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4053 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4054 / (PARM_BOUNDARY / BITS_PER_UNIT))
4055 * (PARM_BOUNDARY / BITS_PER_UNIT));
4057 /* This isn't already where we want it on the stack, so put it there.
4058 This can either be done with push or copy insns. */
4059 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
4060 partial, reg, used - size, argblock,
4061 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4062 ARGS_SIZE_RTX (arg->alignment_pad));
4067 /* BLKmode, at least partly to be pushed. */
4069 register int excess;
4072 /* Pushing a nonscalar.
4073 If part is passed in registers, PARTIAL says how much
4074 and emit_push_insn will take care of putting it there. */
4076 /* Round its size up to a multiple
4077 of the allocation unit for arguments. */
4079 if (arg->size.var != 0)
4082 size_rtx = ARGS_SIZE_RTX (arg->size);
4086 /* PUSH_ROUNDING has no effect on us, because
4087 emit_push_insn for BLKmode is careful to avoid it. */
4088 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
4089 + partial * UNITS_PER_WORD);
4090 size_rtx = expr_size (pval);
4093 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4094 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
4095 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
4096 reg_parm_stack_space,
4097 ARGS_SIZE_RTX (arg->alignment_pad));
4101 /* Unless this is a partially-in-register argument, the argument is now
4104 ??? Note that this can change arg->value from arg->stack to
4105 arg->stack_slot and it matters when they are not the same.
4106 It isn't totally clear that this is correct in all cases. */
4108 arg->value = arg->stack_slot;
4110 /* Once we have pushed something, pops can't safely
4111 be deferred during the rest of the arguments. */
4114 /* ANSI doesn't require a sequence point here,
4115 but PCC has one, so this will avoid some problems. */
4118 /* Free any temporary slots made in processing this argument. Show
4119 that we might have taken the address of something and pushed that
4121 preserve_temp_slots (NULL_RTX);