1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
39 #include "langhooks.h"
45 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
46 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
48 /* Data structure and subroutines used within expand_call. */
52 /* Tree node for this argument. */
54 /* Mode for value; TYPE_MODE unless promoted. */
55 enum machine_mode mode;
56 /* Current RTL value for argument, or 0 if it isn't precomputed. */
58 /* Initially-compute RTL value for argument; only for const functions. */
60 /* Register to pass this argument in, 0 if passed on stack, or an
61 PARALLEL if the arg is to be copied into multiple non-contiguous
64 /* Register to pass this argument in when generating tail call sequence.
65 This is not the same register as for normal calls on machines with
68 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
69 form for emit_group_move. */
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
74 /* Number of bytes to put in registers. 0 means put the whole arg
75 in registers. Also 0 if not passed in registers. */
77 /* Nonzero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
82 /* Some fields packaged up for locate_and_pad_parm. */
83 struct locate_and_pad_arg_data locate;
84 /* Location on the stack at which parameter should be stored. The store
85 has already been done if STACK == VALUE. */
87 /* Location on the stack of the start of this argument slot. This can
88 differ from STACK if this arg pads downward. This location is known
89 to be aligned to FUNCTION_ARG_BOUNDARY. */
91 /* Place that this stack area has been saved, if needed. */
93 /* If an argument's alignment does not permit direct copying into registers,
94 copy in smaller-sized pieces into pseudos. These are stored in a
95 block pointed to by this field. The next field says how many
96 word-sized pseudos we made. */
101 /* A vector of one char per byte of stack space. A byte if nonzero if
102 the corresponding stack location has been used.
103 This vector is used to prevent a function call within an argument from
104 clobbering any stack already set up. */
105 static char *stack_usage_map;
107 /* Size of STACK_USAGE_MAP. */
108 static int highest_outgoing_arg_in_use;
110 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
111 stack location's tail call argument has been already stored into the stack.
112 This bitmap is used to prevent sibling call optimization if function tries
113 to use parent's incoming argument slots when they have been already
114 overwritten with tail call arguments. */
115 static sbitmap stored_args_map;
117 /* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
122 static int stack_arg_under_construction;
124 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
125 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
127 static void precompute_register_parameters (int, struct arg_data *, int *);
128 static int store_one_arg (struct arg_data *, rtx, int, int, int);
129 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
130 static int finalize_must_preallocate (int, int, struct arg_data *,
132 static void precompute_arguments (int, int, struct arg_data *);
133 static int compute_argument_block_size (int, struct args_size *, int);
134 static void initialize_argument_information (int, struct arg_data *,
135 struct args_size *, int,
137 tree, CUMULATIVE_ARGS *, int,
138 rtx *, int *, int *, int *,
140 static void compute_argument_addresses (struct arg_data *, rtx, int);
141 static rtx rtx_for_function_call (tree, tree);
142 static void load_register_parameters (struct arg_data *, int, rtx *, int,
144 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
145 enum machine_mode, int, va_list);
146 static int special_function_p (const_tree, int);
147 static int check_sibcall_argument_overlap_1 (rtx);
148 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
150 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
152 static tree split_complex_types (tree);
154 #ifdef REG_PARM_STACK_SPACE
155 static rtx save_fixed_argument_area (int, rtx, int *, int *);
156 static void restore_fixed_argument_area (rtx, rtx, int, int);
159 /* Force FUNEXP into a form suitable for the address of a CALL,
160 and return that as an rtx. Also load the static chain register
161 if FNDECL is a nested function.
163 CALL_FUSAGE points to a variable holding the prospective
164 CALL_INSN_FUNCTION_USAGE information. */
167 prepare_call_address (rtx funexp, rtx static_chain_value,
168 rtx *call_fusage, int reg_parm_seen, int sibcallp)
170 /* Make a valid memory address and copy constants through pseudo-regs,
171 but not for a constant address if -fno-function-cse. */
172 if (GET_CODE (funexp) != SYMBOL_REF)
173 /* If we are using registers for parameters, force the
174 function address into a register now. */
175 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
176 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
177 : memory_address (FUNCTION_MODE, funexp));
180 #ifndef NO_FUNCTION_CSE
181 if (optimize && ! flag_no_function_cse)
182 funexp = force_reg (Pmode, funexp);
186 if (static_chain_value != 0)
188 static_chain_value = convert_memory_address (Pmode, static_chain_value);
189 emit_move_insn (static_chain_rtx, static_chain_value);
191 if (REG_P (static_chain_rtx))
192 use_reg (call_fusage, static_chain_rtx);
198 /* Generate instructions to call function FUNEXP,
199 and optionally pop the results.
200 The CALL_INSN is the first insn generated.
202 FNDECL is the declaration node of the function. This is given to the
203 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
205 FUNTYPE is the data type of the function. This is given to the macro
206 RETURN_POPS_ARGS to determine whether this function pops its own args.
207 We used to allow an identifier for library functions, but that doesn't
208 work when the return type is an aggregate type and the calling convention
209 says that the pointer to this aggregate is to be popped by the callee.
211 STACK_SIZE is the number of bytes of arguments on the stack,
212 ROUNDED_STACK_SIZE is that number rounded up to
213 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
214 both to put into the call insn and to generate explicit popping
217 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
218 It is zero if this call doesn't want a structure value.
220 NEXT_ARG_REG is the rtx that results from executing
221 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
222 just after all the args have had their registers assigned.
223 This could be whatever you like, but normally it is the first
224 arg-register beyond those used for args in this call,
225 or 0 if all the arg-registers are used in this call.
226 It is passed on to `gen_call' so you can put this info in the call insn.
228 VALREG is a hard register in which a value is returned,
229 or 0 if the call does not return a value.
231 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
232 the args to this call were processed.
233 We restore `inhibit_defer_pop' to that value.
235 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
236 denote registers used by the called function. */
239 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
240 tree funtype ATTRIBUTE_UNUSED,
241 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
242 HOST_WIDE_INT rounded_stack_size,
243 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
244 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
245 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
246 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
248 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
250 int already_popped = 0;
251 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
252 #if defined (HAVE_call) && defined (HAVE_call_value)
253 rtx struct_value_size_rtx;
254 struct_value_size_rtx = GEN_INT (struct_value_size);
257 #ifdef CALL_POPS_ARGS
258 n_popped += CALL_POPS_ARGS (* args_so_far);
261 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
262 and we don't want to load it into a register as an optimization,
263 because prepare_call_address already did it if it should be done. */
264 if (GET_CODE (funexp) != SYMBOL_REF)
265 funexp = memory_address (FUNCTION_MODE, funexp);
267 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
268 if ((ecf_flags & ECF_SIBCALL)
269 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
270 && (n_popped > 0 || stack_size == 0))
272 rtx n_pop = GEN_INT (n_popped);
275 /* If this subroutine pops its own args, record that in the call insn
276 if possible, for the sake of frame pointer elimination. */
279 pat = GEN_SIBCALL_VALUE_POP (valreg,
280 gen_rtx_MEM (FUNCTION_MODE, funexp),
281 rounded_stack_size_rtx, next_arg_reg,
284 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
285 rounded_stack_size_rtx, next_arg_reg, n_pop);
287 emit_call_insn (pat);
293 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
294 /* If the target has "call" or "call_value" insns, then prefer them
295 if no arguments are actually popped. If the target does not have
296 "call" or "call_value" insns, then we must use the popping versions
297 even if the call has no arguments to pop. */
298 #if defined (HAVE_call) && defined (HAVE_call_value)
299 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
300 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
302 if (HAVE_call_pop && HAVE_call_value_pop)
305 rtx n_pop = GEN_INT (n_popped);
308 /* If this subroutine pops its own args, record that in the call insn
309 if possible, for the sake of frame pointer elimination. */
312 pat = GEN_CALL_VALUE_POP (valreg,
313 gen_rtx_MEM (FUNCTION_MODE, funexp),
314 rounded_stack_size_rtx, next_arg_reg, n_pop);
316 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
317 rounded_stack_size_rtx, next_arg_reg, n_pop);
319 emit_call_insn (pat);
325 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
326 if ((ecf_flags & ECF_SIBCALL)
327 && HAVE_sibcall && HAVE_sibcall_value)
330 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
331 gen_rtx_MEM (FUNCTION_MODE, funexp),
332 rounded_stack_size_rtx,
333 next_arg_reg, NULL_RTX));
335 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
336 rounded_stack_size_rtx, next_arg_reg,
337 struct_value_size_rtx));
342 #if defined (HAVE_call) && defined (HAVE_call_value)
343 if (HAVE_call && HAVE_call_value)
346 emit_call_insn (GEN_CALL_VALUE (valreg,
347 gen_rtx_MEM (FUNCTION_MODE, funexp),
348 rounded_stack_size_rtx, next_arg_reg,
351 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
352 rounded_stack_size_rtx, next_arg_reg,
353 struct_value_size_rtx));
359 /* Find the call we just emitted. */
360 call_insn = last_call_insn ();
362 /* Mark memory as used for "pure" function call. */
363 if (ecf_flags & ECF_PURE)
367 gen_rtx_USE (VOIDmode,
368 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
371 /* Put the register usage information there. */
372 add_function_usage_to (call_insn, call_fusage);
374 /* If this is a const call, then set the insn's unchanging bit. */
375 if (ecf_flags & (ECF_CONST | ECF_PURE))
376 CONST_OR_PURE_CALL_P (call_insn) = 1;
378 /* If this call can't throw, attach a REG_EH_REGION reg note to that
380 if (ecf_flags & ECF_NOTHROW)
381 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
382 REG_NOTES (call_insn));
385 int rn = lookup_stmt_eh_region (fntree);
387 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
388 throw, which we already took care of. */
390 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
391 REG_NOTES (call_insn));
392 note_current_region_may_contain_throw ();
395 if (ecf_flags & ECF_NORETURN)
396 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
397 REG_NOTES (call_insn));
399 if (ecf_flags & ECF_RETURNS_TWICE)
401 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
402 REG_NOTES (call_insn));
403 current_function_calls_setjmp = 1;
406 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
408 /* Restore this now, so that we do defer pops for this call's args
409 if the context of the call as a whole permits. */
410 inhibit_defer_pop = old_inhibit_defer_pop;
415 CALL_INSN_FUNCTION_USAGE (call_insn)
416 = gen_rtx_EXPR_LIST (VOIDmode,
417 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
418 CALL_INSN_FUNCTION_USAGE (call_insn));
419 rounded_stack_size -= n_popped;
420 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
421 stack_pointer_delta -= n_popped;
424 if (!ACCUMULATE_OUTGOING_ARGS)
426 /* If returning from the subroutine does not automatically pop the args,
427 we need an instruction to pop them sooner or later.
428 Perhaps do it now; perhaps just record how much space to pop later.
430 If returning from the subroutine does pop the args, indicate that the
431 stack pointer will be changed. */
433 if (rounded_stack_size != 0)
435 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN))
436 /* Just pretend we did the pop. */
437 stack_pointer_delta -= rounded_stack_size;
438 else if (flag_defer_pop && inhibit_defer_pop == 0
439 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
440 pending_stack_adjust += rounded_stack_size;
442 adjust_stack (rounded_stack_size_rtx);
445 /* When we accumulate outgoing args, we must avoid any stack manipulations.
446 Restore the stack pointer to its original value now. Usually
447 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
448 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
449 popping variants of functions exist as well.
451 ??? We may optimize similar to defer_pop above, but it is
452 probably not worthwhile.
454 ??? It will be worthwhile to enable combine_stack_adjustments even for
457 anti_adjust_stack (GEN_INT (n_popped));
460 /* Determine if the function identified by NAME and FNDECL is one with
461 special properties we wish to know about.
463 For example, if the function might return more than one time (setjmp), then
464 set RETURNS_TWICE to a nonzero value.
466 Similarly set NORETURN if the function is in the longjmp family.
468 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
469 space from the stack such as alloca. */
472 special_function_p (const_tree fndecl, int flags)
474 if (fndecl && DECL_NAME (fndecl)
475 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
476 /* Exclude functions not at the file scope, or not `extern',
477 since they are not the magic functions we would otherwise
479 FIXME: this should be handled with attributes, not with this
480 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
481 because you can declare fork() inside a function if you
483 && (DECL_CONTEXT (fndecl) == NULL_TREE
484 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
485 && TREE_PUBLIC (fndecl))
487 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
488 const char *tname = name;
490 /* We assume that alloca will always be called by name. It
491 makes no sense to pass it as a pointer-to-function to
492 anything that does not understand its behavior. */
493 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
495 && ! strcmp (name, "alloca"))
496 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
498 && ! strcmp (name, "__builtin_alloca"))))
499 flags |= ECF_MAY_BE_ALLOCA;
501 /* Disregard prefix _, __ or __x. */
504 if (name[1] == '_' && name[2] == 'x')
506 else if (name[1] == '_')
515 && (! strcmp (tname, "setjmp")
516 || ! strcmp (tname, "setjmp_syscall")))
518 && ! strcmp (tname, "sigsetjmp"))
520 && ! strcmp (tname, "savectx")))
521 flags |= ECF_RETURNS_TWICE;
524 && ! strcmp (tname, "siglongjmp"))
525 flags |= ECF_NORETURN;
527 else if ((tname[0] == 'q' && tname[1] == 's'
528 && ! strcmp (tname, "qsetjmp"))
529 || (tname[0] == 'v' && tname[1] == 'f'
530 && ! strcmp (tname, "vfork"))
531 || (tname[0] == 'g' && tname[1] == 'e'
532 && !strcmp (tname, "getcontext")))
533 flags |= ECF_RETURNS_TWICE;
535 else if (tname[0] == 'l' && tname[1] == 'o'
536 && ! strcmp (tname, "longjmp"))
537 flags |= ECF_NORETURN;
543 /* Return nonzero when FNDECL represents a call to setjmp. */
546 setjmp_call_p (const_tree fndecl)
548 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
551 /* Return true when exp contains alloca call. */
553 alloca_call_p (const_tree exp)
555 if (TREE_CODE (exp) == CALL_EXPR
556 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
557 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
558 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
559 & ECF_MAY_BE_ALLOCA))
564 /* Detect flags (function attributes) from the function decl or type node. */
567 flags_from_decl_or_type (const_tree exp)
570 const_tree type = exp;
574 type = TREE_TYPE (exp);
576 /* The function exp may have the `malloc' attribute. */
577 if (DECL_IS_MALLOC (exp))
580 /* The function exp may have the `returns_twice' attribute. */
581 if (DECL_IS_RETURNS_TWICE (exp))
582 flags |= ECF_RETURNS_TWICE;
584 /* The function exp may have the `pure' attribute. */
585 if (DECL_IS_PURE (exp))
588 if (DECL_IS_NOVOPS (exp))
591 if (TREE_NOTHROW (exp))
592 flags |= ECF_NOTHROW;
594 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
597 flags = special_function_p (exp, flags);
599 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
602 if (TREE_THIS_VOLATILE (exp))
603 flags |= ECF_NORETURN;
605 /* Mark if the function returns with the stack pointer depressed. We
606 cannot consider it pure or constant in that case. */
607 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
609 flags |= ECF_SP_DEPRESSED;
610 flags &= ~(ECF_PURE | ECF_CONST);
616 /* Detect flags from a CALL_EXPR. */
619 call_expr_flags (const_tree t)
622 tree decl = get_callee_fndecl (t);
625 flags = flags_from_decl_or_type (decl);
628 t = TREE_TYPE (CALL_EXPR_FN (t));
629 if (t && TREE_CODE (t) == POINTER_TYPE)
630 flags = flags_from_decl_or_type (TREE_TYPE (t));
638 /* Precompute all register parameters as described by ARGS, storing values
639 into fields within the ARGS array.
641 NUM_ACTUALS indicates the total number elements in the ARGS array.
643 Set REG_PARM_SEEN if we encounter a register parameter. */
646 precompute_register_parameters (int num_actuals, struct arg_data *args,
653 for (i = 0; i < num_actuals; i++)
654 if (args[i].reg != 0 && ! args[i].pass_on_stack)
658 if (args[i].value == 0)
661 args[i].value = expand_normal (args[i].tree_value);
662 preserve_temp_slots (args[i].value);
666 /* If the value is a non-legitimate constant, force it into a
667 pseudo now. TLS symbols sometimes need a call to resolve. */
668 if (CONSTANT_P (args[i].value)
669 && !LEGITIMATE_CONSTANT_P (args[i].value))
670 args[i].value = force_reg (args[i].mode, args[i].value);
672 /* If we are to promote the function arg to a wider mode,
675 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
677 = convert_modes (args[i].mode,
678 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
679 args[i].value, args[i].unsignedp);
681 /* If we're going to have to load the value by parts, pull the
682 parts into pseudos. The part extraction process can involve
683 non-trivial computation. */
684 if (GET_CODE (args[i].reg) == PARALLEL)
686 tree type = TREE_TYPE (args[i].tree_value);
687 args[i].parallel_value
688 = emit_group_load_into_temps (args[i].reg, args[i].value,
689 type, int_size_in_bytes (type));
692 /* If the value is expensive, and we are inside an appropriately
693 short loop, put the value into a pseudo and then put the pseudo
696 For small register classes, also do this if this call uses
697 register parameters. This is to avoid reload conflicts while
698 loading the parameters registers. */
700 else if ((! (REG_P (args[i].value)
701 || (GET_CODE (args[i].value) == SUBREG
702 && REG_P (SUBREG_REG (args[i].value)))))
703 && args[i].mode != BLKmode
704 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
705 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
707 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
711 #ifdef REG_PARM_STACK_SPACE
713 /* The argument list is the property of the called routine and it
714 may clobber it. If the fixed area has been used for previous
715 parameters, we must save and restore it. */
718 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
723 /* Compute the boundary of the area that needs to be saved, if any. */
724 high = reg_parm_stack_space;
725 #ifdef ARGS_GROW_DOWNWARD
728 if (high > highest_outgoing_arg_in_use)
729 high = highest_outgoing_arg_in_use;
731 for (low = 0; low < high; low++)
732 if (stack_usage_map[low] != 0)
735 enum machine_mode save_mode;
740 while (stack_usage_map[--high] == 0)
744 *high_to_save = high;
746 num_to_save = high - low + 1;
747 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
749 /* If we don't have the required alignment, must do this
751 if ((low & (MIN (GET_MODE_SIZE (save_mode),
752 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
755 #ifdef ARGS_GROW_DOWNWARD
760 stack_area = gen_rtx_MEM (save_mode,
761 memory_address (save_mode,
762 plus_constant (argblock,
765 set_mem_align (stack_area, PARM_BOUNDARY);
766 if (save_mode == BLKmode)
768 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
769 emit_block_move (validize_mem (save_area), stack_area,
770 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
774 save_area = gen_reg_rtx (save_mode);
775 emit_move_insn (save_area, stack_area);
785 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
787 enum machine_mode save_mode = GET_MODE (save_area);
791 #ifdef ARGS_GROW_DOWNWARD
792 delta = -high_to_save;
796 stack_area = gen_rtx_MEM (save_mode,
797 memory_address (save_mode,
798 plus_constant (argblock, delta)));
799 set_mem_align (stack_area, PARM_BOUNDARY);
801 if (save_mode != BLKmode)
802 emit_move_insn (stack_area, save_area);
804 emit_block_move (stack_area, validize_mem (save_area),
805 GEN_INT (high_to_save - low_to_save + 1),
808 #endif /* REG_PARM_STACK_SPACE */
810 /* If any elements in ARGS refer to parameters that are to be passed in
811 registers, but not in memory, and whose alignment does not permit a
812 direct copy into registers. Copy the values into a group of pseudos
813 which we will later copy into the appropriate hard registers.
815 Pseudos for each unaligned argument will be stored into the array
816 args[argnum].aligned_regs. The caller is responsible for deallocating
817 the aligned_regs array if it is nonzero. */
820 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
824 for (i = 0; i < num_actuals; i++)
825 if (args[i].reg != 0 && ! args[i].pass_on_stack
826 && args[i].mode == BLKmode
827 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
828 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
830 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
831 int endian_correction = 0;
835 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
836 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
840 args[i].n_aligned_regs
841 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
844 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
846 /* Structures smaller than a word are normally aligned to the
847 least significant byte. On a BYTES_BIG_ENDIAN machine,
848 this means we must skip the empty high order bytes when
849 calculating the bit offset. */
850 if (bytes < UNITS_PER_WORD
851 #ifdef BLOCK_REG_PADDING
852 && (BLOCK_REG_PADDING (args[i].mode,
853 TREE_TYPE (args[i].tree_value), 1)
859 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
861 for (j = 0; j < args[i].n_aligned_regs; j++)
863 rtx reg = gen_reg_rtx (word_mode);
864 rtx word = operand_subword_force (args[i].value, j, BLKmode);
865 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
867 args[i].aligned_regs[j] = reg;
868 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
869 word_mode, word_mode);
871 /* There is no need to restrict this code to loading items
872 in TYPE_ALIGN sized hunks. The bitfield instructions can
873 load up entire word sized registers efficiently.
875 ??? This may not be needed anymore.
876 We use to emit a clobber here but that doesn't let later
877 passes optimize the instructions we emit. By storing 0 into
878 the register later passes know the first AND to zero out the
879 bitfield being set in the register is unnecessary. The store
880 of 0 will be deleted as will at least the first AND. */
882 emit_move_insn (reg, const0_rtx);
884 bytes -= bitsize / BITS_PER_UNIT;
885 store_bit_field (reg, bitsize, endian_correction, word_mode,
891 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
894 NUM_ACTUALS is the total number of parameters.
896 N_NAMED_ARGS is the total number of named arguments.
898 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
901 FNDECL is the tree code for the target of this call (if known)
903 ARGS_SO_FAR holds state needed by the target to know where to place
906 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
907 for arguments which are passed in registers.
909 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
910 and may be modified by this routine.
912 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
913 flags which may may be modified by this routine.
915 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
916 that requires allocation of stack space.
918 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
919 the thunked-to function. */
922 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
923 struct arg_data *args,
924 struct args_size *args_size,
925 int n_named_args ATTRIBUTE_UNUSED,
926 tree exp, tree struct_value_addr_value,
928 CUMULATIVE_ARGS *args_so_far,
929 int reg_parm_stack_space,
930 rtx *old_stack_level, int *old_pending_adj,
931 int *must_preallocate, int *ecf_flags,
932 bool *may_tailcall, bool call_from_thunk_p)
934 /* 1 if scanning parms front to back, -1 if scanning back to front. */
937 /* Count arg position in order args appear. */
942 args_size->constant = 0;
945 /* In this loop, we consider args in the order they are written.
946 We fill up ARGS from the front or from the back if necessary
947 so that in any case the first arg to be pushed ends up at the front. */
949 if (PUSH_ARGS_REVERSED)
951 i = num_actuals - 1, inc = -1;
952 /* In this case, must reverse order of args
953 so that we compute and push the last arg first. */
960 /* First fill in the actual arguments in the ARGS array, splitting
961 complex arguments if necessary. */
964 call_expr_arg_iterator iter;
967 if (struct_value_addr_value)
969 args[j].tree_value = struct_value_addr_value;
972 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
974 tree argtype = TREE_TYPE (arg);
975 if (targetm.calls.split_complex_arg
977 && TREE_CODE (argtype) == COMPLEX_TYPE
978 && targetm.calls.split_complex_arg (argtype))
980 tree subtype = TREE_TYPE (argtype);
981 arg = save_expr (arg);
982 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
984 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
987 args[j].tree_value = arg;
992 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
993 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
995 tree type = TREE_TYPE (args[i].tree_value);
997 enum machine_mode mode;
999 /* Replace erroneous argument with constant zero. */
1000 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1001 args[i].tree_value = integer_zero_node, type = integer_type_node;
1003 /* If TYPE is a transparent union, pass things the way we would
1004 pass the first field of the union. We have already verified that
1005 the modes are the same. */
1006 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1007 type = TREE_TYPE (TYPE_FIELDS (type));
1009 /* Decide where to pass this arg.
1011 args[i].reg is nonzero if all or part is passed in registers.
1013 args[i].partial is nonzero if part but not all is passed in registers,
1014 and the exact value says how many bytes are passed in registers.
1016 args[i].pass_on_stack is nonzero if the argument must at least be
1017 computed on the stack. It may then be loaded back into registers
1018 if args[i].reg is nonzero.
1020 These decisions are driven by the FUNCTION_... macros and must agree
1021 with those made by function.c. */
1023 /* See if this argument should be passed by invisible reference. */
1024 if (pass_by_reference (args_so_far, TYPE_MODE (type),
1025 type, argpos < n_named_args))
1031 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1032 type, argpos < n_named_args);
1034 /* If we're compiling a thunk, pass through invisible references
1035 instead of making a copy. */
1036 if (call_from_thunk_p
1038 && !TREE_ADDRESSABLE (type)
1039 && (base = get_base_address (args[i].tree_value))
1040 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1042 /* We can't use sibcalls if a callee-copied argument is
1043 stored in the current function's frame. */
1044 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1045 *may_tailcall = false;
1047 args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1048 type = TREE_TYPE (args[i].tree_value);
1050 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1054 /* We make a copy of the object and pass the address to the
1055 function being called. */
1058 if (!COMPLETE_TYPE_P (type)
1059 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1060 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1061 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1062 STACK_CHECK_MAX_VAR_SIZE))))
1064 /* This is a variable-sized object. Make space on the stack
1066 rtx size_rtx = expr_size (args[i].tree_value);
1068 if (*old_stack_level == 0)
1070 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1071 *old_pending_adj = pending_stack_adjust;
1072 pending_stack_adjust = 0;
1075 copy = gen_rtx_MEM (BLKmode,
1076 allocate_dynamic_stack_space
1077 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1078 set_mem_attributes (copy, type, 1);
1081 copy = assign_temp (type, 0, 1, 0);
1083 store_expr (args[i].tree_value, copy, 0, false);
1086 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1088 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1091 = build_fold_addr_expr (make_tree (type, copy));
1092 type = TREE_TYPE (args[i].tree_value);
1093 *may_tailcall = false;
1097 mode = TYPE_MODE (type);
1098 unsignedp = TYPE_UNSIGNED (type);
1100 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1101 mode = promote_mode (type, mode, &unsignedp, 1);
1103 args[i].unsignedp = unsignedp;
1104 args[i].mode = mode;
1106 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1107 argpos < n_named_args);
1108 #ifdef FUNCTION_INCOMING_ARG
1109 /* If this is a sibling call and the machine has register windows, the
1110 register window has to be unwinded before calling the routine, so
1111 arguments have to go into the incoming registers. */
1112 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1113 argpos < n_named_args);
1115 args[i].tail_call_reg = args[i].reg;
1120 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1121 argpos < n_named_args);
1123 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1125 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1126 it means that we are to pass this arg in the register(s) designated
1127 by the PARALLEL, but also to pass it in the stack. */
1128 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1129 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1130 args[i].pass_on_stack = 1;
1132 /* If this is an addressable type, we must preallocate the stack
1133 since we must evaluate the object into its final location.
1135 If this is to be passed in both registers and the stack, it is simpler
1137 if (TREE_ADDRESSABLE (type)
1138 || (args[i].pass_on_stack && args[i].reg != 0))
1139 *must_preallocate = 1;
1141 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1142 we cannot consider this function call constant. */
1143 if (TREE_ADDRESSABLE (type))
1144 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1146 /* Compute the stack-size of this argument. */
1147 if (args[i].reg == 0 || args[i].partial != 0
1148 || reg_parm_stack_space > 0
1149 || args[i].pass_on_stack)
1150 locate_and_pad_parm (mode, type,
1151 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1156 args[i].pass_on_stack ? 0 : args[i].partial,
1157 fndecl, args_size, &args[i].locate);
1158 #ifdef BLOCK_REG_PADDING
1160 /* The argument is passed entirely in registers. See at which
1161 end it should be padded. */
1162 args[i].locate.where_pad =
1163 BLOCK_REG_PADDING (mode, type,
1164 int_size_in_bytes (type) <= UNITS_PER_WORD);
1167 /* Update ARGS_SIZE, the total stack space for args so far. */
1169 args_size->constant += args[i].locate.size.constant;
1170 if (args[i].locate.size.var)
1171 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1173 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1174 have been used, etc. */
1176 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1177 argpos < n_named_args);
1181 /* Update ARGS_SIZE to contain the total size for the argument block.
1182 Return the original constant component of the argument block's size.
1184 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1185 for arguments passed in registers. */
1188 compute_argument_block_size (int reg_parm_stack_space,
1189 struct args_size *args_size,
1190 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1192 int unadjusted_args_size = args_size->constant;
1194 /* For accumulate outgoing args mode we don't need to align, since the frame
1195 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1196 backends from generating misaligned frame sizes. */
1197 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1198 preferred_stack_boundary = STACK_BOUNDARY;
1200 /* Compute the actual size of the argument block required. The variable
1201 and constant sizes must be combined, the size may have to be rounded,
1202 and there may be a minimum required size. */
1206 args_size->var = ARGS_SIZE_TREE (*args_size);
1207 args_size->constant = 0;
1209 preferred_stack_boundary /= BITS_PER_UNIT;
1210 if (preferred_stack_boundary > 1)
1212 /* We don't handle this case yet. To handle it correctly we have
1213 to add the delta, round and subtract the delta.
1214 Currently no machine description requires this support. */
1215 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1216 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1219 if (reg_parm_stack_space > 0)
1222 = size_binop (MAX_EXPR, args_size->var,
1223 ssize_int (reg_parm_stack_space));
1225 /* The area corresponding to register parameters is not to count in
1226 the size of the block we need. So make the adjustment. */
1227 if (!OUTGOING_REG_PARM_STACK_SPACE)
1229 = size_binop (MINUS_EXPR, args_size->var,
1230 ssize_int (reg_parm_stack_space));
1235 preferred_stack_boundary /= BITS_PER_UNIT;
1236 if (preferred_stack_boundary < 1)
1237 preferred_stack_boundary = 1;
1238 args_size->constant = (((args_size->constant
1239 + stack_pointer_delta
1240 + preferred_stack_boundary - 1)
1241 / preferred_stack_boundary
1242 * preferred_stack_boundary)
1243 - stack_pointer_delta);
1245 args_size->constant = MAX (args_size->constant,
1246 reg_parm_stack_space);
1248 if (!OUTGOING_REG_PARM_STACK_SPACE)
1249 args_size->constant -= reg_parm_stack_space;
1251 return unadjusted_args_size;
1254 /* Precompute parameters as needed for a function call.
1256 FLAGS is mask of ECF_* constants.
1258 NUM_ACTUALS is the number of arguments.
1260 ARGS is an array containing information for each argument; this
1261 routine fills in the INITIAL_VALUE and VALUE fields for each
1262 precomputed argument. */
1265 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1269 /* If this is a libcall, then precompute all arguments so that we do not
1270 get extraneous instructions emitted as part of the libcall sequence. */
1272 /* If we preallocated the stack space, and some arguments must be passed
1273 on the stack, then we must precompute any parameter which contains a
1274 function call which will store arguments on the stack.
1275 Otherwise, evaluating the parameter may clobber previous parameters
1276 which have already been stored into the stack. (we have code to avoid
1277 such case by saving the outgoing stack arguments, but it results in
1279 if ((flags & ECF_LIBCALL_BLOCK) == 0 && !ACCUMULATE_OUTGOING_ARGS)
1282 for (i = 0; i < num_actuals; i++)
1284 enum machine_mode mode;
1286 if ((flags & ECF_LIBCALL_BLOCK) == 0
1287 && TREE_CODE (args[i].tree_value) != CALL_EXPR)
1290 /* If this is an addressable type, we cannot pre-evaluate it. */
1291 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
1293 args[i].initial_value = args[i].value
1294 = expand_normal (args[i].tree_value);
1296 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1297 if (mode != args[i].mode)
1300 = convert_modes (args[i].mode, mode,
1301 args[i].value, args[i].unsignedp);
1302 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1303 /* CSE will replace this only if it contains args[i].value
1304 pseudo, so convert it down to the declared mode using
1306 if (REG_P (args[i].value)
1307 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1309 args[i].initial_value
1310 = gen_lowpart_SUBREG (mode, args[i].value);
1311 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1312 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1320 /* Given the current state of MUST_PREALLOCATE and information about
1321 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1322 compute and return the final value for MUST_PREALLOCATE. */
1325 finalize_must_preallocate (int must_preallocate, int num_actuals,
1326 struct arg_data *args, struct args_size *args_size)
1328 /* See if we have or want to preallocate stack space.
1330 If we would have to push a partially-in-regs parm
1331 before other stack parms, preallocate stack space instead.
1333 If the size of some parm is not a multiple of the required stack
1334 alignment, we must preallocate.
1336 If the total size of arguments that would otherwise create a copy in
1337 a temporary (such as a CALL) is more than half the total argument list
1338 size, preallocation is faster.
1340 Another reason to preallocate is if we have a machine (like the m88k)
1341 where stack alignment is required to be maintained between every
1342 pair of insns, not just when the call is made. However, we assume here
1343 that such machines either do not have push insns (and hence preallocation
1344 would occur anyway) or the problem is taken care of with
1347 if (! must_preallocate)
1349 int partial_seen = 0;
1350 int copy_to_evaluate_size = 0;
1353 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1355 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1357 else if (partial_seen && args[i].reg == 0)
1358 must_preallocate = 1;
1360 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1361 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1362 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1363 || TREE_CODE (args[i].tree_value) == COND_EXPR
1364 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1365 copy_to_evaluate_size
1366 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1369 if (copy_to_evaluate_size * 2 >= args_size->constant
1370 && args_size->constant > 0)
1371 must_preallocate = 1;
1373 return must_preallocate;
1376 /* If we preallocated stack space, compute the address of each argument
1377 and store it into the ARGS array.
1379 We need not ensure it is a valid memory address here; it will be
1380 validized when it is used.
1382 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1385 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1389 rtx arg_reg = argblock;
1390 int i, arg_offset = 0;
1392 if (GET_CODE (argblock) == PLUS)
1393 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1395 for (i = 0; i < num_actuals; i++)
1397 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1398 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1400 unsigned int align, boundary;
1401 unsigned int units_on_stack = 0;
1402 enum machine_mode partial_mode = VOIDmode;
1404 /* Skip this parm if it will not be passed on the stack. */
1405 if (! args[i].pass_on_stack
1407 && args[i].partial == 0)
1410 if (GET_CODE (offset) == CONST_INT)
1411 addr = plus_constant (arg_reg, INTVAL (offset));
1413 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1415 addr = plus_constant (addr, arg_offset);
1417 if (args[i].partial != 0)
1419 /* Only part of the parameter is being passed on the stack.
1420 Generate a simple memory reference of the correct size. */
1421 units_on_stack = args[i].locate.size.constant;
1422 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1424 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1425 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1429 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1430 set_mem_attributes (args[i].stack,
1431 TREE_TYPE (args[i].tree_value), 1);
1433 align = BITS_PER_UNIT;
1434 boundary = args[i].locate.boundary;
1435 if (args[i].locate.where_pad != downward)
1437 else if (GET_CODE (offset) == CONST_INT)
1439 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1440 align = align & -align;
1442 set_mem_align (args[i].stack, align);
1444 if (GET_CODE (slot_offset) == CONST_INT)
1445 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1447 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1449 addr = plus_constant (addr, arg_offset);
1451 if (args[i].partial != 0)
1453 /* Only part of the parameter is being passed on the stack.
1454 Generate a simple memory reference of the correct size.
1456 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1457 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1461 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1462 set_mem_attributes (args[i].stack_slot,
1463 TREE_TYPE (args[i].tree_value), 1);
1465 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1467 /* Function incoming arguments may overlap with sibling call
1468 outgoing arguments and we cannot allow reordering of reads
1469 from function arguments with stores to outgoing arguments
1470 of sibling calls. */
1471 set_mem_alias_set (args[i].stack, 0);
1472 set_mem_alias_set (args[i].stack_slot, 0);
1477 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1478 in a call instruction.
1480 FNDECL is the tree node for the target function. For an indirect call
1481 FNDECL will be NULL_TREE.
1483 ADDR is the operand 0 of CALL_EXPR for this call. */
1486 rtx_for_function_call (tree fndecl, tree addr)
1490 /* Get the function to call, in the form of RTL. */
1493 /* If this is the first use of the function, see if we need to
1494 make an external definition for it. */
1495 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1497 assemble_external (fndecl);
1498 TREE_USED (fndecl) = 1;
1501 /* Get a SYMBOL_REF rtx for the function address. */
1502 funexp = XEXP (DECL_RTL (fndecl), 0);
1505 /* Generate an rtx (probably a pseudo-register) for the address. */
1508 funexp = expand_normal (addr);
1509 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1514 /* Return true if and only if SIZE storage units (usually bytes)
1515 starting from address ADDR overlap with already clobbered argument
1516 area. This function is used to determine if we should give up a
1520 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1524 if (addr == current_function_internal_arg_pointer)
1526 else if (GET_CODE (addr) == PLUS
1527 && XEXP (addr, 0) == current_function_internal_arg_pointer
1528 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
1529 i = INTVAL (XEXP (addr, 1));
1530 /* Return true for arg pointer based indexed addressing. */
1531 else if (GET_CODE (addr) == PLUS
1532 && (XEXP (addr, 0) == current_function_internal_arg_pointer
1533 || XEXP (addr, 1) == current_function_internal_arg_pointer))
1538 #ifdef ARGS_GROW_DOWNWARD
1543 unsigned HOST_WIDE_INT k;
1545 for (k = 0; k < size; k++)
1546 if (i + k < stored_args_map->n_bits
1547 && TEST_BIT (stored_args_map, i + k))
1554 /* Do the register loads required for any wholly-register parms or any
1555 parms which are passed both on the stack and in a register. Their
1556 expressions were already evaluated.
1558 Mark all register-parms as living through the call, putting these USE
1559 insns in the CALL_INSN_FUNCTION_USAGE field.
1561 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1562 checking, setting *SIBCALL_FAILURE if appropriate. */
1565 load_register_parameters (struct arg_data *args, int num_actuals,
1566 rtx *call_fusage, int flags, int is_sibcall,
1567 int *sibcall_failure)
1571 for (i = 0; i < num_actuals; i++)
1573 rtx reg = ((flags & ECF_SIBCALL)
1574 ? args[i].tail_call_reg : args[i].reg);
1577 int partial = args[i].partial;
1580 rtx before_arg = get_last_insn ();
1581 /* Set non-negative if we must move a word at a time, even if
1582 just one word (e.g, partial == 4 && mode == DFmode). Set
1583 to -1 if we just use a normal move insn. This value can be
1584 zero if the argument is a zero size structure. */
1586 if (GET_CODE (reg) == PARALLEL)
1590 gcc_assert (partial % UNITS_PER_WORD == 0);
1591 nregs = partial / UNITS_PER_WORD;
1593 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1595 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1596 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1599 size = GET_MODE_SIZE (args[i].mode);
1601 /* Handle calls that pass values in multiple non-contiguous
1602 locations. The Irix 6 ABI has examples of this. */
1604 if (GET_CODE (reg) == PARALLEL)
1605 emit_group_move (reg, args[i].parallel_value);
1607 /* If simple case, just do move. If normal partial, store_one_arg
1608 has already loaded the register for us. In all other cases,
1609 load the register(s) from memory. */
1611 else if (nregs == -1)
1613 emit_move_insn (reg, args[i].value);
1614 #ifdef BLOCK_REG_PADDING
1615 /* Handle case where we have a value that needs shifting
1616 up to the msb. eg. a QImode value and we're padding
1617 upward on a BYTES_BIG_ENDIAN machine. */
1618 if (size < UNITS_PER_WORD
1619 && (args[i].locate.where_pad
1620 == (BYTES_BIG_ENDIAN ? upward : downward)))
1623 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1625 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1626 report the whole reg as used. Strictly speaking, the
1627 call only uses SIZE bytes at the msb end, but it doesn't
1628 seem worth generating rtl to say that. */
1629 reg = gen_rtx_REG (word_mode, REGNO (reg));
1630 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1631 build_int_cst (NULL_TREE, shift),
1634 emit_move_insn (reg, x);
1639 /* If we have pre-computed the values to put in the registers in
1640 the case of non-aligned structures, copy them in now. */
1642 else if (args[i].n_aligned_regs != 0)
1643 for (j = 0; j < args[i].n_aligned_regs; j++)
1644 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1645 args[i].aligned_regs[j]);
1647 else if (partial == 0 || args[i].pass_on_stack)
1649 rtx mem = validize_mem (args[i].value);
1651 /* Check for overlap with already clobbered argument area. */
1653 && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
1655 *sibcall_failure = 1;
1657 /* Handle a BLKmode that needs shifting. */
1658 if (nregs == 1 && size < UNITS_PER_WORD
1659 #ifdef BLOCK_REG_PADDING
1660 && args[i].locate.where_pad == downward
1666 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1667 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1668 rtx x = gen_reg_rtx (word_mode);
1669 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1670 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1673 emit_move_insn (x, tem);
1674 x = expand_shift (dir, word_mode, x,
1675 build_int_cst (NULL_TREE, shift),
1678 emit_move_insn (ri, x);
1681 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1684 /* When a parameter is a block, and perhaps in other cases, it is
1685 possible that it did a load from an argument slot that was
1686 already clobbered. */
1688 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1689 *sibcall_failure = 1;
1691 /* Handle calls that pass values in multiple non-contiguous
1692 locations. The Irix 6 ABI has examples of this. */
1693 if (GET_CODE (reg) == PARALLEL)
1694 use_group_regs (call_fusage, reg);
1695 else if (nregs == -1)
1696 use_reg (call_fusage, reg);
1698 use_regs (call_fusage, REGNO (reg), nregs);
1703 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1704 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1705 bytes, then we would need to push some additional bytes to pad the
1706 arguments. So, we compute an adjust to the stack pointer for an
1707 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1708 bytes. Then, when the arguments are pushed the stack will be perfectly
1709 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1710 be popped after the call. Returns the adjustment. */
1713 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1714 struct args_size *args_size,
1715 unsigned int preferred_unit_stack_boundary)
1717 /* The number of bytes to pop so that the stack will be
1718 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1719 HOST_WIDE_INT adjustment;
1720 /* The alignment of the stack after the arguments are pushed, if we
1721 just pushed the arguments without adjust the stack here. */
1722 unsigned HOST_WIDE_INT unadjusted_alignment;
1724 unadjusted_alignment
1725 = ((stack_pointer_delta + unadjusted_args_size)
1726 % preferred_unit_stack_boundary);
1728 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1729 as possible -- leaving just enough left to cancel out the
1730 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1731 PENDING_STACK_ADJUST is non-negative, and congruent to
1732 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1734 /* Begin by trying to pop all the bytes. */
1735 unadjusted_alignment
1736 = (unadjusted_alignment
1737 - (pending_stack_adjust % preferred_unit_stack_boundary));
1738 adjustment = pending_stack_adjust;
1739 /* Push enough additional bytes that the stack will be aligned
1740 after the arguments are pushed. */
1741 if (preferred_unit_stack_boundary > 1)
1743 if (unadjusted_alignment > 0)
1744 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1746 adjustment += unadjusted_alignment;
1749 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1750 bytes after the call. The right number is the entire
1751 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1752 by the arguments in the first place. */
1754 = pending_stack_adjust - adjustment + unadjusted_args_size;
1759 /* Scan X expression if it does not dereference any argument slots
1760 we already clobbered by tail call arguments (as noted in stored_args_map
1762 Return nonzero if X expression dereferences such argument slots,
1766 check_sibcall_argument_overlap_1 (rtx x)
1775 code = GET_CODE (x);
1778 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1779 GET_MODE_SIZE (GET_MODE (x)));
1781 /* Scan all subexpressions. */
1782 fmt = GET_RTX_FORMAT (code);
1783 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1787 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1790 else if (*fmt == 'E')
1792 for (j = 0; j < XVECLEN (x, i); j++)
1793 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1800 /* Scan sequence after INSN if it does not dereference any argument slots
1801 we already clobbered by tail call arguments (as noted in stored_args_map
1802 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1803 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1804 should be 0). Return nonzero if sequence after INSN dereferences such argument
1805 slots, zero otherwise. */
1808 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1812 if (insn == NULL_RTX)
1813 insn = get_insns ();
1815 insn = NEXT_INSN (insn);
1817 for (; insn; insn = NEXT_INSN (insn))
1819 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1822 if (mark_stored_args_map)
1824 #ifdef ARGS_GROW_DOWNWARD
1825 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1827 low = arg->locate.slot_offset.constant;
1830 for (high = low + arg->locate.size.constant; low < high; low++)
1831 SET_BIT (stored_args_map, low);
1833 return insn != NULL_RTX;
1836 /* Given that a function returns a value of mode MODE at the most
1837 significant end of hard register VALUE, shift VALUE left or right
1838 as specified by LEFT_P. Return true if some action was needed. */
1841 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1843 HOST_WIDE_INT shift;
1845 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1846 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1850 /* Use ashr rather than lshr for right shifts. This is for the benefit
1851 of the MIPS port, which requires SImode values to be sign-extended
1852 when stored in 64-bit registers. */
1853 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1854 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1859 /* Generate all the code for a CALL_EXPR exp
1860 and return an rtx for its value.
1861 Store the value in TARGET (specified as an rtx) if convenient.
1862 If the value is stored in TARGET then TARGET is returned.
1863 If IGNORE is nonzero, then we ignore the value of the function call. */
1866 expand_call (tree exp, rtx target, int ignore)
1868 /* Nonzero if we are currently expanding a call. */
1869 static int currently_expanding_call = 0;
1871 /* RTX for the function to be called. */
1873 /* Sequence of insns to perform a normal "call". */
1874 rtx normal_call_insns = NULL_RTX;
1875 /* Sequence of insns to perform a tail "call". */
1876 rtx tail_call_insns = NULL_RTX;
1877 /* Data type of the function. */
1879 tree type_arg_types;
1880 /* Declaration of the function being called,
1881 or 0 if the function is computed (not known by name). */
1883 /* The type of the function being called. */
1885 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1888 /* Register in which non-BLKmode value will be returned,
1889 or 0 if no value or if value is BLKmode. */
1891 /* Address where we should return a BLKmode value;
1892 0 if value not BLKmode. */
1893 rtx structure_value_addr = 0;
1894 /* Nonzero if that address is being passed by treating it as
1895 an extra, implicit first parameter. Otherwise,
1896 it is passed by being copied directly into struct_value_rtx. */
1897 int structure_value_addr_parm = 0;
1898 /* Holds the value of implicit argument for the struct value. */
1899 tree structure_value_addr_value = NULL_TREE;
1900 /* Size of aggregate value wanted, or zero if none wanted
1901 or if we are using the non-reentrant PCC calling convention
1902 or expecting the value in registers. */
1903 HOST_WIDE_INT struct_value_size = 0;
1904 /* Nonzero if called function returns an aggregate in memory PCC style,
1905 by returning the address of where to find it. */
1906 int pcc_struct_value = 0;
1907 rtx struct_value = 0;
1909 /* Number of actual parameters in this call, including struct value addr. */
1911 /* Number of named args. Args after this are anonymous ones
1912 and they must all go on the stack. */
1914 /* Number of complex actual arguments that need to be split. */
1915 int num_complex_actuals = 0;
1917 /* Vector of information about each argument.
1918 Arguments are numbered in the order they will be pushed,
1919 not the order they are written. */
1920 struct arg_data *args;
1922 /* Total size in bytes of all the stack-parms scanned so far. */
1923 struct args_size args_size;
1924 struct args_size adjusted_args_size;
1925 /* Size of arguments before any adjustments (such as rounding). */
1926 int unadjusted_args_size;
1927 /* Data on reg parms scanned so far. */
1928 CUMULATIVE_ARGS args_so_far;
1929 /* Nonzero if a reg parm has been scanned. */
1931 /* Nonzero if this is an indirect function call. */
1933 /* Nonzero if we must avoid push-insns in the args for this call.
1934 If stack space is allocated for register parameters, but not by the
1935 caller, then it is preallocated in the fixed part of the stack frame.
1936 So the entire argument block must then be preallocated (i.e., we
1937 ignore PUSH_ROUNDING in that case). */
1939 int must_preallocate = !PUSH_ARGS;
1941 /* Size of the stack reserved for parameter registers. */
1942 int reg_parm_stack_space = 0;
1944 /* Address of space preallocated for stack parms
1945 (on machines that lack push insns), or 0 if space not preallocated. */
1948 /* Mask of ECF_ flags. */
1950 #ifdef REG_PARM_STACK_SPACE
1951 /* Define the boundary of the register parm stack space that needs to be
1953 int low_to_save, high_to_save;
1954 rtx save_area = 0; /* Place that it is saved */
1957 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1958 char *initial_stack_usage_map = stack_usage_map;
1959 char *stack_usage_map_buf = NULL;
1961 int old_stack_allocated;
1963 /* State variables to track stack modifications. */
1964 rtx old_stack_level = 0;
1965 int old_stack_arg_under_construction = 0;
1966 int old_pending_adj = 0;
1967 int old_inhibit_defer_pop = inhibit_defer_pop;
1969 /* Some stack pointer alterations we make are performed via
1970 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1971 which we then also need to save/restore along the way. */
1972 int old_stack_pointer_delta = 0;
1975 tree p = CALL_EXPR_FN (exp);
1976 tree addr = CALL_EXPR_FN (exp);
1978 /* The alignment of the stack, in bits. */
1979 unsigned HOST_WIDE_INT preferred_stack_boundary;
1980 /* The alignment of the stack, in bytes. */
1981 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
1982 /* The static chain value to use for this call. */
1983 rtx static_chain_value;
1984 /* See if this is "nothrow" function call. */
1985 if (TREE_NOTHROW (exp))
1986 flags |= ECF_NOTHROW;
1988 /* See if we can find a DECL-node for the actual function, and get the
1989 function attributes (flags) from the function decl or type node. */
1990 fndecl = get_callee_fndecl (exp);
1993 fntype = TREE_TYPE (fndecl);
1994 flags |= flags_from_decl_or_type (fndecl);
1998 fntype = TREE_TYPE (TREE_TYPE (p));
1999 flags |= flags_from_decl_or_type (fntype);
2002 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2004 /* Warn if this value is an aggregate type,
2005 regardless of which calling convention we are using for it. */
2006 if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2007 warning (OPT_Waggregate_return, "function call has aggregate value");
2009 /* If the result of a pure or const function call is ignored (or void),
2010 and none of its arguments are volatile, we can avoid expanding the
2011 call and just evaluate the arguments for side-effects. */
2012 if ((flags & (ECF_CONST | ECF_PURE))
2013 && (ignore || target == const0_rtx
2014 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2016 bool volatilep = false;
2018 call_expr_arg_iterator iter;
2020 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2021 if (TREE_THIS_VOLATILE (arg))
2029 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2030 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2035 #ifdef REG_PARM_STACK_SPACE
2036 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2039 if (!OUTGOING_REG_PARM_STACK_SPACE && reg_parm_stack_space > 0 && PUSH_ARGS)
2040 must_preallocate = 1;
2042 /* Set up a place to return a structure. */
2044 /* Cater to broken compilers. */
2045 if (aggregate_value_p (exp, fndecl))
2047 /* This call returns a big structure. */
2048 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2050 #ifdef PCC_STATIC_STRUCT_RETURN
2052 pcc_struct_value = 1;
2054 #else /* not PCC_STATIC_STRUCT_RETURN */
2056 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2058 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2059 structure_value_addr = XEXP (target, 0);
2062 /* For variable-sized objects, we must be called with a target
2063 specified. If we were to allocate space on the stack here,
2064 we would have no way of knowing when to free it. */
2065 rtx d = assign_temp (TREE_TYPE (exp), 0, 1, 1);
2067 mark_temp_addr_taken (d);
2068 structure_value_addr = XEXP (d, 0);
2072 #endif /* not PCC_STATIC_STRUCT_RETURN */
2075 /* Figure out the amount to which the stack should be aligned. */
2076 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2079 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2080 if (i && i->preferred_incoming_stack_boundary)
2081 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2084 /* Operand 0 is a pointer-to-function; get the type of the function. */
2085 funtype = TREE_TYPE (addr);
2086 gcc_assert (POINTER_TYPE_P (funtype));
2087 funtype = TREE_TYPE (funtype);
2089 /* Count whether there are actual complex arguments that need to be split
2090 into their real and imaginary parts. Munge the type_arg_types
2091 appropriately here as well. */
2092 if (targetm.calls.split_complex_arg)
2094 call_expr_arg_iterator iter;
2096 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2098 tree type = TREE_TYPE (arg);
2099 if (type && TREE_CODE (type) == COMPLEX_TYPE
2100 && targetm.calls.split_complex_arg (type))
2101 num_complex_actuals++;
2103 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2106 type_arg_types = TYPE_ARG_TYPES (funtype);
2108 if (flags & ECF_MAY_BE_ALLOCA)
2109 current_function_calls_alloca = 1;
2111 /* If struct_value_rtx is 0, it means pass the address
2112 as if it were an extra parameter. Put the argument expression
2113 in structure_value_addr_value. */
2114 if (structure_value_addr && struct_value == 0)
2116 /* If structure_value_addr is a REG other than
2117 virtual_outgoing_args_rtx, we can use always use it. If it
2118 is not a REG, we must always copy it into a register.
2119 If it is virtual_outgoing_args_rtx, we must copy it to another
2120 register in some cases. */
2121 rtx temp = (!REG_P (structure_value_addr)
2122 || (ACCUMULATE_OUTGOING_ARGS
2123 && stack_arg_under_construction
2124 && structure_value_addr == virtual_outgoing_args_rtx)
2125 ? copy_addr_to_reg (convert_memory_address
2126 (Pmode, structure_value_addr))
2127 : structure_value_addr);
2129 structure_value_addr_value =
2130 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2131 structure_value_addr_parm = 1;
2134 /* Count the arguments and set NUM_ACTUALS. */
2136 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2138 /* Compute number of named args.
2139 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2141 if (type_arg_types != 0)
2143 = (list_length (type_arg_types)
2144 /* Count the struct value address, if it is passed as a parm. */
2145 + structure_value_addr_parm);
2147 /* If we know nothing, treat all args as named. */
2148 n_named_args = num_actuals;
2150 /* Start updating where the next arg would go.
2152 On some machines (such as the PA) indirect calls have a different
2153 calling convention than normal calls. The fourth argument in
2154 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2156 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2158 /* Now possibly adjust the number of named args.
2159 Normally, don't include the last named arg if anonymous args follow.
2160 We do include the last named arg if
2161 targetm.calls.strict_argument_naming() returns nonzero.
2162 (If no anonymous args follow, the result of list_length is actually
2163 one too large. This is harmless.)
2165 If targetm.calls.pretend_outgoing_varargs_named() returns
2166 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2167 this machine will be able to place unnamed args that were passed
2168 in registers into the stack. So treat all args as named. This
2169 allows the insns emitting for a specific argument list to be
2170 independent of the function declaration.
2172 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2173 we do not have any reliable way to pass unnamed args in
2174 registers, so we must force them into memory. */
2176 if (type_arg_types != 0
2177 && targetm.calls.strict_argument_naming (&args_so_far))
2179 else if (type_arg_types != 0
2180 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2181 /* Don't include the last named arg. */
2184 /* Treat all args as named. */
2185 n_named_args = num_actuals;
2187 /* Make a vector to hold all the information about each arg. */
2188 args = alloca (num_actuals * sizeof (struct arg_data));
2189 memset (args, 0, num_actuals * sizeof (struct arg_data));
2191 /* Build up entries in the ARGS array, compute the size of the
2192 arguments into ARGS_SIZE, etc. */
2193 initialize_argument_information (num_actuals, args, &args_size,
2195 structure_value_addr_value, fndecl,
2196 &args_so_far, reg_parm_stack_space,
2197 &old_stack_level, &old_pending_adj,
2198 &must_preallocate, &flags,
2199 &try_tail_call, CALL_FROM_THUNK_P (exp));
2203 /* If this function requires a variable-sized argument list, don't
2204 try to make a cse'able block for this call. We may be able to
2205 do this eventually, but it is too complicated to keep track of
2206 what insns go in the cse'able block and which don't. */
2208 flags &= ~ECF_LIBCALL_BLOCK;
2209 must_preallocate = 1;
2212 /* Now make final decision about preallocating stack space. */
2213 must_preallocate = finalize_must_preallocate (must_preallocate,
2217 /* If the structure value address will reference the stack pointer, we
2218 must stabilize it. We don't need to do this if we know that we are
2219 not going to adjust the stack pointer in processing this call. */
2221 if (structure_value_addr
2222 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2223 || reg_mentioned_p (virtual_outgoing_args_rtx,
2224 structure_value_addr))
2226 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2227 structure_value_addr = copy_to_reg (structure_value_addr);
2229 /* Tail calls can make things harder to debug, and we've traditionally
2230 pushed these optimizations into -O2. Don't try if we're already
2231 expanding a call, as that means we're an argument. Don't try if
2232 there's cleanups, as we know there's code to follow the call. */
2234 if (currently_expanding_call++ != 0
2235 || !flag_optimize_sibling_calls
2237 || lookup_stmt_eh_region (exp) >= 0
2238 || dbg_cnt (tail_call) == false)
2241 /* Rest of purposes for tail call optimizations to fail. */
2243 #ifdef HAVE_sibcall_epilogue
2244 !HAVE_sibcall_epilogue
2249 /* Doing sibling call optimization needs some work, since
2250 structure_value_addr can be allocated on the stack.
2251 It does not seem worth the effort since few optimizable
2252 sibling calls will return a structure. */
2253 || structure_value_addr != NULL_RTX
2254 /* Check whether the target is able to optimize the call
2256 || !targetm.function_ok_for_sibcall (fndecl, exp)
2257 /* Functions that do not return exactly once may not be sibcall
2259 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2260 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2261 /* If the called function is nested in the current one, it might access
2262 some of the caller's arguments, but could clobber them beforehand if
2263 the argument areas are shared. */
2264 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2265 /* If this function requires more stack slots than the current
2266 function, we cannot change it into a sibling call.
2267 current_function_pretend_args_size is not part of the
2268 stack allocated by our caller. */
2269 || args_size.constant > (current_function_args_size
2270 - current_function_pretend_args_size)
2271 /* If the callee pops its own arguments, then it must pop exactly
2272 the same number of arguments as the current function. */
2273 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2274 != RETURN_POPS_ARGS (current_function_decl,
2275 TREE_TYPE (current_function_decl),
2276 current_function_args_size))
2277 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2280 /* Ensure current function's preferred stack boundary is at least
2281 what we need. We don't have to increase alignment for recursive
2283 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2284 && fndecl != current_function_decl)
2285 cfun->preferred_stack_boundary = preferred_stack_boundary;
2286 if (fndecl == current_function_decl)
2287 cfun->recursive_call_emit = true;
2289 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2291 /* We want to make two insn chains; one for a sibling call, the other
2292 for a normal call. We will select one of the two chains after
2293 initial RTL generation is complete. */
2294 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2296 int sibcall_failure = 0;
2297 /* We want to emit any pending stack adjustments before the tail
2298 recursion "call". That way we know any adjustment after the tail
2299 recursion call can be ignored if we indeed use the tail
2301 int save_pending_stack_adjust = 0;
2302 int save_stack_pointer_delta = 0;
2304 rtx before_call, next_arg_reg;
2308 /* State variables we need to save and restore between
2310 save_pending_stack_adjust = pending_stack_adjust;
2311 save_stack_pointer_delta = stack_pointer_delta;
2314 flags &= ~ECF_SIBCALL;
2316 flags |= ECF_SIBCALL;
2318 /* Other state variables that we must reinitialize each time
2319 through the loop (that are not initialized by the loop itself). */
2323 /* Start a new sequence for the normal call case.
2325 From this point on, if the sibling call fails, we want to set
2326 sibcall_failure instead of continuing the loop. */
2329 /* Don't let pending stack adjusts add up to too much.
2330 Also, do all pending adjustments now if there is any chance
2331 this might be a call to alloca or if we are expanding a sibling
2332 call sequence or if we are calling a function that is to return
2333 with stack pointer depressed.
2334 Also do the adjustments before a throwing call, otherwise
2335 exception handling can fail; PR 19225. */
2336 if (pending_stack_adjust >= 32
2337 || (pending_stack_adjust > 0
2338 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2339 || (pending_stack_adjust > 0
2340 && flag_exceptions && !(flags & ECF_NOTHROW))
2342 do_pending_stack_adjust ();
2344 /* When calling a const function, we must pop the stack args right away,
2345 so that the pop is deleted or moved with the call. */
2346 if (pass && (flags & ECF_LIBCALL_BLOCK))
2349 /* Precompute any arguments as needed. */
2351 precompute_arguments (flags, num_actuals, args);
2353 /* Now we are about to start emitting insns that can be deleted
2354 if a libcall is deleted. */
2355 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2358 if (pass == 0 && cfun->stack_protect_guard)
2359 stack_protect_epilogue ();
2361 adjusted_args_size = args_size;
2362 /* Compute the actual size of the argument block required. The variable
2363 and constant sizes must be combined, the size may have to be rounded,
2364 and there may be a minimum required size. When generating a sibcall
2365 pattern, do not round up, since we'll be re-using whatever space our
2367 unadjusted_args_size
2368 = compute_argument_block_size (reg_parm_stack_space,
2369 &adjusted_args_size,
2371 : preferred_stack_boundary));
2373 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2375 /* The argument block when performing a sibling call is the
2376 incoming argument block. */
2379 argblock = virtual_incoming_args_rtx;
2381 #ifdef STACK_GROWS_DOWNWARD
2382 = plus_constant (argblock, current_function_pretend_args_size);
2384 = plus_constant (argblock, -current_function_pretend_args_size);
2386 stored_args_map = sbitmap_alloc (args_size.constant);
2387 sbitmap_zero (stored_args_map);
2390 /* If we have no actual push instructions, or shouldn't use them,
2391 make space for all args right now. */
2392 else if (adjusted_args_size.var != 0)
2394 if (old_stack_level == 0)
2396 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2397 old_stack_pointer_delta = stack_pointer_delta;
2398 old_pending_adj = pending_stack_adjust;
2399 pending_stack_adjust = 0;
2400 /* stack_arg_under_construction says whether a stack arg is
2401 being constructed at the old stack level. Pushing the stack
2402 gets a clean outgoing argument block. */
2403 old_stack_arg_under_construction = stack_arg_under_construction;
2404 stack_arg_under_construction = 0;
2406 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2410 /* Note that we must go through the motions of allocating an argument
2411 block even if the size is zero because we may be storing args
2412 in the area reserved for register arguments, which may be part of
2415 int needed = adjusted_args_size.constant;
2417 /* Store the maximum argument space used. It will be pushed by
2418 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2421 if (needed > current_function_outgoing_args_size)
2422 current_function_outgoing_args_size = needed;
2424 if (must_preallocate)
2426 if (ACCUMULATE_OUTGOING_ARGS)
2428 /* Since the stack pointer will never be pushed, it is
2429 possible for the evaluation of a parm to clobber
2430 something we have already written to the stack.
2431 Since most function calls on RISC machines do not use
2432 the stack, this is uncommon, but must work correctly.
2434 Therefore, we save any area of the stack that was already
2435 written and that we are using. Here we set up to do this
2436 by making a new stack usage map from the old one. The
2437 actual save will be done by store_one_arg.
2439 Another approach might be to try to reorder the argument
2440 evaluations to avoid this conflicting stack usage. */
2442 /* Since we will be writing into the entire argument area,
2443 the map must be allocated for its entire size, not just
2444 the part that is the responsibility of the caller. */
2445 if (!OUTGOING_REG_PARM_STACK_SPACE)
2446 needed += reg_parm_stack_space;
2448 #ifdef ARGS_GROW_DOWNWARD
2449 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2452 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2455 if (stack_usage_map_buf)
2456 free (stack_usage_map_buf);
2457 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2458 stack_usage_map = stack_usage_map_buf;
2460 if (initial_highest_arg_in_use)
2461 memcpy (stack_usage_map, initial_stack_usage_map,
2462 initial_highest_arg_in_use);
2464 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2465 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2466 (highest_outgoing_arg_in_use
2467 - initial_highest_arg_in_use));
2470 /* The address of the outgoing argument list must not be
2471 copied to a register here, because argblock would be left
2472 pointing to the wrong place after the call to
2473 allocate_dynamic_stack_space below. */
2475 argblock = virtual_outgoing_args_rtx;
2479 if (inhibit_defer_pop == 0)
2481 /* Try to reuse some or all of the pending_stack_adjust
2482 to get this space. */
2484 = (combine_pending_stack_adjustment_and_call
2485 (unadjusted_args_size,
2486 &adjusted_args_size,
2487 preferred_unit_stack_boundary));
2489 /* combine_pending_stack_adjustment_and_call computes
2490 an adjustment before the arguments are allocated.
2491 Account for them and see whether or not the stack
2492 needs to go up or down. */
2493 needed = unadjusted_args_size - needed;
2497 /* We're releasing stack space. */
2498 /* ??? We can avoid any adjustment at all if we're
2499 already aligned. FIXME. */
2500 pending_stack_adjust = -needed;
2501 do_pending_stack_adjust ();
2505 /* We need to allocate space. We'll do that in
2506 push_block below. */
2507 pending_stack_adjust = 0;
2510 /* Special case this because overhead of `push_block' in
2511 this case is non-trivial. */
2513 argblock = virtual_outgoing_args_rtx;
2516 argblock = push_block (GEN_INT (needed), 0, 0);
2517 #ifdef ARGS_GROW_DOWNWARD
2518 argblock = plus_constant (argblock, needed);
2522 /* We only really need to call `copy_to_reg' in the case
2523 where push insns are going to be used to pass ARGBLOCK
2524 to a function call in ARGS. In that case, the stack
2525 pointer changes value from the allocation point to the
2526 call point, and hence the value of
2527 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2528 as well always do it. */
2529 argblock = copy_to_reg (argblock);
2534 if (ACCUMULATE_OUTGOING_ARGS)
2536 /* The save/restore code in store_one_arg handles all
2537 cases except one: a constructor call (including a C
2538 function returning a BLKmode struct) to initialize
2540 if (stack_arg_under_construction)
2543 = GEN_INT (adjusted_args_size.constant
2544 + (OUTGOING_REG_PARM_STACK_SPACE ? 0
2545 : reg_parm_stack_space));
2546 if (old_stack_level == 0)
2548 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2550 old_stack_pointer_delta = stack_pointer_delta;
2551 old_pending_adj = pending_stack_adjust;
2552 pending_stack_adjust = 0;
2553 /* stack_arg_under_construction says whether a stack
2554 arg is being constructed at the old stack level.
2555 Pushing the stack gets a clean outgoing argument
2557 old_stack_arg_under_construction
2558 = stack_arg_under_construction;
2559 stack_arg_under_construction = 0;
2560 /* Make a new map for the new argument list. */
2561 if (stack_usage_map_buf)
2562 free (stack_usage_map_buf);
2563 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
2564 stack_usage_map = stack_usage_map_buf;
2565 highest_outgoing_arg_in_use = 0;
2567 allocate_dynamic_stack_space (push_size, NULL_RTX,
2571 /* If argument evaluation might modify the stack pointer,
2572 copy the address of the argument list to a register. */
2573 for (i = 0; i < num_actuals; i++)
2574 if (args[i].pass_on_stack)
2576 argblock = copy_addr_to_reg (argblock);
2581 compute_argument_addresses (args, argblock, num_actuals);
2583 /* If we push args individually in reverse order, perform stack alignment
2584 before the first push (the last arg). */
2585 if (PUSH_ARGS_REVERSED && argblock == 0
2586 && adjusted_args_size.constant != unadjusted_args_size)
2588 /* When the stack adjustment is pending, we get better code
2589 by combining the adjustments. */
2590 if (pending_stack_adjust
2591 && ! (flags & ECF_LIBCALL_BLOCK)
2592 && ! inhibit_defer_pop)
2594 pending_stack_adjust
2595 = (combine_pending_stack_adjustment_and_call
2596 (unadjusted_args_size,
2597 &adjusted_args_size,
2598 preferred_unit_stack_boundary));
2599 do_pending_stack_adjust ();
2601 else if (argblock == 0)
2602 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2603 - unadjusted_args_size));
2605 /* Now that the stack is properly aligned, pops can't safely
2606 be deferred during the evaluation of the arguments. */
2609 funexp = rtx_for_function_call (fndecl, addr);
2611 /* Figure out the register where the value, if any, will come back. */
2613 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2614 && ! structure_value_addr)
2616 if (pcc_struct_value)
2617 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2618 fndecl, NULL, (pass == 0));
2620 valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype,
2623 /* If VALREG is a PARALLEL whose first member has a zero
2624 offset, use that. This is for targets such as m68k that
2625 return the same value in multiple places. */
2626 if (GET_CODE (valreg) == PARALLEL)
2628 rtx elem = XVECEXP (valreg, 0, 0);
2629 rtx where = XEXP (elem, 0);
2630 rtx offset = XEXP (elem, 1);
2631 if (offset == const0_rtx
2632 && GET_MODE (where) == GET_MODE (valreg))
2637 /* Precompute all register parameters. It isn't safe to compute anything
2638 once we have started filling any specific hard regs. */
2639 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2641 if (CALL_EXPR_STATIC_CHAIN (exp))
2642 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
2644 static_chain_value = 0;
2646 #ifdef REG_PARM_STACK_SPACE
2647 /* Save the fixed argument area if it's part of the caller's frame and
2648 is clobbered by argument setup for this call. */
2649 if (ACCUMULATE_OUTGOING_ARGS && pass)
2650 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2651 &low_to_save, &high_to_save);
2654 /* Now store (and compute if necessary) all non-register parms.
2655 These come before register parms, since they can require block-moves,
2656 which could clobber the registers used for register parms.
2657 Parms which have partial registers are not stored here,
2658 but we do preallocate space here if they want that. */
2660 for (i = 0; i < num_actuals; i++)
2661 if (args[i].reg == 0 || args[i].pass_on_stack)
2663 rtx before_arg = get_last_insn ();
2665 if (store_one_arg (&args[i], argblock, flags,
2666 adjusted_args_size.var != 0,
2667 reg_parm_stack_space)
2669 && check_sibcall_argument_overlap (before_arg,
2671 sibcall_failure = 1;
2673 if (flags & ECF_CONST
2675 && args[i].value == args[i].stack)
2676 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2677 gen_rtx_USE (VOIDmode,
2682 /* If we have a parm that is passed in registers but not in memory
2683 and whose alignment does not permit a direct copy into registers,
2684 make a group of pseudos that correspond to each register that we
2686 if (STRICT_ALIGNMENT)
2687 store_unaligned_arguments_into_pseudos (args, num_actuals);
2689 /* Now store any partially-in-registers parm.
2690 This is the last place a block-move can happen. */
2692 for (i = 0; i < num_actuals; i++)
2693 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2695 rtx before_arg = get_last_insn ();
2697 if (store_one_arg (&args[i], argblock, flags,
2698 adjusted_args_size.var != 0,
2699 reg_parm_stack_space)
2701 && check_sibcall_argument_overlap (before_arg,
2703 sibcall_failure = 1;
2706 /* If we pushed args in forward order, perform stack alignment
2707 after pushing the last arg. */
2708 if (!PUSH_ARGS_REVERSED && argblock == 0)
2709 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2710 - unadjusted_args_size));
2712 /* If register arguments require space on the stack and stack space
2713 was not preallocated, allocate stack space here for arguments
2714 passed in registers. */
2715 if (OUTGOING_REG_PARM_STACK_SPACE && !ACCUMULATE_OUTGOING_ARGS
2716 && must_preallocate == 0 && reg_parm_stack_space > 0)
2717 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2719 /* Pass the function the address in which to return a
2721 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2723 structure_value_addr
2724 = convert_memory_address (Pmode, structure_value_addr);
2725 emit_move_insn (struct_value,
2727 force_operand (structure_value_addr,
2730 if (REG_P (struct_value))
2731 use_reg (&call_fusage, struct_value);
2734 funexp = prepare_call_address (funexp, static_chain_value,
2735 &call_fusage, reg_parm_seen, pass == 0);
2737 load_register_parameters (args, num_actuals, &call_fusage, flags,
2738 pass == 0, &sibcall_failure);
2740 /* Save a pointer to the last insn before the call, so that we can
2741 later safely search backwards to find the CALL_INSN. */
2742 before_call = get_last_insn ();
2744 /* Set up next argument register. For sibling calls on machines
2745 with register windows this should be the incoming register. */
2746 #ifdef FUNCTION_INCOMING_ARG
2748 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2752 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2755 /* All arguments and registers used for the call must be set up by
2758 /* Stack must be properly aligned now. */
2760 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2762 /* Generate the actual call instruction. */
2763 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2764 adjusted_args_size.constant, struct_value_size,
2765 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2766 flags, & args_so_far);
2768 /* If a non-BLKmode value is returned at the most significant end
2769 of a register, shift the register right by the appropriate amount
2770 and update VALREG accordingly. BLKmode values are handled by the
2771 group load/store machinery below. */
2772 if (!structure_value_addr
2773 && !pcc_struct_value
2774 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2775 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2777 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2778 sibcall_failure = 1;
2779 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2782 /* If call is cse'able, make appropriate pair of reg-notes around it.
2783 Test valreg so we don't crash; may safely ignore `const'
2784 if return type is void. Disable for PARALLEL return values, because
2785 we have no way to move such values into a pseudo register. */
2786 if (pass && (flags & ECF_LIBCALL_BLOCK))
2790 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2792 insns = get_insns ();
2794 /* Expansion of block moves possibly introduced a loop that may
2795 not appear inside libcall block. */
2796 for (insn = insns; insn; insn = NEXT_INSN (insn))
2808 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2810 /* Mark the return value as a pointer if needed. */
2811 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2812 mark_reg_pointer (temp,
2813 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2816 if (flag_unsafe_math_optimizations
2818 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2819 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2820 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2821 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2822 note = gen_rtx_fmt_e (SQRT,
2824 args[0].initial_value);
2827 /* Construct an "equal form" for the value which
2828 mentions all the arguments in order as well as
2829 the function name. */
2830 for (i = 0; i < num_actuals; i++)
2831 note = gen_rtx_EXPR_LIST (VOIDmode,
2832 args[i].initial_value, note);
2833 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2835 if (flags & ECF_PURE)
2836 note = gen_rtx_EXPR_LIST (VOIDmode,
2837 gen_rtx_USE (VOIDmode,
2838 gen_rtx_MEM (BLKmode,
2839 gen_rtx_SCRATCH (VOIDmode))),
2842 emit_libcall_block (insns, temp, valreg, note);
2847 else if (pass && (flags & ECF_MALLOC))
2849 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2852 /* The return value from a malloc-like function is a pointer. */
2853 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2854 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2856 emit_move_insn (temp, valreg);
2858 /* The return value from a malloc-like function can not alias
2860 last = get_last_insn ();
2862 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2864 /* Write out the sequence. */
2865 insns = get_insns ();
2871 /* For calls to `setjmp', etc., inform
2872 function.c:setjmp_warnings that it should complain if
2873 nonvolatile values are live. For functions that cannot
2874 return, inform flow that control does not fall through. */
2876 if ((flags & ECF_NORETURN) || pass == 0)
2878 /* The barrier must be emitted
2879 immediately after the CALL_INSN. Some ports emit more
2880 than just a CALL_INSN above, so we must search for it here. */
2882 rtx last = get_last_insn ();
2883 while (!CALL_P (last))
2885 last = PREV_INSN (last);
2886 /* There was no CALL_INSN? */
2887 gcc_assert (last != before_call);
2890 emit_barrier_after (last);
2892 /* Stack adjustments after a noreturn call are dead code.
2893 However when NO_DEFER_POP is in effect, we must preserve
2894 stack_pointer_delta. */
2895 if (inhibit_defer_pop == 0)
2897 stack_pointer_delta = old_stack_allocated;
2898 pending_stack_adjust = 0;
2902 /* If value type not void, return an rtx for the value. */
2904 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2906 target = const0_rtx;
2907 else if (structure_value_addr)
2909 if (target == 0 || !MEM_P (target))
2912 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2913 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2914 structure_value_addr));
2915 set_mem_attributes (target, exp, 1);
2918 else if (pcc_struct_value)
2920 /* This is the special C++ case where we need to
2921 know what the true target was. We take care to
2922 never use this value more than once in one expression. */
2923 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2924 copy_to_reg (valreg));
2925 set_mem_attributes (target, exp, 1);
2927 /* Handle calls that return values in multiple non-contiguous locations.
2928 The Irix 6 ABI has examples of this. */
2929 else if (GET_CODE (valreg) == PARALLEL)
2933 /* This will only be assigned once, so it can be readonly. */
2934 tree nt = build_qualified_type (TREE_TYPE (exp),
2935 (TYPE_QUALS (TREE_TYPE (exp))
2936 | TYPE_QUAL_CONST));
2938 target = assign_temp (nt, 0, 1, 1);
2941 if (! rtx_equal_p (target, valreg))
2942 emit_group_store (target, valreg, TREE_TYPE (exp),
2943 int_size_in_bytes (TREE_TYPE (exp)));
2945 /* We can not support sibling calls for this case. */
2946 sibcall_failure = 1;
2949 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2950 && GET_MODE (target) == GET_MODE (valreg))
2952 bool may_overlap = false;
2954 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
2955 reg to a plain register. */
2957 && HARD_REGISTER_P (valreg)
2958 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (valreg)))
2959 && !(REG_P (target) && !HARD_REGISTER_P (target)))
2960 valreg = copy_to_reg (valreg);
2962 /* If TARGET is a MEM in the argument area, and we have
2963 saved part of the argument area, then we can't store
2964 directly into TARGET as it may get overwritten when we
2965 restore the argument save area below. Don't work too
2966 hard though and simply force TARGET to a register if it
2967 is a MEM; the optimizer is quite likely to sort it out. */
2968 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
2969 for (i = 0; i < num_actuals; i++)
2970 if (args[i].save_area)
2977 target = copy_to_reg (valreg);
2980 /* TARGET and VALREG cannot be equal at this point
2981 because the latter would not have
2982 REG_FUNCTION_VALUE_P true, while the former would if
2983 it were referring to the same register.
2985 If they refer to the same register, this move will be
2986 a no-op, except when function inlining is being
2988 emit_move_insn (target, valreg);
2990 /* If we are setting a MEM, this code must be executed.
2991 Since it is emitted after the call insn, sibcall
2992 optimization cannot be performed in that case. */
2994 sibcall_failure = 1;
2997 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2999 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3001 /* We can not support sibling calls for this case. */
3002 sibcall_failure = 1;
3005 target = copy_to_reg (valreg);
3007 if (targetm.calls.promote_function_return(funtype))
3009 /* If we promoted this return value, make the proper SUBREG.
3010 TARGET might be const0_rtx here, so be careful. */
3012 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3013 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3015 tree type = TREE_TYPE (exp);
3016 int unsignedp = TYPE_UNSIGNED (type);
3018 enum machine_mode pmode;
3020 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
3021 /* If we don't promote as expected, something is wrong. */
3022 gcc_assert (GET_MODE (target) == pmode);
3024 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3025 && (GET_MODE_SIZE (GET_MODE (target))
3026 > GET_MODE_SIZE (TYPE_MODE (type))))
3028 offset = GET_MODE_SIZE (GET_MODE (target))
3029 - GET_MODE_SIZE (TYPE_MODE (type));
3030 if (! BYTES_BIG_ENDIAN)
3031 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3032 else if (! WORDS_BIG_ENDIAN)
3033 offset %= UNITS_PER_WORD;
3035 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3036 SUBREG_PROMOTED_VAR_P (target) = 1;
3037 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3041 /* If size of args is variable or this was a constructor call for a stack
3042 argument, restore saved stack-pointer value. */
3044 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3046 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3047 stack_pointer_delta = old_stack_pointer_delta;
3048 pending_stack_adjust = old_pending_adj;
3049 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3050 stack_arg_under_construction = old_stack_arg_under_construction;
3051 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3052 stack_usage_map = initial_stack_usage_map;
3053 sibcall_failure = 1;
3055 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3057 #ifdef REG_PARM_STACK_SPACE
3059 restore_fixed_argument_area (save_area, argblock,
3060 high_to_save, low_to_save);
3063 /* If we saved any argument areas, restore them. */
3064 for (i = 0; i < num_actuals; i++)
3065 if (args[i].save_area)
3067 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3069 = gen_rtx_MEM (save_mode,
3070 memory_address (save_mode,
3071 XEXP (args[i].stack_slot, 0)));
3073 if (save_mode != BLKmode)
3074 emit_move_insn (stack_area, args[i].save_area);
3076 emit_block_move (stack_area, args[i].save_area,
3077 GEN_INT (args[i].locate.size.constant),
3078 BLOCK_OP_CALL_PARM);
3081 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3082 stack_usage_map = initial_stack_usage_map;
3085 /* If this was alloca, record the new stack level for nonlocal gotos.
3086 Check for the handler slots since we might not have a save area
3087 for non-local gotos. */
3089 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3090 update_nonlocal_goto_save_area ();
3092 /* Free up storage we no longer need. */
3093 for (i = 0; i < num_actuals; ++i)
3094 if (args[i].aligned_regs)
3095 free (args[i].aligned_regs);
3097 insns = get_insns ();
3102 tail_call_insns = insns;
3104 /* Restore the pending stack adjustment now that we have
3105 finished generating the sibling call sequence. */
3107 pending_stack_adjust = save_pending_stack_adjust;
3108 stack_pointer_delta = save_stack_pointer_delta;
3110 /* Prepare arg structure for next iteration. */
3111 for (i = 0; i < num_actuals; i++)
3114 args[i].aligned_regs = 0;
3118 sbitmap_free (stored_args_map);
3122 normal_call_insns = insns;
3124 /* Verify that we've deallocated all the stack we used. */
3125 gcc_assert ((flags & ECF_NORETURN)
3126 || (old_stack_allocated
3127 == stack_pointer_delta - pending_stack_adjust));
3130 /* If something prevents making this a sibling call,
3131 zero out the sequence. */
3132 if (sibcall_failure)
3133 tail_call_insns = NULL_RTX;
3138 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3139 arguments too, as argument area is now clobbered by the call. */
3140 if (tail_call_insns)
3142 emit_insn (tail_call_insns);
3143 cfun->tail_call_emit = true;
3146 emit_insn (normal_call_insns);
3148 currently_expanding_call--;
3150 /* If this function returns with the stack pointer depressed, ensure
3151 this block saves and restores the stack pointer, show it was
3152 changed, and adjust for any outgoing arg space. */
3153 if (flags & ECF_SP_DEPRESSED)
3155 clear_pending_stack_adjust ();
3156 emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
3157 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3160 if (stack_usage_map_buf)
3161 free (stack_usage_map_buf);
3166 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3167 this function's incoming arguments.
3169 At the start of RTL generation we know the only REG_EQUIV notes
3170 in the rtl chain are those for incoming arguments, so we can look
3171 for REG_EQUIV notes between the start of the function and the
3172 NOTE_INSN_FUNCTION_BEG.
3174 This is (slight) overkill. We could keep track of the highest
3175 argument we clobber and be more selective in removing notes, but it
3176 does not seem to be worth the effort. */
3179 fixup_tail_calls (void)
3183 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3187 /* There are never REG_EQUIV notes for the incoming arguments
3188 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3190 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3193 note = find_reg_note (insn, REG_EQUIV, 0);
3195 remove_note (insn, note);
3196 note = find_reg_note (insn, REG_EQUIV, 0);
3201 /* Traverse a list of TYPES and expand all complex types into their
3204 split_complex_types (tree types)
3208 /* Before allocating memory, check for the common case of no complex. */
3209 for (p = types; p; p = TREE_CHAIN (p))
3211 tree type = TREE_VALUE (p);
3212 if (TREE_CODE (type) == COMPLEX_TYPE
3213 && targetm.calls.split_complex_arg (type))
3219 types = copy_list (types);
3221 for (p = types; p; p = TREE_CHAIN (p))
3223 tree complex_type = TREE_VALUE (p);
3225 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3226 && targetm.calls.split_complex_arg (complex_type))
3230 /* Rewrite complex type with component type. */
3231 TREE_VALUE (p) = TREE_TYPE (complex_type);
3232 next = TREE_CHAIN (p);
3234 /* Add another component type for the imaginary part. */
3235 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3236 TREE_CHAIN (p) = imag;
3237 TREE_CHAIN (imag) = next;
3239 /* Skip the newly created node. */
3247 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3248 The RETVAL parameter specifies whether return value needs to be saved, other
3249 parameters are documented in the emit_library_call function below. */
3252 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3253 enum libcall_type fn_type,
3254 enum machine_mode outmode, int nargs, va_list p)
3256 /* Total size in bytes of all the stack-parms scanned so far. */
3257 struct args_size args_size;
3258 /* Size of arguments before any adjustments (such as rounding). */
3259 struct args_size original_args_size;
3265 CUMULATIVE_ARGS args_so_far;
3269 enum machine_mode mode;
3272 struct locate_and_pad_arg_data locate;
3276 int old_inhibit_defer_pop = inhibit_defer_pop;
3277 rtx call_fusage = 0;
3280 int pcc_struct_value = 0;
3281 int struct_value_size = 0;
3283 int reg_parm_stack_space = 0;
3286 tree tfom; /* type_for_mode (outmode, 0) */
3288 #ifdef REG_PARM_STACK_SPACE
3289 /* Define the boundary of the register parm stack space that needs to be
3291 int low_to_save, high_to_save;
3292 rtx save_area = 0; /* Place that it is saved. */
3295 /* Size of the stack reserved for parameter registers. */
3296 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3297 char *initial_stack_usage_map = stack_usage_map;
3298 char *stack_usage_map_buf = NULL;
3300 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3302 #ifdef REG_PARM_STACK_SPACE
3303 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3306 /* By default, library functions can not throw. */
3307 flags = ECF_NOTHROW;
3319 case LCT_CONST_MAKE_BLOCK:
3320 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3322 case LCT_PURE_MAKE_BLOCK:
3323 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3326 flags |= ECF_NORETURN;
3329 flags = ECF_NORETURN;
3331 case LCT_RETURNS_TWICE:
3332 flags = ECF_RETURNS_TWICE;
3337 /* Ensure current function's preferred stack boundary is at least
3339 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3340 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3342 /* If this kind of value comes back in memory,
3343 decide where in memory it should come back. */
3344 if (outmode != VOIDmode)
3346 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3347 if (aggregate_value_p (tfom, 0))
3349 #ifdef PCC_STATIC_STRUCT_RETURN
3351 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3352 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3353 pcc_struct_value = 1;
3355 value = gen_reg_rtx (outmode);
3356 #else /* not PCC_STATIC_STRUCT_RETURN */
3357 struct_value_size = GET_MODE_SIZE (outmode);
3358 if (value != 0 && MEM_P (value))
3361 mem_value = assign_temp (tfom, 0, 1, 1);
3363 /* This call returns a big structure. */
3364 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3368 tfom = void_type_node;
3370 /* ??? Unfinished: must pass the memory address as an argument. */
3372 /* Copy all the libcall-arguments out of the varargs data
3373 and into a vector ARGVEC.
3375 Compute how to pass each argument. We only support a very small subset
3376 of the full argument passing conventions to limit complexity here since
3377 library functions shouldn't have many args. */
3379 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3380 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3382 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3383 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3385 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3388 args_size.constant = 0;
3393 /* Now we are about to start emitting insns that can be deleted
3394 if a libcall is deleted. */
3395 if (flags & ECF_LIBCALL_BLOCK)
3400 /* If there's a structure value address to be passed,
3401 either pass it in the special place, or pass it as an extra argument. */
3402 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3404 rtx addr = XEXP (mem_value, 0);
3408 /* Make sure it is a reasonable operand for a move or push insn. */
3409 if (!REG_P (addr) && !MEM_P (addr)
3410 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3411 addr = force_operand (addr, NULL_RTX);
3413 argvec[count].value = addr;
3414 argvec[count].mode = Pmode;
3415 argvec[count].partial = 0;
3417 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3418 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3419 NULL_TREE, 1) == 0);
3421 locate_and_pad_parm (Pmode, NULL_TREE,
3422 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3425 argvec[count].reg != 0,
3427 0, NULL_TREE, &args_size, &argvec[count].locate);
3429 if (argvec[count].reg == 0 || argvec[count].partial != 0
3430 || reg_parm_stack_space > 0)
3431 args_size.constant += argvec[count].locate.size.constant;
3433 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3438 for (; count < nargs; count++)
3440 rtx val = va_arg (p, rtx);
3441 enum machine_mode mode = va_arg (p, enum machine_mode);
3443 /* We cannot convert the arg value to the mode the library wants here;
3444 must do it earlier where we know the signedness of the arg. */
3445 gcc_assert (mode != BLKmode
3446 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3448 /* Make sure it is a reasonable operand for a move or push insn. */
3449 if (!REG_P (val) && !MEM_P (val)
3450 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3451 val = force_operand (val, NULL_RTX);
3453 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3457 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3459 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3460 functions, so we have to pretend this isn't such a function. */
3461 if (flags & ECF_LIBCALL_BLOCK)
3463 rtx insns = get_insns ();
3467 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3469 /* If this was a CONST function, it is now PURE since
3470 it now reads memory. */
3471 if (flags & ECF_CONST)
3473 flags &= ~ECF_CONST;
3477 if (GET_MODE (val) == MEM && !must_copy)
3481 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3483 emit_move_insn (slot, val);
3486 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3487 gen_rtx_USE (VOIDmode, slot),
3490 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3491 gen_rtx_CLOBBER (VOIDmode,
3496 val = force_operand (XEXP (slot, 0), NULL_RTX);
3499 argvec[count].value = val;
3500 argvec[count].mode = mode;
3502 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3504 argvec[count].partial
3505 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3507 locate_and_pad_parm (mode, NULL_TREE,
3508 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3511 argvec[count].reg != 0,
3513 argvec[count].partial,
3514 NULL_TREE, &args_size, &argvec[count].locate);
3516 gcc_assert (!argvec[count].locate.size.var);
3518 if (argvec[count].reg == 0 || argvec[count].partial != 0
3519 || reg_parm_stack_space > 0)
3520 args_size.constant += argvec[count].locate.size.constant;
3522 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3525 /* If this machine requires an external definition for library
3526 functions, write one out. */
3527 assemble_external_libcall (fun);
3529 original_args_size = args_size;
3530 args_size.constant = (((args_size.constant
3531 + stack_pointer_delta
3535 - stack_pointer_delta);
3537 args_size.constant = MAX (args_size.constant,
3538 reg_parm_stack_space);
3540 if (!OUTGOING_REG_PARM_STACK_SPACE)
3541 args_size.constant -= reg_parm_stack_space;
3543 if (args_size.constant > current_function_outgoing_args_size)
3544 current_function_outgoing_args_size = args_size.constant;
3546 if (ACCUMULATE_OUTGOING_ARGS)
3548 /* Since the stack pointer will never be pushed, it is possible for
3549 the evaluation of a parm to clobber something we have already
3550 written to the stack. Since most function calls on RISC machines
3551 do not use the stack, this is uncommon, but must work correctly.
3553 Therefore, we save any area of the stack that was already written
3554 and that we are using. Here we set up to do this by making a new
3555 stack usage map from the old one.
3557 Another approach might be to try to reorder the argument
3558 evaluations to avoid this conflicting stack usage. */
3560 needed = args_size.constant;
3562 /* Since we will be writing into the entire argument area, the
3563 map must be allocated for its entire size, not just the part that
3564 is the responsibility of the caller. */
3565 if (!OUTGOING_REG_PARM_STACK_SPACE)
3566 needed += reg_parm_stack_space;
3568 #ifdef ARGS_GROW_DOWNWARD
3569 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3572 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3575 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3576 stack_usage_map = stack_usage_map_buf;
3578 if (initial_highest_arg_in_use)
3579 memcpy (stack_usage_map, initial_stack_usage_map,
3580 initial_highest_arg_in_use);
3582 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3583 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3584 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3587 /* We must be careful to use virtual regs before they're instantiated,
3588 and real regs afterwards. Loop optimization, for example, can create
3589 new libcalls after we've instantiated the virtual regs, and if we
3590 use virtuals anyway, they won't match the rtl patterns. */
3592 if (virtuals_instantiated)
3593 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3595 argblock = virtual_outgoing_args_rtx;
3600 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3603 /* If we push args individually in reverse order, perform stack alignment
3604 before the first push (the last arg). */
3605 if (argblock == 0 && PUSH_ARGS_REVERSED)
3606 anti_adjust_stack (GEN_INT (args_size.constant
3607 - original_args_size.constant));
3609 if (PUSH_ARGS_REVERSED)
3620 #ifdef REG_PARM_STACK_SPACE
3621 if (ACCUMULATE_OUTGOING_ARGS)
3623 /* The argument list is the property of the called routine and it
3624 may clobber it. If the fixed area has been used for previous
3625 parameters, we must save and restore it. */
3626 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3627 &low_to_save, &high_to_save);
3631 /* Push the args that need to be pushed. */
3633 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3634 are to be pushed. */
3635 for (count = 0; count < nargs; count++, argnum += inc)
3637 enum machine_mode mode = argvec[argnum].mode;
3638 rtx val = argvec[argnum].value;
3639 rtx reg = argvec[argnum].reg;
3640 int partial = argvec[argnum].partial;
3641 int lower_bound = 0, upper_bound = 0, i;
3643 if (! (reg != 0 && partial == 0))
3645 if (ACCUMULATE_OUTGOING_ARGS)
3647 /* If this is being stored into a pre-allocated, fixed-size,
3648 stack area, save any previous data at that location. */
3650 #ifdef ARGS_GROW_DOWNWARD
3651 /* stack_slot is negative, but we want to index stack_usage_map
3652 with positive values. */
3653 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3654 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3656 lower_bound = argvec[argnum].locate.offset.constant;
3657 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3661 /* Don't worry about things in the fixed argument area;
3662 it has already been saved. */
3663 if (i < reg_parm_stack_space)
3664 i = reg_parm_stack_space;
3665 while (i < upper_bound && stack_usage_map[i] == 0)
3668 if (i < upper_bound)
3670 /* We need to make a save area. */
3672 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3673 enum machine_mode save_mode
3674 = mode_for_size (size, MODE_INT, 1);
3676 = plus_constant (argblock,
3677 argvec[argnum].locate.offset.constant);
3679 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3681 if (save_mode == BLKmode)
3683 argvec[argnum].save_area
3684 = assign_stack_temp (BLKmode,
3685 argvec[argnum].locate.size.constant,
3688 emit_block_move (validize_mem (argvec[argnum].save_area),
3690 GEN_INT (argvec[argnum].locate.size.constant),
3691 BLOCK_OP_CALL_PARM);
3695 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3697 emit_move_insn (argvec[argnum].save_area, stack_area);
3702 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3703 partial, reg, 0, argblock,
3704 GEN_INT (argvec[argnum].locate.offset.constant),
3705 reg_parm_stack_space,
3706 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3708 /* Now mark the segment we just used. */
3709 if (ACCUMULATE_OUTGOING_ARGS)
3710 for (i = lower_bound; i < upper_bound; i++)
3711 stack_usage_map[i] = 1;
3715 if (flags & ECF_CONST)
3719 /* Indicate argument access so that alias.c knows that these
3722 use = plus_constant (argblock,
3723 argvec[argnum].locate.offset.constant);
3725 /* When arguments are pushed, trying to tell alias.c where
3726 exactly this argument is won't work, because the
3727 auto-increment causes confusion. So we merely indicate
3728 that we access something with a known mode somewhere on
3730 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3731 gen_rtx_SCRATCH (Pmode));
3732 use = gen_rtx_MEM (argvec[argnum].mode, use);
3733 use = gen_rtx_USE (VOIDmode, use);
3734 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3739 /* If we pushed args in forward order, perform stack alignment
3740 after pushing the last arg. */
3741 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3742 anti_adjust_stack (GEN_INT (args_size.constant
3743 - original_args_size.constant));
3745 if (PUSH_ARGS_REVERSED)
3750 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3752 /* Now load any reg parms into their regs. */
3754 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3755 are to be pushed. */
3756 for (count = 0; count < nargs; count++, argnum += inc)
3758 enum machine_mode mode = argvec[argnum].mode;
3759 rtx val = argvec[argnum].value;
3760 rtx reg = argvec[argnum].reg;
3761 int partial = argvec[argnum].partial;
3763 /* Handle calls that pass values in multiple non-contiguous
3764 locations. The PA64 has examples of this for library calls. */
3765 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3766 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3767 else if (reg != 0 && partial == 0)
3768 emit_move_insn (reg, val);
3773 /* Any regs containing parms remain in use through the call. */
3774 for (count = 0; count < nargs; count++)
3776 rtx reg = argvec[count].reg;
3777 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3778 use_group_regs (&call_fusage, reg);
3781 int partial = argvec[count].partial;
3785 gcc_assert (partial % UNITS_PER_WORD == 0);
3786 nregs = partial / UNITS_PER_WORD;
3787 use_regs (&call_fusage, REGNO (reg), nregs);
3790 use_reg (&call_fusage, reg);
3794 /* Pass the function the address in which to return a structure value. */
3795 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3797 emit_move_insn (struct_value,
3799 force_operand (XEXP (mem_value, 0),
3801 if (REG_P (struct_value))
3802 use_reg (&call_fusage, struct_value);
3805 /* Don't allow popping to be deferred, since then
3806 cse'ing of library calls could delete a call and leave the pop. */
3808 valreg = (mem_value == 0 && outmode != VOIDmode
3809 ? hard_libcall_value (outmode) : NULL_RTX);
3811 /* Stack must be properly aligned now. */
3812 gcc_assert (!(stack_pointer_delta
3813 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3815 before_call = get_last_insn ();
3817 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3818 will set inhibit_defer_pop to that value. */
3819 /* The return type is needed to decide how many bytes the function pops.
3820 Signedness plays no role in that, so for simplicity, we pretend it's
3821 always signed. We also assume that the list of arguments passed has
3822 no impact, so we pretend it is unknown. */
3824 emit_call_1 (fun, NULL,
3825 get_identifier (XSTR (orgfun, 0)),
3826 build_function_type (tfom, NULL_TREE),
3827 original_args_size.constant, args_size.constant,
3829 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3831 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3833 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
3834 that it should complain if nonvolatile values are live. For
3835 functions that cannot return, inform flow that control does not
3838 if (flags & ECF_NORETURN)
3840 /* The barrier note must be emitted
3841 immediately after the CALL_INSN. Some ports emit more than
3842 just a CALL_INSN above, so we must search for it here. */
3844 rtx last = get_last_insn ();
3845 while (!CALL_P (last))
3847 last = PREV_INSN (last);
3848 /* There was no CALL_INSN? */
3849 gcc_assert (last != before_call);
3852 emit_barrier_after (last);
3855 /* Now restore inhibit_defer_pop to its actual original value. */
3858 /* If call is cse'able, make appropriate pair of reg-notes around it.
3859 Test valreg so we don't crash; may safely ignore `const'
3860 if return type is void. Disable for PARALLEL return values, because
3861 we have no way to move such values into a pseudo register. */
3862 if (flags & ECF_LIBCALL_BLOCK)
3868 insns = get_insns ();
3878 if (GET_CODE (valreg) == PARALLEL)
3880 temp = gen_reg_rtx (outmode);
3881 emit_group_store (temp, valreg, NULL_TREE,
3882 GET_MODE_SIZE (outmode));
3886 temp = gen_reg_rtx (GET_MODE (valreg));
3888 /* Construct an "equal form" for the value which mentions all the
3889 arguments in order as well as the function name. */
3890 for (i = 0; i < nargs; i++)
3891 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3892 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3894 insns = get_insns ();
3897 if (flags & ECF_PURE)
3898 note = gen_rtx_EXPR_LIST (VOIDmode,
3899 gen_rtx_USE (VOIDmode,
3900 gen_rtx_MEM (BLKmode,
3901 gen_rtx_SCRATCH (VOIDmode))),
3904 emit_libcall_block (insns, temp, valreg, note);
3911 /* Copy the value to the right place. */
3912 if (outmode != VOIDmode && retval)
3918 if (value != mem_value)
3919 emit_move_insn (value, mem_value);
3921 else if (GET_CODE (valreg) == PARALLEL)
3924 value = gen_reg_rtx (outmode);
3925 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3929 /* Convert to the proper mode if PROMOTE_MODE has been active. */
3930 if (GET_MODE (valreg) != outmode)
3932 int unsignedp = TYPE_UNSIGNED (tfom);
3934 gcc_assert (targetm.calls.promote_function_return (tfom));
3935 gcc_assert (promote_mode (tfom, outmode, &unsignedp, 0)
3936 == GET_MODE (valreg));
3938 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
3942 emit_move_insn (value, valreg);
3948 if (ACCUMULATE_OUTGOING_ARGS)
3950 #ifdef REG_PARM_STACK_SPACE
3952 restore_fixed_argument_area (save_area, argblock,
3953 high_to_save, low_to_save);
3956 /* If we saved any argument areas, restore them. */
3957 for (count = 0; count < nargs; count++)
3958 if (argvec[count].save_area)
3960 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3961 rtx adr = plus_constant (argblock,
3962 argvec[count].locate.offset.constant);
3963 rtx stack_area = gen_rtx_MEM (save_mode,
3964 memory_address (save_mode, adr));
3966 if (save_mode == BLKmode)
3967 emit_block_move (stack_area,
3968 validize_mem (argvec[count].save_area),
3969 GEN_INT (argvec[count].locate.size.constant),
3970 BLOCK_OP_CALL_PARM);
3972 emit_move_insn (stack_area, argvec[count].save_area);
3975 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3976 stack_usage_map = initial_stack_usage_map;
3979 if (stack_usage_map_buf)
3980 free (stack_usage_map_buf);
3986 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3987 (emitting the queue unless NO_QUEUE is nonzero),
3988 for a value of mode OUTMODE,
3989 with NARGS different arguments, passed as alternating rtx values
3990 and machine_modes to convert them to.
3992 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
3993 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
3994 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
3995 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
3996 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
3997 or other LCT_ value for other types of library calls. */
4000 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4001 enum machine_mode outmode, int nargs, ...)
4005 va_start (p, nargs);
4006 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4010 /* Like emit_library_call except that an extra argument, VALUE,
4011 comes second and says where to store the result.
4012 (If VALUE is zero, this function chooses a convenient way
4013 to return the value.
4015 This function returns an rtx for where the value is to be found.
4016 If VALUE is nonzero, VALUE is returned. */
4019 emit_library_call_value (rtx orgfun, rtx value,
4020 enum libcall_type fn_type,
4021 enum machine_mode outmode, int nargs, ...)
4026 va_start (p, nargs);
4027 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4034 /* Store a single argument for a function call
4035 into the register or memory area where it must be passed.
4036 *ARG describes the argument value and where to pass it.
4038 ARGBLOCK is the address of the stack-block for all the arguments,
4039 or 0 on a machine where arguments are pushed individually.
4041 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4042 so must be careful about how the stack is used.
4044 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4045 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4046 that we need not worry about saving and restoring the stack.
4048 FNDECL is the declaration of the function we are calling.
4050 Return nonzero if this arg should cause sibcall failure,
4054 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4055 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4057 tree pval = arg->tree_value;
4061 int i, lower_bound = 0, upper_bound = 0;
4062 int sibcall_failure = 0;
4064 if (TREE_CODE (pval) == ERROR_MARK)
4067 /* Push a new temporary level for any temporaries we make for
4071 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4073 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4074 save any previous data at that location. */
4075 if (argblock && ! variable_size && arg->stack)
4077 #ifdef ARGS_GROW_DOWNWARD
4078 /* stack_slot is negative, but we want to index stack_usage_map
4079 with positive values. */
4080 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4081 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4085 lower_bound = upper_bound - arg->locate.size.constant;
4087 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4088 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4092 upper_bound = lower_bound + arg->locate.size.constant;
4096 /* Don't worry about things in the fixed argument area;
4097 it has already been saved. */
4098 if (i < reg_parm_stack_space)
4099 i = reg_parm_stack_space;
4100 while (i < upper_bound && stack_usage_map[i] == 0)
4103 if (i < upper_bound)
4105 /* We need to make a save area. */
4106 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4107 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4108 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4109 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4111 if (save_mode == BLKmode)
4113 tree ot = TREE_TYPE (arg->tree_value);
4114 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4115 | TYPE_QUAL_CONST));
4117 arg->save_area = assign_temp (nt, 0, 1, 1);
4118 preserve_temp_slots (arg->save_area);
4119 emit_block_move (validize_mem (arg->save_area), stack_area,
4120 GEN_INT (arg->locate.size.constant),
4121 BLOCK_OP_CALL_PARM);
4125 arg->save_area = gen_reg_rtx (save_mode);
4126 emit_move_insn (arg->save_area, stack_area);
4132 /* If this isn't going to be placed on both the stack and in registers,
4133 set up the register and number of words. */
4134 if (! arg->pass_on_stack)
4136 if (flags & ECF_SIBCALL)
4137 reg = arg->tail_call_reg;
4140 partial = arg->partial;
4143 /* Being passed entirely in a register. We shouldn't be called in
4145 gcc_assert (reg == 0 || partial != 0);
4147 /* If this arg needs special alignment, don't load the registers
4149 if (arg->n_aligned_regs != 0)
4152 /* If this is being passed partially in a register, we can't evaluate
4153 it directly into its stack slot. Otherwise, we can. */
4154 if (arg->value == 0)
4156 /* stack_arg_under_construction is nonzero if a function argument is
4157 being evaluated directly into the outgoing argument list and
4158 expand_call must take special action to preserve the argument list
4159 if it is called recursively.
4161 For scalar function arguments stack_usage_map is sufficient to
4162 determine which stack slots must be saved and restored. Scalar
4163 arguments in general have pass_on_stack == 0.
4165 If this argument is initialized by a function which takes the
4166 address of the argument (a C++ constructor or a C function
4167 returning a BLKmode structure), then stack_usage_map is
4168 insufficient and expand_call must push the stack around the
4169 function call. Such arguments have pass_on_stack == 1.
4171 Note that it is always safe to set stack_arg_under_construction,
4172 but this generates suboptimal code if set when not needed. */
4174 if (arg->pass_on_stack)
4175 stack_arg_under_construction++;
4177 arg->value = expand_expr (pval,
4179 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4180 ? NULL_RTX : arg->stack,
4181 VOIDmode, EXPAND_STACK_PARM);
4183 /* If we are promoting object (or for any other reason) the mode
4184 doesn't agree, convert the mode. */
4186 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4187 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4188 arg->value, arg->unsignedp);
4190 if (arg->pass_on_stack)
4191 stack_arg_under_construction--;
4194 /* Check for overlap with already clobbered argument area. */
4195 if ((flags & ECF_SIBCALL)
4196 && MEM_P (arg->value)
4197 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4198 arg->locate.size.constant))
4199 sibcall_failure = 1;
4201 /* Don't allow anything left on stack from computation
4202 of argument to alloca. */
4203 if (flags & ECF_MAY_BE_ALLOCA)
4204 do_pending_stack_adjust ();
4206 if (arg->value == arg->stack)
4207 /* If the value is already in the stack slot, we are done. */
4209 else if (arg->mode != BLKmode)
4212 unsigned int parm_align;
4214 /* Argument is a scalar, not entirely passed in registers.
4215 (If part is passed in registers, arg->partial says how much
4216 and emit_push_insn will take care of putting it there.)
4218 Push it, and if its size is less than the
4219 amount of space allocated to it,
4220 also bump stack pointer by the additional space.
4221 Note that in C the default argument promotions
4222 will prevent such mismatches. */
4224 size = GET_MODE_SIZE (arg->mode);
4225 /* Compute how much space the push instruction will push.
4226 On many machines, pushing a byte will advance the stack
4227 pointer by a halfword. */
4228 #ifdef PUSH_ROUNDING
4229 size = PUSH_ROUNDING (size);
4233 /* Compute how much space the argument should get:
4234 round up to a multiple of the alignment for arguments. */
4235 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4236 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4237 / (PARM_BOUNDARY / BITS_PER_UNIT))
4238 * (PARM_BOUNDARY / BITS_PER_UNIT));
4240 /* Compute the alignment of the pushed argument. */
4241 parm_align = arg->locate.boundary;
4242 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4244 int pad = used - size;
4247 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4248 parm_align = MIN (parm_align, pad_align);
4252 /* This isn't already where we want it on the stack, so put it there.
4253 This can either be done with push or copy insns. */
4254 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4255 parm_align, partial, reg, used - size, argblock,
4256 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4257 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4259 /* Unless this is a partially-in-register argument, the argument is now
4262 arg->value = arg->stack;
4266 /* BLKmode, at least partly to be pushed. */
4268 unsigned int parm_align;
4272 /* Pushing a nonscalar.
4273 If part is passed in registers, PARTIAL says how much
4274 and emit_push_insn will take care of putting it there. */
4276 /* Round its size up to a multiple
4277 of the allocation unit for arguments. */
4279 if (arg->locate.size.var != 0)
4282 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4286 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4287 for BLKmode is careful to avoid it. */
4288 excess = (arg->locate.size.constant
4289 - int_size_in_bytes (TREE_TYPE (pval))
4291 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4292 NULL_RTX, TYPE_MODE (sizetype), 0);
4295 parm_align = arg->locate.boundary;
4297 /* When an argument is padded down, the block is aligned to
4298 PARM_BOUNDARY, but the actual argument isn't. */
4299 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4301 if (arg->locate.size.var)
4302 parm_align = BITS_PER_UNIT;
4305 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4306 parm_align = MIN (parm_align, excess_align);
4310 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4312 /* emit_push_insn might not work properly if arg->value and
4313 argblock + arg->locate.offset areas overlap. */
4317 if (XEXP (x, 0) == current_function_internal_arg_pointer
4318 || (GET_CODE (XEXP (x, 0)) == PLUS
4319 && XEXP (XEXP (x, 0), 0) ==
4320 current_function_internal_arg_pointer
4321 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4323 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4324 i = INTVAL (XEXP (XEXP (x, 0), 1));
4326 /* expand_call should ensure this. */
4327 gcc_assert (!arg->locate.offset.var
4328 && arg->locate.size.var == 0
4329 && GET_CODE (size_rtx) == CONST_INT);
4331 if (arg->locate.offset.constant > i)
4333 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4334 sibcall_failure = 1;
4336 else if (arg->locate.offset.constant < i)
4338 /* Use arg->locate.size.constant instead of size_rtx
4339 because we only care about the part of the argument
4341 if (i < (arg->locate.offset.constant
4342 + arg->locate.size.constant))
4343 sibcall_failure = 1;
4347 /* Even though they appear to be at the same location,
4348 if part of the outgoing argument is in registers,
4349 they aren't really at the same location. Check for
4350 this by making sure that the incoming size is the
4351 same as the outgoing size. */
4352 if (arg->locate.size.constant != INTVAL (size_rtx))
4353 sibcall_failure = 1;
4358 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4359 parm_align, partial, reg, excess, argblock,
4360 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4361 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4363 /* Unless this is a partially-in-register argument, the argument is now
4366 ??? Unlike the case above, in which we want the actual
4367 address of the data, so that we can load it directly into a
4368 register, here we want the address of the stack slot, so that
4369 it's properly aligned for word-by-word copying or something
4370 like that. It's not clear that this is always correct. */
4372 arg->value = arg->stack_slot;
4375 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4377 tree type = TREE_TYPE (arg->tree_value);
4379 = emit_group_load_into_temps (arg->reg, arg->value, type,
4380 int_size_in_bytes (type));
4383 /* Mark all slots this store used. */
4384 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4385 && argblock && ! variable_size && arg->stack)
4386 for (i = lower_bound; i < upper_bound; i++)
4387 stack_usage_map[i] = 1;
4389 /* Once we have pushed something, pops can't safely
4390 be deferred during the rest of the arguments. */
4393 /* Free any temporary slots made in processing this argument. Show
4394 that we might have taken the address of something and pushed that
4396 preserve_temp_slots (NULL_RTX);
4400 return sibcall_failure;
4403 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4406 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4412 /* If the type has variable size... */
4413 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4416 /* If the type is marked as addressable (it is required
4417 to be constructed into the stack)... */
4418 if (TREE_ADDRESSABLE (type))
4424 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4425 takes trailing padding of a structure into account. */
4426 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4429 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, tree type)
4434 /* If the type has variable size... */
4435 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4438 /* If the type is marked as addressable (it is required
4439 to be constructed into the stack)... */
4440 if (TREE_ADDRESSABLE (type))
4443 /* If the padding and mode of the type is such that a copy into
4444 a register would put it into the wrong part of the register. */
4446 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4447 && (FUNCTION_ARG_PADDING (mode, type)
4448 == (BYTES_BIG_ENDIAN ? upward : downward)))