1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
40 #include "langhooks.h"
45 #include "tree-flow.h"
47 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
48 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
50 /* Data structure and subroutines used within expand_call. */
54 /* Tree node for this argument. */
56 /* Mode for value; TYPE_MODE unless promoted. */
57 enum machine_mode mode;
58 /* Current RTL value for argument, or 0 if it isn't precomputed. */
60 /* Initially-compute RTL value for argument; only for const functions. */
62 /* Register to pass this argument in, 0 if passed on stack, or an
63 PARALLEL if the arg is to be copied into multiple non-contiguous
66 /* Register to pass this argument in when generating tail call sequence.
67 This is not the same register as for normal calls on machines with
70 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
71 form for emit_group_move. */
73 /* If REG was promoted from the actual mode of the argument expression,
74 indicates whether the promotion is sign- or zero-extended. */
76 /* Number of bytes to put in registers. 0 means put the whole arg
77 in registers. Also 0 if not passed in registers. */
79 /* Nonzero if argument must be passed on stack.
80 Note that some arguments may be passed on the stack
81 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
82 pass_on_stack identifies arguments that *cannot* go in registers. */
84 /* Some fields packaged up for locate_and_pad_parm. */
85 struct locate_and_pad_arg_data locate;
86 /* Location on the stack at which parameter should be stored. The store
87 has already been done if STACK == VALUE. */
89 /* Location on the stack of the start of this argument slot. This can
90 differ from STACK if this arg pads downward. This location is known
91 to be aligned to FUNCTION_ARG_BOUNDARY. */
93 /* Place that this stack area has been saved, if needed. */
95 /* If an argument's alignment does not permit direct copying into registers,
96 copy in smaller-sized pieces into pseudos. These are stored in a
97 block pointed to by this field. The next field says how many
98 word-sized pseudos we made. */
103 /* A vector of one char per byte of stack space. A byte if nonzero if
104 the corresponding stack location has been used.
105 This vector is used to prevent a function call within an argument from
106 clobbering any stack already set up. */
107 static char *stack_usage_map;
109 /* Size of STACK_USAGE_MAP. */
110 static int highest_outgoing_arg_in_use;
112 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
113 stack location's tail call argument has been already stored into the stack.
114 This bitmap is used to prevent sibling call optimization if function tries
115 to use parent's incoming argument slots when they have been already
116 overwritten with tail call arguments. */
117 static sbitmap stored_args_map;
119 /* stack_arg_under_construction is nonzero when an argument may be
120 initialized with a constructor call (including a C function that
121 returns a BLKmode struct) and expand_call must take special action
122 to make sure the object being constructed does not overlap the
123 argument list for the constructor call. */
124 static int stack_arg_under_construction;
126 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
127 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
129 static void precompute_register_parameters (int, struct arg_data *, int *);
130 static int store_one_arg (struct arg_data *, rtx, int, int, int);
131 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
132 static int finalize_must_preallocate (int, int, struct arg_data *,
134 static void precompute_arguments (int, struct arg_data *);
135 static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
136 static void initialize_argument_information (int, struct arg_data *,
137 struct args_size *, int,
139 tree, tree, CUMULATIVE_ARGS *, int,
140 rtx *, int *, int *, int *,
142 static void compute_argument_addresses (struct arg_data *, rtx, int);
143 static rtx rtx_for_function_call (tree, tree);
144 static void load_register_parameters (struct arg_data *, int, rtx *, int,
146 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
147 enum machine_mode, int, va_list);
148 static int special_function_p (const_tree, int);
149 static int check_sibcall_argument_overlap_1 (rtx);
150 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
152 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
154 static tree split_complex_types (tree);
156 #ifdef REG_PARM_STACK_SPACE
157 static rtx save_fixed_argument_area (int, rtx, int *, int *);
158 static void restore_fixed_argument_area (rtx, rtx, int, int);
161 /* Force FUNEXP into a form suitable for the address of a CALL,
162 and return that as an rtx. Also load the static chain register
163 if FNDECL is a nested function.
165 CALL_FUSAGE points to a variable holding the prospective
166 CALL_INSN_FUNCTION_USAGE information. */
169 prepare_call_address (rtx funexp, rtx static_chain_value,
170 rtx *call_fusage, int reg_parm_seen, int sibcallp)
172 /* Make a valid memory address and copy constants through pseudo-regs,
173 but not for a constant address if -fno-function-cse. */
174 if (GET_CODE (funexp) != SYMBOL_REF)
175 /* If we are using registers for parameters, force the
176 function address into a register now. */
177 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
178 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
179 : memory_address (FUNCTION_MODE, funexp));
182 #ifndef NO_FUNCTION_CSE
183 if (optimize && ! flag_no_function_cse)
184 funexp = force_reg (Pmode, funexp);
188 if (static_chain_value != 0)
190 static_chain_value = convert_memory_address (Pmode, static_chain_value);
191 emit_move_insn (static_chain_rtx, static_chain_value);
193 if (REG_P (static_chain_rtx))
194 use_reg (call_fusage, static_chain_rtx);
200 /* Generate instructions to call function FUNEXP,
201 and optionally pop the results.
202 The CALL_INSN is the first insn generated.
204 FNDECL is the declaration node of the function. This is given to the
205 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
207 FUNTYPE is the data type of the function. This is given to the macro
208 RETURN_POPS_ARGS to determine whether this function pops its own args.
209 We used to allow an identifier for library functions, but that doesn't
210 work when the return type is an aggregate type and the calling convention
211 says that the pointer to this aggregate is to be popped by the callee.
213 STACK_SIZE is the number of bytes of arguments on the stack,
214 ROUNDED_STACK_SIZE is that number rounded up to
215 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
216 both to put into the call insn and to generate explicit popping
219 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
220 It is zero if this call doesn't want a structure value.
222 NEXT_ARG_REG is the rtx that results from executing
223 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
224 just after all the args have had their registers assigned.
225 This could be whatever you like, but normally it is the first
226 arg-register beyond those used for args in this call,
227 or 0 if all the arg-registers are used in this call.
228 It is passed on to `gen_call' so you can put this info in the call insn.
230 VALREG is a hard register in which a value is returned,
231 or 0 if the call does not return a value.
233 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
234 the args to this call were processed.
235 We restore `inhibit_defer_pop' to that value.
237 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
238 denote registers used by the called function. */
241 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
242 tree funtype ATTRIBUTE_UNUSED,
243 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
244 HOST_WIDE_INT rounded_stack_size,
245 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
246 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
247 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
248 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
250 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
252 int already_popped = 0;
253 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
254 #if defined (HAVE_call) && defined (HAVE_call_value)
255 rtx struct_value_size_rtx;
256 struct_value_size_rtx = GEN_INT (struct_value_size);
259 #ifdef CALL_POPS_ARGS
260 n_popped += CALL_POPS_ARGS (* args_so_far);
263 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
264 and we don't want to load it into a register as an optimization,
265 because prepare_call_address already did it if it should be done. */
266 if (GET_CODE (funexp) != SYMBOL_REF)
267 funexp = memory_address (FUNCTION_MODE, funexp);
269 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
270 if ((ecf_flags & ECF_SIBCALL)
271 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
272 && (n_popped > 0 || stack_size == 0))
274 rtx n_pop = GEN_INT (n_popped);
277 /* If this subroutine pops its own args, record that in the call insn
278 if possible, for the sake of frame pointer elimination. */
281 pat = GEN_SIBCALL_VALUE_POP (valreg,
282 gen_rtx_MEM (FUNCTION_MODE, funexp),
283 rounded_stack_size_rtx, next_arg_reg,
286 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
287 rounded_stack_size_rtx, next_arg_reg, n_pop);
289 emit_call_insn (pat);
295 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
296 /* If the target has "call" or "call_value" insns, then prefer them
297 if no arguments are actually popped. If the target does not have
298 "call" or "call_value" insns, then we must use the popping versions
299 even if the call has no arguments to pop. */
300 #if defined (HAVE_call) && defined (HAVE_call_value)
301 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
304 if (HAVE_call_pop && HAVE_call_value_pop)
307 rtx n_pop = GEN_INT (n_popped);
310 /* If this subroutine pops its own args, record that in the call insn
311 if possible, for the sake of frame pointer elimination. */
314 pat = GEN_CALL_VALUE_POP (valreg,
315 gen_rtx_MEM (FUNCTION_MODE, funexp),
316 rounded_stack_size_rtx, next_arg_reg, n_pop);
318 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
319 rounded_stack_size_rtx, next_arg_reg, n_pop);
321 emit_call_insn (pat);
327 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
328 if ((ecf_flags & ECF_SIBCALL)
329 && HAVE_sibcall && HAVE_sibcall_value)
332 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
333 gen_rtx_MEM (FUNCTION_MODE, funexp),
334 rounded_stack_size_rtx,
335 next_arg_reg, NULL_RTX));
337 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
338 rounded_stack_size_rtx, next_arg_reg,
339 struct_value_size_rtx));
344 #if defined (HAVE_call) && defined (HAVE_call_value)
345 if (HAVE_call && HAVE_call_value)
348 emit_call_insn (GEN_CALL_VALUE (valreg,
349 gen_rtx_MEM (FUNCTION_MODE, funexp),
350 rounded_stack_size_rtx, next_arg_reg,
353 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
354 rounded_stack_size_rtx, next_arg_reg,
355 struct_value_size_rtx));
361 /* Find the call we just emitted. */
362 call_insn = last_call_insn ();
364 /* Put the register usage information there. */
365 add_function_usage_to (call_insn, call_fusage);
367 /* If this is a const call, then set the insn's unchanging bit. */
368 if (ecf_flags & ECF_CONST)
369 RTL_CONST_CALL_P (call_insn) = 1;
371 /* If this is a pure call, then set the insn's unchanging bit. */
372 if (ecf_flags & ECF_PURE)
373 RTL_PURE_CALL_P (call_insn) = 1;
375 /* If this is a const call, then set the insn's unchanging bit. */
376 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
377 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
379 /* If this call can't throw, attach a REG_EH_REGION reg note to that
381 if (ecf_flags & ECF_NOTHROW)
382 add_reg_note (call_insn, REG_EH_REGION, const0_rtx);
385 int rn = lookup_expr_eh_region (fntree);
387 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
388 throw, which we already took care of. */
390 add_reg_note (call_insn, REG_EH_REGION, GEN_INT (rn));
393 if (ecf_flags & ECF_NORETURN)
394 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
396 if (ecf_flags & ECF_RETURNS_TWICE)
398 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
399 cfun->calls_setjmp = 1;
402 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
404 /* Restore this now, so that we do defer pops for this call's args
405 if the context of the call as a whole permits. */
406 inhibit_defer_pop = old_inhibit_defer_pop;
411 CALL_INSN_FUNCTION_USAGE (call_insn)
412 = gen_rtx_EXPR_LIST (VOIDmode,
413 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
414 CALL_INSN_FUNCTION_USAGE (call_insn));
415 rounded_stack_size -= n_popped;
416 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
417 stack_pointer_delta -= n_popped;
419 /* If popup is needed, stack realign must use DRAP */
420 if (SUPPORTS_STACK_ALIGNMENT)
421 crtl->need_drap = true;
424 if (!ACCUMULATE_OUTGOING_ARGS)
426 /* If returning from the subroutine does not automatically pop the args,
427 we need an instruction to pop them sooner or later.
428 Perhaps do it now; perhaps just record how much space to pop later.
430 If returning from the subroutine does pop the args, indicate that the
431 stack pointer will be changed. */
433 if (rounded_stack_size != 0)
435 if (ecf_flags & ECF_NORETURN)
436 /* Just pretend we did the pop. */
437 stack_pointer_delta -= rounded_stack_size;
438 else if (flag_defer_pop && inhibit_defer_pop == 0
439 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
440 pending_stack_adjust += rounded_stack_size;
442 adjust_stack (rounded_stack_size_rtx);
445 /* When we accumulate outgoing args, we must avoid any stack manipulations.
446 Restore the stack pointer to its original value now. Usually
447 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
448 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
449 popping variants of functions exist as well.
451 ??? We may optimize similar to defer_pop above, but it is
452 probably not worthwhile.
454 ??? It will be worthwhile to enable combine_stack_adjustments even for
457 anti_adjust_stack (GEN_INT (n_popped));
460 /* Determine if the function identified by NAME and FNDECL is one with
461 special properties we wish to know about.
463 For example, if the function might return more than one time (setjmp), then
464 set RETURNS_TWICE to a nonzero value.
466 Similarly set NORETURN if the function is in the longjmp family.
468 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
469 space from the stack such as alloca. */
472 special_function_p (const_tree fndecl, int flags)
474 if (fndecl && DECL_NAME (fndecl)
475 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
476 /* Exclude functions not at the file scope, or not `extern',
477 since they are not the magic functions we would otherwise
479 FIXME: this should be handled with attributes, not with this
480 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
481 because you can declare fork() inside a function if you
483 && (DECL_CONTEXT (fndecl) == NULL_TREE
484 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
485 && TREE_PUBLIC (fndecl))
487 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
488 const char *tname = name;
490 /* We assume that alloca will always be called by name. It
491 makes no sense to pass it as a pointer-to-function to
492 anything that does not understand its behavior. */
493 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
495 && ! strcmp (name, "alloca"))
496 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
498 && ! strcmp (name, "__builtin_alloca"))))
499 flags |= ECF_MAY_BE_ALLOCA;
501 /* Disregard prefix _, __, __x or __builtin_. */
506 && !strncmp (name + 3, "uiltin_", 7))
508 else if (name[1] == '_' && name[2] == 'x')
510 else if (name[1] == '_')
519 && (! strcmp (tname, "setjmp")
520 || ! strcmp (tname, "setjmp_syscall")))
522 && ! strcmp (tname, "sigsetjmp"))
524 && ! strcmp (tname, "savectx")))
525 flags |= ECF_RETURNS_TWICE;
528 && ! strcmp (tname, "siglongjmp"))
529 flags |= ECF_NORETURN;
531 else if ((tname[0] == 'q' && tname[1] == 's'
532 && ! strcmp (tname, "qsetjmp"))
533 || (tname[0] == 'v' && tname[1] == 'f'
534 && ! strcmp (tname, "vfork"))
535 || (tname[0] == 'g' && tname[1] == 'e'
536 && !strcmp (tname, "getcontext")))
537 flags |= ECF_RETURNS_TWICE;
539 else if (tname[0] == 'l' && tname[1] == 'o'
540 && ! strcmp (tname, "longjmp"))
541 flags |= ECF_NORETURN;
547 /* Return nonzero when FNDECL represents a call to setjmp. */
550 setjmp_call_p (const_tree fndecl)
552 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
556 /* Return true if STMT is an alloca call. */
559 gimple_alloca_call_p (const_gimple stmt)
563 if (!is_gimple_call (stmt))
566 fndecl = gimple_call_fndecl (stmt);
567 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
573 /* Return true when exp contains alloca call. */
576 alloca_call_p (const_tree exp)
578 if (TREE_CODE (exp) == CALL_EXPR
579 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
580 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
581 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
582 & ECF_MAY_BE_ALLOCA))
587 /* Detect flags (function attributes) from the function decl or type node. */
590 flags_from_decl_or_type (const_tree exp)
593 const_tree type = exp;
597 type = TREE_TYPE (exp);
599 /* The function exp may have the `malloc' attribute. */
600 if (DECL_IS_MALLOC (exp))
603 /* The function exp may have the `returns_twice' attribute. */
604 if (DECL_IS_RETURNS_TWICE (exp))
605 flags |= ECF_RETURNS_TWICE;
607 /* Process the pure and const attributes. */
608 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
610 if (DECL_PURE_P (exp))
612 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
613 flags |= ECF_LOOPING_CONST_OR_PURE;
615 if (DECL_IS_NOVOPS (exp))
618 if (TREE_NOTHROW (exp))
619 flags |= ECF_NOTHROW;
621 flags = special_function_p (exp, flags);
623 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
626 if (TREE_THIS_VOLATILE (exp))
627 flags |= ECF_NORETURN;
632 /* Detect flags from a CALL_EXPR. */
635 call_expr_flags (const_tree t)
638 tree decl = get_callee_fndecl (t);
641 flags = flags_from_decl_or_type (decl);
644 t = TREE_TYPE (CALL_EXPR_FN (t));
645 if (t && TREE_CODE (t) == POINTER_TYPE)
646 flags = flags_from_decl_or_type (TREE_TYPE (t));
654 /* Precompute all register parameters as described by ARGS, storing values
655 into fields within the ARGS array.
657 NUM_ACTUALS indicates the total number elements in the ARGS array.
659 Set REG_PARM_SEEN if we encounter a register parameter. */
662 precompute_register_parameters (int num_actuals, struct arg_data *args,
669 for (i = 0; i < num_actuals; i++)
670 if (args[i].reg != 0 && ! args[i].pass_on_stack)
674 if (args[i].value == 0)
677 args[i].value = expand_normal (args[i].tree_value);
678 preserve_temp_slots (args[i].value);
682 /* If the value is a non-legitimate constant, force it into a
683 pseudo now. TLS symbols sometimes need a call to resolve. */
684 if (CONSTANT_P (args[i].value)
685 && !LEGITIMATE_CONSTANT_P (args[i].value))
686 args[i].value = force_reg (args[i].mode, args[i].value);
688 /* If we are to promote the function arg to a wider mode,
691 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
693 = convert_modes (args[i].mode,
694 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
695 args[i].value, args[i].unsignedp);
697 /* If we're going to have to load the value by parts, pull the
698 parts into pseudos. The part extraction process can involve
699 non-trivial computation. */
700 if (GET_CODE (args[i].reg) == PARALLEL)
702 tree type = TREE_TYPE (args[i].tree_value);
703 args[i].parallel_value
704 = emit_group_load_into_temps (args[i].reg, args[i].value,
705 type, int_size_in_bytes (type));
708 /* If the value is expensive, and we are inside an appropriately
709 short loop, put the value into a pseudo and then put the pseudo
712 For small register classes, also do this if this call uses
713 register parameters. This is to avoid reload conflicts while
714 loading the parameters registers. */
716 else if ((! (REG_P (args[i].value)
717 || (GET_CODE (args[i].value) == SUBREG
718 && REG_P (SUBREG_REG (args[i].value)))))
719 && args[i].mode != BLKmode
720 && rtx_cost (args[i].value, SET, optimize_insn_for_speed_p ())
722 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
724 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
728 #ifdef REG_PARM_STACK_SPACE
730 /* The argument list is the property of the called routine and it
731 may clobber it. If the fixed area has been used for previous
732 parameters, we must save and restore it. */
735 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
740 /* Compute the boundary of the area that needs to be saved, if any. */
741 high = reg_parm_stack_space;
742 #ifdef ARGS_GROW_DOWNWARD
745 if (high > highest_outgoing_arg_in_use)
746 high = highest_outgoing_arg_in_use;
748 for (low = 0; low < high; low++)
749 if (stack_usage_map[low] != 0)
752 enum machine_mode save_mode;
757 while (stack_usage_map[--high] == 0)
761 *high_to_save = high;
763 num_to_save = high - low + 1;
764 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
766 /* If we don't have the required alignment, must do this
768 if ((low & (MIN (GET_MODE_SIZE (save_mode),
769 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
772 #ifdef ARGS_GROW_DOWNWARD
777 stack_area = gen_rtx_MEM (save_mode,
778 memory_address (save_mode,
779 plus_constant (argblock,
782 set_mem_align (stack_area, PARM_BOUNDARY);
783 if (save_mode == BLKmode)
785 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
786 emit_block_move (validize_mem (save_area), stack_area,
787 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
791 save_area = gen_reg_rtx (save_mode);
792 emit_move_insn (save_area, stack_area);
802 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
804 enum machine_mode save_mode = GET_MODE (save_area);
808 #ifdef ARGS_GROW_DOWNWARD
809 delta = -high_to_save;
813 stack_area = gen_rtx_MEM (save_mode,
814 memory_address (save_mode,
815 plus_constant (argblock, delta)));
816 set_mem_align (stack_area, PARM_BOUNDARY);
818 if (save_mode != BLKmode)
819 emit_move_insn (stack_area, save_area);
821 emit_block_move (stack_area, validize_mem (save_area),
822 GEN_INT (high_to_save - low_to_save + 1),
825 #endif /* REG_PARM_STACK_SPACE */
827 /* If any elements in ARGS refer to parameters that are to be passed in
828 registers, but not in memory, and whose alignment does not permit a
829 direct copy into registers. Copy the values into a group of pseudos
830 which we will later copy into the appropriate hard registers.
832 Pseudos for each unaligned argument will be stored into the array
833 args[argnum].aligned_regs. The caller is responsible for deallocating
834 the aligned_regs array if it is nonzero. */
837 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
841 for (i = 0; i < num_actuals; i++)
842 if (args[i].reg != 0 && ! args[i].pass_on_stack
843 && args[i].mode == BLKmode
844 && MEM_P (args[i].value)
845 && (MEM_ALIGN (args[i].value)
846 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
848 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
849 int endian_correction = 0;
853 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
854 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
858 args[i].n_aligned_regs
859 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
862 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
864 /* Structures smaller than a word are normally aligned to the
865 least significant byte. On a BYTES_BIG_ENDIAN machine,
866 this means we must skip the empty high order bytes when
867 calculating the bit offset. */
868 if (bytes < UNITS_PER_WORD
869 #ifdef BLOCK_REG_PADDING
870 && (BLOCK_REG_PADDING (args[i].mode,
871 TREE_TYPE (args[i].tree_value), 1)
877 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
879 for (j = 0; j < args[i].n_aligned_regs; j++)
881 rtx reg = gen_reg_rtx (word_mode);
882 rtx word = operand_subword_force (args[i].value, j, BLKmode);
883 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
885 args[i].aligned_regs[j] = reg;
886 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
887 word_mode, word_mode);
889 /* There is no need to restrict this code to loading items
890 in TYPE_ALIGN sized hunks. The bitfield instructions can
891 load up entire word sized registers efficiently.
893 ??? This may not be needed anymore.
894 We use to emit a clobber here but that doesn't let later
895 passes optimize the instructions we emit. By storing 0 into
896 the register later passes know the first AND to zero out the
897 bitfield being set in the register is unnecessary. The store
898 of 0 will be deleted as will at least the first AND. */
900 emit_move_insn (reg, const0_rtx);
902 bytes -= bitsize / BITS_PER_UNIT;
903 store_bit_field (reg, bitsize, endian_correction, word_mode,
909 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
912 NUM_ACTUALS is the total number of parameters.
914 N_NAMED_ARGS is the total number of named arguments.
916 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
919 FNDECL is the tree code for the target of this call (if known)
921 ARGS_SO_FAR holds state needed by the target to know where to place
924 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
925 for arguments which are passed in registers.
927 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
928 and may be modified by this routine.
930 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
931 flags which may may be modified by this routine.
933 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
934 that requires allocation of stack space.
936 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
937 the thunked-to function. */
940 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
941 struct arg_data *args,
942 struct args_size *args_size,
943 int n_named_args ATTRIBUTE_UNUSED,
944 tree exp, tree struct_value_addr_value,
945 tree fndecl, tree fntype,
946 CUMULATIVE_ARGS *args_so_far,
947 int reg_parm_stack_space,
948 rtx *old_stack_level, int *old_pending_adj,
949 int *must_preallocate, int *ecf_flags,
950 bool *may_tailcall, bool call_from_thunk_p)
952 location_t loc = EXPR_LOCATION (exp);
953 /* 1 if scanning parms front to back, -1 if scanning back to front. */
956 /* Count arg position in order args appear. */
961 args_size->constant = 0;
964 /* In this loop, we consider args in the order they are written.
965 We fill up ARGS from the front or from the back if necessary
966 so that in any case the first arg to be pushed ends up at the front. */
968 if (PUSH_ARGS_REVERSED)
970 i = num_actuals - 1, inc = -1;
971 /* In this case, must reverse order of args
972 so that we compute and push the last arg first. */
979 /* First fill in the actual arguments in the ARGS array, splitting
980 complex arguments if necessary. */
983 call_expr_arg_iterator iter;
986 if (struct_value_addr_value)
988 args[j].tree_value = struct_value_addr_value;
991 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
993 tree argtype = TREE_TYPE (arg);
994 if (targetm.calls.split_complex_arg
996 && TREE_CODE (argtype) == COMPLEX_TYPE
997 && targetm.calls.split_complex_arg (argtype))
999 tree subtype = TREE_TYPE (argtype);
1000 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1002 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1005 args[j].tree_value = arg;
1010 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1011 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
1013 tree type = TREE_TYPE (args[i].tree_value);
1015 enum machine_mode mode;
1017 /* Replace erroneous argument with constant zero. */
1018 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1019 args[i].tree_value = integer_zero_node, type = integer_type_node;
1021 /* If TYPE is a transparent union, pass things the way we would
1022 pass the first field of the union. We have already verified that
1023 the modes are the same. */
1024 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1025 type = TREE_TYPE (TYPE_FIELDS (type));
1027 /* Decide where to pass this arg.
1029 args[i].reg is nonzero if all or part is passed in registers.
1031 args[i].partial is nonzero if part but not all is passed in registers,
1032 and the exact value says how many bytes are passed in registers.
1034 args[i].pass_on_stack is nonzero if the argument must at least be
1035 computed on the stack. It may then be loaded back into registers
1036 if args[i].reg is nonzero.
1038 These decisions are driven by the FUNCTION_... macros and must agree
1039 with those made by function.c. */
1041 /* See if this argument should be passed by invisible reference. */
1042 if (pass_by_reference (args_so_far, TYPE_MODE (type),
1043 type, argpos < n_named_args))
1049 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1050 type, argpos < n_named_args);
1052 /* If we're compiling a thunk, pass through invisible references
1053 instead of making a copy. */
1054 if (call_from_thunk_p
1056 && !TREE_ADDRESSABLE (type)
1057 && (base = get_base_address (args[i].tree_value))
1058 && TREE_CODE (base) != SSA_NAME
1059 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1061 /* We can't use sibcalls if a callee-copied argument is
1062 stored in the current function's frame. */
1063 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1064 *may_tailcall = false;
1066 args[i].tree_value = build_fold_addr_expr_loc (loc,
1067 args[i].tree_value);
1068 type = TREE_TYPE (args[i].tree_value);
1070 if (*ecf_flags & ECF_CONST)
1071 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1075 /* We make a copy of the object and pass the address to the
1076 function being called. */
1079 if (!COMPLETE_TYPE_P (type)
1080 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1081 || (flag_stack_check == GENERIC_STACK_CHECK
1082 && compare_tree_int (TYPE_SIZE_UNIT (type),
1083 STACK_CHECK_MAX_VAR_SIZE) > 0))
1085 /* This is a variable-sized object. Make space on the stack
1087 rtx size_rtx = expr_size (args[i].tree_value);
1089 if (*old_stack_level == 0)
1091 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1092 *old_pending_adj = pending_stack_adjust;
1093 pending_stack_adjust = 0;
1096 copy = gen_rtx_MEM (BLKmode,
1097 allocate_dynamic_stack_space
1098 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1099 set_mem_attributes (copy, type, 1);
1102 copy = assign_temp (type, 0, 1, 0);
1104 store_expr (args[i].tree_value, copy, 0, false);
1106 /* Just change the const function to pure and then let
1107 the next test clear the pure based on
1109 if (*ecf_flags & ECF_CONST)
1111 *ecf_flags &= ~ECF_CONST;
1112 *ecf_flags |= ECF_PURE;
1115 if (!callee_copies && *ecf_flags & ECF_PURE)
1116 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1119 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
1120 type = TREE_TYPE (args[i].tree_value);
1121 *may_tailcall = false;
1125 mode = TYPE_MODE (type);
1126 unsignedp = TYPE_UNSIGNED (type);
1128 if (targetm.calls.promote_function_args (fndecl
1129 ? TREE_TYPE (fndecl)
1131 mode = promote_mode (type, mode, &unsignedp, 1);
1133 args[i].unsignedp = unsignedp;
1134 args[i].mode = mode;
1136 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1137 argpos < n_named_args);
1138 #ifdef FUNCTION_INCOMING_ARG
1139 /* If this is a sibling call and the machine has register windows, the
1140 register window has to be unwinded before calling the routine, so
1141 arguments have to go into the incoming registers. */
1142 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1143 argpos < n_named_args);
1145 args[i].tail_call_reg = args[i].reg;
1150 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1151 argpos < n_named_args);
1153 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1155 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1156 it means that we are to pass this arg in the register(s) designated
1157 by the PARALLEL, but also to pass it in the stack. */
1158 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1159 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1160 args[i].pass_on_stack = 1;
1162 /* If this is an addressable type, we must preallocate the stack
1163 since we must evaluate the object into its final location.
1165 If this is to be passed in both registers and the stack, it is simpler
1167 if (TREE_ADDRESSABLE (type)
1168 || (args[i].pass_on_stack && args[i].reg != 0))
1169 *must_preallocate = 1;
1171 /* Compute the stack-size of this argument. */
1172 if (args[i].reg == 0 || args[i].partial != 0
1173 || reg_parm_stack_space > 0
1174 || args[i].pass_on_stack)
1175 locate_and_pad_parm (mode, type,
1176 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1181 args[i].pass_on_stack ? 0 : args[i].partial,
1182 fndecl, args_size, &args[i].locate);
1183 #ifdef BLOCK_REG_PADDING
1185 /* The argument is passed entirely in registers. See at which
1186 end it should be padded. */
1187 args[i].locate.where_pad =
1188 BLOCK_REG_PADDING (mode, type,
1189 int_size_in_bytes (type) <= UNITS_PER_WORD);
1192 /* Update ARGS_SIZE, the total stack space for args so far. */
1194 args_size->constant += args[i].locate.size.constant;
1195 if (args[i].locate.size.var)
1196 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1198 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1199 have been used, etc. */
1201 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1202 argpos < n_named_args);
1206 /* Update ARGS_SIZE to contain the total size for the argument block.
1207 Return the original constant component of the argument block's size.
1209 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1210 for arguments passed in registers. */
1213 compute_argument_block_size (int reg_parm_stack_space,
1214 struct args_size *args_size,
1215 tree fndecl ATTRIBUTE_UNUSED,
1216 tree fntype ATTRIBUTE_UNUSED,
1217 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1219 int unadjusted_args_size = args_size->constant;
1221 /* For accumulate outgoing args mode we don't need to align, since the frame
1222 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1223 backends from generating misaligned frame sizes. */
1224 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1225 preferred_stack_boundary = STACK_BOUNDARY;
1227 /* Compute the actual size of the argument block required. The variable
1228 and constant sizes must be combined, the size may have to be rounded,
1229 and there may be a minimum required size. */
1233 args_size->var = ARGS_SIZE_TREE (*args_size);
1234 args_size->constant = 0;
1236 preferred_stack_boundary /= BITS_PER_UNIT;
1237 if (preferred_stack_boundary > 1)
1239 /* We don't handle this case yet. To handle it correctly we have
1240 to add the delta, round and subtract the delta.
1241 Currently no machine description requires this support. */
1242 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1243 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1246 if (reg_parm_stack_space > 0)
1249 = size_binop (MAX_EXPR, args_size->var,
1250 ssize_int (reg_parm_stack_space));
1252 /* The area corresponding to register parameters is not to count in
1253 the size of the block we need. So make the adjustment. */
1254 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1256 = size_binop (MINUS_EXPR, args_size->var,
1257 ssize_int (reg_parm_stack_space));
1262 preferred_stack_boundary /= BITS_PER_UNIT;
1263 if (preferred_stack_boundary < 1)
1264 preferred_stack_boundary = 1;
1265 args_size->constant = (((args_size->constant
1266 + stack_pointer_delta
1267 + preferred_stack_boundary - 1)
1268 / preferred_stack_boundary
1269 * preferred_stack_boundary)
1270 - stack_pointer_delta);
1272 args_size->constant = MAX (args_size->constant,
1273 reg_parm_stack_space);
1275 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1276 args_size->constant -= reg_parm_stack_space;
1278 return unadjusted_args_size;
1281 /* Precompute parameters as needed for a function call.
1283 FLAGS is mask of ECF_* constants.
1285 NUM_ACTUALS is the number of arguments.
1287 ARGS is an array containing information for each argument; this
1288 routine fills in the INITIAL_VALUE and VALUE fields for each
1289 precomputed argument. */
1292 precompute_arguments (int num_actuals, struct arg_data *args)
1296 /* If this is a libcall, then precompute all arguments so that we do not
1297 get extraneous instructions emitted as part of the libcall sequence. */
1299 /* If we preallocated the stack space, and some arguments must be passed
1300 on the stack, then we must precompute any parameter which contains a
1301 function call which will store arguments on the stack.
1302 Otherwise, evaluating the parameter may clobber previous parameters
1303 which have already been stored into the stack. (we have code to avoid
1304 such case by saving the outgoing stack arguments, but it results in
1306 if (!ACCUMULATE_OUTGOING_ARGS)
1309 for (i = 0; i < num_actuals; i++)
1311 enum machine_mode mode;
1313 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
1316 /* If this is an addressable type, we cannot pre-evaluate it. */
1317 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
1319 args[i].initial_value = args[i].value
1320 = expand_normal (args[i].tree_value);
1322 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1323 if (mode != args[i].mode)
1326 = convert_modes (args[i].mode, mode,
1327 args[i].value, args[i].unsignedp);
1328 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1329 /* CSE will replace this only if it contains args[i].value
1330 pseudo, so convert it down to the declared mode using
1332 if (REG_P (args[i].value)
1333 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1335 args[i].initial_value
1336 = gen_lowpart_SUBREG (mode, args[i].value);
1337 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1338 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1346 /* Given the current state of MUST_PREALLOCATE and information about
1347 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1348 compute and return the final value for MUST_PREALLOCATE. */
1351 finalize_must_preallocate (int must_preallocate, int num_actuals,
1352 struct arg_data *args, struct args_size *args_size)
1354 /* See if we have or want to preallocate stack space.
1356 If we would have to push a partially-in-regs parm
1357 before other stack parms, preallocate stack space instead.
1359 If the size of some parm is not a multiple of the required stack
1360 alignment, we must preallocate.
1362 If the total size of arguments that would otherwise create a copy in
1363 a temporary (such as a CALL) is more than half the total argument list
1364 size, preallocation is faster.
1366 Another reason to preallocate is if we have a machine (like the m88k)
1367 where stack alignment is required to be maintained between every
1368 pair of insns, not just when the call is made. However, we assume here
1369 that such machines either do not have push insns (and hence preallocation
1370 would occur anyway) or the problem is taken care of with
1373 if (! must_preallocate)
1375 int partial_seen = 0;
1376 int copy_to_evaluate_size = 0;
1379 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1381 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1383 else if (partial_seen && args[i].reg == 0)
1384 must_preallocate = 1;
1386 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1387 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1388 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1389 || TREE_CODE (args[i].tree_value) == COND_EXPR
1390 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1391 copy_to_evaluate_size
1392 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1395 if (copy_to_evaluate_size * 2 >= args_size->constant
1396 && args_size->constant > 0)
1397 must_preallocate = 1;
1399 return must_preallocate;
1402 /* If we preallocated stack space, compute the address of each argument
1403 and store it into the ARGS array.
1405 We need not ensure it is a valid memory address here; it will be
1406 validized when it is used.
1408 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1411 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1415 rtx arg_reg = argblock;
1416 int i, arg_offset = 0;
1418 if (GET_CODE (argblock) == PLUS)
1419 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1421 for (i = 0; i < num_actuals; i++)
1423 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1424 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1426 unsigned int align, boundary;
1427 unsigned int units_on_stack = 0;
1428 enum machine_mode partial_mode = VOIDmode;
1430 /* Skip this parm if it will not be passed on the stack. */
1431 if (! args[i].pass_on_stack
1433 && args[i].partial == 0)
1436 if (CONST_INT_P (offset))
1437 addr = plus_constant (arg_reg, INTVAL (offset));
1439 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1441 addr = plus_constant (addr, arg_offset);
1443 if (args[i].partial != 0)
1445 /* Only part of the parameter is being passed on the stack.
1446 Generate a simple memory reference of the correct size. */
1447 units_on_stack = args[i].locate.size.constant;
1448 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1450 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1451 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1455 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1456 set_mem_attributes (args[i].stack,
1457 TREE_TYPE (args[i].tree_value), 1);
1459 align = BITS_PER_UNIT;
1460 boundary = args[i].locate.boundary;
1461 if (args[i].locate.where_pad != downward)
1463 else if (CONST_INT_P (offset))
1465 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1466 align = align & -align;
1468 set_mem_align (args[i].stack, align);
1470 if (CONST_INT_P (slot_offset))
1471 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1473 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1475 addr = plus_constant (addr, arg_offset);
1477 if (args[i].partial != 0)
1479 /* Only part of the parameter is being passed on the stack.
1480 Generate a simple memory reference of the correct size.
1482 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1483 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1487 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1488 set_mem_attributes (args[i].stack_slot,
1489 TREE_TYPE (args[i].tree_value), 1);
1491 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1493 /* Function incoming arguments may overlap with sibling call
1494 outgoing arguments and we cannot allow reordering of reads
1495 from function arguments with stores to outgoing arguments
1496 of sibling calls. */
1497 set_mem_alias_set (args[i].stack, 0);
1498 set_mem_alias_set (args[i].stack_slot, 0);
1503 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1504 in a call instruction.
1506 FNDECL is the tree node for the target function. For an indirect call
1507 FNDECL will be NULL_TREE.
1509 ADDR is the operand 0 of CALL_EXPR for this call. */
1512 rtx_for_function_call (tree fndecl, tree addr)
1516 /* Get the function to call, in the form of RTL. */
1519 /* If this is the first use of the function, see if we need to
1520 make an external definition for it. */
1521 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1523 assemble_external (fndecl);
1524 TREE_USED (fndecl) = 1;
1527 /* Get a SYMBOL_REF rtx for the function address. */
1528 funexp = XEXP (DECL_RTL (fndecl), 0);
1531 /* Generate an rtx (probably a pseudo-register) for the address. */
1534 funexp = expand_normal (addr);
1535 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1540 /* Return true if and only if SIZE storage units (usually bytes)
1541 starting from address ADDR overlap with already clobbered argument
1542 area. This function is used to determine if we should give up a
1546 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1550 if (addr == crtl->args.internal_arg_pointer)
1552 else if (GET_CODE (addr) == PLUS
1553 && XEXP (addr, 0) == crtl->args.internal_arg_pointer
1554 && CONST_INT_P (XEXP (addr, 1)))
1555 i = INTVAL (XEXP (addr, 1));
1556 /* Return true for arg pointer based indexed addressing. */
1557 else if (GET_CODE (addr) == PLUS
1558 && (XEXP (addr, 0) == crtl->args.internal_arg_pointer
1559 || XEXP (addr, 1) == crtl->args.internal_arg_pointer))
1564 #ifdef ARGS_GROW_DOWNWARD
1569 unsigned HOST_WIDE_INT k;
1571 for (k = 0; k < size; k++)
1572 if (i + k < stored_args_map->n_bits
1573 && TEST_BIT (stored_args_map, i + k))
1580 /* Do the register loads required for any wholly-register parms or any
1581 parms which are passed both on the stack and in a register. Their
1582 expressions were already evaluated.
1584 Mark all register-parms as living through the call, putting these USE
1585 insns in the CALL_INSN_FUNCTION_USAGE field.
1587 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1588 checking, setting *SIBCALL_FAILURE if appropriate. */
1591 load_register_parameters (struct arg_data *args, int num_actuals,
1592 rtx *call_fusage, int flags, int is_sibcall,
1593 int *sibcall_failure)
1597 for (i = 0; i < num_actuals; i++)
1599 rtx reg = ((flags & ECF_SIBCALL)
1600 ? args[i].tail_call_reg : args[i].reg);
1603 int partial = args[i].partial;
1606 rtx before_arg = get_last_insn ();
1607 /* Set non-negative if we must move a word at a time, even if
1608 just one word (e.g, partial == 4 && mode == DFmode). Set
1609 to -1 if we just use a normal move insn. This value can be
1610 zero if the argument is a zero size structure. */
1612 if (GET_CODE (reg) == PARALLEL)
1616 gcc_assert (partial % UNITS_PER_WORD == 0);
1617 nregs = partial / UNITS_PER_WORD;
1619 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1621 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1622 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1625 size = GET_MODE_SIZE (args[i].mode);
1627 /* Handle calls that pass values in multiple non-contiguous
1628 locations. The Irix 6 ABI has examples of this. */
1630 if (GET_CODE (reg) == PARALLEL)
1631 emit_group_move (reg, args[i].parallel_value);
1633 /* If simple case, just do move. If normal partial, store_one_arg
1634 has already loaded the register for us. In all other cases,
1635 load the register(s) from memory. */
1637 else if (nregs == -1)
1639 emit_move_insn (reg, args[i].value);
1640 #ifdef BLOCK_REG_PADDING
1641 /* Handle case where we have a value that needs shifting
1642 up to the msb. eg. a QImode value and we're padding
1643 upward on a BYTES_BIG_ENDIAN machine. */
1644 if (size < UNITS_PER_WORD
1645 && (args[i].locate.where_pad
1646 == (BYTES_BIG_ENDIAN ? upward : downward)))
1649 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1651 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1652 report the whole reg as used. Strictly speaking, the
1653 call only uses SIZE bytes at the msb end, but it doesn't
1654 seem worth generating rtl to say that. */
1655 reg = gen_rtx_REG (word_mode, REGNO (reg));
1656 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1657 build_int_cst (NULL_TREE, shift),
1660 emit_move_insn (reg, x);
1665 /* If we have pre-computed the values to put in the registers in
1666 the case of non-aligned structures, copy them in now. */
1668 else if (args[i].n_aligned_regs != 0)
1669 for (j = 0; j < args[i].n_aligned_regs; j++)
1670 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1671 args[i].aligned_regs[j]);
1673 else if (partial == 0 || args[i].pass_on_stack)
1675 rtx mem = validize_mem (args[i].value);
1677 /* Check for overlap with already clobbered argument area. */
1679 && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
1681 *sibcall_failure = 1;
1683 /* Handle a BLKmode that needs shifting. */
1684 if (nregs == 1 && size < UNITS_PER_WORD
1685 #ifdef BLOCK_REG_PADDING
1686 && args[i].locate.where_pad == downward
1692 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1693 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1694 rtx x = gen_reg_rtx (word_mode);
1695 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1696 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1699 emit_move_insn (x, tem);
1700 x = expand_shift (dir, word_mode, x,
1701 build_int_cst (NULL_TREE, shift),
1704 emit_move_insn (ri, x);
1707 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1710 /* When a parameter is a block, and perhaps in other cases, it is
1711 possible that it did a load from an argument slot that was
1712 already clobbered. */
1714 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1715 *sibcall_failure = 1;
1717 /* Handle calls that pass values in multiple non-contiguous
1718 locations. The Irix 6 ABI has examples of this. */
1719 if (GET_CODE (reg) == PARALLEL)
1720 use_group_regs (call_fusage, reg);
1721 else if (nregs == -1)
1722 use_reg (call_fusage, reg);
1724 use_regs (call_fusage, REGNO (reg), nregs);
1729 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1730 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1731 bytes, then we would need to push some additional bytes to pad the
1732 arguments. So, we compute an adjust to the stack pointer for an
1733 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1734 bytes. Then, when the arguments are pushed the stack will be perfectly
1735 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1736 be popped after the call. Returns the adjustment. */
1739 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1740 struct args_size *args_size,
1741 unsigned int preferred_unit_stack_boundary)
1743 /* The number of bytes to pop so that the stack will be
1744 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1745 HOST_WIDE_INT adjustment;
1746 /* The alignment of the stack after the arguments are pushed, if we
1747 just pushed the arguments without adjust the stack here. */
1748 unsigned HOST_WIDE_INT unadjusted_alignment;
1750 unadjusted_alignment
1751 = ((stack_pointer_delta + unadjusted_args_size)
1752 % preferred_unit_stack_boundary);
1754 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1755 as possible -- leaving just enough left to cancel out the
1756 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1757 PENDING_STACK_ADJUST is non-negative, and congruent to
1758 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1760 /* Begin by trying to pop all the bytes. */
1761 unadjusted_alignment
1762 = (unadjusted_alignment
1763 - (pending_stack_adjust % preferred_unit_stack_boundary));
1764 adjustment = pending_stack_adjust;
1765 /* Push enough additional bytes that the stack will be aligned
1766 after the arguments are pushed. */
1767 if (preferred_unit_stack_boundary > 1)
1769 if (unadjusted_alignment > 0)
1770 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1772 adjustment += unadjusted_alignment;
1775 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1776 bytes after the call. The right number is the entire
1777 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1778 by the arguments in the first place. */
1780 = pending_stack_adjust - adjustment + unadjusted_args_size;
1785 /* Scan X expression if it does not dereference any argument slots
1786 we already clobbered by tail call arguments (as noted in stored_args_map
1788 Return nonzero if X expression dereferences such argument slots,
1792 check_sibcall_argument_overlap_1 (rtx x)
1801 code = GET_CODE (x);
1804 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1805 GET_MODE_SIZE (GET_MODE (x)));
1807 /* Scan all subexpressions. */
1808 fmt = GET_RTX_FORMAT (code);
1809 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1813 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1816 else if (*fmt == 'E')
1818 for (j = 0; j < XVECLEN (x, i); j++)
1819 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1826 /* Scan sequence after INSN if it does not dereference any argument slots
1827 we already clobbered by tail call arguments (as noted in stored_args_map
1828 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1829 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1830 should be 0). Return nonzero if sequence after INSN dereferences such argument
1831 slots, zero otherwise. */
1834 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1838 if (insn == NULL_RTX)
1839 insn = get_insns ();
1841 insn = NEXT_INSN (insn);
1843 for (; insn; insn = NEXT_INSN (insn))
1845 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1848 if (mark_stored_args_map)
1850 #ifdef ARGS_GROW_DOWNWARD
1851 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1853 low = arg->locate.slot_offset.constant;
1856 for (high = low + arg->locate.size.constant; low < high; low++)
1857 SET_BIT (stored_args_map, low);
1859 return insn != NULL_RTX;
1862 /* Given that a function returns a value of mode MODE at the most
1863 significant end of hard register VALUE, shift VALUE left or right
1864 as specified by LEFT_P. Return true if some action was needed. */
1867 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1869 HOST_WIDE_INT shift;
1871 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1872 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1876 /* Use ashr rather than lshr for right shifts. This is for the benefit
1877 of the MIPS port, which requires SImode values to be sign-extended
1878 when stored in 64-bit registers. */
1879 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1880 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1885 /* If X is a likely-spilled register value, copy it to a pseudo
1886 register and return that register. Return X otherwise. */
1889 avoid_likely_spilled_reg (rtx x)
1894 && HARD_REGISTER_P (x)
1895 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (x))))
1897 /* Make sure that we generate a REG rather than a CONCAT.
1898 Moves into CONCATs can need nontrivial instructions,
1899 and the whole point of this function is to avoid
1900 using the hard register directly in such a situation. */
1901 generating_concat_p = 0;
1902 new_rtx = gen_reg_rtx (GET_MODE (x));
1903 generating_concat_p = 1;
1904 emit_move_insn (new_rtx, x);
1910 /* Generate all the code for a CALL_EXPR exp
1911 and return an rtx for its value.
1912 Store the value in TARGET (specified as an rtx) if convenient.
1913 If the value is stored in TARGET then TARGET is returned.
1914 If IGNORE is nonzero, then we ignore the value of the function call. */
1917 expand_call (tree exp, rtx target, int ignore)
1919 /* Nonzero if we are currently expanding a call. */
1920 static int currently_expanding_call = 0;
1922 /* RTX for the function to be called. */
1924 /* Sequence of insns to perform a normal "call". */
1925 rtx normal_call_insns = NULL_RTX;
1926 /* Sequence of insns to perform a tail "call". */
1927 rtx tail_call_insns = NULL_RTX;
1928 /* Data type of the function. */
1930 tree type_arg_types;
1931 /* Declaration of the function being called,
1932 or 0 if the function is computed (not known by name). */
1934 /* The type of the function being called. */
1936 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1939 /* Register in which non-BLKmode value will be returned,
1940 or 0 if no value or if value is BLKmode. */
1942 /* Address where we should return a BLKmode value;
1943 0 if value not BLKmode. */
1944 rtx structure_value_addr = 0;
1945 /* Nonzero if that address is being passed by treating it as
1946 an extra, implicit first parameter. Otherwise,
1947 it is passed by being copied directly into struct_value_rtx. */
1948 int structure_value_addr_parm = 0;
1949 /* Holds the value of implicit argument for the struct value. */
1950 tree structure_value_addr_value = NULL_TREE;
1951 /* Size of aggregate value wanted, or zero if none wanted
1952 or if we are using the non-reentrant PCC calling convention
1953 or expecting the value in registers. */
1954 HOST_WIDE_INT struct_value_size = 0;
1955 /* Nonzero if called function returns an aggregate in memory PCC style,
1956 by returning the address of where to find it. */
1957 int pcc_struct_value = 0;
1958 rtx struct_value = 0;
1960 /* Number of actual parameters in this call, including struct value addr. */
1962 /* Number of named args. Args after this are anonymous ones
1963 and they must all go on the stack. */
1965 /* Number of complex actual arguments that need to be split. */
1966 int num_complex_actuals = 0;
1968 /* Vector of information about each argument.
1969 Arguments are numbered in the order they will be pushed,
1970 not the order they are written. */
1971 struct arg_data *args;
1973 /* Total size in bytes of all the stack-parms scanned so far. */
1974 struct args_size args_size;
1975 struct args_size adjusted_args_size;
1976 /* Size of arguments before any adjustments (such as rounding). */
1977 int unadjusted_args_size;
1978 /* Data on reg parms scanned so far. */
1979 CUMULATIVE_ARGS args_so_far;
1980 /* Nonzero if a reg parm has been scanned. */
1982 /* Nonzero if this is an indirect function call. */
1984 /* Nonzero if we must avoid push-insns in the args for this call.
1985 If stack space is allocated for register parameters, but not by the
1986 caller, then it is preallocated in the fixed part of the stack frame.
1987 So the entire argument block must then be preallocated (i.e., we
1988 ignore PUSH_ROUNDING in that case). */
1990 int must_preallocate = !PUSH_ARGS;
1992 /* Size of the stack reserved for parameter registers. */
1993 int reg_parm_stack_space = 0;
1995 /* Address of space preallocated for stack parms
1996 (on machines that lack push insns), or 0 if space not preallocated. */
1999 /* Mask of ECF_ flags. */
2001 #ifdef REG_PARM_STACK_SPACE
2002 /* Define the boundary of the register parm stack space that needs to be
2004 int low_to_save, high_to_save;
2005 rtx save_area = 0; /* Place that it is saved */
2008 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2009 char *initial_stack_usage_map = stack_usage_map;
2010 char *stack_usage_map_buf = NULL;
2012 int old_stack_allocated;
2014 /* State variables to track stack modifications. */
2015 rtx old_stack_level = 0;
2016 int old_stack_arg_under_construction = 0;
2017 int old_pending_adj = 0;
2018 int old_inhibit_defer_pop = inhibit_defer_pop;
2020 /* Some stack pointer alterations we make are performed via
2021 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2022 which we then also need to save/restore along the way. */
2023 int old_stack_pointer_delta = 0;
2026 tree p = CALL_EXPR_FN (exp);
2027 tree addr = CALL_EXPR_FN (exp);
2029 /* The alignment of the stack, in bits. */
2030 unsigned HOST_WIDE_INT preferred_stack_boundary;
2031 /* The alignment of the stack, in bytes. */
2032 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
2033 /* The static chain value to use for this call. */
2034 rtx static_chain_value;
2035 /* See if this is "nothrow" function call. */
2036 if (TREE_NOTHROW (exp))
2037 flags |= ECF_NOTHROW;
2039 /* See if we can find a DECL-node for the actual function, and get the
2040 function attributes (flags) from the function decl or type node. */
2041 fndecl = get_callee_fndecl (exp);
2044 fntype = TREE_TYPE (fndecl);
2045 flags |= flags_from_decl_or_type (fndecl);
2049 fntype = TREE_TYPE (TREE_TYPE (p));
2050 flags |= flags_from_decl_or_type (fntype);
2053 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2055 /* Warn if this value is an aggregate type,
2056 regardless of which calling convention we are using for it. */
2057 if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2058 warning (OPT_Waggregate_return, "function call has aggregate value");
2060 /* If the result of a non looping pure or const function call is
2061 ignored (or void), and none of its arguments are volatile, we can
2062 avoid expanding the call and just evaluate the arguments for
2064 if ((flags & (ECF_CONST | ECF_PURE))
2065 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2066 && (ignore || target == const0_rtx
2067 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2069 bool volatilep = false;
2071 call_expr_arg_iterator iter;
2073 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2074 if (TREE_THIS_VOLATILE (arg))
2082 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2083 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2088 #ifdef REG_PARM_STACK_SPACE
2089 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2092 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2093 && reg_parm_stack_space > 0 && PUSH_ARGS)
2094 must_preallocate = 1;
2096 /* Set up a place to return a structure. */
2098 /* Cater to broken compilers. */
2099 if (aggregate_value_p (exp, (!fndecl ? fntype : fndecl)))
2101 /* This call returns a big structure. */
2102 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2104 #ifdef PCC_STATIC_STRUCT_RETURN
2106 pcc_struct_value = 1;
2108 #else /* not PCC_STATIC_STRUCT_RETURN */
2110 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2112 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2113 structure_value_addr = XEXP (target, 0);
2116 /* For variable-sized objects, we must be called with a target
2117 specified. If we were to allocate space on the stack here,
2118 we would have no way of knowing when to free it. */
2119 rtx d = assign_temp (TREE_TYPE (exp), 0, 1, 1);
2121 mark_temp_addr_taken (d);
2122 structure_value_addr = XEXP (d, 0);
2126 #endif /* not PCC_STATIC_STRUCT_RETURN */
2129 /* Figure out the amount to which the stack should be aligned. */
2130 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2133 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2134 /* Without automatic stack alignment, we can't increase preferred
2135 stack boundary. With automatic stack alignment, it is
2136 unnecessary since unless we can guarantee that all callers will
2137 align the outgoing stack properly, callee has to align its
2140 && i->preferred_incoming_stack_boundary
2141 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
2142 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2145 /* Operand 0 is a pointer-to-function; get the type of the function. */
2146 funtype = TREE_TYPE (addr);
2147 gcc_assert (POINTER_TYPE_P (funtype));
2148 funtype = TREE_TYPE (funtype);
2150 /* Count whether there are actual complex arguments that need to be split
2151 into their real and imaginary parts. Munge the type_arg_types
2152 appropriately here as well. */
2153 if (targetm.calls.split_complex_arg)
2155 call_expr_arg_iterator iter;
2157 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2159 tree type = TREE_TYPE (arg);
2160 if (type && TREE_CODE (type) == COMPLEX_TYPE
2161 && targetm.calls.split_complex_arg (type))
2162 num_complex_actuals++;
2164 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2167 type_arg_types = TYPE_ARG_TYPES (funtype);
2169 if (flags & ECF_MAY_BE_ALLOCA)
2170 cfun->calls_alloca = 1;
2172 /* If struct_value_rtx is 0, it means pass the address
2173 as if it were an extra parameter. Put the argument expression
2174 in structure_value_addr_value. */
2175 if (structure_value_addr && struct_value == 0)
2177 /* If structure_value_addr is a REG other than
2178 virtual_outgoing_args_rtx, we can use always use it. If it
2179 is not a REG, we must always copy it into a register.
2180 If it is virtual_outgoing_args_rtx, we must copy it to another
2181 register in some cases. */
2182 rtx temp = (!REG_P (structure_value_addr)
2183 || (ACCUMULATE_OUTGOING_ARGS
2184 && stack_arg_under_construction
2185 && structure_value_addr == virtual_outgoing_args_rtx)
2186 ? copy_addr_to_reg (convert_memory_address
2187 (Pmode, structure_value_addr))
2188 : structure_value_addr);
2190 structure_value_addr_value =
2191 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2192 structure_value_addr_parm = 1;
2195 /* Count the arguments and set NUM_ACTUALS. */
2197 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2199 /* Compute number of named args.
2200 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2202 if (type_arg_types != 0)
2204 = (list_length (type_arg_types)
2205 /* Count the struct value address, if it is passed as a parm. */
2206 + structure_value_addr_parm);
2208 /* If we know nothing, treat all args as named. */
2209 n_named_args = num_actuals;
2211 /* Start updating where the next arg would go.
2213 On some machines (such as the PA) indirect calls have a different
2214 calling convention than normal calls. The fourth argument in
2215 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2217 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2219 /* Now possibly adjust the number of named args.
2220 Normally, don't include the last named arg if anonymous args follow.
2221 We do include the last named arg if
2222 targetm.calls.strict_argument_naming() returns nonzero.
2223 (If no anonymous args follow, the result of list_length is actually
2224 one too large. This is harmless.)
2226 If targetm.calls.pretend_outgoing_varargs_named() returns
2227 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2228 this machine will be able to place unnamed args that were passed
2229 in registers into the stack. So treat all args as named. This
2230 allows the insns emitting for a specific argument list to be
2231 independent of the function declaration.
2233 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2234 we do not have any reliable way to pass unnamed args in
2235 registers, so we must force them into memory. */
2237 if (type_arg_types != 0
2238 && targetm.calls.strict_argument_naming (&args_so_far))
2240 else if (type_arg_types != 0
2241 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2242 /* Don't include the last named arg. */
2245 /* Treat all args as named. */
2246 n_named_args = num_actuals;
2248 /* Make a vector to hold all the information about each arg. */
2249 args = XALLOCAVEC (struct arg_data, num_actuals);
2250 memset (args, 0, num_actuals * sizeof (struct arg_data));
2252 /* Build up entries in the ARGS array, compute the size of the
2253 arguments into ARGS_SIZE, etc. */
2254 initialize_argument_information (num_actuals, args, &args_size,
2256 structure_value_addr_value, fndecl, fntype,
2257 &args_so_far, reg_parm_stack_space,
2258 &old_stack_level, &old_pending_adj,
2259 &must_preallocate, &flags,
2260 &try_tail_call, CALL_FROM_THUNK_P (exp));
2263 must_preallocate = 1;
2265 /* Now make final decision about preallocating stack space. */
2266 must_preallocate = finalize_must_preallocate (must_preallocate,
2270 /* If the structure value address will reference the stack pointer, we
2271 must stabilize it. We don't need to do this if we know that we are
2272 not going to adjust the stack pointer in processing this call. */
2274 if (structure_value_addr
2275 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2276 || reg_mentioned_p (virtual_outgoing_args_rtx,
2277 structure_value_addr))
2279 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2280 structure_value_addr = copy_to_reg (structure_value_addr);
2282 /* Tail calls can make things harder to debug, and we've traditionally
2283 pushed these optimizations into -O2. Don't try if we're already
2284 expanding a call, as that means we're an argument. Don't try if
2285 there's cleanups, as we know there's code to follow the call. */
2287 if (currently_expanding_call++ != 0
2288 || !flag_optimize_sibling_calls
2290 || lookup_expr_eh_region (exp) >= 0
2291 || dbg_cnt (tail_call) == false)
2294 /* Rest of purposes for tail call optimizations to fail. */
2296 #ifdef HAVE_sibcall_epilogue
2297 !HAVE_sibcall_epilogue
2302 /* Doing sibling call optimization needs some work, since
2303 structure_value_addr can be allocated on the stack.
2304 It does not seem worth the effort since few optimizable
2305 sibling calls will return a structure. */
2306 || structure_value_addr != NULL_RTX
2307 #ifdef REG_PARM_STACK_SPACE
2308 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2309 || (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2310 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)))
2311 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl))
2313 /* Check whether the target is able to optimize the call
2315 || !targetm.function_ok_for_sibcall (fndecl, exp)
2316 /* Functions that do not return exactly once may not be sibcall
2318 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2319 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2320 /* If the called function is nested in the current one, it might access
2321 some of the caller's arguments, but could clobber them beforehand if
2322 the argument areas are shared. */
2323 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2324 /* If this function requires more stack slots than the current
2325 function, we cannot change it into a sibling call.
2326 crtl->args.pretend_args_size is not part of the
2327 stack allocated by our caller. */
2328 || args_size.constant > (crtl->args.size
2329 - crtl->args.pretend_args_size)
2330 /* If the callee pops its own arguments, then it must pop exactly
2331 the same number of arguments as the current function. */
2332 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2333 != RETURN_POPS_ARGS (current_function_decl,
2334 TREE_TYPE (current_function_decl),
2336 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2339 /* Check if caller and callee disagree in promotion of function
2343 enum machine_mode caller_mode, caller_promoted_mode;
2344 enum machine_mode callee_mode, callee_promoted_mode;
2345 int caller_unsignedp, callee_unsignedp;
2346 tree caller_res = DECL_RESULT (current_function_decl);
2348 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
2349 caller_mode = caller_promoted_mode = DECL_MODE (caller_res);
2350 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
2351 callee_mode = callee_promoted_mode = TYPE_MODE (TREE_TYPE (funtype));
2352 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
2353 caller_promoted_mode
2354 = promote_mode (TREE_TYPE (caller_res), caller_mode,
2355 &caller_unsignedp, 1);
2356 if (targetm.calls.promote_function_return (funtype))
2357 callee_promoted_mode
2358 = promote_mode (TREE_TYPE (funtype), callee_mode,
2359 &callee_unsignedp, 1);
2360 if (caller_mode != VOIDmode
2361 && (caller_promoted_mode != callee_promoted_mode
2362 || ((caller_mode != caller_promoted_mode
2363 || callee_mode != callee_promoted_mode)
2364 && (caller_unsignedp != callee_unsignedp
2365 || GET_MODE_BITSIZE (caller_mode)
2366 < GET_MODE_BITSIZE (callee_mode)))))
2370 /* Ensure current function's preferred stack boundary is at least
2371 what we need. Stack alignment may also increase preferred stack
2373 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
2374 crtl->preferred_stack_boundary = preferred_stack_boundary;
2376 preferred_stack_boundary = crtl->preferred_stack_boundary;
2378 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2380 /* We want to make two insn chains; one for a sibling call, the other
2381 for a normal call. We will select one of the two chains after
2382 initial RTL generation is complete. */
2383 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2385 int sibcall_failure = 0;
2386 /* We want to emit any pending stack adjustments before the tail
2387 recursion "call". That way we know any adjustment after the tail
2388 recursion call can be ignored if we indeed use the tail
2390 int save_pending_stack_adjust = 0;
2391 int save_stack_pointer_delta = 0;
2393 rtx before_call, next_arg_reg, after_args;
2397 /* State variables we need to save and restore between
2399 save_pending_stack_adjust = pending_stack_adjust;
2400 save_stack_pointer_delta = stack_pointer_delta;
2403 flags &= ~ECF_SIBCALL;
2405 flags |= ECF_SIBCALL;
2407 /* Other state variables that we must reinitialize each time
2408 through the loop (that are not initialized by the loop itself). */
2412 /* Start a new sequence for the normal call case.
2414 From this point on, if the sibling call fails, we want to set
2415 sibcall_failure instead of continuing the loop. */
2418 /* Don't let pending stack adjusts add up to too much.
2419 Also, do all pending adjustments now if there is any chance
2420 this might be a call to alloca or if we are expanding a sibling
2422 Also do the adjustments before a throwing call, otherwise
2423 exception handling can fail; PR 19225. */
2424 if (pending_stack_adjust >= 32
2425 || (pending_stack_adjust > 0
2426 && (flags & ECF_MAY_BE_ALLOCA))
2427 || (pending_stack_adjust > 0
2428 && flag_exceptions && !(flags & ECF_NOTHROW))
2430 do_pending_stack_adjust ();
2432 /* Precompute any arguments as needed. */
2434 precompute_arguments (num_actuals, args);
2436 /* Now we are about to start emitting insns that can be deleted
2437 if a libcall is deleted. */
2438 if (pass && (flags & ECF_MALLOC))
2441 if (pass == 0 && crtl->stack_protect_guard)
2442 stack_protect_epilogue ();
2444 adjusted_args_size = args_size;
2445 /* Compute the actual size of the argument block required. The variable
2446 and constant sizes must be combined, the size may have to be rounded,
2447 and there may be a minimum required size. When generating a sibcall
2448 pattern, do not round up, since we'll be re-using whatever space our
2450 unadjusted_args_size
2451 = compute_argument_block_size (reg_parm_stack_space,
2452 &adjusted_args_size,
2455 : preferred_stack_boundary));
2457 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2459 /* The argument block when performing a sibling call is the
2460 incoming argument block. */
2463 argblock = crtl->args.internal_arg_pointer;
2465 #ifdef STACK_GROWS_DOWNWARD
2466 = plus_constant (argblock, crtl->args.pretend_args_size);
2468 = plus_constant (argblock, -crtl->args.pretend_args_size);
2470 stored_args_map = sbitmap_alloc (args_size.constant);
2471 sbitmap_zero (stored_args_map);
2474 /* If we have no actual push instructions, or shouldn't use them,
2475 make space for all args right now. */
2476 else if (adjusted_args_size.var != 0)
2478 if (old_stack_level == 0)
2480 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2481 old_stack_pointer_delta = stack_pointer_delta;
2482 old_pending_adj = pending_stack_adjust;
2483 pending_stack_adjust = 0;
2484 /* stack_arg_under_construction says whether a stack arg is
2485 being constructed at the old stack level. Pushing the stack
2486 gets a clean outgoing argument block. */
2487 old_stack_arg_under_construction = stack_arg_under_construction;
2488 stack_arg_under_construction = 0;
2490 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2494 /* Note that we must go through the motions of allocating an argument
2495 block even if the size is zero because we may be storing args
2496 in the area reserved for register arguments, which may be part of
2499 int needed = adjusted_args_size.constant;
2501 /* Store the maximum argument space used. It will be pushed by
2502 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2505 if (needed > crtl->outgoing_args_size)
2506 crtl->outgoing_args_size = needed;
2508 if (must_preallocate)
2510 if (ACCUMULATE_OUTGOING_ARGS)
2512 /* Since the stack pointer will never be pushed, it is
2513 possible for the evaluation of a parm to clobber
2514 something we have already written to the stack.
2515 Since most function calls on RISC machines do not use
2516 the stack, this is uncommon, but must work correctly.
2518 Therefore, we save any area of the stack that was already
2519 written and that we are using. Here we set up to do this
2520 by making a new stack usage map from the old one. The
2521 actual save will be done by store_one_arg.
2523 Another approach might be to try to reorder the argument
2524 evaluations to avoid this conflicting stack usage. */
2526 /* Since we will be writing into the entire argument area,
2527 the map must be allocated for its entire size, not just
2528 the part that is the responsibility of the caller. */
2529 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2530 needed += reg_parm_stack_space;
2532 #ifdef ARGS_GROW_DOWNWARD
2533 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2536 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2539 if (stack_usage_map_buf)
2540 free (stack_usage_map_buf);
2541 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2542 stack_usage_map = stack_usage_map_buf;
2544 if (initial_highest_arg_in_use)
2545 memcpy (stack_usage_map, initial_stack_usage_map,
2546 initial_highest_arg_in_use);
2548 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2549 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2550 (highest_outgoing_arg_in_use
2551 - initial_highest_arg_in_use));
2554 /* The address of the outgoing argument list must not be
2555 copied to a register here, because argblock would be left
2556 pointing to the wrong place after the call to
2557 allocate_dynamic_stack_space below. */
2559 argblock = virtual_outgoing_args_rtx;
2563 if (inhibit_defer_pop == 0)
2565 /* Try to reuse some or all of the pending_stack_adjust
2566 to get this space. */
2568 = (combine_pending_stack_adjustment_and_call
2569 (unadjusted_args_size,
2570 &adjusted_args_size,
2571 preferred_unit_stack_boundary));
2573 /* combine_pending_stack_adjustment_and_call computes
2574 an adjustment before the arguments are allocated.
2575 Account for them and see whether or not the stack
2576 needs to go up or down. */
2577 needed = unadjusted_args_size - needed;
2581 /* We're releasing stack space. */
2582 /* ??? We can avoid any adjustment at all if we're
2583 already aligned. FIXME. */
2584 pending_stack_adjust = -needed;
2585 do_pending_stack_adjust ();
2589 /* We need to allocate space. We'll do that in
2590 push_block below. */
2591 pending_stack_adjust = 0;
2594 /* Special case this because overhead of `push_block' in
2595 this case is non-trivial. */
2597 argblock = virtual_outgoing_args_rtx;
2600 argblock = push_block (GEN_INT (needed), 0, 0);
2601 #ifdef ARGS_GROW_DOWNWARD
2602 argblock = plus_constant (argblock, needed);
2606 /* We only really need to call `copy_to_reg' in the case
2607 where push insns are going to be used to pass ARGBLOCK
2608 to a function call in ARGS. In that case, the stack
2609 pointer changes value from the allocation point to the
2610 call point, and hence the value of
2611 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2612 as well always do it. */
2613 argblock = copy_to_reg (argblock);
2618 if (ACCUMULATE_OUTGOING_ARGS)
2620 /* The save/restore code in store_one_arg handles all
2621 cases except one: a constructor call (including a C
2622 function returning a BLKmode struct) to initialize
2624 if (stack_arg_under_construction)
2627 = GEN_INT (adjusted_args_size.constant
2628 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
2629 : TREE_TYPE (fndecl))) ? 0
2630 : reg_parm_stack_space));
2631 if (old_stack_level == 0)
2633 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2635 old_stack_pointer_delta = stack_pointer_delta;
2636 old_pending_adj = pending_stack_adjust;
2637 pending_stack_adjust = 0;
2638 /* stack_arg_under_construction says whether a stack
2639 arg is being constructed at the old stack level.
2640 Pushing the stack gets a clean outgoing argument
2642 old_stack_arg_under_construction
2643 = stack_arg_under_construction;
2644 stack_arg_under_construction = 0;
2645 /* Make a new map for the new argument list. */
2646 if (stack_usage_map_buf)
2647 free (stack_usage_map_buf);
2648 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
2649 stack_usage_map = stack_usage_map_buf;
2650 highest_outgoing_arg_in_use = 0;
2652 allocate_dynamic_stack_space (push_size, NULL_RTX,
2656 /* If argument evaluation might modify the stack pointer,
2657 copy the address of the argument list to a register. */
2658 for (i = 0; i < num_actuals; i++)
2659 if (args[i].pass_on_stack)
2661 argblock = copy_addr_to_reg (argblock);
2666 compute_argument_addresses (args, argblock, num_actuals);
2668 /* If we push args individually in reverse order, perform stack alignment
2669 before the first push (the last arg). */
2670 if (PUSH_ARGS_REVERSED && argblock == 0
2671 && adjusted_args_size.constant != unadjusted_args_size)
2673 /* When the stack adjustment is pending, we get better code
2674 by combining the adjustments. */
2675 if (pending_stack_adjust
2676 && ! inhibit_defer_pop)
2678 pending_stack_adjust
2679 = (combine_pending_stack_adjustment_and_call
2680 (unadjusted_args_size,
2681 &adjusted_args_size,
2682 preferred_unit_stack_boundary));
2683 do_pending_stack_adjust ();
2685 else if (argblock == 0)
2686 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2687 - unadjusted_args_size));
2689 /* Now that the stack is properly aligned, pops can't safely
2690 be deferred during the evaluation of the arguments. */
2693 funexp = rtx_for_function_call (fndecl, addr);
2695 /* Figure out the register where the value, if any, will come back. */
2697 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2698 && ! structure_value_addr)
2700 if (pcc_struct_value)
2701 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2702 fndecl, NULL, (pass == 0));
2704 valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype,
2707 /* If VALREG is a PARALLEL whose first member has a zero
2708 offset, use that. This is for targets such as m68k that
2709 return the same value in multiple places. */
2710 if (GET_CODE (valreg) == PARALLEL)
2712 rtx elem = XVECEXP (valreg, 0, 0);
2713 rtx where = XEXP (elem, 0);
2714 rtx offset = XEXP (elem, 1);
2715 if (offset == const0_rtx
2716 && GET_MODE (where) == GET_MODE (valreg))
2721 /* Precompute all register parameters. It isn't safe to compute anything
2722 once we have started filling any specific hard regs. */
2723 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2725 if (CALL_EXPR_STATIC_CHAIN (exp))
2726 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
2728 static_chain_value = 0;
2730 #ifdef REG_PARM_STACK_SPACE
2731 /* Save the fixed argument area if it's part of the caller's frame and
2732 is clobbered by argument setup for this call. */
2733 if (ACCUMULATE_OUTGOING_ARGS && pass)
2734 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2735 &low_to_save, &high_to_save);
2738 /* Now store (and compute if necessary) all non-register parms.
2739 These come before register parms, since they can require block-moves,
2740 which could clobber the registers used for register parms.
2741 Parms which have partial registers are not stored here,
2742 but we do preallocate space here if they want that. */
2744 for (i = 0; i < num_actuals; i++)
2746 if (args[i].reg == 0 || args[i].pass_on_stack)
2748 rtx before_arg = get_last_insn ();
2750 if (store_one_arg (&args[i], argblock, flags,
2751 adjusted_args_size.var != 0,
2752 reg_parm_stack_space)
2754 && check_sibcall_argument_overlap (before_arg,
2756 sibcall_failure = 1;
2759 if (((flags & ECF_CONST)
2760 || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
2762 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2763 gen_rtx_USE (VOIDmode,
2768 /* If we have a parm that is passed in registers but not in memory
2769 and whose alignment does not permit a direct copy into registers,
2770 make a group of pseudos that correspond to each register that we
2772 if (STRICT_ALIGNMENT)
2773 store_unaligned_arguments_into_pseudos (args, num_actuals);
2775 /* Now store any partially-in-registers parm.
2776 This is the last place a block-move can happen. */
2778 for (i = 0; i < num_actuals; i++)
2779 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2781 rtx before_arg = get_last_insn ();
2783 if (store_one_arg (&args[i], argblock, flags,
2784 adjusted_args_size.var != 0,
2785 reg_parm_stack_space)
2787 && check_sibcall_argument_overlap (before_arg,
2789 sibcall_failure = 1;
2792 /* If we pushed args in forward order, perform stack alignment
2793 after pushing the last arg. */
2794 if (!PUSH_ARGS_REVERSED && argblock == 0)
2795 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2796 - unadjusted_args_size));
2798 /* If register arguments require space on the stack and stack space
2799 was not preallocated, allocate stack space here for arguments
2800 passed in registers. */
2801 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2802 && !ACCUMULATE_OUTGOING_ARGS
2803 && must_preallocate == 0 && reg_parm_stack_space > 0)
2804 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2806 /* Pass the function the address in which to return a
2808 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2810 structure_value_addr
2811 = convert_memory_address (Pmode, structure_value_addr);
2812 emit_move_insn (struct_value,
2814 force_operand (structure_value_addr,
2817 if (REG_P (struct_value))
2818 use_reg (&call_fusage, struct_value);
2821 after_args = get_last_insn ();
2822 funexp = prepare_call_address (funexp, static_chain_value,
2823 &call_fusage, reg_parm_seen, pass == 0);
2825 load_register_parameters (args, num_actuals, &call_fusage, flags,
2826 pass == 0, &sibcall_failure);
2828 /* Save a pointer to the last insn before the call, so that we can
2829 later safely search backwards to find the CALL_INSN. */
2830 before_call = get_last_insn ();
2832 /* Set up next argument register. For sibling calls on machines
2833 with register windows this should be the incoming register. */
2834 #ifdef FUNCTION_INCOMING_ARG
2836 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2840 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2843 /* All arguments and registers used for the call must be set up by
2846 /* Stack must be properly aligned now. */
2848 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2850 /* Generate the actual call instruction. */
2851 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2852 adjusted_args_size.constant, struct_value_size,
2853 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2854 flags, & args_so_far);
2856 /* If the call setup or the call itself overlaps with anything
2857 of the argument setup we probably clobbered our call address.
2858 In that case we can't do sibcalls. */
2860 && check_sibcall_argument_overlap (after_args, 0, 0))
2861 sibcall_failure = 1;
2863 /* If a non-BLKmode value is returned at the most significant end
2864 of a register, shift the register right by the appropriate amount
2865 and update VALREG accordingly. BLKmode values are handled by the
2866 group load/store machinery below. */
2867 if (!structure_value_addr
2868 && !pcc_struct_value
2869 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2870 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2872 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2873 sibcall_failure = 1;
2874 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2877 if (pass && (flags & ECF_MALLOC))
2879 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2882 /* The return value from a malloc-like function is a pointer. */
2883 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2884 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2886 emit_move_insn (temp, valreg);
2888 /* The return value from a malloc-like function can not alias
2890 last = get_last_insn ();
2891 add_reg_note (last, REG_NOALIAS, temp);
2893 /* Write out the sequence. */
2894 insns = get_insns ();
2900 /* For calls to `setjmp', etc., inform
2901 function.c:setjmp_warnings that it should complain if
2902 nonvolatile values are live. For functions that cannot
2903 return, inform flow that control does not fall through. */
2905 if ((flags & ECF_NORETURN) || pass == 0)
2907 /* The barrier must be emitted
2908 immediately after the CALL_INSN. Some ports emit more
2909 than just a CALL_INSN above, so we must search for it here. */
2911 rtx last = get_last_insn ();
2912 while (!CALL_P (last))
2914 last = PREV_INSN (last);
2915 /* There was no CALL_INSN? */
2916 gcc_assert (last != before_call);
2919 emit_barrier_after (last);
2921 /* Stack adjustments after a noreturn call are dead code.
2922 However when NO_DEFER_POP is in effect, we must preserve
2923 stack_pointer_delta. */
2924 if (inhibit_defer_pop == 0)
2926 stack_pointer_delta = old_stack_allocated;
2927 pending_stack_adjust = 0;
2931 /* If value type not void, return an rtx for the value. */
2933 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2935 target = const0_rtx;
2936 else if (structure_value_addr)
2938 if (target == 0 || !MEM_P (target))
2941 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2942 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2943 structure_value_addr));
2944 set_mem_attributes (target, exp, 1);
2947 else if (pcc_struct_value)
2949 /* This is the special C++ case where we need to
2950 know what the true target was. We take care to
2951 never use this value more than once in one expression. */
2952 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2953 copy_to_reg (valreg));
2954 set_mem_attributes (target, exp, 1);
2956 /* Handle calls that return values in multiple non-contiguous locations.
2957 The Irix 6 ABI has examples of this. */
2958 else if (GET_CODE (valreg) == PARALLEL)
2962 /* This will only be assigned once, so it can be readonly. */
2963 tree nt = build_qualified_type (TREE_TYPE (exp),
2964 (TYPE_QUALS (TREE_TYPE (exp))
2965 | TYPE_QUAL_CONST));
2967 target = assign_temp (nt, 0, 1, 1);
2970 if (! rtx_equal_p (target, valreg))
2971 emit_group_store (target, valreg, TREE_TYPE (exp),
2972 int_size_in_bytes (TREE_TYPE (exp)));
2974 /* We can not support sibling calls for this case. */
2975 sibcall_failure = 1;
2978 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2979 && GET_MODE (target) == GET_MODE (valreg))
2981 bool may_overlap = false;
2983 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
2984 reg to a plain register. */
2985 if (!REG_P (target) || HARD_REGISTER_P (target))
2986 valreg = avoid_likely_spilled_reg (valreg);
2988 /* If TARGET is a MEM in the argument area, and we have
2989 saved part of the argument area, then we can't store
2990 directly into TARGET as it may get overwritten when we
2991 restore the argument save area below. Don't work too
2992 hard though and simply force TARGET to a register if it
2993 is a MEM; the optimizer is quite likely to sort it out. */
2994 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
2995 for (i = 0; i < num_actuals; i++)
2996 if (args[i].save_area)
3003 target = copy_to_reg (valreg);
3006 /* TARGET and VALREG cannot be equal at this point
3007 because the latter would not have
3008 REG_FUNCTION_VALUE_P true, while the former would if
3009 it were referring to the same register.
3011 If they refer to the same register, this move will be
3012 a no-op, except when function inlining is being
3014 emit_move_insn (target, valreg);
3016 /* If we are setting a MEM, this code must be executed.
3017 Since it is emitted after the call insn, sibcall
3018 optimization cannot be performed in that case. */
3020 sibcall_failure = 1;
3023 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3025 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3027 /* We can not support sibling calls for this case. */
3028 sibcall_failure = 1;
3031 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
3033 if (targetm.calls.promote_function_return(funtype))
3035 /* If we promoted this return value, make the proper SUBREG.
3036 TARGET might be const0_rtx here, so be careful. */
3038 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3039 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3041 tree type = TREE_TYPE (exp);
3042 int unsignedp = TYPE_UNSIGNED (type);
3044 enum machine_mode pmode;
3046 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
3047 /* If we don't promote as expected, something is wrong. */
3048 gcc_assert (GET_MODE (target) == pmode);
3050 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3051 && (GET_MODE_SIZE (GET_MODE (target))
3052 > GET_MODE_SIZE (TYPE_MODE (type))))
3054 offset = GET_MODE_SIZE (GET_MODE (target))
3055 - GET_MODE_SIZE (TYPE_MODE (type));
3056 if (! BYTES_BIG_ENDIAN)
3057 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3058 else if (! WORDS_BIG_ENDIAN)
3059 offset %= UNITS_PER_WORD;
3061 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3062 SUBREG_PROMOTED_VAR_P (target) = 1;
3063 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3067 /* If size of args is variable or this was a constructor call for a stack
3068 argument, restore saved stack-pointer value. */
3070 if (old_stack_level)
3072 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3073 stack_pointer_delta = old_stack_pointer_delta;
3074 pending_stack_adjust = old_pending_adj;
3075 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3076 stack_arg_under_construction = old_stack_arg_under_construction;
3077 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3078 stack_usage_map = initial_stack_usage_map;
3079 sibcall_failure = 1;
3081 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3083 #ifdef REG_PARM_STACK_SPACE
3085 restore_fixed_argument_area (save_area, argblock,
3086 high_to_save, low_to_save);
3089 /* If we saved any argument areas, restore them. */
3090 for (i = 0; i < num_actuals; i++)
3091 if (args[i].save_area)
3093 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3095 = gen_rtx_MEM (save_mode,
3096 memory_address (save_mode,
3097 XEXP (args[i].stack_slot, 0)));
3099 if (save_mode != BLKmode)
3100 emit_move_insn (stack_area, args[i].save_area);
3102 emit_block_move (stack_area, args[i].save_area,
3103 GEN_INT (args[i].locate.size.constant),
3104 BLOCK_OP_CALL_PARM);
3107 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3108 stack_usage_map = initial_stack_usage_map;
3111 /* If this was alloca, record the new stack level for nonlocal gotos.
3112 Check for the handler slots since we might not have a save area
3113 for non-local gotos. */
3115 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3116 update_nonlocal_goto_save_area ();
3118 /* Free up storage we no longer need. */
3119 for (i = 0; i < num_actuals; ++i)
3120 if (args[i].aligned_regs)
3121 free (args[i].aligned_regs);
3123 insns = get_insns ();
3128 tail_call_insns = insns;
3130 /* Restore the pending stack adjustment now that we have
3131 finished generating the sibling call sequence. */
3133 pending_stack_adjust = save_pending_stack_adjust;
3134 stack_pointer_delta = save_stack_pointer_delta;
3136 /* Prepare arg structure for next iteration. */
3137 for (i = 0; i < num_actuals; i++)
3140 args[i].aligned_regs = 0;
3144 sbitmap_free (stored_args_map);
3148 normal_call_insns = insns;
3150 /* Verify that we've deallocated all the stack we used. */
3151 gcc_assert ((flags & ECF_NORETURN)
3152 || (old_stack_allocated
3153 == stack_pointer_delta - pending_stack_adjust));
3156 /* If something prevents making this a sibling call,
3157 zero out the sequence. */
3158 if (sibcall_failure)
3159 tail_call_insns = NULL_RTX;
3164 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3165 arguments too, as argument area is now clobbered by the call. */
3166 if (tail_call_insns)
3168 emit_insn (tail_call_insns);
3169 crtl->tail_call_emit = true;
3172 emit_insn (normal_call_insns);
3174 currently_expanding_call--;
3176 if (stack_usage_map_buf)
3177 free (stack_usage_map_buf);
3182 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3183 this function's incoming arguments.
3185 At the start of RTL generation we know the only REG_EQUIV notes
3186 in the rtl chain are those for incoming arguments, so we can look
3187 for REG_EQUIV notes between the start of the function and the
3188 NOTE_INSN_FUNCTION_BEG.
3190 This is (slight) overkill. We could keep track of the highest
3191 argument we clobber and be more selective in removing notes, but it
3192 does not seem to be worth the effort. */
3195 fixup_tail_calls (void)
3199 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3203 /* There are never REG_EQUIV notes for the incoming arguments
3204 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3206 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3209 note = find_reg_note (insn, REG_EQUIV, 0);
3211 remove_note (insn, note);
3212 note = find_reg_note (insn, REG_EQUIV, 0);
3217 /* Traverse a list of TYPES and expand all complex types into their
3220 split_complex_types (tree types)
3224 /* Before allocating memory, check for the common case of no complex. */
3225 for (p = types; p; p = TREE_CHAIN (p))
3227 tree type = TREE_VALUE (p);
3228 if (TREE_CODE (type) == COMPLEX_TYPE
3229 && targetm.calls.split_complex_arg (type))
3235 types = copy_list (types);
3237 for (p = types; p; p = TREE_CHAIN (p))
3239 tree complex_type = TREE_VALUE (p);
3241 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3242 && targetm.calls.split_complex_arg (complex_type))
3246 /* Rewrite complex type with component type. */
3247 TREE_VALUE (p) = TREE_TYPE (complex_type);
3248 next = TREE_CHAIN (p);
3250 /* Add another component type for the imaginary part. */
3251 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3252 TREE_CHAIN (p) = imag;
3253 TREE_CHAIN (imag) = next;
3255 /* Skip the newly created node. */
3263 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3264 The RETVAL parameter specifies whether return value needs to be saved, other
3265 parameters are documented in the emit_library_call function below. */
3268 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3269 enum libcall_type fn_type,
3270 enum machine_mode outmode, int nargs, va_list p)
3272 /* Total size in bytes of all the stack-parms scanned so far. */
3273 struct args_size args_size;
3274 /* Size of arguments before any adjustments (such as rounding). */
3275 struct args_size original_args_size;
3278 /* Todo, choose the correct decl type of orgfun. Sadly this information
3279 isn't present here, so we default to native calling abi here. */
3280 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3281 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3285 CUMULATIVE_ARGS args_so_far;
3289 enum machine_mode mode;
3292 struct locate_and_pad_arg_data locate;
3296 int old_inhibit_defer_pop = inhibit_defer_pop;
3297 rtx call_fusage = 0;
3300 int pcc_struct_value = 0;
3301 int struct_value_size = 0;
3303 int reg_parm_stack_space = 0;
3306 tree tfom; /* type_for_mode (outmode, 0) */
3308 #ifdef REG_PARM_STACK_SPACE
3309 /* Define the boundary of the register parm stack space that needs to be
3311 int low_to_save = 0, high_to_save = 0;
3312 rtx save_area = 0; /* Place that it is saved. */
3315 /* Size of the stack reserved for parameter registers. */
3316 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3317 char *initial_stack_usage_map = stack_usage_map;
3318 char *stack_usage_map_buf = NULL;
3320 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3322 #ifdef REG_PARM_STACK_SPACE
3323 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3326 /* By default, library functions can not throw. */
3327 flags = ECF_NOTHROW;
3340 flags |= ECF_NORETURN;
3343 flags = ECF_NORETURN;
3345 case LCT_RETURNS_TWICE:
3346 flags = ECF_RETURNS_TWICE;
3351 /* Ensure current function's preferred stack boundary is at least
3353 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3354 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3356 /* If this kind of value comes back in memory,
3357 decide where in memory it should come back. */
3358 if (outmode != VOIDmode)
3360 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3361 if (aggregate_value_p (tfom, 0))
3363 #ifdef PCC_STATIC_STRUCT_RETURN
3365 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3366 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3367 pcc_struct_value = 1;
3369 value = gen_reg_rtx (outmode);
3370 #else /* not PCC_STATIC_STRUCT_RETURN */
3371 struct_value_size = GET_MODE_SIZE (outmode);
3372 if (value != 0 && MEM_P (value))
3375 mem_value = assign_temp (tfom, 0, 1, 1);
3377 /* This call returns a big structure. */
3378 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3382 tfom = void_type_node;
3384 /* ??? Unfinished: must pass the memory address as an argument. */
3386 /* Copy all the libcall-arguments out of the varargs data
3387 and into a vector ARGVEC.
3389 Compute how to pass each argument. We only support a very small subset
3390 of the full argument passing conventions to limit complexity here since
3391 library functions shouldn't have many args. */
3393 argvec = XALLOCAVEC (struct arg, nargs + 1);
3394 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3396 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3397 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3399 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3402 args_size.constant = 0;
3409 /* If there's a structure value address to be passed,
3410 either pass it in the special place, or pass it as an extra argument. */
3411 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3413 rtx addr = XEXP (mem_value, 0);
3417 /* Make sure it is a reasonable operand for a move or push insn. */
3418 if (!REG_P (addr) && !MEM_P (addr)
3419 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3420 addr = force_operand (addr, NULL_RTX);
3422 argvec[count].value = addr;
3423 argvec[count].mode = Pmode;
3424 argvec[count].partial = 0;
3426 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3427 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3428 NULL_TREE, 1) == 0);
3430 locate_and_pad_parm (Pmode, NULL_TREE,
3431 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3434 argvec[count].reg != 0,
3436 0, NULL_TREE, &args_size, &argvec[count].locate);
3438 if (argvec[count].reg == 0 || argvec[count].partial != 0
3439 || reg_parm_stack_space > 0)
3440 args_size.constant += argvec[count].locate.size.constant;
3442 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3447 for (; count < nargs; count++)
3449 rtx val = va_arg (p, rtx);
3450 enum machine_mode mode = (enum machine_mode) va_arg (p, int);
3452 /* We cannot convert the arg value to the mode the library wants here;
3453 must do it earlier where we know the signedness of the arg. */
3454 gcc_assert (mode != BLKmode
3455 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3457 /* Make sure it is a reasonable operand for a move or push insn. */
3458 if (!REG_P (val) && !MEM_P (val)
3459 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3460 val = force_operand (val, NULL_RTX);
3462 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3466 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3468 /* If this was a CONST function, it is now PURE since it now
3470 if (flags & ECF_CONST)
3472 flags &= ~ECF_CONST;
3476 if (MEM_P (val) && !must_copy)
3480 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3482 emit_move_insn (slot, val);
3485 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3486 gen_rtx_USE (VOIDmode, slot),
3489 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3490 gen_rtx_CLOBBER (VOIDmode,
3495 val = force_operand (XEXP (slot, 0), NULL_RTX);
3498 argvec[count].value = val;
3499 argvec[count].mode = mode;
3501 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3503 argvec[count].partial
3504 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3506 locate_and_pad_parm (mode, NULL_TREE,
3507 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3510 argvec[count].reg != 0,
3512 argvec[count].partial,
3513 NULL_TREE, &args_size, &argvec[count].locate);
3515 gcc_assert (!argvec[count].locate.size.var);
3517 if (argvec[count].reg == 0 || argvec[count].partial != 0
3518 || reg_parm_stack_space > 0)
3519 args_size.constant += argvec[count].locate.size.constant;
3521 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3524 /* If this machine requires an external definition for library
3525 functions, write one out. */
3526 assemble_external_libcall (fun);
3528 original_args_size = args_size;
3529 args_size.constant = (((args_size.constant
3530 + stack_pointer_delta
3534 - stack_pointer_delta);
3536 args_size.constant = MAX (args_size.constant,
3537 reg_parm_stack_space);
3539 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3540 args_size.constant -= reg_parm_stack_space;
3542 if (args_size.constant > crtl->outgoing_args_size)
3543 crtl->outgoing_args_size = args_size.constant;
3545 if (ACCUMULATE_OUTGOING_ARGS)
3547 /* Since the stack pointer will never be pushed, it is possible for
3548 the evaluation of a parm to clobber something we have already
3549 written to the stack. Since most function calls on RISC machines
3550 do not use the stack, this is uncommon, but must work correctly.
3552 Therefore, we save any area of the stack that was already written
3553 and that we are using. Here we set up to do this by making a new
3554 stack usage map from the old one.
3556 Another approach might be to try to reorder the argument
3557 evaluations to avoid this conflicting stack usage. */
3559 needed = args_size.constant;
3561 /* Since we will be writing into the entire argument area, the
3562 map must be allocated for its entire size, not just the part that
3563 is the responsibility of the caller. */
3564 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3565 needed += reg_parm_stack_space;
3567 #ifdef ARGS_GROW_DOWNWARD
3568 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3571 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3574 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3575 stack_usage_map = stack_usage_map_buf;
3577 if (initial_highest_arg_in_use)
3578 memcpy (stack_usage_map, initial_stack_usage_map,
3579 initial_highest_arg_in_use);
3581 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3582 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3583 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3586 /* We must be careful to use virtual regs before they're instantiated,
3587 and real regs afterwards. Loop optimization, for example, can create
3588 new libcalls after we've instantiated the virtual regs, and if we
3589 use virtuals anyway, they won't match the rtl patterns. */
3591 if (virtuals_instantiated)
3592 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3594 argblock = virtual_outgoing_args_rtx;
3599 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3602 /* If we push args individually in reverse order, perform stack alignment
3603 before the first push (the last arg). */
3604 if (argblock == 0 && PUSH_ARGS_REVERSED)
3605 anti_adjust_stack (GEN_INT (args_size.constant
3606 - original_args_size.constant));
3608 if (PUSH_ARGS_REVERSED)
3619 #ifdef REG_PARM_STACK_SPACE
3620 if (ACCUMULATE_OUTGOING_ARGS)
3622 /* The argument list is the property of the called routine and it
3623 may clobber it. If the fixed area has been used for previous
3624 parameters, we must save and restore it. */
3625 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3626 &low_to_save, &high_to_save);
3630 /* Push the args that need to be pushed. */
3632 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3633 are to be pushed. */
3634 for (count = 0; count < nargs; count++, argnum += inc)
3636 enum machine_mode mode = argvec[argnum].mode;
3637 rtx val = argvec[argnum].value;
3638 rtx reg = argvec[argnum].reg;
3639 int partial = argvec[argnum].partial;
3640 unsigned int parm_align = argvec[argnum].locate.boundary;
3641 int lower_bound = 0, upper_bound = 0, i;
3643 if (! (reg != 0 && partial == 0))
3645 if (ACCUMULATE_OUTGOING_ARGS)
3647 /* If this is being stored into a pre-allocated, fixed-size,
3648 stack area, save any previous data at that location. */
3650 #ifdef ARGS_GROW_DOWNWARD
3651 /* stack_slot is negative, but we want to index stack_usage_map
3652 with positive values. */
3653 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
3654 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3656 lower_bound = argvec[argnum].locate.slot_offset.constant;
3657 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3661 /* Don't worry about things in the fixed argument area;
3662 it has already been saved. */
3663 if (i < reg_parm_stack_space)
3664 i = reg_parm_stack_space;
3665 while (i < upper_bound && stack_usage_map[i] == 0)
3668 if (i < upper_bound)
3670 /* We need to make a save area. */
3672 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3673 enum machine_mode save_mode
3674 = mode_for_size (size, MODE_INT, 1);
3676 = plus_constant (argblock,
3677 argvec[argnum].locate.offset.constant);
3679 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3681 if (save_mode == BLKmode)
3683 argvec[argnum].save_area
3684 = assign_stack_temp (BLKmode,
3685 argvec[argnum].locate.size.constant,
3688 emit_block_move (validize_mem (argvec[argnum].save_area),
3690 GEN_INT (argvec[argnum].locate.size.constant),
3691 BLOCK_OP_CALL_PARM);
3695 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3697 emit_move_insn (argvec[argnum].save_area, stack_area);
3702 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
3703 partial, reg, 0, argblock,
3704 GEN_INT (argvec[argnum].locate.offset.constant),
3705 reg_parm_stack_space,
3706 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3708 /* Now mark the segment we just used. */
3709 if (ACCUMULATE_OUTGOING_ARGS)
3710 for (i = lower_bound; i < upper_bound; i++)
3711 stack_usage_map[i] = 1;
3715 if ((flags & ECF_CONST)
3716 || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
3720 /* Indicate argument access so that alias.c knows that these
3723 use = plus_constant (argblock,
3724 argvec[argnum].locate.offset.constant);
3726 /* When arguments are pushed, trying to tell alias.c where
3727 exactly this argument is won't work, because the
3728 auto-increment causes confusion. So we merely indicate
3729 that we access something with a known mode somewhere on
3731 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3732 gen_rtx_SCRATCH (Pmode));
3733 use = gen_rtx_MEM (argvec[argnum].mode, use);
3734 use = gen_rtx_USE (VOIDmode, use);
3735 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3740 /* If we pushed args in forward order, perform stack alignment
3741 after pushing the last arg. */
3742 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3743 anti_adjust_stack (GEN_INT (args_size.constant
3744 - original_args_size.constant));
3746 if (PUSH_ARGS_REVERSED)
3751 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3753 /* Now load any reg parms into their regs. */
3755 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3756 are to be pushed. */
3757 for (count = 0; count < nargs; count++, argnum += inc)
3759 enum machine_mode mode = argvec[argnum].mode;
3760 rtx val = argvec[argnum].value;
3761 rtx reg = argvec[argnum].reg;
3762 int partial = argvec[argnum].partial;
3764 /* Handle calls that pass values in multiple non-contiguous
3765 locations. The PA64 has examples of this for library calls. */
3766 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3767 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3768 else if (reg != 0 && partial == 0)
3769 emit_move_insn (reg, val);
3774 /* Any regs containing parms remain in use through the call. */
3775 for (count = 0; count < nargs; count++)
3777 rtx reg = argvec[count].reg;
3778 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3779 use_group_regs (&call_fusage, reg);
3782 int partial = argvec[count].partial;
3786 gcc_assert (partial % UNITS_PER_WORD == 0);
3787 nregs = partial / UNITS_PER_WORD;
3788 use_regs (&call_fusage, REGNO (reg), nregs);
3791 use_reg (&call_fusage, reg);
3795 /* Pass the function the address in which to return a structure value. */
3796 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3798 emit_move_insn (struct_value,
3800 force_operand (XEXP (mem_value, 0),
3802 if (REG_P (struct_value))
3803 use_reg (&call_fusage, struct_value);
3806 /* Don't allow popping to be deferred, since then
3807 cse'ing of library calls could delete a call and leave the pop. */
3809 valreg = (mem_value == 0 && outmode != VOIDmode
3810 ? hard_libcall_value (outmode) : NULL_RTX);
3812 /* Stack must be properly aligned now. */
3813 gcc_assert (!(stack_pointer_delta
3814 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3816 before_call = get_last_insn ();
3818 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3819 will set inhibit_defer_pop to that value. */
3820 /* The return type is needed to decide how many bytes the function pops.
3821 Signedness plays no role in that, so for simplicity, we pretend it's
3822 always signed. We also assume that the list of arguments passed has
3823 no impact, so we pretend it is unknown. */
3825 emit_call_1 (fun, NULL,
3826 get_identifier (XSTR (orgfun, 0)),
3827 build_function_type (tfom, NULL_TREE),
3828 original_args_size.constant, args_size.constant,
3830 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3832 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3834 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
3835 that it should complain if nonvolatile values are live. For
3836 functions that cannot return, inform flow that control does not
3839 if (flags & ECF_NORETURN)
3841 /* The barrier note must be emitted
3842 immediately after the CALL_INSN. Some ports emit more than
3843 just a CALL_INSN above, so we must search for it here. */
3845 rtx last = get_last_insn ();
3846 while (!CALL_P (last))
3848 last = PREV_INSN (last);
3849 /* There was no CALL_INSN? */
3850 gcc_assert (last != before_call);
3853 emit_barrier_after (last);
3856 /* Now restore inhibit_defer_pop to its actual original value. */
3861 /* Copy the value to the right place. */
3862 if (outmode != VOIDmode && retval)
3868 if (value != mem_value)
3869 emit_move_insn (value, mem_value);
3871 else if (GET_CODE (valreg) == PARALLEL)
3874 value = gen_reg_rtx (outmode);
3875 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3879 /* Convert to the proper mode if PROMOTE_MODE has been active. */
3880 if (GET_MODE (valreg) != outmode)
3882 int unsignedp = TYPE_UNSIGNED (tfom);
3884 gcc_assert (targetm.calls.promote_function_return (tfom));
3885 gcc_assert (promote_mode (tfom, outmode, &unsignedp, 0)
3886 == GET_MODE (valreg));
3888 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
3892 emit_move_insn (value, valreg);
3898 if (ACCUMULATE_OUTGOING_ARGS)
3900 #ifdef REG_PARM_STACK_SPACE
3902 restore_fixed_argument_area (save_area, argblock,
3903 high_to_save, low_to_save);
3906 /* If we saved any argument areas, restore them. */
3907 for (count = 0; count < nargs; count++)
3908 if (argvec[count].save_area)
3910 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3911 rtx adr = plus_constant (argblock,
3912 argvec[count].locate.offset.constant);
3913 rtx stack_area = gen_rtx_MEM (save_mode,
3914 memory_address (save_mode, adr));
3916 if (save_mode == BLKmode)
3917 emit_block_move (stack_area,
3918 validize_mem (argvec[count].save_area),
3919 GEN_INT (argvec[count].locate.size.constant),
3920 BLOCK_OP_CALL_PARM);
3922 emit_move_insn (stack_area, argvec[count].save_area);
3925 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3926 stack_usage_map = initial_stack_usage_map;
3929 if (stack_usage_map_buf)
3930 free (stack_usage_map_buf);
3936 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3937 (emitting the queue unless NO_QUEUE is nonzero),
3938 for a value of mode OUTMODE,
3939 with NARGS different arguments, passed as alternating rtx values
3940 and machine_modes to convert them to.
3942 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
3943 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
3944 other types of library calls. */
3947 emit_library_call (rtx orgfun, enum libcall_type fn_type,
3948 enum machine_mode outmode, int nargs, ...)
3952 va_start (p, nargs);
3953 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
3957 /* Like emit_library_call except that an extra argument, VALUE,
3958 comes second and says where to store the result.
3959 (If VALUE is zero, this function chooses a convenient way
3960 to return the value.
3962 This function returns an rtx for where the value is to be found.
3963 If VALUE is nonzero, VALUE is returned. */
3966 emit_library_call_value (rtx orgfun, rtx value,
3967 enum libcall_type fn_type,
3968 enum machine_mode outmode, int nargs, ...)
3973 va_start (p, nargs);
3974 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
3981 /* Store a single argument for a function call
3982 into the register or memory area where it must be passed.
3983 *ARG describes the argument value and where to pass it.
3985 ARGBLOCK is the address of the stack-block for all the arguments,
3986 or 0 on a machine where arguments are pushed individually.
3988 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3989 so must be careful about how the stack is used.
3991 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3992 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3993 that we need not worry about saving and restoring the stack.
3995 FNDECL is the declaration of the function we are calling.
3997 Return nonzero if this arg should cause sibcall failure,
4001 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4002 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4004 tree pval = arg->tree_value;
4008 int i, lower_bound = 0, upper_bound = 0;
4009 int sibcall_failure = 0;
4011 if (TREE_CODE (pval) == ERROR_MARK)
4014 /* Push a new temporary level for any temporaries we make for
4018 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4020 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4021 save any previous data at that location. */
4022 if (argblock && ! variable_size && arg->stack)
4024 #ifdef ARGS_GROW_DOWNWARD
4025 /* stack_slot is negative, but we want to index stack_usage_map
4026 with positive values. */
4027 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4028 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4032 lower_bound = upper_bound - arg->locate.size.constant;
4034 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4035 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4039 upper_bound = lower_bound + arg->locate.size.constant;
4043 /* Don't worry about things in the fixed argument area;
4044 it has already been saved. */
4045 if (i < reg_parm_stack_space)
4046 i = reg_parm_stack_space;
4047 while (i < upper_bound && stack_usage_map[i] == 0)
4050 if (i < upper_bound)
4052 /* We need to make a save area. */
4053 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4054 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4055 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4056 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4058 if (save_mode == BLKmode)
4060 tree ot = TREE_TYPE (arg->tree_value);
4061 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4062 | TYPE_QUAL_CONST));
4064 arg->save_area = assign_temp (nt, 0, 1, 1);
4065 preserve_temp_slots (arg->save_area);
4066 emit_block_move (validize_mem (arg->save_area), stack_area,
4067 GEN_INT (arg->locate.size.constant),
4068 BLOCK_OP_CALL_PARM);
4072 arg->save_area = gen_reg_rtx (save_mode);
4073 emit_move_insn (arg->save_area, stack_area);
4079 /* If this isn't going to be placed on both the stack and in registers,
4080 set up the register and number of words. */
4081 if (! arg->pass_on_stack)
4083 if (flags & ECF_SIBCALL)
4084 reg = arg->tail_call_reg;
4087 partial = arg->partial;
4090 /* Being passed entirely in a register. We shouldn't be called in
4092 gcc_assert (reg == 0 || partial != 0);
4094 /* If this arg needs special alignment, don't load the registers
4096 if (arg->n_aligned_regs != 0)
4099 /* If this is being passed partially in a register, we can't evaluate
4100 it directly into its stack slot. Otherwise, we can. */
4101 if (arg->value == 0)
4103 /* stack_arg_under_construction is nonzero if a function argument is
4104 being evaluated directly into the outgoing argument list and
4105 expand_call must take special action to preserve the argument list
4106 if it is called recursively.
4108 For scalar function arguments stack_usage_map is sufficient to
4109 determine which stack slots must be saved and restored. Scalar
4110 arguments in general have pass_on_stack == 0.
4112 If this argument is initialized by a function which takes the
4113 address of the argument (a C++ constructor or a C function
4114 returning a BLKmode structure), then stack_usage_map is
4115 insufficient and expand_call must push the stack around the
4116 function call. Such arguments have pass_on_stack == 1.
4118 Note that it is always safe to set stack_arg_under_construction,
4119 but this generates suboptimal code if set when not needed. */
4121 if (arg->pass_on_stack)
4122 stack_arg_under_construction++;
4124 arg->value = expand_expr (pval,
4126 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4127 ? NULL_RTX : arg->stack,
4128 VOIDmode, EXPAND_STACK_PARM);
4130 /* If we are promoting object (or for any other reason) the mode
4131 doesn't agree, convert the mode. */
4133 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4134 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4135 arg->value, arg->unsignedp);
4137 if (arg->pass_on_stack)
4138 stack_arg_under_construction--;
4141 /* Check for overlap with already clobbered argument area. */
4142 if ((flags & ECF_SIBCALL)
4143 && MEM_P (arg->value)
4144 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4145 arg->locate.size.constant))
4146 sibcall_failure = 1;
4148 /* Don't allow anything left on stack from computation
4149 of argument to alloca. */
4150 if (flags & ECF_MAY_BE_ALLOCA)
4151 do_pending_stack_adjust ();
4153 if (arg->value == arg->stack)
4154 /* If the value is already in the stack slot, we are done. */
4156 else if (arg->mode != BLKmode)
4159 unsigned int parm_align;
4161 /* Argument is a scalar, not entirely passed in registers.
4162 (If part is passed in registers, arg->partial says how much
4163 and emit_push_insn will take care of putting it there.)
4165 Push it, and if its size is less than the
4166 amount of space allocated to it,
4167 also bump stack pointer by the additional space.
4168 Note that in C the default argument promotions
4169 will prevent such mismatches. */
4171 size = GET_MODE_SIZE (arg->mode);
4172 /* Compute how much space the push instruction will push.
4173 On many machines, pushing a byte will advance the stack
4174 pointer by a halfword. */
4175 #ifdef PUSH_ROUNDING
4176 size = PUSH_ROUNDING (size);
4180 /* Compute how much space the argument should get:
4181 round up to a multiple of the alignment for arguments. */
4182 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4183 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4184 / (PARM_BOUNDARY / BITS_PER_UNIT))
4185 * (PARM_BOUNDARY / BITS_PER_UNIT));
4187 /* Compute the alignment of the pushed argument. */
4188 parm_align = arg->locate.boundary;
4189 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4191 int pad = used - size;
4194 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4195 parm_align = MIN (parm_align, pad_align);
4199 /* This isn't already where we want it on the stack, so put it there.
4200 This can either be done with push or copy insns. */
4201 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4202 parm_align, partial, reg, used - size, argblock,
4203 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4204 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4206 /* Unless this is a partially-in-register argument, the argument is now
4209 arg->value = arg->stack;
4213 /* BLKmode, at least partly to be pushed. */
4215 unsigned int parm_align;
4219 /* Pushing a nonscalar.
4220 If part is passed in registers, PARTIAL says how much
4221 and emit_push_insn will take care of putting it there. */
4223 /* Round its size up to a multiple
4224 of the allocation unit for arguments. */
4226 if (arg->locate.size.var != 0)
4229 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4233 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4234 for BLKmode is careful to avoid it. */
4235 excess = (arg->locate.size.constant
4236 - int_size_in_bytes (TREE_TYPE (pval))
4238 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4239 NULL_RTX, TYPE_MODE (sizetype),
4243 parm_align = arg->locate.boundary;
4245 /* When an argument is padded down, the block is aligned to
4246 PARM_BOUNDARY, but the actual argument isn't. */
4247 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4249 if (arg->locate.size.var)
4250 parm_align = BITS_PER_UNIT;
4253 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4254 parm_align = MIN (parm_align, excess_align);
4258 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4260 /* emit_push_insn might not work properly if arg->value and
4261 argblock + arg->locate.offset areas overlap. */
4265 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4266 || (GET_CODE (XEXP (x, 0)) == PLUS
4267 && XEXP (XEXP (x, 0), 0) ==
4268 crtl->args.internal_arg_pointer
4269 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
4271 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4272 i = INTVAL (XEXP (XEXP (x, 0), 1));
4274 /* expand_call should ensure this. */
4275 gcc_assert (!arg->locate.offset.var
4276 && arg->locate.size.var == 0
4277 && CONST_INT_P (size_rtx));
4279 if (arg->locate.offset.constant > i)
4281 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4282 sibcall_failure = 1;
4284 else if (arg->locate.offset.constant < i)
4286 /* Use arg->locate.size.constant instead of size_rtx
4287 because we only care about the part of the argument
4289 if (i < (arg->locate.offset.constant
4290 + arg->locate.size.constant))
4291 sibcall_failure = 1;
4295 /* Even though they appear to be at the same location,
4296 if part of the outgoing argument is in registers,
4297 they aren't really at the same location. Check for
4298 this by making sure that the incoming size is the
4299 same as the outgoing size. */
4300 if (arg->locate.size.constant != INTVAL (size_rtx))
4301 sibcall_failure = 1;
4306 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4307 parm_align, partial, reg, excess, argblock,
4308 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4309 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4311 /* Unless this is a partially-in-register argument, the argument is now
4314 ??? Unlike the case above, in which we want the actual
4315 address of the data, so that we can load it directly into a
4316 register, here we want the address of the stack slot, so that
4317 it's properly aligned for word-by-word copying or something
4318 like that. It's not clear that this is always correct. */
4320 arg->value = arg->stack_slot;
4323 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4325 tree type = TREE_TYPE (arg->tree_value);
4327 = emit_group_load_into_temps (arg->reg, arg->value, type,
4328 int_size_in_bytes (type));
4331 /* Mark all slots this store used. */
4332 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4333 && argblock && ! variable_size && arg->stack)
4334 for (i = lower_bound; i < upper_bound; i++)
4335 stack_usage_map[i] = 1;
4337 /* Once we have pushed something, pops can't safely
4338 be deferred during the rest of the arguments. */
4341 /* Free any temporary slots made in processing this argument. Show
4342 that we might have taken the address of something and pushed that
4344 preserve_temp_slots (NULL_RTX);
4348 return sibcall_failure;
4351 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4354 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4360 /* If the type has variable size... */
4361 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4364 /* If the type is marked as addressable (it is required
4365 to be constructed into the stack)... */
4366 if (TREE_ADDRESSABLE (type))
4372 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4373 takes trailing padding of a structure into account. */
4374 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4377 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
4382 /* If the type has variable size... */
4383 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4386 /* If the type is marked as addressable (it is required
4387 to be constructed into the stack)... */
4388 if (TREE_ADDRESSABLE (type))
4391 /* If the padding and mode of the type is such that a copy into
4392 a register would put it into the wrong part of the register. */
4394 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4395 && (FUNCTION_ARG_PADDING (mode, type)
4396 == (BYTES_BIG_ENDIAN ? upward : downward)))