1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
39 #include "langhooks.h"
45 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
46 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
48 /* Data structure and subroutines used within expand_call. */
52 /* Tree node for this argument. */
54 /* Mode for value; TYPE_MODE unless promoted. */
55 enum machine_mode mode;
56 /* Current RTL value for argument, or 0 if it isn't precomputed. */
58 /* Initially-compute RTL value for argument; only for const functions. */
60 /* Register to pass this argument in, 0 if passed on stack, or an
61 PARALLEL if the arg is to be copied into multiple non-contiguous
64 /* Register to pass this argument in when generating tail call sequence.
65 This is not the same register as for normal calls on machines with
68 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
69 form for emit_group_move. */
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
74 /* Number of bytes to put in registers. 0 means put the whole arg
75 in registers. Also 0 if not passed in registers. */
77 /* Nonzero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
82 /* Some fields packaged up for locate_and_pad_parm. */
83 struct locate_and_pad_arg_data locate;
84 /* Location on the stack at which parameter should be stored. The store
85 has already been done if STACK == VALUE. */
87 /* Location on the stack of the start of this argument slot. This can
88 differ from STACK if this arg pads downward. This location is known
89 to be aligned to FUNCTION_ARG_BOUNDARY. */
91 /* Place that this stack area has been saved, if needed. */
93 /* If an argument's alignment does not permit direct copying into registers,
94 copy in smaller-sized pieces into pseudos. These are stored in a
95 block pointed to by this field. The next field says how many
96 word-sized pseudos we made. */
101 /* A vector of one char per byte of stack space. A byte if nonzero if
102 the corresponding stack location has been used.
103 This vector is used to prevent a function call within an argument from
104 clobbering any stack already set up. */
105 static char *stack_usage_map;
107 /* Size of STACK_USAGE_MAP. */
108 static int highest_outgoing_arg_in_use;
110 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
111 stack location's tail call argument has been already stored into the stack.
112 This bitmap is used to prevent sibling call optimization if function tries
113 to use parent's incoming argument slots when they have been already
114 overwritten with tail call arguments. */
115 static sbitmap stored_args_map;
117 /* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
122 static int stack_arg_under_construction;
124 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
125 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
127 static void precompute_register_parameters (int, struct arg_data *, int *);
128 static int store_one_arg (struct arg_data *, rtx, int, int, int);
129 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
130 static int finalize_must_preallocate (int, int, struct arg_data *,
132 static void precompute_arguments (int, int, struct arg_data *);
133 static int compute_argument_block_size (int, struct args_size *, tree, int);
134 static void initialize_argument_information (int, struct arg_data *,
135 struct args_size *, int,
137 tree, CUMULATIVE_ARGS *, int,
138 rtx *, int *, int *, int *,
140 static void compute_argument_addresses (struct arg_data *, rtx, int);
141 static rtx rtx_for_function_call (tree, tree);
142 static void load_register_parameters (struct arg_data *, int, rtx *, int,
144 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
145 enum machine_mode, int, va_list);
146 static int special_function_p (const_tree, int);
147 static int check_sibcall_argument_overlap_1 (rtx);
148 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
150 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
152 static tree split_complex_types (tree);
154 #ifdef REG_PARM_STACK_SPACE
155 static rtx save_fixed_argument_area (int, rtx, int *, int *);
156 static void restore_fixed_argument_area (rtx, rtx, int, int);
159 /* Force FUNEXP into a form suitable for the address of a CALL,
160 and return that as an rtx. Also load the static chain register
161 if FNDECL is a nested function.
163 CALL_FUSAGE points to a variable holding the prospective
164 CALL_INSN_FUNCTION_USAGE information. */
167 prepare_call_address (rtx funexp, rtx static_chain_value,
168 rtx *call_fusage, int reg_parm_seen, int sibcallp)
170 /* Make a valid memory address and copy constants through pseudo-regs,
171 but not for a constant address if -fno-function-cse. */
172 if (GET_CODE (funexp) != SYMBOL_REF)
173 /* If we are using registers for parameters, force the
174 function address into a register now. */
175 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
176 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
177 : memory_address (FUNCTION_MODE, funexp));
180 #ifndef NO_FUNCTION_CSE
181 if (optimize && ! flag_no_function_cse)
182 funexp = force_reg (Pmode, funexp);
186 if (static_chain_value != 0)
188 static_chain_value = convert_memory_address (Pmode, static_chain_value);
189 emit_move_insn (static_chain_rtx, static_chain_value);
191 if (REG_P (static_chain_rtx))
192 use_reg (call_fusage, static_chain_rtx);
198 /* Generate instructions to call function FUNEXP,
199 and optionally pop the results.
200 The CALL_INSN is the first insn generated.
202 FNDECL is the declaration node of the function. This is given to the
203 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
205 FUNTYPE is the data type of the function. This is given to the macro
206 RETURN_POPS_ARGS to determine whether this function pops its own args.
207 We used to allow an identifier for library functions, but that doesn't
208 work when the return type is an aggregate type and the calling convention
209 says that the pointer to this aggregate is to be popped by the callee.
211 STACK_SIZE is the number of bytes of arguments on the stack,
212 ROUNDED_STACK_SIZE is that number rounded up to
213 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
214 both to put into the call insn and to generate explicit popping
217 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
218 It is zero if this call doesn't want a structure value.
220 NEXT_ARG_REG is the rtx that results from executing
221 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
222 just after all the args have had their registers assigned.
223 This could be whatever you like, but normally it is the first
224 arg-register beyond those used for args in this call,
225 or 0 if all the arg-registers are used in this call.
226 It is passed on to `gen_call' so you can put this info in the call insn.
228 VALREG is a hard register in which a value is returned,
229 or 0 if the call does not return a value.
231 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
232 the args to this call were processed.
233 We restore `inhibit_defer_pop' to that value.
235 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
236 denote registers used by the called function. */
239 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
240 tree funtype ATTRIBUTE_UNUSED,
241 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
242 HOST_WIDE_INT rounded_stack_size,
243 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
244 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
245 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
246 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
248 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
250 int already_popped = 0;
251 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
252 #if defined (HAVE_call) && defined (HAVE_call_value)
253 rtx struct_value_size_rtx;
254 struct_value_size_rtx = GEN_INT (struct_value_size);
257 #ifdef CALL_POPS_ARGS
258 n_popped += CALL_POPS_ARGS (* args_so_far);
261 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
262 and we don't want to load it into a register as an optimization,
263 because prepare_call_address already did it if it should be done. */
264 if (GET_CODE (funexp) != SYMBOL_REF)
265 funexp = memory_address (FUNCTION_MODE, funexp);
267 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
268 if ((ecf_flags & ECF_SIBCALL)
269 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
270 && (n_popped > 0 || stack_size == 0))
272 rtx n_pop = GEN_INT (n_popped);
275 /* If this subroutine pops its own args, record that in the call insn
276 if possible, for the sake of frame pointer elimination. */
279 pat = GEN_SIBCALL_VALUE_POP (valreg,
280 gen_rtx_MEM (FUNCTION_MODE, funexp),
281 rounded_stack_size_rtx, next_arg_reg,
284 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
285 rounded_stack_size_rtx, next_arg_reg, n_pop);
287 emit_call_insn (pat);
293 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
294 /* If the target has "call" or "call_value" insns, then prefer them
295 if no arguments are actually popped. If the target does not have
296 "call" or "call_value" insns, then we must use the popping versions
297 even if the call has no arguments to pop. */
298 #if defined (HAVE_call) && defined (HAVE_call_value)
299 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
302 if (HAVE_call_pop && HAVE_call_value_pop)
305 rtx n_pop = GEN_INT (n_popped);
308 /* If this subroutine pops its own args, record that in the call insn
309 if possible, for the sake of frame pointer elimination. */
312 pat = GEN_CALL_VALUE_POP (valreg,
313 gen_rtx_MEM (FUNCTION_MODE, funexp),
314 rounded_stack_size_rtx, next_arg_reg, n_pop);
316 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
317 rounded_stack_size_rtx, next_arg_reg, n_pop);
319 emit_call_insn (pat);
325 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
326 if ((ecf_flags & ECF_SIBCALL)
327 && HAVE_sibcall && HAVE_sibcall_value)
330 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
331 gen_rtx_MEM (FUNCTION_MODE, funexp),
332 rounded_stack_size_rtx,
333 next_arg_reg, NULL_RTX));
335 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
336 rounded_stack_size_rtx, next_arg_reg,
337 struct_value_size_rtx));
342 #if defined (HAVE_call) && defined (HAVE_call_value)
343 if (HAVE_call && HAVE_call_value)
346 emit_call_insn (GEN_CALL_VALUE (valreg,
347 gen_rtx_MEM (FUNCTION_MODE, funexp),
348 rounded_stack_size_rtx, next_arg_reg,
351 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
352 rounded_stack_size_rtx, next_arg_reg,
353 struct_value_size_rtx));
359 /* Find the call we just emitted. */
360 call_insn = last_call_insn ();
362 /* Put the register usage information there. */
363 add_function_usage_to (call_insn, call_fusage);
365 /* If this is a const call, then set the insn's unchanging bit. */
366 if (ecf_flags & ECF_CONST)
367 RTL_CONST_CALL_P (call_insn) = 1;
369 /* If this is a pure call, then set the insn's unchanging bit. */
370 if (ecf_flags & ECF_PURE)
371 RTL_PURE_CALL_P (call_insn) = 1;
373 /* If this is a const call, then set the insn's unchanging bit. */
374 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
375 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
377 /* If this call can't throw, attach a REG_EH_REGION reg note to that
379 if (ecf_flags & ECF_NOTHROW)
380 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
381 REG_NOTES (call_insn));
384 int rn = lookup_stmt_eh_region (fntree);
386 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
387 throw, which we already took care of. */
389 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
390 REG_NOTES (call_insn));
393 if (ecf_flags & ECF_NORETURN)
394 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
395 REG_NOTES (call_insn));
397 if (ecf_flags & ECF_RETURNS_TWICE)
399 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
400 REG_NOTES (call_insn));
401 cfun->calls_setjmp = 1;
404 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
406 /* Restore this now, so that we do defer pops for this call's args
407 if the context of the call as a whole permits. */
408 inhibit_defer_pop = old_inhibit_defer_pop;
413 CALL_INSN_FUNCTION_USAGE (call_insn)
414 = gen_rtx_EXPR_LIST (VOIDmode,
415 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
416 CALL_INSN_FUNCTION_USAGE (call_insn));
417 rounded_stack_size -= n_popped;
418 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
419 stack_pointer_delta -= n_popped;
422 if (!ACCUMULATE_OUTGOING_ARGS)
424 /* If returning from the subroutine does not automatically pop the args,
425 we need an instruction to pop them sooner or later.
426 Perhaps do it now; perhaps just record how much space to pop later.
428 If returning from the subroutine does pop the args, indicate that the
429 stack pointer will be changed. */
431 if (rounded_stack_size != 0)
433 if (ecf_flags & ECF_NORETURN)
434 /* Just pretend we did the pop. */
435 stack_pointer_delta -= rounded_stack_size;
436 else if (flag_defer_pop && inhibit_defer_pop == 0
437 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
438 pending_stack_adjust += rounded_stack_size;
440 adjust_stack (rounded_stack_size_rtx);
443 /* When we accumulate outgoing args, we must avoid any stack manipulations.
444 Restore the stack pointer to its original value now. Usually
445 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
446 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
447 popping variants of functions exist as well.
449 ??? We may optimize similar to defer_pop above, but it is
450 probably not worthwhile.
452 ??? It will be worthwhile to enable combine_stack_adjustments even for
455 anti_adjust_stack (GEN_INT (n_popped));
458 /* Determine if the function identified by NAME and FNDECL is one with
459 special properties we wish to know about.
461 For example, if the function might return more than one time (setjmp), then
462 set RETURNS_TWICE to a nonzero value.
464 Similarly set NORETURN if the function is in the longjmp family.
466 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
467 space from the stack such as alloca. */
470 special_function_p (const_tree fndecl, int flags)
472 if (fndecl && DECL_NAME (fndecl)
473 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
474 /* Exclude functions not at the file scope, or not `extern',
475 since they are not the magic functions we would otherwise
477 FIXME: this should be handled with attributes, not with this
478 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
479 because you can declare fork() inside a function if you
481 && (DECL_CONTEXT (fndecl) == NULL_TREE
482 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
483 && TREE_PUBLIC (fndecl))
485 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
486 const char *tname = name;
488 /* We assume that alloca will always be called by name. It
489 makes no sense to pass it as a pointer-to-function to
490 anything that does not understand its behavior. */
491 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
493 && ! strcmp (name, "alloca"))
494 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
496 && ! strcmp (name, "__builtin_alloca"))))
497 flags |= ECF_MAY_BE_ALLOCA;
499 /* Disregard prefix _, __ or __x. */
502 if (name[1] == '_' && name[2] == 'x')
504 else if (name[1] == '_')
513 && (! strcmp (tname, "setjmp")
514 || ! strcmp (tname, "setjmp_syscall")))
516 && ! strcmp (tname, "sigsetjmp"))
518 && ! strcmp (tname, "savectx")))
519 flags |= ECF_RETURNS_TWICE;
522 && ! strcmp (tname, "siglongjmp"))
523 flags |= ECF_NORETURN;
525 else if ((tname[0] == 'q' && tname[1] == 's'
526 && ! strcmp (tname, "qsetjmp"))
527 || (tname[0] == 'v' && tname[1] == 'f'
528 && ! strcmp (tname, "vfork"))
529 || (tname[0] == 'g' && tname[1] == 'e'
530 && !strcmp (tname, "getcontext")))
531 flags |= ECF_RETURNS_TWICE;
533 else if (tname[0] == 'l' && tname[1] == 'o'
534 && ! strcmp (tname, "longjmp"))
535 flags |= ECF_NORETURN;
541 /* Return nonzero when FNDECL represents a call to setjmp. */
544 setjmp_call_p (const_tree fndecl)
546 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
549 /* Return true when exp contains alloca call. */
551 alloca_call_p (const_tree exp)
553 if (TREE_CODE (exp) == CALL_EXPR
554 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
555 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
556 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
557 & ECF_MAY_BE_ALLOCA))
562 /* Detect flags (function attributes) from the function decl or type node. */
565 flags_from_decl_or_type (const_tree exp)
568 const_tree type = exp;
572 type = TREE_TYPE (exp);
574 /* The function exp may have the `malloc' attribute. */
575 if (DECL_IS_MALLOC (exp))
578 /* The function exp may have the `returns_twice' attribute. */
579 if (DECL_IS_RETURNS_TWICE (exp))
580 flags |= ECF_RETURNS_TWICE;
582 /* Process the pure and const attributes. */
583 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
585 if (DECL_PURE_P (exp))
587 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
588 flags |= ECF_LOOPING_CONST_OR_PURE;
590 if (DECL_IS_NOVOPS (exp))
593 if (TREE_NOTHROW (exp))
594 flags |= ECF_NOTHROW;
596 flags = special_function_p (exp, flags);
598 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
601 if (TREE_THIS_VOLATILE (exp))
602 flags |= ECF_NORETURN;
607 /* Detect flags from a CALL_EXPR. */
610 call_expr_flags (const_tree t)
613 tree decl = get_callee_fndecl (t);
616 flags = flags_from_decl_or_type (decl);
619 t = TREE_TYPE (CALL_EXPR_FN (t));
620 if (t && TREE_CODE (t) == POINTER_TYPE)
621 flags = flags_from_decl_or_type (TREE_TYPE (t));
629 /* Precompute all register parameters as described by ARGS, storing values
630 into fields within the ARGS array.
632 NUM_ACTUALS indicates the total number elements in the ARGS array.
634 Set REG_PARM_SEEN if we encounter a register parameter. */
637 precompute_register_parameters (int num_actuals, struct arg_data *args,
644 for (i = 0; i < num_actuals; i++)
645 if (args[i].reg != 0 && ! args[i].pass_on_stack)
649 if (args[i].value == 0)
652 args[i].value = expand_normal (args[i].tree_value);
653 preserve_temp_slots (args[i].value);
657 /* If the value is a non-legitimate constant, force it into a
658 pseudo now. TLS symbols sometimes need a call to resolve. */
659 if (CONSTANT_P (args[i].value)
660 && !LEGITIMATE_CONSTANT_P (args[i].value))
661 args[i].value = force_reg (args[i].mode, args[i].value);
663 /* If we are to promote the function arg to a wider mode,
666 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
668 = convert_modes (args[i].mode,
669 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
670 args[i].value, args[i].unsignedp);
672 /* If we're going to have to load the value by parts, pull the
673 parts into pseudos. The part extraction process can involve
674 non-trivial computation. */
675 if (GET_CODE (args[i].reg) == PARALLEL)
677 tree type = TREE_TYPE (args[i].tree_value);
678 args[i].parallel_value
679 = emit_group_load_into_temps (args[i].reg, args[i].value,
680 type, int_size_in_bytes (type));
683 /* If the value is expensive, and we are inside an appropriately
684 short loop, put the value into a pseudo and then put the pseudo
687 For small register classes, also do this if this call uses
688 register parameters. This is to avoid reload conflicts while
689 loading the parameters registers. */
691 else if ((! (REG_P (args[i].value)
692 || (GET_CODE (args[i].value) == SUBREG
693 && REG_P (SUBREG_REG (args[i].value)))))
694 && args[i].mode != BLKmode
695 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
696 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
698 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
702 #ifdef REG_PARM_STACK_SPACE
704 /* The argument list is the property of the called routine and it
705 may clobber it. If the fixed area has been used for previous
706 parameters, we must save and restore it. */
709 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
714 /* Compute the boundary of the area that needs to be saved, if any. */
715 high = reg_parm_stack_space;
716 #ifdef ARGS_GROW_DOWNWARD
719 if (high > highest_outgoing_arg_in_use)
720 high = highest_outgoing_arg_in_use;
722 for (low = 0; low < high; low++)
723 if (stack_usage_map[low] != 0)
726 enum machine_mode save_mode;
731 while (stack_usage_map[--high] == 0)
735 *high_to_save = high;
737 num_to_save = high - low + 1;
738 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
740 /* If we don't have the required alignment, must do this
742 if ((low & (MIN (GET_MODE_SIZE (save_mode),
743 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
746 #ifdef ARGS_GROW_DOWNWARD
751 stack_area = gen_rtx_MEM (save_mode,
752 memory_address (save_mode,
753 plus_constant (argblock,
756 set_mem_align (stack_area, PARM_BOUNDARY);
757 if (save_mode == BLKmode)
759 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
760 emit_block_move (validize_mem (save_area), stack_area,
761 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
765 save_area = gen_reg_rtx (save_mode);
766 emit_move_insn (save_area, stack_area);
776 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
778 enum machine_mode save_mode = GET_MODE (save_area);
782 #ifdef ARGS_GROW_DOWNWARD
783 delta = -high_to_save;
787 stack_area = gen_rtx_MEM (save_mode,
788 memory_address (save_mode,
789 plus_constant (argblock, delta)));
790 set_mem_align (stack_area, PARM_BOUNDARY);
792 if (save_mode != BLKmode)
793 emit_move_insn (stack_area, save_area);
795 emit_block_move (stack_area, validize_mem (save_area),
796 GEN_INT (high_to_save - low_to_save + 1),
799 #endif /* REG_PARM_STACK_SPACE */
801 /* If any elements in ARGS refer to parameters that are to be passed in
802 registers, but not in memory, and whose alignment does not permit a
803 direct copy into registers. Copy the values into a group of pseudos
804 which we will later copy into the appropriate hard registers.
806 Pseudos for each unaligned argument will be stored into the array
807 args[argnum].aligned_regs. The caller is responsible for deallocating
808 the aligned_regs array if it is nonzero. */
811 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
815 for (i = 0; i < num_actuals; i++)
816 if (args[i].reg != 0 && ! args[i].pass_on_stack
817 && args[i].mode == BLKmode
818 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
819 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
821 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
822 int endian_correction = 0;
826 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
827 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
831 args[i].n_aligned_regs
832 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
835 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
837 /* Structures smaller than a word are normally aligned to the
838 least significant byte. On a BYTES_BIG_ENDIAN machine,
839 this means we must skip the empty high order bytes when
840 calculating the bit offset. */
841 if (bytes < UNITS_PER_WORD
842 #ifdef BLOCK_REG_PADDING
843 && (BLOCK_REG_PADDING (args[i].mode,
844 TREE_TYPE (args[i].tree_value), 1)
850 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
852 for (j = 0; j < args[i].n_aligned_regs; j++)
854 rtx reg = gen_reg_rtx (word_mode);
855 rtx word = operand_subword_force (args[i].value, j, BLKmode);
856 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
858 args[i].aligned_regs[j] = reg;
859 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
860 word_mode, word_mode);
862 /* There is no need to restrict this code to loading items
863 in TYPE_ALIGN sized hunks. The bitfield instructions can
864 load up entire word sized registers efficiently.
866 ??? This may not be needed anymore.
867 We use to emit a clobber here but that doesn't let later
868 passes optimize the instructions we emit. By storing 0 into
869 the register later passes know the first AND to zero out the
870 bitfield being set in the register is unnecessary. The store
871 of 0 will be deleted as will at least the first AND. */
873 emit_move_insn (reg, const0_rtx);
875 bytes -= bitsize / BITS_PER_UNIT;
876 store_bit_field (reg, bitsize, endian_correction, word_mode,
882 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
885 NUM_ACTUALS is the total number of parameters.
887 N_NAMED_ARGS is the total number of named arguments.
889 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
892 FNDECL is the tree code for the target of this call (if known)
894 ARGS_SO_FAR holds state needed by the target to know where to place
897 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
898 for arguments which are passed in registers.
900 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
901 and may be modified by this routine.
903 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
904 flags which may may be modified by this routine.
906 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
907 that requires allocation of stack space.
909 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
910 the thunked-to function. */
913 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
914 struct arg_data *args,
915 struct args_size *args_size,
916 int n_named_args ATTRIBUTE_UNUSED,
917 tree exp, tree struct_value_addr_value,
919 CUMULATIVE_ARGS *args_so_far,
920 int reg_parm_stack_space,
921 rtx *old_stack_level, int *old_pending_adj,
922 int *must_preallocate, int *ecf_flags,
923 bool *may_tailcall, bool call_from_thunk_p)
925 /* 1 if scanning parms front to back, -1 if scanning back to front. */
928 /* Count arg position in order args appear. */
933 args_size->constant = 0;
936 /* In this loop, we consider args in the order they are written.
937 We fill up ARGS from the front or from the back if necessary
938 so that in any case the first arg to be pushed ends up at the front. */
940 if (PUSH_ARGS_REVERSED)
942 i = num_actuals - 1, inc = -1;
943 /* In this case, must reverse order of args
944 so that we compute and push the last arg first. */
951 /* First fill in the actual arguments in the ARGS array, splitting
952 complex arguments if necessary. */
955 call_expr_arg_iterator iter;
958 if (struct_value_addr_value)
960 args[j].tree_value = struct_value_addr_value;
963 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
965 tree argtype = TREE_TYPE (arg);
966 if (targetm.calls.split_complex_arg
968 && TREE_CODE (argtype) == COMPLEX_TYPE
969 && targetm.calls.split_complex_arg (argtype))
971 tree subtype = TREE_TYPE (argtype);
972 arg = save_expr (arg);
973 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
975 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
978 args[j].tree_value = arg;
983 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
984 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
986 tree type = TREE_TYPE (args[i].tree_value);
988 enum machine_mode mode;
990 /* Replace erroneous argument with constant zero. */
991 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
992 args[i].tree_value = integer_zero_node, type = integer_type_node;
994 /* If TYPE is a transparent union, pass things the way we would
995 pass the first field of the union. We have already verified that
996 the modes are the same. */
997 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
998 type = TREE_TYPE (TYPE_FIELDS (type));
1000 /* Decide where to pass this arg.
1002 args[i].reg is nonzero if all or part is passed in registers.
1004 args[i].partial is nonzero if part but not all is passed in registers,
1005 and the exact value says how many bytes are passed in registers.
1007 args[i].pass_on_stack is nonzero if the argument must at least be
1008 computed on the stack. It may then be loaded back into registers
1009 if args[i].reg is nonzero.
1011 These decisions are driven by the FUNCTION_... macros and must agree
1012 with those made by function.c. */
1014 /* See if this argument should be passed by invisible reference. */
1015 if (pass_by_reference (args_so_far, TYPE_MODE (type),
1016 type, argpos < n_named_args))
1022 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1023 type, argpos < n_named_args);
1025 /* If we're compiling a thunk, pass through invisible references
1026 instead of making a copy. */
1027 if (call_from_thunk_p
1029 && !TREE_ADDRESSABLE (type)
1030 && (base = get_base_address (args[i].tree_value))
1031 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1033 /* We can't use sibcalls if a callee-copied argument is
1034 stored in the current function's frame. */
1035 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1036 *may_tailcall = false;
1038 args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1039 type = TREE_TYPE (args[i].tree_value);
1041 if (*ecf_flags & ECF_CONST)
1042 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1043 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1047 /* We make a copy of the object and pass the address to the
1048 function being called. */
1051 if (!COMPLETE_TYPE_P (type)
1052 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1053 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1054 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1055 STACK_CHECK_MAX_VAR_SIZE))))
1057 /* This is a variable-sized object. Make space on the stack
1059 rtx size_rtx = expr_size (args[i].tree_value);
1061 if (*old_stack_level == 0)
1063 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1064 *old_pending_adj = pending_stack_adjust;
1065 pending_stack_adjust = 0;
1068 copy = gen_rtx_MEM (BLKmode,
1069 allocate_dynamic_stack_space
1070 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1071 set_mem_attributes (copy, type, 1);
1074 copy = assign_temp (type, 0, 1, 0);
1076 store_expr (args[i].tree_value, copy, 0, false);
1078 *ecf_flags &= ~(ECF_LIBCALL_BLOCK);
1080 /* Just change the const function to pure and then let
1081 the next test clear the pure based on
1083 if (*ecf_flags & ECF_CONST)
1085 *ecf_flags &= ~ECF_CONST;
1086 *ecf_flags |= ECF_PURE;
1089 if (!callee_copies && *ecf_flags & ECF_PURE)
1090 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1093 = build_fold_addr_expr (make_tree (type, copy));
1094 type = TREE_TYPE (args[i].tree_value);
1095 *may_tailcall = false;
1099 mode = TYPE_MODE (type);
1100 unsignedp = TYPE_UNSIGNED (type);
1102 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1103 mode = promote_mode (type, mode, &unsignedp, 1);
1105 args[i].unsignedp = unsignedp;
1106 args[i].mode = mode;
1108 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1109 argpos < n_named_args);
1110 #ifdef FUNCTION_INCOMING_ARG
1111 /* If this is a sibling call and the machine has register windows, the
1112 register window has to be unwinded before calling the routine, so
1113 arguments have to go into the incoming registers. */
1114 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1115 argpos < n_named_args);
1117 args[i].tail_call_reg = args[i].reg;
1122 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1123 argpos < n_named_args);
1125 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1127 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1128 it means that we are to pass this arg in the register(s) designated
1129 by the PARALLEL, but also to pass it in the stack. */
1130 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1131 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1132 args[i].pass_on_stack = 1;
1134 /* If this is an addressable type, we must preallocate the stack
1135 since we must evaluate the object into its final location.
1137 If this is to be passed in both registers and the stack, it is simpler
1139 if (TREE_ADDRESSABLE (type)
1140 || (args[i].pass_on_stack && args[i].reg != 0))
1141 *must_preallocate = 1;
1143 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1144 we cannot consider this function call constant. */
1145 if (TREE_ADDRESSABLE (type))
1146 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1148 /* Compute the stack-size of this argument. */
1149 if (args[i].reg == 0 || args[i].partial != 0
1150 || reg_parm_stack_space > 0
1151 || args[i].pass_on_stack)
1152 locate_and_pad_parm (mode, type,
1153 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1158 args[i].pass_on_stack ? 0 : args[i].partial,
1159 fndecl, args_size, &args[i].locate);
1160 #ifdef BLOCK_REG_PADDING
1162 /* The argument is passed entirely in registers. See at which
1163 end it should be padded. */
1164 args[i].locate.where_pad =
1165 BLOCK_REG_PADDING (mode, type,
1166 int_size_in_bytes (type) <= UNITS_PER_WORD);
1169 /* Update ARGS_SIZE, the total stack space for args so far. */
1171 args_size->constant += args[i].locate.size.constant;
1172 if (args[i].locate.size.var)
1173 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1175 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1176 have been used, etc. */
1178 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1179 argpos < n_named_args);
1183 /* Update ARGS_SIZE to contain the total size for the argument block.
1184 Return the original constant component of the argument block's size.
1186 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1187 for arguments passed in registers. */
1190 compute_argument_block_size (int reg_parm_stack_space,
1191 struct args_size *args_size,
1192 tree fndecl ATTRIBUTE_UNUSED,
1193 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1195 int unadjusted_args_size = args_size->constant;
1197 /* For accumulate outgoing args mode we don't need to align, since the frame
1198 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1199 backends from generating misaligned frame sizes. */
1200 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1201 preferred_stack_boundary = STACK_BOUNDARY;
1203 /* Compute the actual size of the argument block required. The variable
1204 and constant sizes must be combined, the size may have to be rounded,
1205 and there may be a minimum required size. */
1209 args_size->var = ARGS_SIZE_TREE (*args_size);
1210 args_size->constant = 0;
1212 preferred_stack_boundary /= BITS_PER_UNIT;
1213 if (preferred_stack_boundary > 1)
1215 /* We don't handle this case yet. To handle it correctly we have
1216 to add the delta, round and subtract the delta.
1217 Currently no machine description requires this support. */
1218 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1219 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1222 if (reg_parm_stack_space > 0)
1225 = size_binop (MAX_EXPR, args_size->var,
1226 ssize_int (reg_parm_stack_space));
1228 /* The area corresponding to register parameters is not to count in
1229 the size of the block we need. So make the adjustment. */
1230 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? NULL_TREE : TREE_TYPE (fndecl))))
1232 = size_binop (MINUS_EXPR, args_size->var,
1233 ssize_int (reg_parm_stack_space));
1238 preferred_stack_boundary /= BITS_PER_UNIT;
1239 if (preferred_stack_boundary < 1)
1240 preferred_stack_boundary = 1;
1241 args_size->constant = (((args_size->constant
1242 + stack_pointer_delta
1243 + preferred_stack_boundary - 1)
1244 / preferred_stack_boundary
1245 * preferred_stack_boundary)
1246 - stack_pointer_delta);
1248 args_size->constant = MAX (args_size->constant,
1249 reg_parm_stack_space);
1251 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? NULL_TREE : TREE_TYPE (fndecl))))
1252 args_size->constant -= reg_parm_stack_space;
1254 return unadjusted_args_size;
1257 /* Precompute parameters as needed for a function call.
1259 FLAGS is mask of ECF_* constants.
1261 NUM_ACTUALS is the number of arguments.
1263 ARGS is an array containing information for each argument; this
1264 routine fills in the INITIAL_VALUE and VALUE fields for each
1265 precomputed argument. */
1268 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1272 /* If this is a libcall, then precompute all arguments so that we do not
1273 get extraneous instructions emitted as part of the libcall sequence. */
1275 /* If we preallocated the stack space, and some arguments must be passed
1276 on the stack, then we must precompute any parameter which contains a
1277 function call which will store arguments on the stack.
1278 Otherwise, evaluating the parameter may clobber previous parameters
1279 which have already been stored into the stack. (we have code to avoid
1280 such case by saving the outgoing stack arguments, but it results in
1282 if ((flags & ECF_LIBCALL_BLOCK) == 0 && !ACCUMULATE_OUTGOING_ARGS)
1285 for (i = 0; i < num_actuals; i++)
1287 enum machine_mode mode;
1289 if ((flags & ECF_LIBCALL_BLOCK) == 0
1290 && TREE_CODE (args[i].tree_value) != CALL_EXPR)
1293 /* If this is an addressable type, we cannot pre-evaluate it. */
1294 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
1296 args[i].initial_value = args[i].value
1297 = expand_normal (args[i].tree_value);
1299 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1300 if (mode != args[i].mode)
1303 = convert_modes (args[i].mode, mode,
1304 args[i].value, args[i].unsignedp);
1305 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1306 /* CSE will replace this only if it contains args[i].value
1307 pseudo, so convert it down to the declared mode using
1309 if (REG_P (args[i].value)
1310 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1312 args[i].initial_value
1313 = gen_lowpart_SUBREG (mode, args[i].value);
1314 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1315 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1323 /* Given the current state of MUST_PREALLOCATE and information about
1324 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1325 compute and return the final value for MUST_PREALLOCATE. */
1328 finalize_must_preallocate (int must_preallocate, int num_actuals,
1329 struct arg_data *args, struct args_size *args_size)
1331 /* See if we have or want to preallocate stack space.
1333 If we would have to push a partially-in-regs parm
1334 before other stack parms, preallocate stack space instead.
1336 If the size of some parm is not a multiple of the required stack
1337 alignment, we must preallocate.
1339 If the total size of arguments that would otherwise create a copy in
1340 a temporary (such as a CALL) is more than half the total argument list
1341 size, preallocation is faster.
1343 Another reason to preallocate is if we have a machine (like the m88k)
1344 where stack alignment is required to be maintained between every
1345 pair of insns, not just when the call is made. However, we assume here
1346 that such machines either do not have push insns (and hence preallocation
1347 would occur anyway) or the problem is taken care of with
1350 if (! must_preallocate)
1352 int partial_seen = 0;
1353 int copy_to_evaluate_size = 0;
1356 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1358 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1360 else if (partial_seen && args[i].reg == 0)
1361 must_preallocate = 1;
1363 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1364 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1365 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1366 || TREE_CODE (args[i].tree_value) == COND_EXPR
1367 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1368 copy_to_evaluate_size
1369 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1372 if (copy_to_evaluate_size * 2 >= args_size->constant
1373 && args_size->constant > 0)
1374 must_preallocate = 1;
1376 return must_preallocate;
1379 /* If we preallocated stack space, compute the address of each argument
1380 and store it into the ARGS array.
1382 We need not ensure it is a valid memory address here; it will be
1383 validized when it is used.
1385 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1388 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1392 rtx arg_reg = argblock;
1393 int i, arg_offset = 0;
1395 if (GET_CODE (argblock) == PLUS)
1396 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1398 for (i = 0; i < num_actuals; i++)
1400 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1401 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1403 unsigned int align, boundary;
1404 unsigned int units_on_stack = 0;
1405 enum machine_mode partial_mode = VOIDmode;
1407 /* Skip this parm if it will not be passed on the stack. */
1408 if (! args[i].pass_on_stack
1410 && args[i].partial == 0)
1413 if (GET_CODE (offset) == CONST_INT)
1414 addr = plus_constant (arg_reg, INTVAL (offset));
1416 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1418 addr = plus_constant (addr, arg_offset);
1420 if (args[i].partial != 0)
1422 /* Only part of the parameter is being passed on the stack.
1423 Generate a simple memory reference of the correct size. */
1424 units_on_stack = args[i].locate.size.constant;
1425 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1427 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1428 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1432 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1433 set_mem_attributes (args[i].stack,
1434 TREE_TYPE (args[i].tree_value), 1);
1436 align = BITS_PER_UNIT;
1437 boundary = args[i].locate.boundary;
1438 if (args[i].locate.where_pad != downward)
1440 else if (GET_CODE (offset) == CONST_INT)
1442 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1443 align = align & -align;
1445 set_mem_align (args[i].stack, align);
1447 if (GET_CODE (slot_offset) == CONST_INT)
1448 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1450 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1452 addr = plus_constant (addr, arg_offset);
1454 if (args[i].partial != 0)
1456 /* Only part of the parameter is being passed on the stack.
1457 Generate a simple memory reference of the correct size.
1459 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1460 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1464 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1465 set_mem_attributes (args[i].stack_slot,
1466 TREE_TYPE (args[i].tree_value), 1);
1468 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1470 /* Function incoming arguments may overlap with sibling call
1471 outgoing arguments and we cannot allow reordering of reads
1472 from function arguments with stores to outgoing arguments
1473 of sibling calls. */
1474 set_mem_alias_set (args[i].stack, 0);
1475 set_mem_alias_set (args[i].stack_slot, 0);
1480 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1481 in a call instruction.
1483 FNDECL is the tree node for the target function. For an indirect call
1484 FNDECL will be NULL_TREE.
1486 ADDR is the operand 0 of CALL_EXPR for this call. */
1489 rtx_for_function_call (tree fndecl, tree addr)
1493 /* Get the function to call, in the form of RTL. */
1496 /* If this is the first use of the function, see if we need to
1497 make an external definition for it. */
1498 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1500 assemble_external (fndecl);
1501 TREE_USED (fndecl) = 1;
1504 /* Get a SYMBOL_REF rtx for the function address. */
1505 funexp = XEXP (DECL_RTL (fndecl), 0);
1508 /* Generate an rtx (probably a pseudo-register) for the address. */
1511 funexp = expand_normal (addr);
1512 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1517 /* Return true if and only if SIZE storage units (usually bytes)
1518 starting from address ADDR overlap with already clobbered argument
1519 area. This function is used to determine if we should give up a
1523 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1527 if (addr == crtl->args.internal_arg_pointer)
1529 else if (GET_CODE (addr) == PLUS
1530 && XEXP (addr, 0) == crtl->args.internal_arg_pointer
1531 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
1532 i = INTVAL (XEXP (addr, 1));
1533 /* Return true for arg pointer based indexed addressing. */
1534 else if (GET_CODE (addr) == PLUS
1535 && (XEXP (addr, 0) == crtl->args.internal_arg_pointer
1536 || XEXP (addr, 1) == crtl->args.internal_arg_pointer))
1541 #ifdef ARGS_GROW_DOWNWARD
1546 unsigned HOST_WIDE_INT k;
1548 for (k = 0; k < size; k++)
1549 if (i + k < stored_args_map->n_bits
1550 && TEST_BIT (stored_args_map, i + k))
1557 /* Do the register loads required for any wholly-register parms or any
1558 parms which are passed both on the stack and in a register. Their
1559 expressions were already evaluated.
1561 Mark all register-parms as living through the call, putting these USE
1562 insns in the CALL_INSN_FUNCTION_USAGE field.
1564 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1565 checking, setting *SIBCALL_FAILURE if appropriate. */
1568 load_register_parameters (struct arg_data *args, int num_actuals,
1569 rtx *call_fusage, int flags, int is_sibcall,
1570 int *sibcall_failure)
1574 for (i = 0; i < num_actuals; i++)
1576 rtx reg = ((flags & ECF_SIBCALL)
1577 ? args[i].tail_call_reg : args[i].reg);
1580 int partial = args[i].partial;
1583 rtx before_arg = get_last_insn ();
1584 /* Set non-negative if we must move a word at a time, even if
1585 just one word (e.g, partial == 4 && mode == DFmode). Set
1586 to -1 if we just use a normal move insn. This value can be
1587 zero if the argument is a zero size structure. */
1589 if (GET_CODE (reg) == PARALLEL)
1593 gcc_assert (partial % UNITS_PER_WORD == 0);
1594 nregs = partial / UNITS_PER_WORD;
1596 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1598 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1599 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1602 size = GET_MODE_SIZE (args[i].mode);
1604 /* Handle calls that pass values in multiple non-contiguous
1605 locations. The Irix 6 ABI has examples of this. */
1607 if (GET_CODE (reg) == PARALLEL)
1608 emit_group_move (reg, args[i].parallel_value);
1610 /* If simple case, just do move. If normal partial, store_one_arg
1611 has already loaded the register for us. In all other cases,
1612 load the register(s) from memory. */
1614 else if (nregs == -1)
1616 emit_move_insn (reg, args[i].value);
1617 #ifdef BLOCK_REG_PADDING
1618 /* Handle case where we have a value that needs shifting
1619 up to the msb. eg. a QImode value and we're padding
1620 upward on a BYTES_BIG_ENDIAN machine. */
1621 if (size < UNITS_PER_WORD
1622 && (args[i].locate.where_pad
1623 == (BYTES_BIG_ENDIAN ? upward : downward)))
1626 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1628 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1629 report the whole reg as used. Strictly speaking, the
1630 call only uses SIZE bytes at the msb end, but it doesn't
1631 seem worth generating rtl to say that. */
1632 reg = gen_rtx_REG (word_mode, REGNO (reg));
1633 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1634 build_int_cst (NULL_TREE, shift),
1637 emit_move_insn (reg, x);
1642 /* If we have pre-computed the values to put in the registers in
1643 the case of non-aligned structures, copy them in now. */
1645 else if (args[i].n_aligned_regs != 0)
1646 for (j = 0; j < args[i].n_aligned_regs; j++)
1647 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1648 args[i].aligned_regs[j]);
1650 else if (partial == 0 || args[i].pass_on_stack)
1652 rtx mem = validize_mem (args[i].value);
1654 /* Check for overlap with already clobbered argument area. */
1656 && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
1658 *sibcall_failure = 1;
1660 /* Handle a BLKmode that needs shifting. */
1661 if (nregs == 1 && size < UNITS_PER_WORD
1662 #ifdef BLOCK_REG_PADDING
1663 && args[i].locate.where_pad == downward
1669 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1670 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1671 rtx x = gen_reg_rtx (word_mode);
1672 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1673 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1676 emit_move_insn (x, tem);
1677 x = expand_shift (dir, word_mode, x,
1678 build_int_cst (NULL_TREE, shift),
1681 emit_move_insn (ri, x);
1684 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1687 /* When a parameter is a block, and perhaps in other cases, it is
1688 possible that it did a load from an argument slot that was
1689 already clobbered. */
1691 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1692 *sibcall_failure = 1;
1694 /* Handle calls that pass values in multiple non-contiguous
1695 locations. The Irix 6 ABI has examples of this. */
1696 if (GET_CODE (reg) == PARALLEL)
1697 use_group_regs (call_fusage, reg);
1698 else if (nregs == -1)
1699 use_reg (call_fusage, reg);
1701 use_regs (call_fusage, REGNO (reg), nregs);
1706 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1707 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1708 bytes, then we would need to push some additional bytes to pad the
1709 arguments. So, we compute an adjust to the stack pointer for an
1710 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1711 bytes. Then, when the arguments are pushed the stack will be perfectly
1712 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1713 be popped after the call. Returns the adjustment. */
1716 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1717 struct args_size *args_size,
1718 unsigned int preferred_unit_stack_boundary)
1720 /* The number of bytes to pop so that the stack will be
1721 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1722 HOST_WIDE_INT adjustment;
1723 /* The alignment of the stack after the arguments are pushed, if we
1724 just pushed the arguments without adjust the stack here. */
1725 unsigned HOST_WIDE_INT unadjusted_alignment;
1727 unadjusted_alignment
1728 = ((stack_pointer_delta + unadjusted_args_size)
1729 % preferred_unit_stack_boundary);
1731 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1732 as possible -- leaving just enough left to cancel out the
1733 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1734 PENDING_STACK_ADJUST is non-negative, and congruent to
1735 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1737 /* Begin by trying to pop all the bytes. */
1738 unadjusted_alignment
1739 = (unadjusted_alignment
1740 - (pending_stack_adjust % preferred_unit_stack_boundary));
1741 adjustment = pending_stack_adjust;
1742 /* Push enough additional bytes that the stack will be aligned
1743 after the arguments are pushed. */
1744 if (preferred_unit_stack_boundary > 1)
1746 if (unadjusted_alignment > 0)
1747 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1749 adjustment += unadjusted_alignment;
1752 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1753 bytes after the call. The right number is the entire
1754 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1755 by the arguments in the first place. */
1757 = pending_stack_adjust - adjustment + unadjusted_args_size;
1762 /* Scan X expression if it does not dereference any argument slots
1763 we already clobbered by tail call arguments (as noted in stored_args_map
1765 Return nonzero if X expression dereferences such argument slots,
1769 check_sibcall_argument_overlap_1 (rtx x)
1778 code = GET_CODE (x);
1781 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1782 GET_MODE_SIZE (GET_MODE (x)));
1784 /* Scan all subexpressions. */
1785 fmt = GET_RTX_FORMAT (code);
1786 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1790 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1793 else if (*fmt == 'E')
1795 for (j = 0; j < XVECLEN (x, i); j++)
1796 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1803 /* Scan sequence after INSN if it does not dereference any argument slots
1804 we already clobbered by tail call arguments (as noted in stored_args_map
1805 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1806 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1807 should be 0). Return nonzero if sequence after INSN dereferences such argument
1808 slots, zero otherwise. */
1811 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1815 if (insn == NULL_RTX)
1816 insn = get_insns ();
1818 insn = NEXT_INSN (insn);
1820 for (; insn; insn = NEXT_INSN (insn))
1822 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1825 if (mark_stored_args_map)
1827 #ifdef ARGS_GROW_DOWNWARD
1828 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1830 low = arg->locate.slot_offset.constant;
1833 for (high = low + arg->locate.size.constant; low < high; low++)
1834 SET_BIT (stored_args_map, low);
1836 return insn != NULL_RTX;
1839 /* Given that a function returns a value of mode MODE at the most
1840 significant end of hard register VALUE, shift VALUE left or right
1841 as specified by LEFT_P. Return true if some action was needed. */
1844 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1846 HOST_WIDE_INT shift;
1848 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1849 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1853 /* Use ashr rather than lshr for right shifts. This is for the benefit
1854 of the MIPS port, which requires SImode values to be sign-extended
1855 when stored in 64-bit registers. */
1856 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1857 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1862 /* If X is a likely-spilled register value, copy it to a pseudo
1863 register and return that register. Return X otherwise. */
1866 avoid_likely_spilled_reg (rtx x)
1871 && HARD_REGISTER_P (x)
1872 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (x))))
1874 /* Make sure that we generate a REG rather than a CONCAT.
1875 Moves into CONCATs can need nontrivial instructions,
1876 and the whole point of this function is to avoid
1877 using the hard register directly in such a situation. */
1878 generating_concat_p = 0;
1879 new = gen_reg_rtx (GET_MODE (x));
1880 generating_concat_p = 1;
1881 emit_move_insn (new, x);
1887 /* Generate all the code for a CALL_EXPR exp
1888 and return an rtx for its value.
1889 Store the value in TARGET (specified as an rtx) if convenient.
1890 If the value is stored in TARGET then TARGET is returned.
1891 If IGNORE is nonzero, then we ignore the value of the function call. */
1894 expand_call (tree exp, rtx target, int ignore)
1896 /* Nonzero if we are currently expanding a call. */
1897 static int currently_expanding_call = 0;
1899 /* RTX for the function to be called. */
1901 /* Sequence of insns to perform a normal "call". */
1902 rtx normal_call_insns = NULL_RTX;
1903 /* Sequence of insns to perform a tail "call". */
1904 rtx tail_call_insns = NULL_RTX;
1905 /* Data type of the function. */
1907 tree type_arg_types;
1908 /* Declaration of the function being called,
1909 or 0 if the function is computed (not known by name). */
1911 /* The type of the function being called. */
1913 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1916 /* Register in which non-BLKmode value will be returned,
1917 or 0 if no value or if value is BLKmode. */
1919 /* Address where we should return a BLKmode value;
1920 0 if value not BLKmode. */
1921 rtx structure_value_addr = 0;
1922 /* Nonzero if that address is being passed by treating it as
1923 an extra, implicit first parameter. Otherwise,
1924 it is passed by being copied directly into struct_value_rtx. */
1925 int structure_value_addr_parm = 0;
1926 /* Holds the value of implicit argument for the struct value. */
1927 tree structure_value_addr_value = NULL_TREE;
1928 /* Size of aggregate value wanted, or zero if none wanted
1929 or if we are using the non-reentrant PCC calling convention
1930 or expecting the value in registers. */
1931 HOST_WIDE_INT struct_value_size = 0;
1932 /* Nonzero if called function returns an aggregate in memory PCC style,
1933 by returning the address of where to find it. */
1934 int pcc_struct_value = 0;
1935 rtx struct_value = 0;
1937 /* Number of actual parameters in this call, including struct value addr. */
1939 /* Number of named args. Args after this are anonymous ones
1940 and they must all go on the stack. */
1942 /* Number of complex actual arguments that need to be split. */
1943 int num_complex_actuals = 0;
1945 /* Vector of information about each argument.
1946 Arguments are numbered in the order they will be pushed,
1947 not the order they are written. */
1948 struct arg_data *args;
1950 /* Total size in bytes of all the stack-parms scanned so far. */
1951 struct args_size args_size;
1952 struct args_size adjusted_args_size;
1953 /* Size of arguments before any adjustments (such as rounding). */
1954 int unadjusted_args_size;
1955 /* Data on reg parms scanned so far. */
1956 CUMULATIVE_ARGS args_so_far;
1957 /* Nonzero if a reg parm has been scanned. */
1959 /* Nonzero if this is an indirect function call. */
1961 /* Nonzero if we must avoid push-insns in the args for this call.
1962 If stack space is allocated for register parameters, but not by the
1963 caller, then it is preallocated in the fixed part of the stack frame.
1964 So the entire argument block must then be preallocated (i.e., we
1965 ignore PUSH_ROUNDING in that case). */
1967 int must_preallocate = !PUSH_ARGS;
1969 /* Size of the stack reserved for parameter registers. */
1970 int reg_parm_stack_space = 0;
1972 /* Address of space preallocated for stack parms
1973 (on machines that lack push insns), or 0 if space not preallocated. */
1976 /* Mask of ECF_ flags. */
1978 #ifdef REG_PARM_STACK_SPACE
1979 /* Define the boundary of the register parm stack space that needs to be
1981 int low_to_save, high_to_save;
1982 rtx save_area = 0; /* Place that it is saved */
1985 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1986 char *initial_stack_usage_map = stack_usage_map;
1987 char *stack_usage_map_buf = NULL;
1989 int old_stack_allocated;
1991 /* State variables to track stack modifications. */
1992 rtx old_stack_level = 0;
1993 int old_stack_arg_under_construction = 0;
1994 int old_pending_adj = 0;
1995 int old_inhibit_defer_pop = inhibit_defer_pop;
1997 /* Some stack pointer alterations we make are performed via
1998 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1999 which we then also need to save/restore along the way. */
2000 int old_stack_pointer_delta = 0;
2003 tree p = CALL_EXPR_FN (exp);
2004 tree addr = CALL_EXPR_FN (exp);
2006 /* The alignment of the stack, in bits. */
2007 unsigned HOST_WIDE_INT preferred_stack_boundary;
2008 /* The alignment of the stack, in bytes. */
2009 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
2010 /* The static chain value to use for this call. */
2011 rtx static_chain_value;
2012 /* See if this is "nothrow" function call. */
2013 if (TREE_NOTHROW (exp))
2014 flags |= ECF_NOTHROW;
2016 /* See if we can find a DECL-node for the actual function, and get the
2017 function attributes (flags) from the function decl or type node. */
2018 fndecl = get_callee_fndecl (exp);
2021 fntype = TREE_TYPE (fndecl);
2022 flags |= flags_from_decl_or_type (fndecl);
2026 fntype = TREE_TYPE (TREE_TYPE (p));
2027 flags |= flags_from_decl_or_type (fntype);
2030 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2032 /* Warn if this value is an aggregate type,
2033 regardless of which calling convention we are using for it. */
2034 if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2035 warning (OPT_Waggregate_return, "function call has aggregate value");
2037 /* If the result of a non looping pure or const function call is
2038 ignored (or void), and none of its arguments are volatile, we can
2039 avoid expanding the call and just evaluate the arguments for
2041 if ((flags & (ECF_CONST | ECF_PURE))
2042 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2043 && (ignore || target == const0_rtx
2044 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2046 bool volatilep = false;
2048 call_expr_arg_iterator iter;
2050 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2051 if (TREE_THIS_VOLATILE (arg))
2059 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2060 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2065 #ifdef REG_PARM_STACK_SPACE
2066 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2069 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? NULL_TREE : TREE_TYPE (fndecl)))
2070 && reg_parm_stack_space > 0 && PUSH_ARGS)
2071 must_preallocate = 1;
2073 /* Set up a place to return a structure. */
2075 /* Cater to broken compilers. */
2076 if (aggregate_value_p (exp, fndecl))
2078 /* This call returns a big structure. */
2079 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE
2080 | ECF_LIBCALL_BLOCK);
2082 #ifdef PCC_STATIC_STRUCT_RETURN
2084 pcc_struct_value = 1;
2086 #else /* not PCC_STATIC_STRUCT_RETURN */
2088 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2090 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2091 structure_value_addr = XEXP (target, 0);
2094 /* For variable-sized objects, we must be called with a target
2095 specified. If we were to allocate space on the stack here,
2096 we would have no way of knowing when to free it. */
2097 rtx d = assign_temp (TREE_TYPE (exp), 0, 1, 1);
2099 mark_temp_addr_taken (d);
2100 structure_value_addr = XEXP (d, 0);
2104 #endif /* not PCC_STATIC_STRUCT_RETURN */
2107 /* Figure out the amount to which the stack should be aligned. */
2108 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2111 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2112 /* Without automatic stack alignment, we can't increase preferred
2113 stack boundary. With automatic stack alignment, it is
2114 unnecessary since unless we can guarantee that all callers will
2115 align the outgoing stack properly, callee has to align its
2118 && i->preferred_incoming_stack_boundary
2119 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
2120 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2123 /* Operand 0 is a pointer-to-function; get the type of the function. */
2124 funtype = TREE_TYPE (addr);
2125 gcc_assert (POINTER_TYPE_P (funtype));
2126 funtype = TREE_TYPE (funtype);
2128 /* Count whether there are actual complex arguments that need to be split
2129 into their real and imaginary parts. Munge the type_arg_types
2130 appropriately here as well. */
2131 if (targetm.calls.split_complex_arg)
2133 call_expr_arg_iterator iter;
2135 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2137 tree type = TREE_TYPE (arg);
2138 if (type && TREE_CODE (type) == COMPLEX_TYPE
2139 && targetm.calls.split_complex_arg (type))
2140 num_complex_actuals++;
2142 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2145 type_arg_types = TYPE_ARG_TYPES (funtype);
2147 if (flags & ECF_MAY_BE_ALLOCA)
2148 cfun->calls_alloca = 1;
2150 /* If struct_value_rtx is 0, it means pass the address
2151 as if it were an extra parameter. Put the argument expression
2152 in structure_value_addr_value. */
2153 if (structure_value_addr && struct_value == 0)
2155 /* If structure_value_addr is a REG other than
2156 virtual_outgoing_args_rtx, we can use always use it. If it
2157 is not a REG, we must always copy it into a register.
2158 If it is virtual_outgoing_args_rtx, we must copy it to another
2159 register in some cases. */
2160 rtx temp = (!REG_P (structure_value_addr)
2161 || (ACCUMULATE_OUTGOING_ARGS
2162 && stack_arg_under_construction
2163 && structure_value_addr == virtual_outgoing_args_rtx)
2164 ? copy_addr_to_reg (convert_memory_address
2165 (Pmode, structure_value_addr))
2166 : structure_value_addr);
2168 structure_value_addr_value =
2169 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2170 structure_value_addr_parm = 1;
2173 /* Count the arguments and set NUM_ACTUALS. */
2175 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2177 /* Compute number of named args.
2178 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2180 if (type_arg_types != 0)
2182 = (list_length (type_arg_types)
2183 /* Count the struct value address, if it is passed as a parm. */
2184 + structure_value_addr_parm);
2186 /* If we know nothing, treat all args as named. */
2187 n_named_args = num_actuals;
2189 /* Start updating where the next arg would go.
2191 On some machines (such as the PA) indirect calls have a different
2192 calling convention than normal calls. The fourth argument in
2193 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2195 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2197 /* Now possibly adjust the number of named args.
2198 Normally, don't include the last named arg if anonymous args follow.
2199 We do include the last named arg if
2200 targetm.calls.strict_argument_naming() returns nonzero.
2201 (If no anonymous args follow, the result of list_length is actually
2202 one too large. This is harmless.)
2204 If targetm.calls.pretend_outgoing_varargs_named() returns
2205 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2206 this machine will be able to place unnamed args that were passed
2207 in registers into the stack. So treat all args as named. This
2208 allows the insns emitting for a specific argument list to be
2209 independent of the function declaration.
2211 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2212 we do not have any reliable way to pass unnamed args in
2213 registers, so we must force them into memory. */
2215 if (type_arg_types != 0
2216 && targetm.calls.strict_argument_naming (&args_so_far))
2218 else if (type_arg_types != 0
2219 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2220 /* Don't include the last named arg. */
2223 /* Treat all args as named. */
2224 n_named_args = num_actuals;
2226 /* Make a vector to hold all the information about each arg. */
2227 args = alloca (num_actuals * sizeof (struct arg_data));
2228 memset (args, 0, num_actuals * sizeof (struct arg_data));
2230 /* Build up entries in the ARGS array, compute the size of the
2231 arguments into ARGS_SIZE, etc. */
2232 initialize_argument_information (num_actuals, args, &args_size,
2234 structure_value_addr_value, fndecl,
2235 &args_so_far, reg_parm_stack_space,
2236 &old_stack_level, &old_pending_adj,
2237 &must_preallocate, &flags,
2238 &try_tail_call, CALL_FROM_THUNK_P (exp));
2242 /* If this function requires a variable-sized argument list, don't
2243 try to make a cse'able block for this call. We may be able to
2244 do this eventually, but it is too complicated to keep track of
2245 what insns go in the cse'able block and which don't. */
2247 flags &= ~ECF_LIBCALL_BLOCK;
2248 must_preallocate = 1;
2251 /* Now make final decision about preallocating stack space. */
2252 must_preallocate = finalize_must_preallocate (must_preallocate,
2256 /* If the structure value address will reference the stack pointer, we
2257 must stabilize it. We don't need to do this if we know that we are
2258 not going to adjust the stack pointer in processing this call. */
2260 if (structure_value_addr
2261 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2262 || reg_mentioned_p (virtual_outgoing_args_rtx,
2263 structure_value_addr))
2265 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2266 structure_value_addr = copy_to_reg (structure_value_addr);
2268 /* Tail calls can make things harder to debug, and we've traditionally
2269 pushed these optimizations into -O2. Don't try if we're already
2270 expanding a call, as that means we're an argument. Don't try if
2271 there's cleanups, as we know there's code to follow the call. */
2273 if (currently_expanding_call++ != 0
2274 || !flag_optimize_sibling_calls
2276 || lookup_stmt_eh_region (exp) >= 0
2277 || dbg_cnt (tail_call) == false)
2280 /* Rest of purposes for tail call optimizations to fail. */
2282 #ifdef HAVE_sibcall_epilogue
2283 !HAVE_sibcall_epilogue
2288 /* Doing sibling call optimization needs some work, since
2289 structure_value_addr can be allocated on the stack.
2290 It does not seem worth the effort since few optimizable
2291 sibling calls will return a structure. */
2292 || structure_value_addr != NULL_RTX
2293 /* Check whether the target is able to optimize the call
2295 || !targetm.function_ok_for_sibcall (fndecl, exp)
2296 /* Functions that do not return exactly once may not be sibcall
2298 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2299 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2300 /* If the called function is nested in the current one, it might access
2301 some of the caller's arguments, but could clobber them beforehand if
2302 the argument areas are shared. */
2303 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2304 /* If this function requires more stack slots than the current
2305 function, we cannot change it into a sibling call.
2306 crtl->args.pretend_args_size is not part of the
2307 stack allocated by our caller. */
2308 || args_size.constant > (crtl->args.size
2309 - crtl->args.pretend_args_size)
2310 /* If the callee pops its own arguments, then it must pop exactly
2311 the same number of arguments as the current function. */
2312 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2313 != RETURN_POPS_ARGS (current_function_decl,
2314 TREE_TYPE (current_function_decl),
2316 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2319 /* Ensure current function's preferred stack boundary is at least
2320 what we need. We don't have to increase alignment for recursive
2322 if (crtl->preferred_stack_boundary < preferred_stack_boundary
2323 && fndecl != current_function_decl)
2324 crtl->preferred_stack_boundary = preferred_stack_boundary;
2326 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2328 /* We want to make two insn chains; one for a sibling call, the other
2329 for a normal call. We will select one of the two chains after
2330 initial RTL generation is complete. */
2331 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2333 int sibcall_failure = 0;
2334 /* We want to emit any pending stack adjustments before the tail
2335 recursion "call". That way we know any adjustment after the tail
2336 recursion call can be ignored if we indeed use the tail
2338 int save_pending_stack_adjust = 0;
2339 int save_stack_pointer_delta = 0;
2341 rtx before_call, next_arg_reg, after_args;
2345 /* State variables we need to save and restore between
2347 save_pending_stack_adjust = pending_stack_adjust;
2348 save_stack_pointer_delta = stack_pointer_delta;
2351 flags &= ~ECF_SIBCALL;
2353 flags |= ECF_SIBCALL;
2355 /* Other state variables that we must reinitialize each time
2356 through the loop (that are not initialized by the loop itself). */
2360 /* Start a new sequence for the normal call case.
2362 From this point on, if the sibling call fails, we want to set
2363 sibcall_failure instead of continuing the loop. */
2366 /* Don't let pending stack adjusts add up to too much.
2367 Also, do all pending adjustments now if there is any chance
2368 this might be a call to alloca or if we are expanding a sibling
2370 Also do the adjustments before a throwing call, otherwise
2371 exception handling can fail; PR 19225. */
2372 if (pending_stack_adjust >= 32
2373 || (pending_stack_adjust > 0
2374 && (flags & ECF_MAY_BE_ALLOCA))
2375 || (pending_stack_adjust > 0
2376 && flag_exceptions && !(flags & ECF_NOTHROW))
2378 do_pending_stack_adjust ();
2380 /* When calling a const function, we must pop the stack args right away,
2381 so that the pop is deleted or moved with the call. */
2382 if (pass && (flags & ECF_LIBCALL_BLOCK))
2385 /* Precompute any arguments as needed. */
2387 precompute_arguments (flags, num_actuals, args);
2389 /* Now we are about to start emitting insns that can be deleted
2390 if a libcall is deleted. */
2391 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2394 if (pass == 0 && crtl->stack_protect_guard)
2395 stack_protect_epilogue ();
2397 adjusted_args_size = args_size;
2398 /* Compute the actual size of the argument block required. The variable
2399 and constant sizes must be combined, the size may have to be rounded,
2400 and there may be a minimum required size. When generating a sibcall
2401 pattern, do not round up, since we'll be re-using whatever space our
2403 unadjusted_args_size
2404 = compute_argument_block_size (reg_parm_stack_space,
2405 &adjusted_args_size,
2408 : preferred_stack_boundary));
2410 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2412 /* The argument block when performing a sibling call is the
2413 incoming argument block. */
2416 argblock = virtual_incoming_args_rtx;
2418 #ifdef STACK_GROWS_DOWNWARD
2419 = plus_constant (argblock, crtl->args.pretend_args_size);
2421 = plus_constant (argblock, -crtl->args.pretend_args_size);
2423 stored_args_map = sbitmap_alloc (args_size.constant);
2424 sbitmap_zero (stored_args_map);
2427 /* If we have no actual push instructions, or shouldn't use them,
2428 make space for all args right now. */
2429 else if (adjusted_args_size.var != 0)
2431 if (old_stack_level == 0)
2433 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2434 old_stack_pointer_delta = stack_pointer_delta;
2435 old_pending_adj = pending_stack_adjust;
2436 pending_stack_adjust = 0;
2437 /* stack_arg_under_construction says whether a stack arg is
2438 being constructed at the old stack level. Pushing the stack
2439 gets a clean outgoing argument block. */
2440 old_stack_arg_under_construction = stack_arg_under_construction;
2441 stack_arg_under_construction = 0;
2443 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2447 /* Note that we must go through the motions of allocating an argument
2448 block even if the size is zero because we may be storing args
2449 in the area reserved for register arguments, which may be part of
2452 int needed = adjusted_args_size.constant;
2454 /* Store the maximum argument space used. It will be pushed by
2455 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2458 if (needed > crtl->outgoing_args_size)
2459 crtl->outgoing_args_size = needed;
2461 if (must_preallocate)
2463 if (ACCUMULATE_OUTGOING_ARGS)
2465 /* Since the stack pointer will never be pushed, it is
2466 possible for the evaluation of a parm to clobber
2467 something we have already written to the stack.
2468 Since most function calls on RISC machines do not use
2469 the stack, this is uncommon, but must work correctly.
2471 Therefore, we save any area of the stack that was already
2472 written and that we are using. Here we set up to do this
2473 by making a new stack usage map from the old one. The
2474 actual save will be done by store_one_arg.
2476 Another approach might be to try to reorder the argument
2477 evaluations to avoid this conflicting stack usage. */
2479 /* Since we will be writing into the entire argument area,
2480 the map must be allocated for its entire size, not just
2481 the part that is the responsibility of the caller. */
2482 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? NULL_TREE : TREE_TYPE (fndecl))))
2483 needed += reg_parm_stack_space;
2485 #ifdef ARGS_GROW_DOWNWARD
2486 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2489 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2492 if (stack_usage_map_buf)
2493 free (stack_usage_map_buf);
2494 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2495 stack_usage_map = stack_usage_map_buf;
2497 if (initial_highest_arg_in_use)
2498 memcpy (stack_usage_map, initial_stack_usage_map,
2499 initial_highest_arg_in_use);
2501 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2502 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2503 (highest_outgoing_arg_in_use
2504 - initial_highest_arg_in_use));
2507 /* The address of the outgoing argument list must not be
2508 copied to a register here, because argblock would be left
2509 pointing to the wrong place after the call to
2510 allocate_dynamic_stack_space below. */
2512 argblock = virtual_outgoing_args_rtx;
2516 if (inhibit_defer_pop == 0)
2518 /* Try to reuse some or all of the pending_stack_adjust
2519 to get this space. */
2521 = (combine_pending_stack_adjustment_and_call
2522 (unadjusted_args_size,
2523 &adjusted_args_size,
2524 preferred_unit_stack_boundary));
2526 /* combine_pending_stack_adjustment_and_call computes
2527 an adjustment before the arguments are allocated.
2528 Account for them and see whether or not the stack
2529 needs to go up or down. */
2530 needed = unadjusted_args_size - needed;
2534 /* We're releasing stack space. */
2535 /* ??? We can avoid any adjustment at all if we're
2536 already aligned. FIXME. */
2537 pending_stack_adjust = -needed;
2538 do_pending_stack_adjust ();
2542 /* We need to allocate space. We'll do that in
2543 push_block below. */
2544 pending_stack_adjust = 0;
2547 /* Special case this because overhead of `push_block' in
2548 this case is non-trivial. */
2550 argblock = virtual_outgoing_args_rtx;
2553 argblock = push_block (GEN_INT (needed), 0, 0);
2554 #ifdef ARGS_GROW_DOWNWARD
2555 argblock = plus_constant (argblock, needed);
2559 /* We only really need to call `copy_to_reg' in the case
2560 where push insns are going to be used to pass ARGBLOCK
2561 to a function call in ARGS. In that case, the stack
2562 pointer changes value from the allocation point to the
2563 call point, and hence the value of
2564 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2565 as well always do it. */
2566 argblock = copy_to_reg (argblock);
2571 if (ACCUMULATE_OUTGOING_ARGS)
2573 /* The save/restore code in store_one_arg handles all
2574 cases except one: a constructor call (including a C
2575 function returning a BLKmode struct) to initialize
2577 if (stack_arg_under_construction)
2580 = GEN_INT (adjusted_args_size.constant
2581 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? NULL
2582 : TREE_TYPE (fndecl))) ? 0
2583 : reg_parm_stack_space));
2584 if (old_stack_level == 0)
2586 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2588 old_stack_pointer_delta = stack_pointer_delta;
2589 old_pending_adj = pending_stack_adjust;
2590 pending_stack_adjust = 0;
2591 /* stack_arg_under_construction says whether a stack
2592 arg is being constructed at the old stack level.
2593 Pushing the stack gets a clean outgoing argument
2595 old_stack_arg_under_construction
2596 = stack_arg_under_construction;
2597 stack_arg_under_construction = 0;
2598 /* Make a new map for the new argument list. */
2599 if (stack_usage_map_buf)
2600 free (stack_usage_map_buf);
2601 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
2602 stack_usage_map = stack_usage_map_buf;
2603 highest_outgoing_arg_in_use = 0;
2605 allocate_dynamic_stack_space (push_size, NULL_RTX,
2609 /* If argument evaluation might modify the stack pointer,
2610 copy the address of the argument list to a register. */
2611 for (i = 0; i < num_actuals; i++)
2612 if (args[i].pass_on_stack)
2614 argblock = copy_addr_to_reg (argblock);
2619 compute_argument_addresses (args, argblock, num_actuals);
2621 /* If we push args individually in reverse order, perform stack alignment
2622 before the first push (the last arg). */
2623 if (PUSH_ARGS_REVERSED && argblock == 0
2624 && adjusted_args_size.constant != unadjusted_args_size)
2626 /* When the stack adjustment is pending, we get better code
2627 by combining the adjustments. */
2628 if (pending_stack_adjust
2629 && ! (flags & ECF_LIBCALL_BLOCK)
2630 && ! inhibit_defer_pop)
2632 pending_stack_adjust
2633 = (combine_pending_stack_adjustment_and_call
2634 (unadjusted_args_size,
2635 &adjusted_args_size,
2636 preferred_unit_stack_boundary));
2637 do_pending_stack_adjust ();
2639 else if (argblock == 0)
2640 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2641 - unadjusted_args_size));
2643 /* Now that the stack is properly aligned, pops can't safely
2644 be deferred during the evaluation of the arguments. */
2647 funexp = rtx_for_function_call (fndecl, addr);
2649 /* Figure out the register where the value, if any, will come back. */
2651 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2652 && ! structure_value_addr)
2654 if (pcc_struct_value)
2655 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2656 fndecl, NULL, (pass == 0));
2658 valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype,
2661 /* If VALREG is a PARALLEL whose first member has a zero
2662 offset, use that. This is for targets such as m68k that
2663 return the same value in multiple places. */
2664 if (GET_CODE (valreg) == PARALLEL)
2666 rtx elem = XVECEXP (valreg, 0, 0);
2667 rtx where = XEXP (elem, 0);
2668 rtx offset = XEXP (elem, 1);
2669 if (offset == const0_rtx
2670 && GET_MODE (where) == GET_MODE (valreg))
2675 /* Precompute all register parameters. It isn't safe to compute anything
2676 once we have started filling any specific hard regs. */
2677 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2679 if (CALL_EXPR_STATIC_CHAIN (exp))
2680 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
2682 static_chain_value = 0;
2684 #ifdef REG_PARM_STACK_SPACE
2685 /* Save the fixed argument area if it's part of the caller's frame and
2686 is clobbered by argument setup for this call. */
2687 if (ACCUMULATE_OUTGOING_ARGS && pass)
2688 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2689 &low_to_save, &high_to_save);
2692 /* Now store (and compute if necessary) all non-register parms.
2693 These come before register parms, since they can require block-moves,
2694 which could clobber the registers used for register parms.
2695 Parms which have partial registers are not stored here,
2696 but we do preallocate space here if they want that. */
2698 for (i = 0; i < num_actuals; i++)
2699 if (args[i].reg == 0 || args[i].pass_on_stack)
2701 rtx before_arg = get_last_insn ();
2703 if (store_one_arg (&args[i], argblock, flags,
2704 adjusted_args_size.var != 0,
2705 reg_parm_stack_space)
2707 && check_sibcall_argument_overlap (before_arg,
2709 sibcall_failure = 1;
2711 if (flags & ECF_CONST
2713 && args[i].value == args[i].stack)
2714 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2715 gen_rtx_USE (VOIDmode,
2720 /* If we have a parm that is passed in registers but not in memory
2721 and whose alignment does not permit a direct copy into registers,
2722 make a group of pseudos that correspond to each register that we
2724 if (STRICT_ALIGNMENT)
2725 store_unaligned_arguments_into_pseudos (args, num_actuals);
2727 /* Now store any partially-in-registers parm.
2728 This is the last place a block-move can happen. */
2730 for (i = 0; i < num_actuals; i++)
2731 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2733 rtx before_arg = get_last_insn ();
2735 if (store_one_arg (&args[i], argblock, flags,
2736 adjusted_args_size.var != 0,
2737 reg_parm_stack_space)
2739 && check_sibcall_argument_overlap (before_arg,
2741 sibcall_failure = 1;
2744 /* If we pushed args in forward order, perform stack alignment
2745 after pushing the last arg. */
2746 if (!PUSH_ARGS_REVERSED && argblock == 0)
2747 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2748 - unadjusted_args_size));
2750 /* If register arguments require space on the stack and stack space
2751 was not preallocated, allocate stack space here for arguments
2752 passed in registers. */
2753 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? NULL_TREE : TREE_TYPE (fndecl)))
2754 && !ACCUMULATE_OUTGOING_ARGS
2755 && must_preallocate == 0 && reg_parm_stack_space > 0)
2756 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2758 /* Pass the function the address in which to return a
2760 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2762 structure_value_addr
2763 = convert_memory_address (Pmode, structure_value_addr);
2764 emit_move_insn (struct_value,
2766 force_operand (structure_value_addr,
2769 if (REG_P (struct_value))
2770 use_reg (&call_fusage, struct_value);
2773 after_args = get_last_insn ();
2774 funexp = prepare_call_address (funexp, static_chain_value,
2775 &call_fusage, reg_parm_seen, pass == 0);
2777 load_register_parameters (args, num_actuals, &call_fusage, flags,
2778 pass == 0, &sibcall_failure);
2780 /* Save a pointer to the last insn before the call, so that we can
2781 later safely search backwards to find the CALL_INSN. */
2782 before_call = get_last_insn ();
2784 /* Set up next argument register. For sibling calls on machines
2785 with register windows this should be the incoming register. */
2786 #ifdef FUNCTION_INCOMING_ARG
2788 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2792 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2795 /* All arguments and registers used for the call must be set up by
2798 /* Stack must be properly aligned now. */
2800 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2802 /* Generate the actual call instruction. */
2803 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2804 adjusted_args_size.constant, struct_value_size,
2805 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2806 flags, & args_so_far);
2808 /* If the call setup or the call itself overlaps with anything
2809 of the argument setup we probably clobbered our call address.
2810 In that case we can't do sibcalls. */
2812 && check_sibcall_argument_overlap (after_args, 0, 0))
2813 sibcall_failure = 1;
2815 /* If a non-BLKmode value is returned at the most significant end
2816 of a register, shift the register right by the appropriate amount
2817 and update VALREG accordingly. BLKmode values are handled by the
2818 group load/store machinery below. */
2819 if (!structure_value_addr
2820 && !pcc_struct_value
2821 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2822 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2824 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2825 sibcall_failure = 1;
2826 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2829 /* If call is cse'able, make appropriate pair of reg-notes around it.
2830 Test valreg so we don't crash; may safely ignore `const'
2831 if return type is void. Disable for PARALLEL return values, because
2832 we have no way to move such values into a pseudo register. */
2833 if (pass && (flags & ECF_LIBCALL_BLOCK))
2837 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2839 insns = get_insns ();
2841 /* Expansion of block moves possibly introduced a loop that may
2842 not appear inside libcall block. */
2843 for (insn = insns; insn; insn = NEXT_INSN (insn))
2855 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2857 /* Mark the return value as a pointer if needed. */
2858 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2859 mark_reg_pointer (temp,
2860 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2863 if (flag_unsafe_math_optimizations
2865 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2866 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2867 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2868 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2869 note = gen_rtx_fmt_e (SQRT,
2871 args[0].initial_value);
2874 /* Construct an "equal form" for the value which
2875 mentions all the arguments in order as well as
2876 the function name. */
2877 for (i = 0; i < num_actuals; i++)
2878 note = gen_rtx_EXPR_LIST (VOIDmode,
2879 args[i].initial_value, note);
2880 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2882 emit_libcall_block (insns, temp, valreg, note);
2887 else if (pass && (flags & ECF_MALLOC))
2889 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2892 /* The return value from a malloc-like function is a pointer. */
2893 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2894 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2896 emit_move_insn (temp, valreg);
2898 /* The return value from a malloc-like function can not alias
2900 last = get_last_insn ();
2902 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2904 /* Write out the sequence. */
2905 insns = get_insns ();
2911 /* For calls to `setjmp', etc., inform
2912 function.c:setjmp_warnings that it should complain if
2913 nonvolatile values are live. For functions that cannot
2914 return, inform flow that control does not fall through. */
2916 if ((flags & ECF_NORETURN) || pass == 0)
2918 /* The barrier must be emitted
2919 immediately after the CALL_INSN. Some ports emit more
2920 than just a CALL_INSN above, so we must search for it here. */
2922 rtx last = get_last_insn ();
2923 while (!CALL_P (last))
2925 last = PREV_INSN (last);
2926 /* There was no CALL_INSN? */
2927 gcc_assert (last != before_call);
2930 emit_barrier_after (last);
2932 /* Stack adjustments after a noreturn call are dead code.
2933 However when NO_DEFER_POP is in effect, we must preserve
2934 stack_pointer_delta. */
2935 if (inhibit_defer_pop == 0)
2937 stack_pointer_delta = old_stack_allocated;
2938 pending_stack_adjust = 0;
2942 /* If value type not void, return an rtx for the value. */
2944 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2946 target = const0_rtx;
2947 else if (structure_value_addr)
2949 if (target == 0 || !MEM_P (target))
2952 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2953 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2954 structure_value_addr));
2955 set_mem_attributes (target, exp, 1);
2958 else if (pcc_struct_value)
2960 /* This is the special C++ case where we need to
2961 know what the true target was. We take care to
2962 never use this value more than once in one expression. */
2963 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2964 copy_to_reg (valreg));
2965 set_mem_attributes (target, exp, 1);
2967 /* Handle calls that return values in multiple non-contiguous locations.
2968 The Irix 6 ABI has examples of this. */
2969 else if (GET_CODE (valreg) == PARALLEL)
2973 /* This will only be assigned once, so it can be readonly. */
2974 tree nt = build_qualified_type (TREE_TYPE (exp),
2975 (TYPE_QUALS (TREE_TYPE (exp))
2976 | TYPE_QUAL_CONST));
2978 target = assign_temp (nt, 0, 1, 1);
2981 if (! rtx_equal_p (target, valreg))
2982 emit_group_store (target, valreg, TREE_TYPE (exp),
2983 int_size_in_bytes (TREE_TYPE (exp)));
2985 /* We can not support sibling calls for this case. */
2986 sibcall_failure = 1;
2989 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2990 && GET_MODE (target) == GET_MODE (valreg))
2992 bool may_overlap = false;
2994 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
2995 reg to a plain register. */
2996 if (!REG_P (target) || HARD_REGISTER_P (target))
2997 valreg = avoid_likely_spilled_reg (valreg);
2999 /* If TARGET is a MEM in the argument area, and we have
3000 saved part of the argument area, then we can't store
3001 directly into TARGET as it may get overwritten when we
3002 restore the argument save area below. Don't work too
3003 hard though and simply force TARGET to a register if it
3004 is a MEM; the optimizer is quite likely to sort it out. */
3005 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3006 for (i = 0; i < num_actuals; i++)
3007 if (args[i].save_area)
3014 target = copy_to_reg (valreg);
3017 /* TARGET and VALREG cannot be equal at this point
3018 because the latter would not have
3019 REG_FUNCTION_VALUE_P true, while the former would if
3020 it were referring to the same register.
3022 If they refer to the same register, this move will be
3023 a no-op, except when function inlining is being
3025 emit_move_insn (target, valreg);
3027 /* If we are setting a MEM, this code must be executed.
3028 Since it is emitted after the call insn, sibcall
3029 optimization cannot be performed in that case. */
3031 sibcall_failure = 1;
3034 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3036 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3038 /* We can not support sibling calls for this case. */
3039 sibcall_failure = 1;
3042 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
3044 if (targetm.calls.promote_function_return(funtype))
3046 /* If we promoted this return value, make the proper SUBREG.
3047 TARGET might be const0_rtx here, so be careful. */
3049 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3050 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3052 tree type = TREE_TYPE (exp);
3053 int unsignedp = TYPE_UNSIGNED (type);
3055 enum machine_mode pmode;
3057 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
3058 /* If we don't promote as expected, something is wrong. */
3059 gcc_assert (GET_MODE (target) == pmode);
3061 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3062 && (GET_MODE_SIZE (GET_MODE (target))
3063 > GET_MODE_SIZE (TYPE_MODE (type))))
3065 offset = GET_MODE_SIZE (GET_MODE (target))
3066 - GET_MODE_SIZE (TYPE_MODE (type));
3067 if (! BYTES_BIG_ENDIAN)
3068 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3069 else if (! WORDS_BIG_ENDIAN)
3070 offset %= UNITS_PER_WORD;
3072 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3073 SUBREG_PROMOTED_VAR_P (target) = 1;
3074 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3078 /* If size of args is variable or this was a constructor call for a stack
3079 argument, restore saved stack-pointer value. */
3081 if (old_stack_level)
3083 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3084 stack_pointer_delta = old_stack_pointer_delta;
3085 pending_stack_adjust = old_pending_adj;
3086 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3087 stack_arg_under_construction = old_stack_arg_under_construction;
3088 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3089 stack_usage_map = initial_stack_usage_map;
3090 sibcall_failure = 1;
3092 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3094 #ifdef REG_PARM_STACK_SPACE
3096 restore_fixed_argument_area (save_area, argblock,
3097 high_to_save, low_to_save);
3100 /* If we saved any argument areas, restore them. */
3101 for (i = 0; i < num_actuals; i++)
3102 if (args[i].save_area)
3104 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3106 = gen_rtx_MEM (save_mode,
3107 memory_address (save_mode,
3108 XEXP (args[i].stack_slot, 0)));
3110 if (save_mode != BLKmode)
3111 emit_move_insn (stack_area, args[i].save_area);
3113 emit_block_move (stack_area, args[i].save_area,
3114 GEN_INT (args[i].locate.size.constant),
3115 BLOCK_OP_CALL_PARM);
3118 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3119 stack_usage_map = initial_stack_usage_map;
3122 /* If this was alloca, record the new stack level for nonlocal gotos.
3123 Check for the handler slots since we might not have a save area
3124 for non-local gotos. */
3126 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3127 update_nonlocal_goto_save_area ();
3129 /* Free up storage we no longer need. */
3130 for (i = 0; i < num_actuals; ++i)
3131 if (args[i].aligned_regs)
3132 free (args[i].aligned_regs);
3134 insns = get_insns ();
3139 tail_call_insns = insns;
3141 /* Restore the pending stack adjustment now that we have
3142 finished generating the sibling call sequence. */
3144 pending_stack_adjust = save_pending_stack_adjust;
3145 stack_pointer_delta = save_stack_pointer_delta;
3147 /* Prepare arg structure for next iteration. */
3148 for (i = 0; i < num_actuals; i++)
3151 args[i].aligned_regs = 0;
3155 sbitmap_free (stored_args_map);
3159 normal_call_insns = insns;
3161 /* Verify that we've deallocated all the stack we used. */
3162 gcc_assert ((flags & ECF_NORETURN)
3163 || (old_stack_allocated
3164 == stack_pointer_delta - pending_stack_adjust));
3167 /* If something prevents making this a sibling call,
3168 zero out the sequence. */
3169 if (sibcall_failure)
3170 tail_call_insns = NULL_RTX;
3175 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3176 arguments too, as argument area is now clobbered by the call. */
3177 if (tail_call_insns)
3179 emit_insn (tail_call_insns);
3180 crtl->tail_call_emit = true;
3183 emit_insn (normal_call_insns);
3185 currently_expanding_call--;
3187 if (stack_usage_map_buf)
3188 free (stack_usage_map_buf);
3193 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3194 this function's incoming arguments.
3196 At the start of RTL generation we know the only REG_EQUIV notes
3197 in the rtl chain are those for incoming arguments, so we can look
3198 for REG_EQUIV notes between the start of the function and the
3199 NOTE_INSN_FUNCTION_BEG.
3201 This is (slight) overkill. We could keep track of the highest
3202 argument we clobber and be more selective in removing notes, but it
3203 does not seem to be worth the effort. */
3206 fixup_tail_calls (void)
3210 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3214 /* There are never REG_EQUIV notes for the incoming arguments
3215 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3217 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3220 note = find_reg_note (insn, REG_EQUIV, 0);
3222 remove_note (insn, note);
3223 note = find_reg_note (insn, REG_EQUIV, 0);
3228 /* Traverse a list of TYPES and expand all complex types into their
3231 split_complex_types (tree types)
3235 /* Before allocating memory, check for the common case of no complex. */
3236 for (p = types; p; p = TREE_CHAIN (p))
3238 tree type = TREE_VALUE (p);
3239 if (TREE_CODE (type) == COMPLEX_TYPE
3240 && targetm.calls.split_complex_arg (type))
3246 types = copy_list (types);
3248 for (p = types; p; p = TREE_CHAIN (p))
3250 tree complex_type = TREE_VALUE (p);
3252 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3253 && targetm.calls.split_complex_arg (complex_type))
3257 /* Rewrite complex type with component type. */
3258 TREE_VALUE (p) = TREE_TYPE (complex_type);
3259 next = TREE_CHAIN (p);
3261 /* Add another component type for the imaginary part. */
3262 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3263 TREE_CHAIN (p) = imag;
3264 TREE_CHAIN (imag) = next;
3266 /* Skip the newly created node. */
3274 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3275 The RETVAL parameter specifies whether return value needs to be saved, other
3276 parameters are documented in the emit_library_call function below. */
3279 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3280 enum libcall_type fn_type,
3281 enum machine_mode outmode, int nargs, va_list p)
3283 /* Total size in bytes of all the stack-parms scanned so far. */
3284 struct args_size args_size;
3285 /* Size of arguments before any adjustments (such as rounding). */
3286 struct args_size original_args_size;
3289 /* Todo, choose the correct decl type of orgfun. Sadly this information
3290 isn't present here, so we default to native calling abi here. */
3291 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3295 CUMULATIVE_ARGS args_so_far;
3299 enum machine_mode mode;
3302 struct locate_and_pad_arg_data locate;
3306 int old_inhibit_defer_pop = inhibit_defer_pop;
3307 rtx call_fusage = 0;
3310 int pcc_struct_value = 0;
3311 int struct_value_size = 0;
3313 int reg_parm_stack_space = 0;
3316 tree tfom; /* type_for_mode (outmode, 0) */
3318 #ifdef REG_PARM_STACK_SPACE
3319 /* Define the boundary of the register parm stack space that needs to be
3321 int low_to_save, high_to_save;
3322 rtx save_area = 0; /* Place that it is saved. */
3325 /* Size of the stack reserved for parameter registers. */
3326 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3327 char *initial_stack_usage_map = stack_usage_map;
3328 char *stack_usage_map_buf = NULL;
3330 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3332 #ifdef REG_PARM_STACK_SPACE
3333 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3336 /* By default, library functions can not throw. */
3337 flags = ECF_NOTHROW;
3349 case LCT_CONST_MAKE_BLOCK:
3350 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3352 case LCT_PURE_MAKE_BLOCK:
3353 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3356 flags |= ECF_NORETURN;
3359 flags = ECF_NORETURN;
3361 case LCT_RETURNS_TWICE:
3362 flags = ECF_RETURNS_TWICE;
3367 /* Ensure current function's preferred stack boundary is at least
3369 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3370 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3372 /* If this kind of value comes back in memory,
3373 decide where in memory it should come back. */
3374 if (outmode != VOIDmode)
3376 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3377 if (aggregate_value_p (tfom, 0))
3379 #ifdef PCC_STATIC_STRUCT_RETURN
3381 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3382 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3383 pcc_struct_value = 1;
3385 value = gen_reg_rtx (outmode);
3386 #else /* not PCC_STATIC_STRUCT_RETURN */
3387 struct_value_size = GET_MODE_SIZE (outmode);
3388 if (value != 0 && MEM_P (value))
3391 mem_value = assign_temp (tfom, 0, 1, 1);
3393 /* This call returns a big structure. */
3394 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE
3395 | ECF_LIBCALL_BLOCK);
3399 tfom = void_type_node;
3401 /* ??? Unfinished: must pass the memory address as an argument. */
3403 /* Copy all the libcall-arguments out of the varargs data
3404 and into a vector ARGVEC.
3406 Compute how to pass each argument. We only support a very small subset
3407 of the full argument passing conventions to limit complexity here since
3408 library functions shouldn't have many args. */
3410 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3411 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3413 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3414 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3416 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3419 args_size.constant = 0;
3424 /* Now we are about to start emitting insns that can be deleted
3425 if a libcall is deleted. */
3426 if (flags & ECF_LIBCALL_BLOCK)
3431 /* If there's a structure value address to be passed,
3432 either pass it in the special place, or pass it as an extra argument. */
3433 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3435 rtx addr = XEXP (mem_value, 0);
3439 /* Make sure it is a reasonable operand for a move or push insn. */
3440 if (!REG_P (addr) && !MEM_P (addr)
3441 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3442 addr = force_operand (addr, NULL_RTX);
3444 argvec[count].value = addr;
3445 argvec[count].mode = Pmode;
3446 argvec[count].partial = 0;
3448 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3449 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3450 NULL_TREE, 1) == 0);
3452 locate_and_pad_parm (Pmode, NULL_TREE,
3453 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3456 argvec[count].reg != 0,
3458 0, NULL_TREE, &args_size, &argvec[count].locate);
3460 if (argvec[count].reg == 0 || argvec[count].partial != 0
3461 || reg_parm_stack_space > 0)
3462 args_size.constant += argvec[count].locate.size.constant;
3464 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3469 for (; count < nargs; count++)
3471 rtx val = va_arg (p, rtx);
3472 enum machine_mode mode = va_arg (p, enum machine_mode);
3474 /* We cannot convert the arg value to the mode the library wants here;
3475 must do it earlier where we know the signedness of the arg. */
3476 gcc_assert (mode != BLKmode
3477 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3479 /* Make sure it is a reasonable operand for a move or push insn. */
3480 if (!REG_P (val) && !MEM_P (val)
3481 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3482 val = force_operand (val, NULL_RTX);
3484 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3488 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3490 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3491 functions, so we have to pretend this isn't such a function. */
3492 if (flags & ECF_LIBCALL_BLOCK)
3494 rtx insns = get_insns ();
3499 /* If this was a CONST function, it is now PURE since it now
3501 if (flags & ECF_CONST)
3503 flags &= ~ECF_CONST;
3507 if (GET_MODE (val) == MEM && !must_copy)
3511 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3513 emit_move_insn (slot, val);
3516 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3517 gen_rtx_USE (VOIDmode, slot),
3520 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3521 gen_rtx_CLOBBER (VOIDmode,
3526 val = force_operand (XEXP (slot, 0), NULL_RTX);
3529 argvec[count].value = val;
3530 argvec[count].mode = mode;
3532 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3534 argvec[count].partial
3535 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3537 locate_and_pad_parm (mode, NULL_TREE,
3538 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3541 argvec[count].reg != 0,
3543 argvec[count].partial,
3544 NULL_TREE, &args_size, &argvec[count].locate);
3546 gcc_assert (!argvec[count].locate.size.var);
3548 if (argvec[count].reg == 0 || argvec[count].partial != 0
3549 || reg_parm_stack_space > 0)
3550 args_size.constant += argvec[count].locate.size.constant;
3552 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3555 /* If this machine requires an external definition for library
3556 functions, write one out. */
3557 assemble_external_libcall (fun);
3559 original_args_size = args_size;
3560 args_size.constant = (((args_size.constant
3561 + stack_pointer_delta
3565 - stack_pointer_delta);
3567 args_size.constant = MAX (args_size.constant,
3568 reg_parm_stack_space);
3570 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? NULL_TREE : TREE_TYPE (fndecl))))
3571 args_size.constant -= reg_parm_stack_space;
3573 if (args_size.constant > crtl->outgoing_args_size)
3574 crtl->outgoing_args_size = args_size.constant;
3576 if (ACCUMULATE_OUTGOING_ARGS)
3578 /* Since the stack pointer will never be pushed, it is possible for
3579 the evaluation of a parm to clobber something we have already
3580 written to the stack. Since most function calls on RISC machines
3581 do not use the stack, this is uncommon, but must work correctly.
3583 Therefore, we save any area of the stack that was already written
3584 and that we are using. Here we set up to do this by making a new
3585 stack usage map from the old one.
3587 Another approach might be to try to reorder the argument
3588 evaluations to avoid this conflicting stack usage. */
3590 needed = args_size.constant;
3592 /* Since we will be writing into the entire argument area, the
3593 map must be allocated for its entire size, not just the part that
3594 is the responsibility of the caller. */
3595 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? NULL_TREE : TREE_TYPE (fndecl))))
3596 needed += reg_parm_stack_space;
3598 #ifdef ARGS_GROW_DOWNWARD
3599 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3602 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3605 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3606 stack_usage_map = stack_usage_map_buf;
3608 if (initial_highest_arg_in_use)
3609 memcpy (stack_usage_map, initial_stack_usage_map,
3610 initial_highest_arg_in_use);
3612 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3613 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3614 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3617 /* We must be careful to use virtual regs before they're instantiated,
3618 and real regs afterwards. Loop optimization, for example, can create
3619 new libcalls after we've instantiated the virtual regs, and if we
3620 use virtuals anyway, they won't match the rtl patterns. */
3622 if (virtuals_instantiated)
3623 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3625 argblock = virtual_outgoing_args_rtx;
3630 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3633 /* If we push args individually in reverse order, perform stack alignment
3634 before the first push (the last arg). */
3635 if (argblock == 0 && PUSH_ARGS_REVERSED)
3636 anti_adjust_stack (GEN_INT (args_size.constant
3637 - original_args_size.constant));
3639 if (PUSH_ARGS_REVERSED)
3650 #ifdef REG_PARM_STACK_SPACE
3651 if (ACCUMULATE_OUTGOING_ARGS)
3653 /* The argument list is the property of the called routine and it
3654 may clobber it. If the fixed area has been used for previous
3655 parameters, we must save and restore it. */
3656 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3657 &low_to_save, &high_to_save);
3661 /* Push the args that need to be pushed. */
3663 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3664 are to be pushed. */
3665 for (count = 0; count < nargs; count++, argnum += inc)
3667 enum machine_mode mode = argvec[argnum].mode;
3668 rtx val = argvec[argnum].value;
3669 rtx reg = argvec[argnum].reg;
3670 int partial = argvec[argnum].partial;
3671 int lower_bound = 0, upper_bound = 0, i;
3673 if (! (reg != 0 && partial == 0))
3675 if (ACCUMULATE_OUTGOING_ARGS)
3677 /* If this is being stored into a pre-allocated, fixed-size,
3678 stack area, save any previous data at that location. */
3680 #ifdef ARGS_GROW_DOWNWARD
3681 /* stack_slot is negative, but we want to index stack_usage_map
3682 with positive values. */
3683 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3684 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3686 lower_bound = argvec[argnum].locate.offset.constant;
3687 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3691 /* Don't worry about things in the fixed argument area;
3692 it has already been saved. */
3693 if (i < reg_parm_stack_space)
3694 i = reg_parm_stack_space;
3695 while (i < upper_bound && stack_usage_map[i] == 0)
3698 if (i < upper_bound)
3700 /* We need to make a save area. */
3702 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3703 enum machine_mode save_mode
3704 = mode_for_size (size, MODE_INT, 1);
3706 = plus_constant (argblock,
3707 argvec[argnum].locate.offset.constant);
3709 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3711 if (save_mode == BLKmode)
3713 argvec[argnum].save_area
3714 = assign_stack_temp (BLKmode,
3715 argvec[argnum].locate.size.constant,
3718 emit_block_move (validize_mem (argvec[argnum].save_area),
3720 GEN_INT (argvec[argnum].locate.size.constant),
3721 BLOCK_OP_CALL_PARM);
3725 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3727 emit_move_insn (argvec[argnum].save_area, stack_area);
3732 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3733 partial, reg, 0, argblock,
3734 GEN_INT (argvec[argnum].locate.offset.constant),
3735 reg_parm_stack_space,
3736 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3738 /* Now mark the segment we just used. */
3739 if (ACCUMULATE_OUTGOING_ARGS)
3740 for (i = lower_bound; i < upper_bound; i++)
3741 stack_usage_map[i] = 1;
3745 if (flags & ECF_CONST)
3749 /* Indicate argument access so that alias.c knows that these
3752 use = plus_constant (argblock,
3753 argvec[argnum].locate.offset.constant);
3755 /* When arguments are pushed, trying to tell alias.c where
3756 exactly this argument is won't work, because the
3757 auto-increment causes confusion. So we merely indicate
3758 that we access something with a known mode somewhere on
3760 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3761 gen_rtx_SCRATCH (Pmode));
3762 use = gen_rtx_MEM (argvec[argnum].mode, use);
3763 use = gen_rtx_USE (VOIDmode, use);
3764 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3769 /* If we pushed args in forward order, perform stack alignment
3770 after pushing the last arg. */
3771 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3772 anti_adjust_stack (GEN_INT (args_size.constant
3773 - original_args_size.constant));
3775 if (PUSH_ARGS_REVERSED)
3780 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3782 /* Now load any reg parms into their regs. */
3784 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3785 are to be pushed. */
3786 for (count = 0; count < nargs; count++, argnum += inc)
3788 enum machine_mode mode = argvec[argnum].mode;
3789 rtx val = argvec[argnum].value;
3790 rtx reg = argvec[argnum].reg;
3791 int partial = argvec[argnum].partial;
3793 /* Handle calls that pass values in multiple non-contiguous
3794 locations. The PA64 has examples of this for library calls. */
3795 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3796 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3797 else if (reg != 0 && partial == 0)
3798 emit_move_insn (reg, val);
3803 /* Any regs containing parms remain in use through the call. */
3804 for (count = 0; count < nargs; count++)
3806 rtx reg = argvec[count].reg;
3807 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3808 use_group_regs (&call_fusage, reg);
3811 int partial = argvec[count].partial;
3815 gcc_assert (partial % UNITS_PER_WORD == 0);
3816 nregs = partial / UNITS_PER_WORD;
3817 use_regs (&call_fusage, REGNO (reg), nregs);
3820 use_reg (&call_fusage, reg);
3824 /* Pass the function the address in which to return a structure value. */
3825 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3827 emit_move_insn (struct_value,
3829 force_operand (XEXP (mem_value, 0),
3831 if (REG_P (struct_value))
3832 use_reg (&call_fusage, struct_value);
3835 /* Don't allow popping to be deferred, since then
3836 cse'ing of library calls could delete a call and leave the pop. */
3838 valreg = (mem_value == 0 && outmode != VOIDmode
3839 ? hard_libcall_value (outmode) : NULL_RTX);
3841 /* Stack must be properly aligned now. */
3842 gcc_assert (!(stack_pointer_delta
3843 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3845 before_call = get_last_insn ();
3847 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3848 will set inhibit_defer_pop to that value. */
3849 /* The return type is needed to decide how many bytes the function pops.
3850 Signedness plays no role in that, so for simplicity, we pretend it's
3851 always signed. We also assume that the list of arguments passed has
3852 no impact, so we pretend it is unknown. */
3854 emit_call_1 (fun, NULL,
3855 get_identifier (XSTR (orgfun, 0)),
3856 build_function_type (tfom, NULL_TREE),
3857 original_args_size.constant, args_size.constant,
3859 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3861 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3863 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
3864 that it should complain if nonvolatile values are live. For
3865 functions that cannot return, inform flow that control does not
3868 if (flags & ECF_NORETURN)
3870 /* The barrier note must be emitted
3871 immediately after the CALL_INSN. Some ports emit more than
3872 just a CALL_INSN above, so we must search for it here. */
3874 rtx last = get_last_insn ();
3875 while (!CALL_P (last))
3877 last = PREV_INSN (last);
3878 /* There was no CALL_INSN? */
3879 gcc_assert (last != before_call);
3882 emit_barrier_after (last);
3885 /* Now restore inhibit_defer_pop to its actual original value. */
3888 /* If call is cse'able, make appropriate pair of reg-notes around it.
3889 Test valreg so we don't crash; may safely ignore `const'
3890 if return type is void. Disable for PARALLEL return values, because
3891 we have no way to move such values into a pseudo register. */
3892 if (flags & ECF_LIBCALL_BLOCK)
3898 insns = get_insns ();
3908 if (GET_CODE (valreg) == PARALLEL)
3910 temp = gen_reg_rtx (outmode);
3911 emit_group_store (temp, valreg, NULL_TREE,
3912 GET_MODE_SIZE (outmode));
3916 temp = gen_reg_rtx (GET_MODE (valreg));
3918 /* Construct an "equal form" for the value which mentions all the
3919 arguments in order as well as the function name. */
3920 for (i = 0; i < nargs; i++)
3921 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3922 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3924 insns = get_insns ();
3926 emit_libcall_block (insns, temp, valreg, note);
3933 /* Copy the value to the right place. */
3934 if (outmode != VOIDmode && retval)
3940 if (value != mem_value)
3941 emit_move_insn (value, mem_value);
3943 else if (GET_CODE (valreg) == PARALLEL)
3946 value = gen_reg_rtx (outmode);
3947 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3951 /* Convert to the proper mode if PROMOTE_MODE has been active. */
3952 if (GET_MODE (valreg) != outmode)
3954 int unsignedp = TYPE_UNSIGNED (tfom);
3956 gcc_assert (targetm.calls.promote_function_return (tfom));
3957 gcc_assert (promote_mode (tfom, outmode, &unsignedp, 0)
3958 == GET_MODE (valreg));
3960 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
3964 emit_move_insn (value, valreg);
3970 if (ACCUMULATE_OUTGOING_ARGS)
3972 #ifdef REG_PARM_STACK_SPACE
3974 restore_fixed_argument_area (save_area, argblock,
3975 high_to_save, low_to_save);
3978 /* If we saved any argument areas, restore them. */
3979 for (count = 0; count < nargs; count++)
3980 if (argvec[count].save_area)
3982 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3983 rtx adr = plus_constant (argblock,
3984 argvec[count].locate.offset.constant);
3985 rtx stack_area = gen_rtx_MEM (save_mode,
3986 memory_address (save_mode, adr));
3988 if (save_mode == BLKmode)
3989 emit_block_move (stack_area,
3990 validize_mem (argvec[count].save_area),
3991 GEN_INT (argvec[count].locate.size.constant),
3992 BLOCK_OP_CALL_PARM);
3994 emit_move_insn (stack_area, argvec[count].save_area);
3997 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3998 stack_usage_map = initial_stack_usage_map;
4001 if (stack_usage_map_buf)
4002 free (stack_usage_map_buf);
4008 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4009 (emitting the queue unless NO_QUEUE is nonzero),
4010 for a value of mode OUTMODE,
4011 with NARGS different arguments, passed as alternating rtx values
4012 and machine_modes to convert them to.
4014 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4015 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4016 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4017 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4018 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4019 or other LCT_ value for other types of library calls. */
4022 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4023 enum machine_mode outmode, int nargs, ...)
4027 va_start (p, nargs);
4028 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4032 /* Like emit_library_call except that an extra argument, VALUE,
4033 comes second and says where to store the result.
4034 (If VALUE is zero, this function chooses a convenient way
4035 to return the value.
4037 This function returns an rtx for where the value is to be found.
4038 If VALUE is nonzero, VALUE is returned. */
4041 emit_library_call_value (rtx orgfun, rtx value,
4042 enum libcall_type fn_type,
4043 enum machine_mode outmode, int nargs, ...)
4048 va_start (p, nargs);
4049 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4056 /* Store a single argument for a function call
4057 into the register or memory area where it must be passed.
4058 *ARG describes the argument value and where to pass it.
4060 ARGBLOCK is the address of the stack-block for all the arguments,
4061 or 0 on a machine where arguments are pushed individually.
4063 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4064 so must be careful about how the stack is used.
4066 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4067 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4068 that we need not worry about saving and restoring the stack.
4070 FNDECL is the declaration of the function we are calling.
4072 Return nonzero if this arg should cause sibcall failure,
4076 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4077 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4079 tree pval = arg->tree_value;
4083 int i, lower_bound = 0, upper_bound = 0;
4084 int sibcall_failure = 0;
4086 if (TREE_CODE (pval) == ERROR_MARK)
4089 /* Push a new temporary level for any temporaries we make for
4093 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4095 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4096 save any previous data at that location. */
4097 if (argblock && ! variable_size && arg->stack)
4099 #ifdef ARGS_GROW_DOWNWARD
4100 /* stack_slot is negative, but we want to index stack_usage_map
4101 with positive values. */
4102 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4103 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4107 lower_bound = upper_bound - arg->locate.size.constant;
4109 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4110 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4114 upper_bound = lower_bound + arg->locate.size.constant;
4118 /* Don't worry about things in the fixed argument area;
4119 it has already been saved. */
4120 if (i < reg_parm_stack_space)
4121 i = reg_parm_stack_space;
4122 while (i < upper_bound && stack_usage_map[i] == 0)
4125 if (i < upper_bound)
4127 /* We need to make a save area. */
4128 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4129 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4130 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4131 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4133 if (save_mode == BLKmode)
4135 tree ot = TREE_TYPE (arg->tree_value);
4136 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4137 | TYPE_QUAL_CONST));
4139 arg->save_area = assign_temp (nt, 0, 1, 1);
4140 preserve_temp_slots (arg->save_area);
4141 emit_block_move (validize_mem (arg->save_area), stack_area,
4142 GEN_INT (arg->locate.size.constant),
4143 BLOCK_OP_CALL_PARM);
4147 arg->save_area = gen_reg_rtx (save_mode);
4148 emit_move_insn (arg->save_area, stack_area);
4154 /* If this isn't going to be placed on both the stack and in registers,
4155 set up the register and number of words. */
4156 if (! arg->pass_on_stack)
4158 if (flags & ECF_SIBCALL)
4159 reg = arg->tail_call_reg;
4162 partial = arg->partial;
4165 /* Being passed entirely in a register. We shouldn't be called in
4167 gcc_assert (reg == 0 || partial != 0);
4169 /* If this arg needs special alignment, don't load the registers
4171 if (arg->n_aligned_regs != 0)
4174 /* If this is being passed partially in a register, we can't evaluate
4175 it directly into its stack slot. Otherwise, we can. */
4176 if (arg->value == 0)
4178 /* stack_arg_under_construction is nonzero if a function argument is
4179 being evaluated directly into the outgoing argument list and
4180 expand_call must take special action to preserve the argument list
4181 if it is called recursively.
4183 For scalar function arguments stack_usage_map is sufficient to
4184 determine which stack slots must be saved and restored. Scalar
4185 arguments in general have pass_on_stack == 0.
4187 If this argument is initialized by a function which takes the
4188 address of the argument (a C++ constructor or a C function
4189 returning a BLKmode structure), then stack_usage_map is
4190 insufficient and expand_call must push the stack around the
4191 function call. Such arguments have pass_on_stack == 1.
4193 Note that it is always safe to set stack_arg_under_construction,
4194 but this generates suboptimal code if set when not needed. */
4196 if (arg->pass_on_stack)
4197 stack_arg_under_construction++;
4199 arg->value = expand_expr (pval,
4201 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4202 ? NULL_RTX : arg->stack,
4203 VOIDmode, EXPAND_STACK_PARM);
4205 /* If we are promoting object (or for any other reason) the mode
4206 doesn't agree, convert the mode. */
4208 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4209 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4210 arg->value, arg->unsignedp);
4212 if (arg->pass_on_stack)
4213 stack_arg_under_construction--;
4216 /* Check for overlap with already clobbered argument area. */
4217 if ((flags & ECF_SIBCALL)
4218 && MEM_P (arg->value)
4219 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4220 arg->locate.size.constant))
4221 sibcall_failure = 1;
4223 /* Don't allow anything left on stack from computation
4224 of argument to alloca. */
4225 if (flags & ECF_MAY_BE_ALLOCA)
4226 do_pending_stack_adjust ();
4228 if (arg->value == arg->stack)
4229 /* If the value is already in the stack slot, we are done. */
4231 else if (arg->mode != BLKmode)
4234 unsigned int parm_align;
4236 /* Argument is a scalar, not entirely passed in registers.
4237 (If part is passed in registers, arg->partial says how much
4238 and emit_push_insn will take care of putting it there.)
4240 Push it, and if its size is less than the
4241 amount of space allocated to it,
4242 also bump stack pointer by the additional space.
4243 Note that in C the default argument promotions
4244 will prevent such mismatches. */
4246 size = GET_MODE_SIZE (arg->mode);
4247 /* Compute how much space the push instruction will push.
4248 On many machines, pushing a byte will advance the stack
4249 pointer by a halfword. */
4250 #ifdef PUSH_ROUNDING
4251 size = PUSH_ROUNDING (size);
4255 /* Compute how much space the argument should get:
4256 round up to a multiple of the alignment for arguments. */
4257 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4258 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4259 / (PARM_BOUNDARY / BITS_PER_UNIT))
4260 * (PARM_BOUNDARY / BITS_PER_UNIT));
4262 /* Compute the alignment of the pushed argument. */
4263 parm_align = arg->locate.boundary;
4264 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4266 int pad = used - size;
4269 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4270 parm_align = MIN (parm_align, pad_align);
4274 /* This isn't already where we want it on the stack, so put it there.
4275 This can either be done with push or copy insns. */
4276 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4277 parm_align, partial, reg, used - size, argblock,
4278 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4279 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4281 /* Unless this is a partially-in-register argument, the argument is now
4284 arg->value = arg->stack;
4288 /* BLKmode, at least partly to be pushed. */
4290 unsigned int parm_align;
4294 /* Pushing a nonscalar.
4295 If part is passed in registers, PARTIAL says how much
4296 and emit_push_insn will take care of putting it there. */
4298 /* Round its size up to a multiple
4299 of the allocation unit for arguments. */
4301 if (arg->locate.size.var != 0)
4304 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4308 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4309 for BLKmode is careful to avoid it. */
4310 excess = (arg->locate.size.constant
4311 - int_size_in_bytes (TREE_TYPE (pval))
4313 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4314 NULL_RTX, TYPE_MODE (sizetype), 0);
4317 parm_align = arg->locate.boundary;
4319 /* When an argument is padded down, the block is aligned to
4320 PARM_BOUNDARY, but the actual argument isn't. */
4321 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4323 if (arg->locate.size.var)
4324 parm_align = BITS_PER_UNIT;
4327 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4328 parm_align = MIN (parm_align, excess_align);
4332 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4334 /* emit_push_insn might not work properly if arg->value and
4335 argblock + arg->locate.offset areas overlap. */
4339 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4340 || (GET_CODE (XEXP (x, 0)) == PLUS
4341 && XEXP (XEXP (x, 0), 0) ==
4342 crtl->args.internal_arg_pointer
4343 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4345 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4346 i = INTVAL (XEXP (XEXP (x, 0), 1));
4348 /* expand_call should ensure this. */
4349 gcc_assert (!arg->locate.offset.var
4350 && arg->locate.size.var == 0
4351 && GET_CODE (size_rtx) == CONST_INT);
4353 if (arg->locate.offset.constant > i)
4355 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4356 sibcall_failure = 1;
4358 else if (arg->locate.offset.constant < i)
4360 /* Use arg->locate.size.constant instead of size_rtx
4361 because we only care about the part of the argument
4363 if (i < (arg->locate.offset.constant
4364 + arg->locate.size.constant))
4365 sibcall_failure = 1;
4369 /* Even though they appear to be at the same location,
4370 if part of the outgoing argument is in registers,
4371 they aren't really at the same location. Check for
4372 this by making sure that the incoming size is the
4373 same as the outgoing size. */
4374 if (arg->locate.size.constant != INTVAL (size_rtx))
4375 sibcall_failure = 1;
4380 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4381 parm_align, partial, reg, excess, argblock,
4382 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4383 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4385 /* Unless this is a partially-in-register argument, the argument is now
4388 ??? Unlike the case above, in which we want the actual
4389 address of the data, so that we can load it directly into a
4390 register, here we want the address of the stack slot, so that
4391 it's properly aligned for word-by-word copying or something
4392 like that. It's not clear that this is always correct. */
4394 arg->value = arg->stack_slot;
4397 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4399 tree type = TREE_TYPE (arg->tree_value);
4401 = emit_group_load_into_temps (arg->reg, arg->value, type,
4402 int_size_in_bytes (type));
4405 /* Mark all slots this store used. */
4406 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4407 && argblock && ! variable_size && arg->stack)
4408 for (i = lower_bound; i < upper_bound; i++)
4409 stack_usage_map[i] = 1;
4411 /* Once we have pushed something, pops can't safely
4412 be deferred during the rest of the arguments. */
4415 /* Free any temporary slots made in processing this argument. Show
4416 that we might have taken the address of something and pushed that
4418 preserve_temp_slots (NULL_RTX);
4422 return sibcall_failure;
4425 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4428 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4434 /* If the type has variable size... */
4435 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4438 /* If the type is marked as addressable (it is required
4439 to be constructed into the stack)... */
4440 if (TREE_ADDRESSABLE (type))
4446 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4447 takes trailing padding of a structure into account. */
4448 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4451 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
4456 /* If the type has variable size... */
4457 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4460 /* If the type is marked as addressable (it is required
4461 to be constructed into the stack)... */
4462 if (TREE_ADDRESSABLE (type))
4465 /* If the padding and mode of the type is such that a copy into
4466 a register would put it into the wrong part of the register. */
4468 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4469 && (FUNCTION_ARG_PADDING (mode, type)
4470 == (BYTES_BIG_ENDIAN ? upward : downward)))