1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
40 #include "langhooks.h"
46 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
47 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
49 /* Data structure and subroutines used within expand_call. */
53 /* Tree node for this argument. */
55 /* Mode for value; TYPE_MODE unless promoted. */
56 enum machine_mode mode;
57 /* Current RTL value for argument, or 0 if it isn't precomputed. */
59 /* Initially-compute RTL value for argument; only for const functions. */
61 /* Register to pass this argument in, 0 if passed on stack, or an
62 PARALLEL if the arg is to be copied into multiple non-contiguous
65 /* Register to pass this argument in when generating tail call sequence.
66 This is not the same register as for normal calls on machines with
69 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
70 form for emit_group_move. */
72 /* If REG was promoted from the actual mode of the argument expression,
73 indicates whether the promotion is sign- or zero-extended. */
75 /* Number of bytes to put in registers. 0 means put the whole arg
76 in registers. Also 0 if not passed in registers. */
78 /* Nonzero if argument must be passed on stack.
79 Note that some arguments may be passed on the stack
80 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
81 pass_on_stack identifies arguments that *cannot* go in registers. */
83 /* Some fields packaged up for locate_and_pad_parm. */
84 struct locate_and_pad_arg_data locate;
85 /* Location on the stack at which parameter should be stored. The store
86 has already been done if STACK == VALUE. */
88 /* Location on the stack of the start of this argument slot. This can
89 differ from STACK if this arg pads downward. This location is known
90 to be aligned to FUNCTION_ARG_BOUNDARY. */
92 /* Place that this stack area has been saved, if needed. */
94 /* If an argument's alignment does not permit direct copying into registers,
95 copy in smaller-sized pieces into pseudos. These are stored in a
96 block pointed to by this field. The next field says how many
97 word-sized pseudos we made. */
102 /* A vector of one char per byte of stack space. A byte if nonzero if
103 the corresponding stack location has been used.
104 This vector is used to prevent a function call within an argument from
105 clobbering any stack already set up. */
106 static char *stack_usage_map;
108 /* Size of STACK_USAGE_MAP. */
109 static int highest_outgoing_arg_in_use;
111 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
112 stack location's tail call argument has been already stored into the stack.
113 This bitmap is used to prevent sibling call optimization if function tries
114 to use parent's incoming argument slots when they have been already
115 overwritten with tail call arguments. */
116 static sbitmap stored_args_map;
118 /* stack_arg_under_construction is nonzero when an argument may be
119 initialized with a constructor call (including a C function that
120 returns a BLKmode struct) and expand_call must take special action
121 to make sure the object being constructed does not overlap the
122 argument list for the constructor call. */
123 static int stack_arg_under_construction;
125 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
126 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
128 static void precompute_register_parameters (int, struct arg_data *, int *);
129 static int store_one_arg (struct arg_data *, rtx, int, int, int);
130 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
131 static int finalize_must_preallocate (int, int, struct arg_data *,
133 static void precompute_arguments (int, int, struct arg_data *);
134 static int compute_argument_block_size (int, struct args_size *, int);
135 static void initialize_argument_information (int, struct arg_data *,
136 struct args_size *, int,
138 tree, CUMULATIVE_ARGS *, int,
139 rtx *, int *, int *, int *,
141 static void compute_argument_addresses (struct arg_data *, rtx, int);
142 static rtx rtx_for_function_call (tree, tree);
143 static void load_register_parameters (struct arg_data *, int, rtx *, int,
145 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
146 enum machine_mode, int, va_list);
147 static int special_function_p (tree, int);
148 static int check_sibcall_argument_overlap_1 (rtx);
149 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
151 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
153 static tree split_complex_types (tree);
155 #ifdef REG_PARM_STACK_SPACE
156 static rtx save_fixed_argument_area (int, rtx, int *, int *);
157 static void restore_fixed_argument_area (rtx, rtx, int, int);
160 /* Force FUNEXP into a form suitable for the address of a CALL,
161 and return that as an rtx. Also load the static chain register
162 if FNDECL is a nested function.
164 CALL_FUSAGE points to a variable holding the prospective
165 CALL_INSN_FUNCTION_USAGE information. */
168 prepare_call_address (rtx funexp, rtx static_chain_value,
169 rtx *call_fusage, int reg_parm_seen, int sibcallp)
171 /* Make a valid memory address and copy constants through pseudo-regs,
172 but not for a constant address if -fno-function-cse. */
173 if (GET_CODE (funexp) != SYMBOL_REF)
174 /* If we are using registers for parameters, force the
175 function address into a register now. */
176 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
177 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
178 : memory_address (FUNCTION_MODE, funexp));
181 #ifndef NO_FUNCTION_CSE
182 if (optimize && ! flag_no_function_cse)
183 funexp = force_reg (Pmode, funexp);
187 if (static_chain_value != 0)
189 static_chain_value = convert_memory_address (Pmode, static_chain_value);
190 emit_move_insn (static_chain_rtx, static_chain_value);
192 if (REG_P (static_chain_rtx))
193 use_reg (call_fusage, static_chain_rtx);
199 /* Generate instructions to call function FUNEXP,
200 and optionally pop the results.
201 The CALL_INSN is the first insn generated.
203 FNDECL is the declaration node of the function. This is given to the
204 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
206 FUNTYPE is the data type of the function. This is given to the macro
207 RETURN_POPS_ARGS to determine whether this function pops its own args.
208 We used to allow an identifier for library functions, but that doesn't
209 work when the return type is an aggregate type and the calling convention
210 says that the pointer to this aggregate is to be popped by the callee.
212 STACK_SIZE is the number of bytes of arguments on the stack,
213 ROUNDED_STACK_SIZE is that number rounded up to
214 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
215 both to put into the call insn and to generate explicit popping
218 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
219 It is zero if this call doesn't want a structure value.
221 NEXT_ARG_REG is the rtx that results from executing
222 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
223 just after all the args have had their registers assigned.
224 This could be whatever you like, but normally it is the first
225 arg-register beyond those used for args in this call,
226 or 0 if all the arg-registers are used in this call.
227 It is passed on to `gen_call' so you can put this info in the call insn.
229 VALREG is a hard register in which a value is returned,
230 or 0 if the call does not return a value.
232 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
233 the args to this call were processed.
234 We restore `inhibit_defer_pop' to that value.
236 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
237 denote registers used by the called function. */
240 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
241 tree funtype ATTRIBUTE_UNUSED,
242 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
243 HOST_WIDE_INT rounded_stack_size,
244 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
245 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
246 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
247 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
249 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
251 int already_popped = 0;
252 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
253 #if defined (HAVE_call) && defined (HAVE_call_value)
254 rtx struct_value_size_rtx;
255 struct_value_size_rtx = GEN_INT (struct_value_size);
258 #ifdef CALL_POPS_ARGS
259 n_popped += CALL_POPS_ARGS (* args_so_far);
262 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
263 and we don't want to load it into a register as an optimization,
264 because prepare_call_address already did it if it should be done. */
265 if (GET_CODE (funexp) != SYMBOL_REF)
266 funexp = memory_address (FUNCTION_MODE, funexp);
268 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
269 if ((ecf_flags & ECF_SIBCALL)
270 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
271 && (n_popped > 0 || stack_size == 0))
273 rtx n_pop = GEN_INT (n_popped);
276 /* If this subroutine pops its own args, record that in the call insn
277 if possible, for the sake of frame pointer elimination. */
280 pat = GEN_SIBCALL_VALUE_POP (valreg,
281 gen_rtx_MEM (FUNCTION_MODE, funexp),
282 rounded_stack_size_rtx, next_arg_reg,
285 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
286 rounded_stack_size_rtx, next_arg_reg, n_pop);
288 emit_call_insn (pat);
294 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
295 /* If the target has "call" or "call_value" insns, then prefer them
296 if no arguments are actually popped. If the target does not have
297 "call" or "call_value" insns, then we must use the popping versions
298 even if the call has no arguments to pop. */
299 #if defined (HAVE_call) && defined (HAVE_call_value)
300 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
301 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
303 if (HAVE_call_pop && HAVE_call_value_pop)
306 rtx n_pop = GEN_INT (n_popped);
309 /* If this subroutine pops its own args, record that in the call insn
310 if possible, for the sake of frame pointer elimination. */
313 pat = GEN_CALL_VALUE_POP (valreg,
314 gen_rtx_MEM (FUNCTION_MODE, funexp),
315 rounded_stack_size_rtx, next_arg_reg, n_pop);
317 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
318 rounded_stack_size_rtx, next_arg_reg, n_pop);
320 emit_call_insn (pat);
326 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
327 if ((ecf_flags & ECF_SIBCALL)
328 && HAVE_sibcall && HAVE_sibcall_value)
331 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
332 gen_rtx_MEM (FUNCTION_MODE, funexp),
333 rounded_stack_size_rtx,
334 next_arg_reg, NULL_RTX));
336 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
337 rounded_stack_size_rtx, next_arg_reg,
338 struct_value_size_rtx));
343 #if defined (HAVE_call) && defined (HAVE_call_value)
344 if (HAVE_call && HAVE_call_value)
347 emit_call_insn (GEN_CALL_VALUE (valreg,
348 gen_rtx_MEM (FUNCTION_MODE, funexp),
349 rounded_stack_size_rtx, next_arg_reg,
352 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
353 rounded_stack_size_rtx, next_arg_reg,
354 struct_value_size_rtx));
360 /* Find the call we just emitted. */
361 call_insn = last_call_insn ();
363 /* Mark memory as used for "pure" function call. */
364 if (ecf_flags & ECF_PURE)
368 gen_rtx_USE (VOIDmode,
369 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
372 /* Put the register usage information there. */
373 add_function_usage_to (call_insn, call_fusage);
375 /* If this is a const call, then set the insn's unchanging bit. */
376 if (ecf_flags & (ECF_CONST | ECF_PURE))
377 CONST_OR_PURE_CALL_P (call_insn) = 1;
379 /* If this call can't throw, attach a REG_EH_REGION reg note to that
381 if (ecf_flags & ECF_NOTHROW)
382 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
383 REG_NOTES (call_insn));
386 int rn = lookup_stmt_eh_region (fntree);
388 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
389 throw, which we already took care of. */
391 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
392 REG_NOTES (call_insn));
393 note_current_region_may_contain_throw ();
396 if (ecf_flags & ECF_NORETURN)
397 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
398 REG_NOTES (call_insn));
400 if (ecf_flags & ECF_RETURNS_TWICE)
402 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
403 REG_NOTES (call_insn));
404 current_function_calls_setjmp = 1;
407 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
409 /* Restore this now, so that we do defer pops for this call's args
410 if the context of the call as a whole permits. */
411 inhibit_defer_pop = old_inhibit_defer_pop;
416 CALL_INSN_FUNCTION_USAGE (call_insn)
417 = gen_rtx_EXPR_LIST (VOIDmode,
418 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
419 CALL_INSN_FUNCTION_USAGE (call_insn));
420 rounded_stack_size -= n_popped;
421 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
422 stack_pointer_delta -= n_popped;
425 if (!ACCUMULATE_OUTGOING_ARGS)
427 /* If returning from the subroutine does not automatically pop the args,
428 we need an instruction to pop them sooner or later.
429 Perhaps do it now; perhaps just record how much space to pop later.
431 If returning from the subroutine does pop the args, indicate that the
432 stack pointer will be changed. */
434 if (rounded_stack_size != 0)
436 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN))
437 /* Just pretend we did the pop. */
438 stack_pointer_delta -= rounded_stack_size;
439 else if (flag_defer_pop && inhibit_defer_pop == 0
440 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
441 pending_stack_adjust += rounded_stack_size;
443 adjust_stack (rounded_stack_size_rtx);
446 /* When we accumulate outgoing args, we must avoid any stack manipulations.
447 Restore the stack pointer to its original value now. Usually
448 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
449 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
450 popping variants of functions exist as well.
452 ??? We may optimize similar to defer_pop above, but it is
453 probably not worthwhile.
455 ??? It will be worthwhile to enable combine_stack_adjustments even for
458 anti_adjust_stack (GEN_INT (n_popped));
461 /* Determine if the function identified by NAME and FNDECL is one with
462 special properties we wish to know about.
464 For example, if the function might return more than one time (setjmp), then
465 set RETURNS_TWICE to a nonzero value.
467 Similarly set NORETURN if the function is in the longjmp family.
469 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
470 space from the stack such as alloca. */
473 special_function_p (tree fndecl, int flags)
475 if (fndecl && DECL_NAME (fndecl)
476 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
477 /* Exclude functions not at the file scope, or not `extern',
478 since they are not the magic functions we would otherwise
480 FIXME: this should be handled with attributes, not with this
481 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
482 because you can declare fork() inside a function if you
484 && (DECL_CONTEXT (fndecl) == NULL_TREE
485 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
486 && TREE_PUBLIC (fndecl))
488 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
489 const char *tname = name;
491 /* We assume that alloca will always be called by name. It
492 makes no sense to pass it as a pointer-to-function to
493 anything that does not understand its behavior. */
494 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
496 && ! strcmp (name, "alloca"))
497 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
499 && ! strcmp (name, "__builtin_alloca"))))
500 flags |= ECF_MAY_BE_ALLOCA;
502 /* Disregard prefix _, __ or __x. */
505 if (name[1] == '_' && name[2] == 'x')
507 else if (name[1] == '_')
516 && (! strcmp (tname, "setjmp")
517 || ! strcmp (tname, "setjmp_syscall")))
519 && ! strcmp (tname, "sigsetjmp"))
521 && ! strcmp (tname, "savectx")))
522 flags |= ECF_RETURNS_TWICE;
525 && ! strcmp (tname, "siglongjmp"))
526 flags |= ECF_NORETURN;
528 else if ((tname[0] == 'q' && tname[1] == 's'
529 && ! strcmp (tname, "qsetjmp"))
530 || (tname[0] == 'v' && tname[1] == 'f'
531 && ! strcmp (tname, "vfork"))
532 || (tname[0] == 'g' && tname[1] == 'e'
533 && !strcmp (tname, "getcontext")))
534 flags |= ECF_RETURNS_TWICE;
536 else if (tname[0] == 'l' && tname[1] == 'o'
537 && ! strcmp (tname, "longjmp"))
538 flags |= ECF_NORETURN;
544 /* Return nonzero when FNDECL represents a call to setjmp. */
547 setjmp_call_p (tree fndecl)
549 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
552 /* Return true when exp contains alloca call. */
554 alloca_call_p (tree exp)
556 if (TREE_CODE (exp) == CALL_EXPR
557 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
558 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
559 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
560 & ECF_MAY_BE_ALLOCA))
565 /* Detect flags (function attributes) from the function decl or type node. */
568 flags_from_decl_or_type (tree exp)
575 type = TREE_TYPE (exp);
577 /* The function exp may have the `malloc' attribute. */
578 if (DECL_IS_MALLOC (exp))
581 /* The function exp may have the `returns_twice' attribute. */
582 if (DECL_IS_RETURNS_TWICE (exp))
583 flags |= ECF_RETURNS_TWICE;
585 /* The function exp may have the `pure' attribute. */
586 if (DECL_IS_PURE (exp))
589 if (DECL_IS_NOVOPS (exp))
592 if (TREE_NOTHROW (exp))
593 flags |= ECF_NOTHROW;
595 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
598 flags = special_function_p (exp, flags);
600 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
603 if (TREE_THIS_VOLATILE (exp))
604 flags |= ECF_NORETURN;
606 /* Mark if the function returns with the stack pointer depressed. We
607 cannot consider it pure or constant in that case. */
608 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
610 flags |= ECF_SP_DEPRESSED;
611 flags &= ~(ECF_PURE | ECF_CONST);
617 /* Detect flags from a CALL_EXPR. */
620 call_expr_flags (tree t)
623 tree decl = get_callee_fndecl (t);
626 flags = flags_from_decl_or_type (decl);
629 t = TREE_TYPE (CALL_EXPR_FN (t));
630 if (t && TREE_CODE (t) == POINTER_TYPE)
631 flags = flags_from_decl_or_type (TREE_TYPE (t));
639 /* Precompute all register parameters as described by ARGS, storing values
640 into fields within the ARGS array.
642 NUM_ACTUALS indicates the total number elements in the ARGS array.
644 Set REG_PARM_SEEN if we encounter a register parameter. */
647 precompute_register_parameters (int num_actuals, struct arg_data *args,
654 for (i = 0; i < num_actuals; i++)
655 if (args[i].reg != 0 && ! args[i].pass_on_stack)
659 if (args[i].value == 0)
662 args[i].value = expand_normal (args[i].tree_value);
663 preserve_temp_slots (args[i].value);
667 /* If the value is a non-legitimate constant, force it into a
668 pseudo now. TLS symbols sometimes need a call to resolve. */
669 if (CONSTANT_P (args[i].value)
670 && !LEGITIMATE_CONSTANT_P (args[i].value))
671 args[i].value = force_reg (args[i].mode, args[i].value);
673 /* If we are to promote the function arg to a wider mode,
676 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
678 = convert_modes (args[i].mode,
679 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
680 args[i].value, args[i].unsignedp);
682 /* If we're going to have to load the value by parts, pull the
683 parts into pseudos. The part extraction process can involve
684 non-trivial computation. */
685 if (GET_CODE (args[i].reg) == PARALLEL)
687 tree type = TREE_TYPE (args[i].tree_value);
688 args[i].parallel_value
689 = emit_group_load_into_temps (args[i].reg, args[i].value,
690 type, int_size_in_bytes (type));
693 /* If the value is expensive, and we are inside an appropriately
694 short loop, put the value into a pseudo and then put the pseudo
697 For small register classes, also do this if this call uses
698 register parameters. This is to avoid reload conflicts while
699 loading the parameters registers. */
701 else if ((! (REG_P (args[i].value)
702 || (GET_CODE (args[i].value) == SUBREG
703 && REG_P (SUBREG_REG (args[i].value)))))
704 && args[i].mode != BLKmode
705 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
706 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
708 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
712 #ifdef REG_PARM_STACK_SPACE
714 /* The argument list is the property of the called routine and it
715 may clobber it. If the fixed area has been used for previous
716 parameters, we must save and restore it. */
719 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
724 /* Compute the boundary of the area that needs to be saved, if any. */
725 high = reg_parm_stack_space;
726 #ifdef ARGS_GROW_DOWNWARD
729 if (high > highest_outgoing_arg_in_use)
730 high = highest_outgoing_arg_in_use;
732 for (low = 0; low < high; low++)
733 if (stack_usage_map[low] != 0)
736 enum machine_mode save_mode;
741 while (stack_usage_map[--high] == 0)
745 *high_to_save = high;
747 num_to_save = high - low + 1;
748 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
750 /* If we don't have the required alignment, must do this
752 if ((low & (MIN (GET_MODE_SIZE (save_mode),
753 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
756 #ifdef ARGS_GROW_DOWNWARD
761 stack_area = gen_rtx_MEM (save_mode,
762 memory_address (save_mode,
763 plus_constant (argblock,
766 set_mem_align (stack_area, PARM_BOUNDARY);
767 if (save_mode == BLKmode)
769 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
770 emit_block_move (validize_mem (save_area), stack_area,
771 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
775 save_area = gen_reg_rtx (save_mode);
776 emit_move_insn (save_area, stack_area);
786 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
788 enum machine_mode save_mode = GET_MODE (save_area);
792 #ifdef ARGS_GROW_DOWNWARD
793 delta = -high_to_save;
797 stack_area = gen_rtx_MEM (save_mode,
798 memory_address (save_mode,
799 plus_constant (argblock, delta)));
800 set_mem_align (stack_area, PARM_BOUNDARY);
802 if (save_mode != BLKmode)
803 emit_move_insn (stack_area, save_area);
805 emit_block_move (stack_area, validize_mem (save_area),
806 GEN_INT (high_to_save - low_to_save + 1),
809 #endif /* REG_PARM_STACK_SPACE */
811 /* If any elements in ARGS refer to parameters that are to be passed in
812 registers, but not in memory, and whose alignment does not permit a
813 direct copy into registers. Copy the values into a group of pseudos
814 which we will later copy into the appropriate hard registers.
816 Pseudos for each unaligned argument will be stored into the array
817 args[argnum].aligned_regs. The caller is responsible for deallocating
818 the aligned_regs array if it is nonzero. */
821 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
825 for (i = 0; i < num_actuals; i++)
826 if (args[i].reg != 0 && ! args[i].pass_on_stack
827 && args[i].mode == BLKmode
828 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
829 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
831 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
832 int endian_correction = 0;
836 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
837 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
841 args[i].n_aligned_regs
842 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
845 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
847 /* Structures smaller than a word are normally aligned to the
848 least significant byte. On a BYTES_BIG_ENDIAN machine,
849 this means we must skip the empty high order bytes when
850 calculating the bit offset. */
851 if (bytes < UNITS_PER_WORD
852 #ifdef BLOCK_REG_PADDING
853 && (BLOCK_REG_PADDING (args[i].mode,
854 TREE_TYPE (args[i].tree_value), 1)
860 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
862 for (j = 0; j < args[i].n_aligned_regs; j++)
864 rtx reg = gen_reg_rtx (word_mode);
865 rtx word = operand_subword_force (args[i].value, j, BLKmode);
866 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
868 args[i].aligned_regs[j] = reg;
869 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
870 word_mode, word_mode);
872 /* There is no need to restrict this code to loading items
873 in TYPE_ALIGN sized hunks. The bitfield instructions can
874 load up entire word sized registers efficiently.
876 ??? This may not be needed anymore.
877 We use to emit a clobber here but that doesn't let later
878 passes optimize the instructions we emit. By storing 0 into
879 the register later passes know the first AND to zero out the
880 bitfield being set in the register is unnecessary. The store
881 of 0 will be deleted as will at least the first AND. */
883 emit_move_insn (reg, const0_rtx);
885 bytes -= bitsize / BITS_PER_UNIT;
886 store_bit_field (reg, bitsize, endian_correction, word_mode,
892 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
895 NUM_ACTUALS is the total number of parameters.
897 N_NAMED_ARGS is the total number of named arguments.
899 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
902 FNDECL is the tree code for the target of this call (if known)
904 ARGS_SO_FAR holds state needed by the target to know where to place
907 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
908 for arguments which are passed in registers.
910 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
911 and may be modified by this routine.
913 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
914 flags which may may be modified by this routine.
916 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
917 that requires allocation of stack space.
919 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
920 the thunked-to function. */
923 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
924 struct arg_data *args,
925 struct args_size *args_size,
926 int n_named_args ATTRIBUTE_UNUSED,
927 tree exp, tree struct_value_addr_value,
929 CUMULATIVE_ARGS *args_so_far,
930 int reg_parm_stack_space,
931 rtx *old_stack_level, int *old_pending_adj,
932 int *must_preallocate, int *ecf_flags,
933 bool *may_tailcall, bool call_from_thunk_p)
935 /* 1 if scanning parms front to back, -1 if scanning back to front. */
938 /* Count arg position in order args appear. */
943 args_size->constant = 0;
946 /* In this loop, we consider args in the order they are written.
947 We fill up ARGS from the front or from the back if necessary
948 so that in any case the first arg to be pushed ends up at the front. */
950 if (PUSH_ARGS_REVERSED)
952 i = num_actuals - 1, inc = -1;
953 /* In this case, must reverse order of args
954 so that we compute and push the last arg first. */
961 /* First fill in the actual arguments in the ARGS array, splitting
962 complex arguments if necessary. */
965 call_expr_arg_iterator iter;
968 if (struct_value_addr_value)
970 args[j].tree_value = struct_value_addr_value;
973 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
975 tree argtype = TREE_TYPE (arg);
976 if (targetm.calls.split_complex_arg
978 && TREE_CODE (argtype) == COMPLEX_TYPE
979 && targetm.calls.split_complex_arg (argtype))
981 tree subtype = TREE_TYPE (argtype);
982 arg = save_expr (arg);
983 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
985 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
988 args[j].tree_value = arg;
993 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
994 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
996 tree type = TREE_TYPE (args[i].tree_value);
998 enum machine_mode mode;
1000 /* Replace erroneous argument with constant zero. */
1001 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1002 args[i].tree_value = integer_zero_node, type = integer_type_node;
1004 /* If TYPE is a transparent union, pass things the way we would
1005 pass the first field of the union. We have already verified that
1006 the modes are the same. */
1007 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1008 type = TREE_TYPE (TYPE_FIELDS (type));
1010 /* Decide where to pass this arg.
1012 args[i].reg is nonzero if all or part is passed in registers.
1014 args[i].partial is nonzero if part but not all is passed in registers,
1015 and the exact value says how many bytes are passed in registers.
1017 args[i].pass_on_stack is nonzero if the argument must at least be
1018 computed on the stack. It may then be loaded back into registers
1019 if args[i].reg is nonzero.
1021 These decisions are driven by the FUNCTION_... macros and must agree
1022 with those made by function.c. */
1024 /* See if this argument should be passed by invisible reference. */
1025 if (pass_by_reference (args_so_far, TYPE_MODE (type),
1026 type, argpos < n_named_args))
1032 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1033 type, argpos < n_named_args);
1035 /* If we're compiling a thunk, pass through invisible references
1036 instead of making a copy. */
1037 if (call_from_thunk_p
1039 && !TREE_ADDRESSABLE (type)
1040 && (base = get_base_address (args[i].tree_value))
1041 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1043 /* We can't use sibcalls if a callee-copied argument is
1044 stored in the current function's frame. */
1045 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1046 *may_tailcall = false;
1048 args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1049 type = TREE_TYPE (args[i].tree_value);
1051 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1055 /* We make a copy of the object and pass the address to the
1056 function being called. */
1059 if (!COMPLETE_TYPE_P (type)
1060 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1061 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1062 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1063 STACK_CHECK_MAX_VAR_SIZE))))
1065 /* This is a variable-sized object. Make space on the stack
1067 rtx size_rtx = expr_size (args[i].tree_value);
1069 if (*old_stack_level == 0)
1071 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1072 *old_pending_adj = pending_stack_adjust;
1073 pending_stack_adjust = 0;
1076 copy = gen_rtx_MEM (BLKmode,
1077 allocate_dynamic_stack_space
1078 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1079 set_mem_attributes (copy, type, 1);
1082 copy = assign_temp (type, 0, 1, 0);
1084 store_expr (args[i].tree_value, copy, 0, false);
1087 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1089 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1092 = build_fold_addr_expr (make_tree (type, copy));
1093 type = TREE_TYPE (args[i].tree_value);
1094 *may_tailcall = false;
1098 mode = TYPE_MODE (type);
1099 unsignedp = TYPE_UNSIGNED (type);
1101 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1102 mode = promote_mode (type, mode, &unsignedp, 1);
1104 args[i].unsignedp = unsignedp;
1105 args[i].mode = mode;
1107 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1108 argpos < n_named_args);
1109 #ifdef FUNCTION_INCOMING_ARG
1110 /* If this is a sibling call and the machine has register windows, the
1111 register window has to be unwinded before calling the routine, so
1112 arguments have to go into the incoming registers. */
1113 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1114 argpos < n_named_args);
1116 args[i].tail_call_reg = args[i].reg;
1121 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1122 argpos < n_named_args);
1124 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1126 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1127 it means that we are to pass this arg in the register(s) designated
1128 by the PARALLEL, but also to pass it in the stack. */
1129 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1130 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1131 args[i].pass_on_stack = 1;
1133 /* If this is an addressable type, we must preallocate the stack
1134 since we must evaluate the object into its final location.
1136 If this is to be passed in both registers and the stack, it is simpler
1138 if (TREE_ADDRESSABLE (type)
1139 || (args[i].pass_on_stack && args[i].reg != 0))
1140 *must_preallocate = 1;
1142 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1143 we cannot consider this function call constant. */
1144 if (TREE_ADDRESSABLE (type))
1145 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1147 /* Compute the stack-size of this argument. */
1148 if (args[i].reg == 0 || args[i].partial != 0
1149 || reg_parm_stack_space > 0
1150 || args[i].pass_on_stack)
1151 locate_and_pad_parm (mode, type,
1152 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1157 args[i].pass_on_stack ? 0 : args[i].partial,
1158 fndecl, args_size, &args[i].locate);
1159 #ifdef BLOCK_REG_PADDING
1161 /* The argument is passed entirely in registers. See at which
1162 end it should be padded. */
1163 args[i].locate.where_pad =
1164 BLOCK_REG_PADDING (mode, type,
1165 int_size_in_bytes (type) <= UNITS_PER_WORD);
1168 /* Update ARGS_SIZE, the total stack space for args so far. */
1170 args_size->constant += args[i].locate.size.constant;
1171 if (args[i].locate.size.var)
1172 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1174 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1175 have been used, etc. */
1177 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1178 argpos < n_named_args);
1182 /* Update ARGS_SIZE to contain the total size for the argument block.
1183 Return the original constant component of the argument block's size.
1185 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1186 for arguments passed in registers. */
1189 compute_argument_block_size (int reg_parm_stack_space,
1190 struct args_size *args_size,
1191 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1193 int unadjusted_args_size = args_size->constant;
1195 /* For accumulate outgoing args mode we don't need to align, since the frame
1196 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1197 backends from generating misaligned frame sizes. */
1198 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1199 preferred_stack_boundary = STACK_BOUNDARY;
1201 /* Compute the actual size of the argument block required. The variable
1202 and constant sizes must be combined, the size may have to be rounded,
1203 and there may be a minimum required size. */
1207 args_size->var = ARGS_SIZE_TREE (*args_size);
1208 args_size->constant = 0;
1210 preferred_stack_boundary /= BITS_PER_UNIT;
1211 if (preferred_stack_boundary > 1)
1213 /* We don't handle this case yet. To handle it correctly we have
1214 to add the delta, round and subtract the delta.
1215 Currently no machine description requires this support. */
1216 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1217 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1220 if (reg_parm_stack_space > 0)
1223 = size_binop (MAX_EXPR, args_size->var,
1224 ssize_int (reg_parm_stack_space));
1226 /* The area corresponding to register parameters is not to count in
1227 the size of the block we need. So make the adjustment. */
1228 if (!OUTGOING_REG_PARM_STACK_SPACE)
1230 = size_binop (MINUS_EXPR, args_size->var,
1231 ssize_int (reg_parm_stack_space));
1236 preferred_stack_boundary /= BITS_PER_UNIT;
1237 if (preferred_stack_boundary < 1)
1238 preferred_stack_boundary = 1;
1239 args_size->constant = (((args_size->constant
1240 + stack_pointer_delta
1241 + preferred_stack_boundary - 1)
1242 / preferred_stack_boundary
1243 * preferred_stack_boundary)
1244 - stack_pointer_delta);
1246 args_size->constant = MAX (args_size->constant,
1247 reg_parm_stack_space);
1249 if (!OUTGOING_REG_PARM_STACK_SPACE)
1250 args_size->constant -= reg_parm_stack_space;
1252 return unadjusted_args_size;
1255 /* Precompute parameters as needed for a function call.
1257 FLAGS is mask of ECF_* constants.
1259 NUM_ACTUALS is the number of arguments.
1261 ARGS is an array containing information for each argument; this
1262 routine fills in the INITIAL_VALUE and VALUE fields for each
1263 precomputed argument. */
1266 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1270 /* If this is a libcall, then precompute all arguments so that we do not
1271 get extraneous instructions emitted as part of the libcall sequence. */
1272 if ((flags & ECF_LIBCALL_BLOCK) == 0)
1275 for (i = 0; i < num_actuals; i++)
1277 enum machine_mode mode;
1279 /* If this is an addressable type, we cannot pre-evaluate it. */
1280 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
1282 args[i].initial_value = args[i].value
1283 = expand_normal (args[i].tree_value);
1285 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1286 if (mode != args[i].mode)
1289 = convert_modes (args[i].mode, mode,
1290 args[i].value, args[i].unsignedp);
1291 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1292 /* CSE will replace this only if it contains args[i].value
1293 pseudo, so convert it down to the declared mode using
1295 if (REG_P (args[i].value)
1296 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1298 args[i].initial_value
1299 = gen_lowpart_SUBREG (mode, args[i].value);
1300 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1301 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1309 /* Given the current state of MUST_PREALLOCATE and information about
1310 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1311 compute and return the final value for MUST_PREALLOCATE. */
1314 finalize_must_preallocate (int must_preallocate, int num_actuals,
1315 struct arg_data *args, struct args_size *args_size)
1317 /* See if we have or want to preallocate stack space.
1319 If we would have to push a partially-in-regs parm
1320 before other stack parms, preallocate stack space instead.
1322 If the size of some parm is not a multiple of the required stack
1323 alignment, we must preallocate.
1325 If the total size of arguments that would otherwise create a copy in
1326 a temporary (such as a CALL) is more than half the total argument list
1327 size, preallocation is faster.
1329 Another reason to preallocate is if we have a machine (like the m88k)
1330 where stack alignment is required to be maintained between every
1331 pair of insns, not just when the call is made. However, we assume here
1332 that such machines either do not have push insns (and hence preallocation
1333 would occur anyway) or the problem is taken care of with
1336 if (! must_preallocate)
1338 int partial_seen = 0;
1339 int copy_to_evaluate_size = 0;
1342 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1344 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1346 else if (partial_seen && args[i].reg == 0)
1347 must_preallocate = 1;
1349 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1350 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1351 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1352 || TREE_CODE (args[i].tree_value) == COND_EXPR
1353 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1354 copy_to_evaluate_size
1355 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1358 if (copy_to_evaluate_size * 2 >= args_size->constant
1359 && args_size->constant > 0)
1360 must_preallocate = 1;
1362 return must_preallocate;
1365 /* If we preallocated stack space, compute the address of each argument
1366 and store it into the ARGS array.
1368 We need not ensure it is a valid memory address here; it will be
1369 validized when it is used.
1371 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1374 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1378 rtx arg_reg = argblock;
1379 int i, arg_offset = 0;
1381 if (GET_CODE (argblock) == PLUS)
1382 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1384 for (i = 0; i < num_actuals; i++)
1386 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1387 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1389 unsigned int align, boundary;
1390 unsigned int units_on_stack = 0;
1391 enum machine_mode partial_mode = VOIDmode;
1393 /* Skip this parm if it will not be passed on the stack. */
1394 if (! args[i].pass_on_stack
1396 && args[i].partial == 0)
1399 if (GET_CODE (offset) == CONST_INT)
1400 addr = plus_constant (arg_reg, INTVAL (offset));
1402 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1404 addr = plus_constant (addr, arg_offset);
1406 if (args[i].partial != 0)
1408 /* Only part of the parameter is being passed on the stack.
1409 Generate a simple memory reference of the correct size. */
1410 units_on_stack = args[i].locate.size.constant;
1411 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1413 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1414 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1418 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1419 set_mem_attributes (args[i].stack,
1420 TREE_TYPE (args[i].tree_value), 1);
1422 align = BITS_PER_UNIT;
1423 boundary = args[i].locate.boundary;
1424 if (args[i].locate.where_pad != downward)
1426 else if (GET_CODE (offset) == CONST_INT)
1428 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1429 align = align & -align;
1431 set_mem_align (args[i].stack, align);
1433 if (GET_CODE (slot_offset) == CONST_INT)
1434 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1436 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1438 addr = plus_constant (addr, arg_offset);
1440 if (args[i].partial != 0)
1442 /* Only part of the parameter is being passed on the stack.
1443 Generate a simple memory reference of the correct size.
1445 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1446 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1450 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1451 set_mem_attributes (args[i].stack_slot,
1452 TREE_TYPE (args[i].tree_value), 1);
1454 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1456 /* Function incoming arguments may overlap with sibling call
1457 outgoing arguments and we cannot allow reordering of reads
1458 from function arguments with stores to outgoing arguments
1459 of sibling calls. */
1460 set_mem_alias_set (args[i].stack, 0);
1461 set_mem_alias_set (args[i].stack_slot, 0);
1466 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1467 in a call instruction.
1469 FNDECL is the tree node for the target function. For an indirect call
1470 FNDECL will be NULL_TREE.
1472 ADDR is the operand 0 of CALL_EXPR for this call. */
1475 rtx_for_function_call (tree fndecl, tree addr)
1479 /* Get the function to call, in the form of RTL. */
1482 /* If this is the first use of the function, see if we need to
1483 make an external definition for it. */
1484 if (! TREE_USED (fndecl))
1486 assemble_external (fndecl);
1487 TREE_USED (fndecl) = 1;
1490 /* Get a SYMBOL_REF rtx for the function address. */
1491 funexp = XEXP (DECL_RTL (fndecl), 0);
1494 /* Generate an rtx (probably a pseudo-register) for the address. */
1497 funexp = expand_normal (addr);
1498 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1503 /* Return true if and only if SIZE storage units (usually bytes)
1504 starting from address ADDR overlap with already clobbered argument
1505 area. This function is used to determine if we should give up a
1509 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1513 if (addr == current_function_internal_arg_pointer)
1515 else if (GET_CODE (addr) == PLUS
1516 && XEXP (addr, 0) == current_function_internal_arg_pointer
1517 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
1518 i = INTVAL (XEXP (addr, 1));
1519 /* Return true for arg pointer based indexed addressing. */
1520 else if (GET_CODE (addr) == PLUS
1521 && (XEXP (addr, 0) == current_function_internal_arg_pointer
1522 || XEXP (addr, 1) == current_function_internal_arg_pointer))
1527 #ifdef ARGS_GROW_DOWNWARD
1532 unsigned HOST_WIDE_INT k;
1534 for (k = 0; k < size; k++)
1535 if (i + k < stored_args_map->n_bits
1536 && TEST_BIT (stored_args_map, i + k))
1543 /* Do the register loads required for any wholly-register parms or any
1544 parms which are passed both on the stack and in a register. Their
1545 expressions were already evaluated.
1547 Mark all register-parms as living through the call, putting these USE
1548 insns in the CALL_INSN_FUNCTION_USAGE field.
1550 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1551 checking, setting *SIBCALL_FAILURE if appropriate. */
1554 load_register_parameters (struct arg_data *args, int num_actuals,
1555 rtx *call_fusage, int flags, int is_sibcall,
1556 int *sibcall_failure)
1560 for (i = 0; i < num_actuals; i++)
1562 rtx reg = ((flags & ECF_SIBCALL)
1563 ? args[i].tail_call_reg : args[i].reg);
1566 int partial = args[i].partial;
1569 rtx before_arg = get_last_insn ();
1570 /* Set non-negative if we must move a word at a time, even if
1571 just one word (e.g, partial == 4 && mode == DFmode). Set
1572 to -1 if we just use a normal move insn. This value can be
1573 zero if the argument is a zero size structure. */
1575 if (GET_CODE (reg) == PARALLEL)
1579 gcc_assert (partial % UNITS_PER_WORD == 0);
1580 nregs = partial / UNITS_PER_WORD;
1582 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1584 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1585 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1588 size = GET_MODE_SIZE (args[i].mode);
1590 /* Handle calls that pass values in multiple non-contiguous
1591 locations. The Irix 6 ABI has examples of this. */
1593 if (GET_CODE (reg) == PARALLEL)
1594 emit_group_move (reg, args[i].parallel_value);
1596 /* If simple case, just do move. If normal partial, store_one_arg
1597 has already loaded the register for us. In all other cases,
1598 load the register(s) from memory. */
1600 else if (nregs == -1)
1602 emit_move_insn (reg, args[i].value);
1603 #ifdef BLOCK_REG_PADDING
1604 /* Handle case where we have a value that needs shifting
1605 up to the msb. eg. a QImode value and we're padding
1606 upward on a BYTES_BIG_ENDIAN machine. */
1607 if (size < UNITS_PER_WORD
1608 && (args[i].locate.where_pad
1609 == (BYTES_BIG_ENDIAN ? upward : downward)))
1612 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1614 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1615 report the whole reg as used. Strictly speaking, the
1616 call only uses SIZE bytes at the msb end, but it doesn't
1617 seem worth generating rtl to say that. */
1618 reg = gen_rtx_REG (word_mode, REGNO (reg));
1619 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1620 build_int_cst (NULL_TREE, shift),
1623 emit_move_insn (reg, x);
1628 /* If we have pre-computed the values to put in the registers in
1629 the case of non-aligned structures, copy them in now. */
1631 else if (args[i].n_aligned_regs != 0)
1632 for (j = 0; j < args[i].n_aligned_regs; j++)
1633 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1634 args[i].aligned_regs[j]);
1636 else if (partial == 0 || args[i].pass_on_stack)
1638 rtx mem = validize_mem (args[i].value);
1640 /* Check for overlap with already clobbered argument area. */
1642 && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
1644 *sibcall_failure = 1;
1646 /* Handle a BLKmode that needs shifting. */
1647 if (nregs == 1 && size < UNITS_PER_WORD
1648 #ifdef BLOCK_REG_PADDING
1649 && args[i].locate.where_pad == downward
1655 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1656 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1657 rtx x = gen_reg_rtx (word_mode);
1658 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1659 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1662 emit_move_insn (x, tem);
1663 x = expand_shift (dir, word_mode, x,
1664 build_int_cst (NULL_TREE, shift),
1667 emit_move_insn (ri, x);
1670 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1673 /* When a parameter is a block, and perhaps in other cases, it is
1674 possible that it did a load from an argument slot that was
1675 already clobbered. */
1677 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1678 *sibcall_failure = 1;
1680 /* Handle calls that pass values in multiple non-contiguous
1681 locations. The Irix 6 ABI has examples of this. */
1682 if (GET_CODE (reg) == PARALLEL)
1683 use_group_regs (call_fusage, reg);
1684 else if (nregs == -1)
1685 use_reg (call_fusage, reg);
1687 use_regs (call_fusage, REGNO (reg), nregs);
1692 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1693 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1694 bytes, then we would need to push some additional bytes to pad the
1695 arguments. So, we compute an adjust to the stack pointer for an
1696 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1697 bytes. Then, when the arguments are pushed the stack will be perfectly
1698 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1699 be popped after the call. Returns the adjustment. */
1702 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1703 struct args_size *args_size,
1704 unsigned int preferred_unit_stack_boundary)
1706 /* The number of bytes to pop so that the stack will be
1707 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1708 HOST_WIDE_INT adjustment;
1709 /* The alignment of the stack after the arguments are pushed, if we
1710 just pushed the arguments without adjust the stack here. */
1711 unsigned HOST_WIDE_INT unadjusted_alignment;
1713 unadjusted_alignment
1714 = ((stack_pointer_delta + unadjusted_args_size)
1715 % preferred_unit_stack_boundary);
1717 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1718 as possible -- leaving just enough left to cancel out the
1719 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1720 PENDING_STACK_ADJUST is non-negative, and congruent to
1721 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1723 /* Begin by trying to pop all the bytes. */
1724 unadjusted_alignment
1725 = (unadjusted_alignment
1726 - (pending_stack_adjust % preferred_unit_stack_boundary));
1727 adjustment = pending_stack_adjust;
1728 /* Push enough additional bytes that the stack will be aligned
1729 after the arguments are pushed. */
1730 if (preferred_unit_stack_boundary > 1)
1732 if (unadjusted_alignment > 0)
1733 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1735 adjustment += unadjusted_alignment;
1738 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1739 bytes after the call. The right number is the entire
1740 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1741 by the arguments in the first place. */
1743 = pending_stack_adjust - adjustment + unadjusted_args_size;
1748 /* Scan X expression if it does not dereference any argument slots
1749 we already clobbered by tail call arguments (as noted in stored_args_map
1751 Return nonzero if X expression dereferences such argument slots,
1755 check_sibcall_argument_overlap_1 (rtx x)
1764 code = GET_CODE (x);
1767 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1768 GET_MODE_SIZE (GET_MODE (x)));
1770 /* Scan all subexpressions. */
1771 fmt = GET_RTX_FORMAT (code);
1772 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1776 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1779 else if (*fmt == 'E')
1781 for (j = 0; j < XVECLEN (x, i); j++)
1782 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1789 /* Scan sequence after INSN if it does not dereference any argument slots
1790 we already clobbered by tail call arguments (as noted in stored_args_map
1791 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1792 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1793 should be 0). Return nonzero if sequence after INSN dereferences such argument
1794 slots, zero otherwise. */
1797 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1801 if (insn == NULL_RTX)
1802 insn = get_insns ();
1804 insn = NEXT_INSN (insn);
1806 for (; insn; insn = NEXT_INSN (insn))
1808 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1811 if (mark_stored_args_map)
1813 #ifdef ARGS_GROW_DOWNWARD
1814 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1816 low = arg->locate.slot_offset.constant;
1819 for (high = low + arg->locate.size.constant; low < high; low++)
1820 SET_BIT (stored_args_map, low);
1822 return insn != NULL_RTX;
1825 /* Given that a function returns a value of mode MODE at the most
1826 significant end of hard register VALUE, shift VALUE left or right
1827 as specified by LEFT_P. Return true if some action was needed. */
1830 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1832 HOST_WIDE_INT shift;
1834 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1835 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1839 /* Use ashr rather than lshr for right shifts. This is for the benefit
1840 of the MIPS port, which requires SImode values to be sign-extended
1841 when stored in 64-bit registers. */
1842 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1843 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1848 /* Generate all the code for a CALL_EXPR exp
1849 and return an rtx for its value.
1850 Store the value in TARGET (specified as an rtx) if convenient.
1851 If the value is stored in TARGET then TARGET is returned.
1852 If IGNORE is nonzero, then we ignore the value of the function call. */
1855 expand_call (tree exp, rtx target, int ignore)
1857 /* Nonzero if we are currently expanding a call. */
1858 static int currently_expanding_call = 0;
1860 /* RTX for the function to be called. */
1862 /* Sequence of insns to perform a normal "call". */
1863 rtx normal_call_insns = NULL_RTX;
1864 /* Sequence of insns to perform a tail "call". */
1865 rtx tail_call_insns = NULL_RTX;
1866 /* Data type of the function. */
1868 tree type_arg_types;
1869 /* Declaration of the function being called,
1870 or 0 if the function is computed (not known by name). */
1872 /* The type of the function being called. */
1874 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1877 /* Register in which non-BLKmode value will be returned,
1878 or 0 if no value or if value is BLKmode. */
1880 /* Address where we should return a BLKmode value;
1881 0 if value not BLKmode. */
1882 rtx structure_value_addr = 0;
1883 /* Nonzero if that address is being passed by treating it as
1884 an extra, implicit first parameter. Otherwise,
1885 it is passed by being copied directly into struct_value_rtx. */
1886 int structure_value_addr_parm = 0;
1887 /* Holds the value of implicit argument for the struct value. */
1888 tree structure_value_addr_value = NULL_TREE;
1889 /* Size of aggregate value wanted, or zero if none wanted
1890 or if we are using the non-reentrant PCC calling convention
1891 or expecting the value in registers. */
1892 HOST_WIDE_INT struct_value_size = 0;
1893 /* Nonzero if called function returns an aggregate in memory PCC style,
1894 by returning the address of where to find it. */
1895 int pcc_struct_value = 0;
1896 rtx struct_value = 0;
1898 /* Number of actual parameters in this call, including struct value addr. */
1900 /* Number of named args. Args after this are anonymous ones
1901 and they must all go on the stack. */
1903 /* Number of complex actual arguments that need to be split. */
1904 int num_complex_actuals = 0;
1906 /* Vector of information about each argument.
1907 Arguments are numbered in the order they will be pushed,
1908 not the order they are written. */
1909 struct arg_data *args;
1911 /* Total size in bytes of all the stack-parms scanned so far. */
1912 struct args_size args_size;
1913 struct args_size adjusted_args_size;
1914 /* Size of arguments before any adjustments (such as rounding). */
1915 int unadjusted_args_size;
1916 /* Data on reg parms scanned so far. */
1917 CUMULATIVE_ARGS args_so_far;
1918 /* Nonzero if a reg parm has been scanned. */
1920 /* Nonzero if this is an indirect function call. */
1922 /* Nonzero if we must avoid push-insns in the args for this call.
1923 If stack space is allocated for register parameters, but not by the
1924 caller, then it is preallocated in the fixed part of the stack frame.
1925 So the entire argument block must then be preallocated (i.e., we
1926 ignore PUSH_ROUNDING in that case). */
1928 int must_preallocate = !PUSH_ARGS;
1930 /* Size of the stack reserved for parameter registers. */
1931 int reg_parm_stack_space = 0;
1933 /* Address of space preallocated for stack parms
1934 (on machines that lack push insns), or 0 if space not preallocated. */
1937 /* Mask of ECF_ flags. */
1939 #ifdef REG_PARM_STACK_SPACE
1940 /* Define the boundary of the register parm stack space that needs to be
1942 int low_to_save, high_to_save;
1943 rtx save_area = 0; /* Place that it is saved */
1946 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1947 char *initial_stack_usage_map = stack_usage_map;
1948 char *stack_usage_map_buf = NULL;
1950 int old_stack_allocated;
1952 /* State variables to track stack modifications. */
1953 rtx old_stack_level = 0;
1954 int old_stack_arg_under_construction = 0;
1955 int old_pending_adj = 0;
1956 int old_inhibit_defer_pop = inhibit_defer_pop;
1958 /* Some stack pointer alterations we make are performed via
1959 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1960 which we then also need to save/restore along the way. */
1961 int old_stack_pointer_delta = 0;
1964 tree p = CALL_EXPR_FN (exp);
1965 tree addr = CALL_EXPR_FN (exp);
1967 /* The alignment of the stack, in bits. */
1968 unsigned HOST_WIDE_INT preferred_stack_boundary;
1969 /* The alignment of the stack, in bytes. */
1970 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
1971 /* The static chain value to use for this call. */
1972 rtx static_chain_value;
1973 /* See if this is "nothrow" function call. */
1974 if (TREE_NOTHROW (exp))
1975 flags |= ECF_NOTHROW;
1977 /* See if we can find a DECL-node for the actual function, and get the
1978 function attributes (flags) from the function decl or type node. */
1979 fndecl = get_callee_fndecl (exp);
1982 fntype = TREE_TYPE (fndecl);
1983 flags |= flags_from_decl_or_type (fndecl);
1987 fntype = TREE_TYPE (TREE_TYPE (p));
1988 flags |= flags_from_decl_or_type (fntype);
1991 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
1993 /* Warn if this value is an aggregate type,
1994 regardless of which calling convention we are using for it. */
1995 if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1996 warning (OPT_Waggregate_return, "function call has aggregate value");
1998 /* If the result of a pure or const function call is ignored (or void),
1999 and none of its arguments are volatile, we can avoid expanding the
2000 call and just evaluate the arguments for side-effects. */
2001 if ((flags & (ECF_CONST | ECF_PURE))
2002 && (ignore || target == const0_rtx
2003 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2005 bool volatilep = false;
2007 call_expr_arg_iterator iter;
2009 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2010 if (TREE_THIS_VOLATILE (arg))
2018 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2019 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2024 #ifdef REG_PARM_STACK_SPACE
2025 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2028 if (!OUTGOING_REG_PARM_STACK_SPACE && reg_parm_stack_space > 0 && PUSH_ARGS)
2029 must_preallocate = 1;
2031 /* Set up a place to return a structure. */
2033 /* Cater to broken compilers. */
2034 if (aggregate_value_p (exp, fndecl))
2036 /* This call returns a big structure. */
2037 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2039 #ifdef PCC_STATIC_STRUCT_RETURN
2041 pcc_struct_value = 1;
2043 #else /* not PCC_STATIC_STRUCT_RETURN */
2045 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2047 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2048 structure_value_addr = XEXP (target, 0);
2051 /* For variable-sized objects, we must be called with a target
2052 specified. If we were to allocate space on the stack here,
2053 we would have no way of knowing when to free it. */
2054 rtx d = assign_temp (TREE_TYPE (exp), 0, 1, 1);
2056 mark_temp_addr_taken (d);
2057 structure_value_addr = XEXP (d, 0);
2061 #endif /* not PCC_STATIC_STRUCT_RETURN */
2064 /* Figure out the amount to which the stack should be aligned. */
2065 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2068 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2069 if (i && i->preferred_incoming_stack_boundary)
2070 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2073 /* Operand 0 is a pointer-to-function; get the type of the function. */
2074 funtype = TREE_TYPE (addr);
2075 gcc_assert (POINTER_TYPE_P (funtype));
2076 funtype = TREE_TYPE (funtype);
2078 /* Count whether there are actual complex arguments that need to be split
2079 into their real and imaginary parts. Munge the type_arg_types
2080 appropriately here as well. */
2081 if (targetm.calls.split_complex_arg)
2083 call_expr_arg_iterator iter;
2085 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2087 tree type = TREE_TYPE (arg);
2088 if (type && TREE_CODE (type) == COMPLEX_TYPE
2089 && targetm.calls.split_complex_arg (type))
2090 num_complex_actuals++;
2092 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2095 type_arg_types = TYPE_ARG_TYPES (funtype);
2097 if (flags & ECF_MAY_BE_ALLOCA)
2098 current_function_calls_alloca = 1;
2100 /* If struct_value_rtx is 0, it means pass the address
2101 as if it were an extra parameter. Put the argument expression
2102 in structure_value_addr_value. */
2103 if (structure_value_addr && struct_value == 0)
2105 /* If structure_value_addr is a REG other than
2106 virtual_outgoing_args_rtx, we can use always use it. If it
2107 is not a REG, we must always copy it into a register.
2108 If it is virtual_outgoing_args_rtx, we must copy it to another
2109 register in some cases. */
2110 rtx temp = (!REG_P (structure_value_addr)
2111 || (ACCUMULATE_OUTGOING_ARGS
2112 && stack_arg_under_construction
2113 && structure_value_addr == virtual_outgoing_args_rtx)
2114 ? copy_addr_to_reg (convert_memory_address
2115 (Pmode, structure_value_addr))
2116 : structure_value_addr);
2118 structure_value_addr_value =
2119 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2120 structure_value_addr_parm = 1;
2123 /* Count the arguments and set NUM_ACTUALS. */
2125 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2127 /* Compute number of named args.
2128 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2130 if (type_arg_types != 0)
2132 = (list_length (type_arg_types)
2133 /* Count the struct value address, if it is passed as a parm. */
2134 + structure_value_addr_parm);
2136 /* If we know nothing, treat all args as named. */
2137 n_named_args = num_actuals;
2139 /* Start updating where the next arg would go.
2141 On some machines (such as the PA) indirect calls have a different
2142 calling convention than normal calls. The fourth argument in
2143 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2145 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2147 /* Now possibly adjust the number of named args.
2148 Normally, don't include the last named arg if anonymous args follow.
2149 We do include the last named arg if
2150 targetm.calls.strict_argument_naming() returns nonzero.
2151 (If no anonymous args follow, the result of list_length is actually
2152 one too large. This is harmless.)
2154 If targetm.calls.pretend_outgoing_varargs_named() returns
2155 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2156 this machine will be able to place unnamed args that were passed
2157 in registers into the stack. So treat all args as named. This
2158 allows the insns emitting for a specific argument list to be
2159 independent of the function declaration.
2161 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2162 we do not have any reliable way to pass unnamed args in
2163 registers, so we must force them into memory. */
2165 if (type_arg_types != 0
2166 && targetm.calls.strict_argument_naming (&args_so_far))
2168 else if (type_arg_types != 0
2169 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2170 /* Don't include the last named arg. */
2173 /* Treat all args as named. */
2174 n_named_args = num_actuals;
2176 /* Make a vector to hold all the information about each arg. */
2177 args = alloca (num_actuals * sizeof (struct arg_data));
2178 memset (args, 0, num_actuals * sizeof (struct arg_data));
2180 /* Build up entries in the ARGS array, compute the size of the
2181 arguments into ARGS_SIZE, etc. */
2182 initialize_argument_information (num_actuals, args, &args_size,
2184 structure_value_addr_value, fndecl,
2185 &args_so_far, reg_parm_stack_space,
2186 &old_stack_level, &old_pending_adj,
2187 &must_preallocate, &flags,
2188 &try_tail_call, CALL_FROM_THUNK_P (exp));
2192 /* If this function requires a variable-sized argument list, don't
2193 try to make a cse'able block for this call. We may be able to
2194 do this eventually, but it is too complicated to keep track of
2195 what insns go in the cse'able block and which don't. */
2197 flags &= ~ECF_LIBCALL_BLOCK;
2198 must_preallocate = 1;
2201 /* Now make final decision about preallocating stack space. */
2202 must_preallocate = finalize_must_preallocate (must_preallocate,
2206 /* If the structure value address will reference the stack pointer, we
2207 must stabilize it. We don't need to do this if we know that we are
2208 not going to adjust the stack pointer in processing this call. */
2210 if (structure_value_addr
2211 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2212 || reg_mentioned_p (virtual_outgoing_args_rtx,
2213 structure_value_addr))
2215 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2216 structure_value_addr = copy_to_reg (structure_value_addr);
2218 /* Tail calls can make things harder to debug, and we've traditionally
2219 pushed these optimizations into -O2. Don't try if we're already
2220 expanding a call, as that means we're an argument. Don't try if
2221 there's cleanups, as we know there's code to follow the call. */
2223 if (currently_expanding_call++ != 0
2224 || !flag_optimize_sibling_calls
2226 || lookup_stmt_eh_region (exp) >= 0
2227 || dbg_cnt (tail_call) == false)
2230 /* Rest of purposes for tail call optimizations to fail. */
2232 #ifdef HAVE_sibcall_epilogue
2233 !HAVE_sibcall_epilogue
2238 /* Doing sibling call optimization needs some work, since
2239 structure_value_addr can be allocated on the stack.
2240 It does not seem worth the effort since few optimizable
2241 sibling calls will return a structure. */
2242 || structure_value_addr != NULL_RTX
2243 /* Check whether the target is able to optimize the call
2245 || !targetm.function_ok_for_sibcall (fndecl, exp)
2246 /* Functions that do not return exactly once may not be sibcall
2248 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2249 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2250 /* If the called function is nested in the current one, it might access
2251 some of the caller's arguments, but could clobber them beforehand if
2252 the argument areas are shared. */
2253 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2254 /* If this function requires more stack slots than the current
2255 function, we cannot change it into a sibling call.
2256 current_function_pretend_args_size is not part of the
2257 stack allocated by our caller. */
2258 || args_size.constant > (current_function_args_size
2259 - current_function_pretend_args_size)
2260 /* If the callee pops its own arguments, then it must pop exactly
2261 the same number of arguments as the current function. */
2262 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2263 != RETURN_POPS_ARGS (current_function_decl,
2264 TREE_TYPE (current_function_decl),
2265 current_function_args_size))
2266 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2269 /* Ensure current function's preferred stack boundary is at least
2270 what we need. We don't have to increase alignment for recursive
2272 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2273 && fndecl != current_function_decl)
2274 cfun->preferred_stack_boundary = preferred_stack_boundary;
2275 if (fndecl == current_function_decl)
2276 cfun->recursive_call_emit = true;
2278 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2280 /* We want to make two insn chains; one for a sibling call, the other
2281 for a normal call. We will select one of the two chains after
2282 initial RTL generation is complete. */
2283 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2285 int sibcall_failure = 0;
2286 /* We want to emit any pending stack adjustments before the tail
2287 recursion "call". That way we know any adjustment after the tail
2288 recursion call can be ignored if we indeed use the tail
2290 int save_pending_stack_adjust = 0;
2291 int save_stack_pointer_delta = 0;
2293 rtx before_call, next_arg_reg;
2297 /* State variables we need to save and restore between
2299 save_pending_stack_adjust = pending_stack_adjust;
2300 save_stack_pointer_delta = stack_pointer_delta;
2303 flags &= ~ECF_SIBCALL;
2305 flags |= ECF_SIBCALL;
2307 /* Other state variables that we must reinitialize each time
2308 through the loop (that are not initialized by the loop itself). */
2312 /* Start a new sequence for the normal call case.
2314 From this point on, if the sibling call fails, we want to set
2315 sibcall_failure instead of continuing the loop. */
2318 /* Don't let pending stack adjusts add up to too much.
2319 Also, do all pending adjustments now if there is any chance
2320 this might be a call to alloca or if we are expanding a sibling
2321 call sequence or if we are calling a function that is to return
2322 with stack pointer depressed.
2323 Also do the adjustments before a throwing call, otherwise
2324 exception handling can fail; PR 19225. */
2325 if (pending_stack_adjust >= 32
2326 || (pending_stack_adjust > 0
2327 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2328 || (pending_stack_adjust > 0
2329 && flag_exceptions && !(flags & ECF_NOTHROW))
2331 do_pending_stack_adjust ();
2333 /* When calling a const function, we must pop the stack args right away,
2334 so that the pop is deleted or moved with the call. */
2335 if (pass && (flags & ECF_LIBCALL_BLOCK))
2338 /* Precompute any arguments as needed. */
2340 precompute_arguments (flags, num_actuals, args);
2342 /* Now we are about to start emitting insns that can be deleted
2343 if a libcall is deleted. */
2344 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2347 if (pass == 0 && cfun->stack_protect_guard)
2348 stack_protect_epilogue ();
2350 adjusted_args_size = args_size;
2351 /* Compute the actual size of the argument block required. The variable
2352 and constant sizes must be combined, the size may have to be rounded,
2353 and there may be a minimum required size. When generating a sibcall
2354 pattern, do not round up, since we'll be re-using whatever space our
2356 unadjusted_args_size
2357 = compute_argument_block_size (reg_parm_stack_space,
2358 &adjusted_args_size,
2360 : preferred_stack_boundary));
2362 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2364 /* The argument block when performing a sibling call is the
2365 incoming argument block. */
2368 argblock = virtual_incoming_args_rtx;
2370 #ifdef STACK_GROWS_DOWNWARD
2371 = plus_constant (argblock, current_function_pretend_args_size);
2373 = plus_constant (argblock, -current_function_pretend_args_size);
2375 stored_args_map = sbitmap_alloc (args_size.constant);
2376 sbitmap_zero (stored_args_map);
2379 /* If we have no actual push instructions, or shouldn't use them,
2380 make space for all args right now. */
2381 else if (adjusted_args_size.var != 0)
2383 if (old_stack_level == 0)
2385 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2386 old_stack_pointer_delta = stack_pointer_delta;
2387 old_pending_adj = pending_stack_adjust;
2388 pending_stack_adjust = 0;
2389 /* stack_arg_under_construction says whether a stack arg is
2390 being constructed at the old stack level. Pushing the stack
2391 gets a clean outgoing argument block. */
2392 old_stack_arg_under_construction = stack_arg_under_construction;
2393 stack_arg_under_construction = 0;
2395 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2399 /* Note that we must go through the motions of allocating an argument
2400 block even if the size is zero because we may be storing args
2401 in the area reserved for register arguments, which may be part of
2404 int needed = adjusted_args_size.constant;
2406 /* Store the maximum argument space used. It will be pushed by
2407 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2410 if (needed > current_function_outgoing_args_size)
2411 current_function_outgoing_args_size = needed;
2413 if (must_preallocate)
2415 if (ACCUMULATE_OUTGOING_ARGS)
2417 /* Since the stack pointer will never be pushed, it is
2418 possible for the evaluation of a parm to clobber
2419 something we have already written to the stack.
2420 Since most function calls on RISC machines do not use
2421 the stack, this is uncommon, but must work correctly.
2423 Therefore, we save any area of the stack that was already
2424 written and that we are using. Here we set up to do this
2425 by making a new stack usage map from the old one. The
2426 actual save will be done by store_one_arg.
2428 Another approach might be to try to reorder the argument
2429 evaluations to avoid this conflicting stack usage. */
2431 /* Since we will be writing into the entire argument area,
2432 the map must be allocated for its entire size, not just
2433 the part that is the responsibility of the caller. */
2434 if (!OUTGOING_REG_PARM_STACK_SPACE)
2435 needed += reg_parm_stack_space;
2437 #ifdef ARGS_GROW_DOWNWARD
2438 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2441 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2444 if (stack_usage_map_buf)
2445 free (stack_usage_map_buf);
2446 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2447 stack_usage_map = stack_usage_map_buf;
2449 if (initial_highest_arg_in_use)
2450 memcpy (stack_usage_map, initial_stack_usage_map,
2451 initial_highest_arg_in_use);
2453 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2454 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2455 (highest_outgoing_arg_in_use
2456 - initial_highest_arg_in_use));
2459 /* The address of the outgoing argument list must not be
2460 copied to a register here, because argblock would be left
2461 pointing to the wrong place after the call to
2462 allocate_dynamic_stack_space below. */
2464 argblock = virtual_outgoing_args_rtx;
2468 if (inhibit_defer_pop == 0)
2470 /* Try to reuse some or all of the pending_stack_adjust
2471 to get this space. */
2473 = (combine_pending_stack_adjustment_and_call
2474 (unadjusted_args_size,
2475 &adjusted_args_size,
2476 preferred_unit_stack_boundary));
2478 /* combine_pending_stack_adjustment_and_call computes
2479 an adjustment before the arguments are allocated.
2480 Account for them and see whether or not the stack
2481 needs to go up or down. */
2482 needed = unadjusted_args_size - needed;
2486 /* We're releasing stack space. */
2487 /* ??? We can avoid any adjustment at all if we're
2488 already aligned. FIXME. */
2489 pending_stack_adjust = -needed;
2490 do_pending_stack_adjust ();
2494 /* We need to allocate space. We'll do that in
2495 push_block below. */
2496 pending_stack_adjust = 0;
2499 /* Special case this because overhead of `push_block' in
2500 this case is non-trivial. */
2502 argblock = virtual_outgoing_args_rtx;
2505 argblock = push_block (GEN_INT (needed), 0, 0);
2506 #ifdef ARGS_GROW_DOWNWARD
2507 argblock = plus_constant (argblock, needed);
2511 /* We only really need to call `copy_to_reg' in the case
2512 where push insns are going to be used to pass ARGBLOCK
2513 to a function call in ARGS. In that case, the stack
2514 pointer changes value from the allocation point to the
2515 call point, and hence the value of
2516 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2517 as well always do it. */
2518 argblock = copy_to_reg (argblock);
2523 if (ACCUMULATE_OUTGOING_ARGS)
2525 /* The save/restore code in store_one_arg handles all
2526 cases except one: a constructor call (including a C
2527 function returning a BLKmode struct) to initialize
2529 if (stack_arg_under_construction)
2532 = GEN_INT (adjusted_args_size.constant
2533 + (OUTGOING_REG_PARM_STACK_SPACE ? 0
2534 : reg_parm_stack_space));
2535 if (old_stack_level == 0)
2537 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2539 old_stack_pointer_delta = stack_pointer_delta;
2540 old_pending_adj = pending_stack_adjust;
2541 pending_stack_adjust = 0;
2542 /* stack_arg_under_construction says whether a stack
2543 arg is being constructed at the old stack level.
2544 Pushing the stack gets a clean outgoing argument
2546 old_stack_arg_under_construction
2547 = stack_arg_under_construction;
2548 stack_arg_under_construction = 0;
2549 /* Make a new map for the new argument list. */
2550 if (stack_usage_map_buf)
2551 free (stack_usage_map_buf);
2552 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
2553 stack_usage_map = stack_usage_map_buf;
2554 highest_outgoing_arg_in_use = 0;
2556 allocate_dynamic_stack_space (push_size, NULL_RTX,
2560 /* If argument evaluation might modify the stack pointer,
2561 copy the address of the argument list to a register. */
2562 for (i = 0; i < num_actuals; i++)
2563 if (args[i].pass_on_stack)
2565 argblock = copy_addr_to_reg (argblock);
2570 compute_argument_addresses (args, argblock, num_actuals);
2572 /* If we push args individually in reverse order, perform stack alignment
2573 before the first push (the last arg). */
2574 if (PUSH_ARGS_REVERSED && argblock == 0
2575 && adjusted_args_size.constant != unadjusted_args_size)
2577 /* When the stack adjustment is pending, we get better code
2578 by combining the adjustments. */
2579 if (pending_stack_adjust
2580 && ! (flags & ECF_LIBCALL_BLOCK)
2581 && ! inhibit_defer_pop)
2583 pending_stack_adjust
2584 = (combine_pending_stack_adjustment_and_call
2585 (unadjusted_args_size,
2586 &adjusted_args_size,
2587 preferred_unit_stack_boundary));
2588 do_pending_stack_adjust ();
2590 else if (argblock == 0)
2591 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2592 - unadjusted_args_size));
2594 /* Now that the stack is properly aligned, pops can't safely
2595 be deferred during the evaluation of the arguments. */
2598 funexp = rtx_for_function_call (fndecl, addr);
2600 /* Figure out the register where the value, if any, will come back. */
2602 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2603 && ! structure_value_addr)
2605 if (pcc_struct_value)
2606 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2607 fndecl, NULL, (pass == 0));
2609 valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype,
2612 /* If VALREG is a PARALLEL whose first member has a zero
2613 offset, use that. This is for targets such as m68k that
2614 return the same value in multiple places. */
2615 if (GET_CODE (valreg) == PARALLEL)
2617 rtx elem = XVECEXP (valreg, 0, 0);
2618 rtx where = XEXP (elem, 0);
2619 rtx offset = XEXP (elem, 1);
2620 if (offset == const0_rtx
2621 && GET_MODE (where) == GET_MODE (valreg))
2626 /* Precompute all register parameters. It isn't safe to compute anything
2627 once we have started filling any specific hard regs. */
2628 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2630 if (CALL_EXPR_STATIC_CHAIN (exp))
2631 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
2633 static_chain_value = 0;
2635 #ifdef REG_PARM_STACK_SPACE
2636 /* Save the fixed argument area if it's part of the caller's frame and
2637 is clobbered by argument setup for this call. */
2638 if (ACCUMULATE_OUTGOING_ARGS && pass)
2639 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2640 &low_to_save, &high_to_save);
2643 /* Now store (and compute if necessary) all non-register parms.
2644 These come before register parms, since they can require block-moves,
2645 which could clobber the registers used for register parms.
2646 Parms which have partial registers are not stored here,
2647 but we do preallocate space here if they want that. */
2649 for (i = 0; i < num_actuals; i++)
2650 if (args[i].reg == 0 || args[i].pass_on_stack)
2652 rtx before_arg = get_last_insn ();
2654 if (store_one_arg (&args[i], argblock, flags,
2655 adjusted_args_size.var != 0,
2656 reg_parm_stack_space)
2658 && check_sibcall_argument_overlap (before_arg,
2660 sibcall_failure = 1;
2662 if (flags & ECF_CONST
2664 && args[i].value == args[i].stack)
2665 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2666 gen_rtx_USE (VOIDmode,
2671 /* If we have a parm that is passed in registers but not in memory
2672 and whose alignment does not permit a direct copy into registers,
2673 make a group of pseudos that correspond to each register that we
2675 if (STRICT_ALIGNMENT)
2676 store_unaligned_arguments_into_pseudos (args, num_actuals);
2678 /* Now store any partially-in-registers parm.
2679 This is the last place a block-move can happen. */
2681 for (i = 0; i < num_actuals; i++)
2682 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2684 rtx before_arg = get_last_insn ();
2686 if (store_one_arg (&args[i], argblock, flags,
2687 adjusted_args_size.var != 0,
2688 reg_parm_stack_space)
2690 && check_sibcall_argument_overlap (before_arg,
2692 sibcall_failure = 1;
2695 /* If we pushed args in forward order, perform stack alignment
2696 after pushing the last arg. */
2697 if (!PUSH_ARGS_REVERSED && argblock == 0)
2698 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2699 - unadjusted_args_size));
2701 /* If register arguments require space on the stack and stack space
2702 was not preallocated, allocate stack space here for arguments
2703 passed in registers. */
2704 if (OUTGOING_REG_PARM_STACK_SPACE && !ACCUMULATE_OUTGOING_ARGS
2705 && must_preallocate == 0 && reg_parm_stack_space > 0)
2706 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2708 /* Pass the function the address in which to return a
2710 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2712 structure_value_addr
2713 = convert_memory_address (Pmode, structure_value_addr);
2714 emit_move_insn (struct_value,
2716 force_operand (structure_value_addr,
2719 if (REG_P (struct_value))
2720 use_reg (&call_fusage, struct_value);
2723 funexp = prepare_call_address (funexp, static_chain_value,
2724 &call_fusage, reg_parm_seen, pass == 0);
2726 load_register_parameters (args, num_actuals, &call_fusage, flags,
2727 pass == 0, &sibcall_failure);
2729 /* Save a pointer to the last insn before the call, so that we can
2730 later safely search backwards to find the CALL_INSN. */
2731 before_call = get_last_insn ();
2733 /* Set up next argument register. For sibling calls on machines
2734 with register windows this should be the incoming register. */
2735 #ifdef FUNCTION_INCOMING_ARG
2737 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2741 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2744 /* All arguments and registers used for the call must be set up by
2747 /* Stack must be properly aligned now. */
2749 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2751 /* Generate the actual call instruction. */
2752 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2753 adjusted_args_size.constant, struct_value_size,
2754 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2755 flags, & args_so_far);
2757 /* If a non-BLKmode value is returned at the most significant end
2758 of a register, shift the register right by the appropriate amount
2759 and update VALREG accordingly. BLKmode values are handled by the
2760 group load/store machinery below. */
2761 if (!structure_value_addr
2762 && !pcc_struct_value
2763 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2764 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2766 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2767 sibcall_failure = 1;
2768 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2771 /* If call is cse'able, make appropriate pair of reg-notes around it.
2772 Test valreg so we don't crash; may safely ignore `const'
2773 if return type is void. Disable for PARALLEL return values, because
2774 we have no way to move such values into a pseudo register. */
2775 if (pass && (flags & ECF_LIBCALL_BLOCK))
2779 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2781 insns = get_insns ();
2783 /* Expansion of block moves possibly introduced a loop that may
2784 not appear inside libcall block. */
2785 for (insn = insns; insn; insn = NEXT_INSN (insn))
2797 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2799 /* Mark the return value as a pointer if needed. */
2800 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2801 mark_reg_pointer (temp,
2802 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2805 if (flag_unsafe_math_optimizations
2807 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2808 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2809 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2810 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2811 note = gen_rtx_fmt_e (SQRT,
2813 args[0].initial_value);
2816 /* Construct an "equal form" for the value which
2817 mentions all the arguments in order as well as
2818 the function name. */
2819 for (i = 0; i < num_actuals; i++)
2820 note = gen_rtx_EXPR_LIST (VOIDmode,
2821 args[i].initial_value, note);
2822 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2824 if (flags & ECF_PURE)
2825 note = gen_rtx_EXPR_LIST (VOIDmode,
2826 gen_rtx_USE (VOIDmode,
2827 gen_rtx_MEM (BLKmode,
2828 gen_rtx_SCRATCH (VOIDmode))),
2831 emit_libcall_block (insns, temp, valreg, note);
2836 else if (pass && (flags & ECF_MALLOC))
2838 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2841 /* The return value from a malloc-like function is a pointer. */
2842 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2843 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2845 emit_move_insn (temp, valreg);
2847 /* The return value from a malloc-like function can not alias
2849 last = get_last_insn ();
2851 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2853 /* Write out the sequence. */
2854 insns = get_insns ();
2860 /* For calls to `setjmp', etc., inform
2861 function.c:setjmp_warnings that it should complain if
2862 nonvolatile values are live. For functions that cannot
2863 return, inform flow that control does not fall through. */
2865 if ((flags & ECF_NORETURN) || pass == 0)
2867 /* The barrier must be emitted
2868 immediately after the CALL_INSN. Some ports emit more
2869 than just a CALL_INSN above, so we must search for it here. */
2871 rtx last = get_last_insn ();
2872 while (!CALL_P (last))
2874 last = PREV_INSN (last);
2875 /* There was no CALL_INSN? */
2876 gcc_assert (last != before_call);
2879 emit_barrier_after (last);
2881 /* Stack adjustments after a noreturn call are dead code.
2882 However when NO_DEFER_POP is in effect, we must preserve
2883 stack_pointer_delta. */
2884 if (inhibit_defer_pop == 0)
2886 stack_pointer_delta = old_stack_allocated;
2887 pending_stack_adjust = 0;
2891 /* If value type not void, return an rtx for the value. */
2893 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2895 target = const0_rtx;
2896 else if (structure_value_addr)
2898 if (target == 0 || !MEM_P (target))
2901 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2902 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2903 structure_value_addr));
2904 set_mem_attributes (target, exp, 1);
2907 else if (pcc_struct_value)
2909 /* This is the special C++ case where we need to
2910 know what the true target was. We take care to
2911 never use this value more than once in one expression. */
2912 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2913 copy_to_reg (valreg));
2914 set_mem_attributes (target, exp, 1);
2916 /* Handle calls that return values in multiple non-contiguous locations.
2917 The Irix 6 ABI has examples of this. */
2918 else if (GET_CODE (valreg) == PARALLEL)
2922 /* This will only be assigned once, so it can be readonly. */
2923 tree nt = build_qualified_type (TREE_TYPE (exp),
2924 (TYPE_QUALS (TREE_TYPE (exp))
2925 | TYPE_QUAL_CONST));
2927 target = assign_temp (nt, 0, 1, 1);
2930 if (! rtx_equal_p (target, valreg))
2931 emit_group_store (target, valreg, TREE_TYPE (exp),
2932 int_size_in_bytes (TREE_TYPE (exp)));
2934 /* We can not support sibling calls for this case. */
2935 sibcall_failure = 1;
2938 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2939 && GET_MODE (target) == GET_MODE (valreg))
2941 bool may_overlap = false;
2943 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
2944 reg to a plain register. */
2946 && HARD_REGISTER_P (valreg)
2947 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (valreg)))
2948 && !(REG_P (target) && !HARD_REGISTER_P (target)))
2949 valreg = copy_to_reg (valreg);
2951 /* If TARGET is a MEM in the argument area, and we have
2952 saved part of the argument area, then we can't store
2953 directly into TARGET as it may get overwritten when we
2954 restore the argument save area below. Don't work too
2955 hard though and simply force TARGET to a register if it
2956 is a MEM; the optimizer is quite likely to sort it out. */
2957 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
2958 for (i = 0; i < num_actuals; i++)
2959 if (args[i].save_area)
2966 target = copy_to_reg (valreg);
2969 /* TARGET and VALREG cannot be equal at this point
2970 because the latter would not have
2971 REG_FUNCTION_VALUE_P true, while the former would if
2972 it were referring to the same register.
2974 If they refer to the same register, this move will be
2975 a no-op, except when function inlining is being
2977 emit_move_insn (target, valreg);
2979 /* If we are setting a MEM, this code must be executed.
2980 Since it is emitted after the call insn, sibcall
2981 optimization cannot be performed in that case. */
2983 sibcall_failure = 1;
2986 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2988 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2990 /* We can not support sibling calls for this case. */
2991 sibcall_failure = 1;
2994 target = copy_to_reg (valreg);
2996 if (targetm.calls.promote_function_return(funtype))
2998 /* If we promoted this return value, make the proper SUBREG.
2999 TARGET might be const0_rtx here, so be careful. */
3001 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3002 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3004 tree type = TREE_TYPE (exp);
3005 int unsignedp = TYPE_UNSIGNED (type);
3007 enum machine_mode pmode;
3009 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
3010 /* If we don't promote as expected, something is wrong. */
3011 gcc_assert (GET_MODE (target) == pmode);
3013 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3014 && (GET_MODE_SIZE (GET_MODE (target))
3015 > GET_MODE_SIZE (TYPE_MODE (type))))
3017 offset = GET_MODE_SIZE (GET_MODE (target))
3018 - GET_MODE_SIZE (TYPE_MODE (type));
3019 if (! BYTES_BIG_ENDIAN)
3020 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3021 else if (! WORDS_BIG_ENDIAN)
3022 offset %= UNITS_PER_WORD;
3024 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3025 SUBREG_PROMOTED_VAR_P (target) = 1;
3026 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3030 /* If size of args is variable or this was a constructor call for a stack
3031 argument, restore saved stack-pointer value. */
3033 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3035 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3036 stack_pointer_delta = old_stack_pointer_delta;
3037 pending_stack_adjust = old_pending_adj;
3038 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3039 stack_arg_under_construction = old_stack_arg_under_construction;
3040 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3041 stack_usage_map = initial_stack_usage_map;
3042 sibcall_failure = 1;
3044 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3046 #ifdef REG_PARM_STACK_SPACE
3048 restore_fixed_argument_area (save_area, argblock,
3049 high_to_save, low_to_save);
3052 /* If we saved any argument areas, restore them. */
3053 for (i = 0; i < num_actuals; i++)
3054 if (args[i].save_area)
3056 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3058 = gen_rtx_MEM (save_mode,
3059 memory_address (save_mode,
3060 XEXP (args[i].stack_slot, 0)));
3062 if (save_mode != BLKmode)
3063 emit_move_insn (stack_area, args[i].save_area);
3065 emit_block_move (stack_area, args[i].save_area,
3066 GEN_INT (args[i].locate.size.constant),
3067 BLOCK_OP_CALL_PARM);
3070 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3071 stack_usage_map = initial_stack_usage_map;
3074 /* If this was alloca, record the new stack level for nonlocal gotos.
3075 Check for the handler slots since we might not have a save area
3076 for non-local gotos. */
3078 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3079 update_nonlocal_goto_save_area ();
3081 /* Free up storage we no longer need. */
3082 for (i = 0; i < num_actuals; ++i)
3083 if (args[i].aligned_regs)
3084 free (args[i].aligned_regs);
3086 insns = get_insns ();
3091 tail_call_insns = insns;
3093 /* Restore the pending stack adjustment now that we have
3094 finished generating the sibling call sequence. */
3096 pending_stack_adjust = save_pending_stack_adjust;
3097 stack_pointer_delta = save_stack_pointer_delta;
3099 /* Prepare arg structure for next iteration. */
3100 for (i = 0; i < num_actuals; i++)
3103 args[i].aligned_regs = 0;
3107 sbitmap_free (stored_args_map);
3111 normal_call_insns = insns;
3113 /* Verify that we've deallocated all the stack we used. */
3114 gcc_assert ((flags & ECF_NORETURN)
3115 || (old_stack_allocated
3116 == stack_pointer_delta - pending_stack_adjust));
3119 /* If something prevents making this a sibling call,
3120 zero out the sequence. */
3121 if (sibcall_failure)
3122 tail_call_insns = NULL_RTX;
3127 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3128 arguments too, as argument area is now clobbered by the call. */
3129 if (tail_call_insns)
3131 emit_insn (tail_call_insns);
3132 cfun->tail_call_emit = true;
3135 emit_insn (normal_call_insns);
3137 currently_expanding_call--;
3139 /* If this function returns with the stack pointer depressed, ensure
3140 this block saves and restores the stack pointer, show it was
3141 changed, and adjust for any outgoing arg space. */
3142 if (flags & ECF_SP_DEPRESSED)
3144 clear_pending_stack_adjust ();
3145 emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
3146 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3149 if (stack_usage_map_buf)
3150 free (stack_usage_map_buf);
3155 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3156 this function's incoming arguments.
3158 At the start of RTL generation we know the only REG_EQUIV notes
3159 in the rtl chain are those for incoming arguments, so we can look
3160 for REG_EQUIV notes between the start of the function and the
3161 NOTE_INSN_FUNCTION_BEG.
3163 This is (slight) overkill. We could keep track of the highest
3164 argument we clobber and be more selective in removing notes, but it
3165 does not seem to be worth the effort. */
3168 fixup_tail_calls (void)
3172 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3176 /* There are never REG_EQUIV notes for the incoming arguments
3177 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3179 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3182 note = find_reg_note (insn, REG_EQUIV, 0);
3184 remove_note (insn, note);
3185 note = find_reg_note (insn, REG_EQUIV, 0);
3190 /* Traverse a list of TYPES and expand all complex types into their
3193 split_complex_types (tree types)
3197 /* Before allocating memory, check for the common case of no complex. */
3198 for (p = types; p; p = TREE_CHAIN (p))
3200 tree type = TREE_VALUE (p);
3201 if (TREE_CODE (type) == COMPLEX_TYPE
3202 && targetm.calls.split_complex_arg (type))
3208 types = copy_list (types);
3210 for (p = types; p; p = TREE_CHAIN (p))
3212 tree complex_type = TREE_VALUE (p);
3214 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3215 && targetm.calls.split_complex_arg (complex_type))
3219 /* Rewrite complex type with component type. */
3220 TREE_VALUE (p) = TREE_TYPE (complex_type);
3221 next = TREE_CHAIN (p);
3223 /* Add another component type for the imaginary part. */
3224 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3225 TREE_CHAIN (p) = imag;
3226 TREE_CHAIN (imag) = next;
3228 /* Skip the newly created node. */
3236 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3237 The RETVAL parameter specifies whether return value needs to be saved, other
3238 parameters are documented in the emit_library_call function below. */
3241 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3242 enum libcall_type fn_type,
3243 enum machine_mode outmode, int nargs, va_list p)
3245 /* Total size in bytes of all the stack-parms scanned so far. */
3246 struct args_size args_size;
3247 /* Size of arguments before any adjustments (such as rounding). */
3248 struct args_size original_args_size;
3254 CUMULATIVE_ARGS args_so_far;
3258 enum machine_mode mode;
3261 struct locate_and_pad_arg_data locate;
3265 int old_inhibit_defer_pop = inhibit_defer_pop;
3266 rtx call_fusage = 0;
3269 int pcc_struct_value = 0;
3270 int struct_value_size = 0;
3272 int reg_parm_stack_space = 0;
3275 tree tfom; /* type_for_mode (outmode, 0) */
3277 #ifdef REG_PARM_STACK_SPACE
3278 /* Define the boundary of the register parm stack space that needs to be
3280 int low_to_save, high_to_save;
3281 rtx save_area = 0; /* Place that it is saved. */
3284 /* Size of the stack reserved for parameter registers. */
3285 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3286 char *initial_stack_usage_map = stack_usage_map;
3287 char *stack_usage_map_buf = NULL;
3289 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3291 #ifdef REG_PARM_STACK_SPACE
3292 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3295 /* By default, library functions can not throw. */
3296 flags = ECF_NOTHROW;
3308 case LCT_CONST_MAKE_BLOCK:
3309 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3311 case LCT_PURE_MAKE_BLOCK:
3312 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3315 flags |= ECF_NORETURN;
3318 flags = ECF_NORETURN;
3320 case LCT_RETURNS_TWICE:
3321 flags = ECF_RETURNS_TWICE;
3326 /* Ensure current function's preferred stack boundary is at least
3328 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3329 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3331 /* If this kind of value comes back in memory,
3332 decide where in memory it should come back. */
3333 if (outmode != VOIDmode)
3335 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3336 if (aggregate_value_p (tfom, 0))
3338 #ifdef PCC_STATIC_STRUCT_RETURN
3340 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3341 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3342 pcc_struct_value = 1;
3344 value = gen_reg_rtx (outmode);
3345 #else /* not PCC_STATIC_STRUCT_RETURN */
3346 struct_value_size = GET_MODE_SIZE (outmode);
3347 if (value != 0 && MEM_P (value))
3350 mem_value = assign_temp (tfom, 0, 1, 1);
3352 /* This call returns a big structure. */
3353 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3357 tfom = void_type_node;
3359 /* ??? Unfinished: must pass the memory address as an argument. */
3361 /* Copy all the libcall-arguments out of the varargs data
3362 and into a vector ARGVEC.
3364 Compute how to pass each argument. We only support a very small subset
3365 of the full argument passing conventions to limit complexity here since
3366 library functions shouldn't have many args. */
3368 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3369 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3371 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3372 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3374 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3377 args_size.constant = 0;
3382 /* Now we are about to start emitting insns that can be deleted
3383 if a libcall is deleted. */
3384 if (flags & ECF_LIBCALL_BLOCK)
3389 /* If there's a structure value address to be passed,
3390 either pass it in the special place, or pass it as an extra argument. */
3391 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3393 rtx addr = XEXP (mem_value, 0);
3397 /* Make sure it is a reasonable operand for a move or push insn. */
3398 if (!REG_P (addr) && !MEM_P (addr)
3399 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3400 addr = force_operand (addr, NULL_RTX);
3402 argvec[count].value = addr;
3403 argvec[count].mode = Pmode;
3404 argvec[count].partial = 0;
3406 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3407 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3408 NULL_TREE, 1) == 0);
3410 locate_and_pad_parm (Pmode, NULL_TREE,
3411 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3414 argvec[count].reg != 0,
3416 0, NULL_TREE, &args_size, &argvec[count].locate);
3418 if (argvec[count].reg == 0 || argvec[count].partial != 0
3419 || reg_parm_stack_space > 0)
3420 args_size.constant += argvec[count].locate.size.constant;
3422 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3427 for (; count < nargs; count++)
3429 rtx val = va_arg (p, rtx);
3430 enum machine_mode mode = va_arg (p, enum machine_mode);
3432 /* We cannot convert the arg value to the mode the library wants here;
3433 must do it earlier where we know the signedness of the arg. */
3434 gcc_assert (mode != BLKmode
3435 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3437 /* Make sure it is a reasonable operand for a move or push insn. */
3438 if (!REG_P (val) && !MEM_P (val)
3439 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3440 val = force_operand (val, NULL_RTX);
3442 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3446 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3448 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3449 functions, so we have to pretend this isn't such a function. */
3450 if (flags & ECF_LIBCALL_BLOCK)
3452 rtx insns = get_insns ();
3456 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3458 /* If this was a CONST function, it is now PURE since
3459 it now reads memory. */
3460 if (flags & ECF_CONST)
3462 flags &= ~ECF_CONST;
3466 if (GET_MODE (val) == MEM && !must_copy)
3470 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3472 emit_move_insn (slot, val);
3475 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3476 gen_rtx_USE (VOIDmode, slot),
3479 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3480 gen_rtx_CLOBBER (VOIDmode,
3485 val = force_operand (XEXP (slot, 0), NULL_RTX);
3488 argvec[count].value = val;
3489 argvec[count].mode = mode;
3491 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3493 argvec[count].partial
3494 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3496 locate_and_pad_parm (mode, NULL_TREE,
3497 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3500 argvec[count].reg != 0,
3502 argvec[count].partial,
3503 NULL_TREE, &args_size, &argvec[count].locate);
3505 gcc_assert (!argvec[count].locate.size.var);
3507 if (argvec[count].reg == 0 || argvec[count].partial != 0
3508 || reg_parm_stack_space > 0)
3509 args_size.constant += argvec[count].locate.size.constant;
3511 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3514 /* If this machine requires an external definition for library
3515 functions, write one out. */
3516 assemble_external_libcall (fun);
3518 original_args_size = args_size;
3519 args_size.constant = (((args_size.constant
3520 + stack_pointer_delta
3524 - stack_pointer_delta);
3526 args_size.constant = MAX (args_size.constant,
3527 reg_parm_stack_space);
3529 if (!OUTGOING_REG_PARM_STACK_SPACE)
3530 args_size.constant -= reg_parm_stack_space;
3532 if (args_size.constant > current_function_outgoing_args_size)
3533 current_function_outgoing_args_size = args_size.constant;
3535 if (ACCUMULATE_OUTGOING_ARGS)
3537 /* Since the stack pointer will never be pushed, it is possible for
3538 the evaluation of a parm to clobber something we have already
3539 written to the stack. Since most function calls on RISC machines
3540 do not use the stack, this is uncommon, but must work correctly.
3542 Therefore, we save any area of the stack that was already written
3543 and that we are using. Here we set up to do this by making a new
3544 stack usage map from the old one.
3546 Another approach might be to try to reorder the argument
3547 evaluations to avoid this conflicting stack usage. */
3549 needed = args_size.constant;
3551 /* Since we will be writing into the entire argument area, the
3552 map must be allocated for its entire size, not just the part that
3553 is the responsibility of the caller. */
3554 if (!OUTGOING_REG_PARM_STACK_SPACE)
3555 needed += reg_parm_stack_space;
3557 #ifdef ARGS_GROW_DOWNWARD
3558 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3561 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3564 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3565 stack_usage_map = stack_usage_map_buf;
3567 if (initial_highest_arg_in_use)
3568 memcpy (stack_usage_map, initial_stack_usage_map,
3569 initial_highest_arg_in_use);
3571 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3572 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3573 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3576 /* We must be careful to use virtual regs before they're instantiated,
3577 and real regs afterwards. Loop optimization, for example, can create
3578 new libcalls after we've instantiated the virtual regs, and if we
3579 use virtuals anyway, they won't match the rtl patterns. */
3581 if (virtuals_instantiated)
3582 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3584 argblock = virtual_outgoing_args_rtx;
3589 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3592 /* If we push args individually in reverse order, perform stack alignment
3593 before the first push (the last arg). */
3594 if (argblock == 0 && PUSH_ARGS_REVERSED)
3595 anti_adjust_stack (GEN_INT (args_size.constant
3596 - original_args_size.constant));
3598 if (PUSH_ARGS_REVERSED)
3609 #ifdef REG_PARM_STACK_SPACE
3610 if (ACCUMULATE_OUTGOING_ARGS)
3612 /* The argument list is the property of the called routine and it
3613 may clobber it. If the fixed area has been used for previous
3614 parameters, we must save and restore it. */
3615 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3616 &low_to_save, &high_to_save);
3620 /* Push the args that need to be pushed. */
3622 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3623 are to be pushed. */
3624 for (count = 0; count < nargs; count++, argnum += inc)
3626 enum machine_mode mode = argvec[argnum].mode;
3627 rtx val = argvec[argnum].value;
3628 rtx reg = argvec[argnum].reg;
3629 int partial = argvec[argnum].partial;
3630 int lower_bound = 0, upper_bound = 0, i;
3632 if (! (reg != 0 && partial == 0))
3634 if (ACCUMULATE_OUTGOING_ARGS)
3636 /* If this is being stored into a pre-allocated, fixed-size,
3637 stack area, save any previous data at that location. */
3639 #ifdef ARGS_GROW_DOWNWARD
3640 /* stack_slot is negative, but we want to index stack_usage_map
3641 with positive values. */
3642 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3643 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3645 lower_bound = argvec[argnum].locate.offset.constant;
3646 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3650 /* Don't worry about things in the fixed argument area;
3651 it has already been saved. */
3652 if (i < reg_parm_stack_space)
3653 i = reg_parm_stack_space;
3654 while (i < upper_bound && stack_usage_map[i] == 0)
3657 if (i < upper_bound)
3659 /* We need to make a save area. */
3661 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3662 enum machine_mode save_mode
3663 = mode_for_size (size, MODE_INT, 1);
3665 = plus_constant (argblock,
3666 argvec[argnum].locate.offset.constant);
3668 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3670 if (save_mode == BLKmode)
3672 argvec[argnum].save_area
3673 = assign_stack_temp (BLKmode,
3674 argvec[argnum].locate.size.constant,
3677 emit_block_move (validize_mem (argvec[argnum].save_area),
3679 GEN_INT (argvec[argnum].locate.size.constant),
3680 BLOCK_OP_CALL_PARM);
3684 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3686 emit_move_insn (argvec[argnum].save_area, stack_area);
3691 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3692 partial, reg, 0, argblock,
3693 GEN_INT (argvec[argnum].locate.offset.constant),
3694 reg_parm_stack_space,
3695 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3697 /* Now mark the segment we just used. */
3698 if (ACCUMULATE_OUTGOING_ARGS)
3699 for (i = lower_bound; i < upper_bound; i++)
3700 stack_usage_map[i] = 1;
3704 if (flags & ECF_CONST)
3708 /* Indicate argument access so that alias.c knows that these
3711 use = plus_constant (argblock,
3712 argvec[argnum].locate.offset.constant);
3714 /* When arguments are pushed, trying to tell alias.c where
3715 exactly this argument is won't work, because the
3716 auto-increment causes confusion. So we merely indicate
3717 that we access something with a known mode somewhere on
3719 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3720 gen_rtx_SCRATCH (Pmode));
3721 use = gen_rtx_MEM (argvec[argnum].mode, use);
3722 use = gen_rtx_USE (VOIDmode, use);
3723 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3728 /* If we pushed args in forward order, perform stack alignment
3729 after pushing the last arg. */
3730 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3731 anti_adjust_stack (GEN_INT (args_size.constant
3732 - original_args_size.constant));
3734 if (PUSH_ARGS_REVERSED)
3739 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3741 /* Now load any reg parms into their regs. */
3743 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3744 are to be pushed. */
3745 for (count = 0; count < nargs; count++, argnum += inc)
3747 enum machine_mode mode = argvec[argnum].mode;
3748 rtx val = argvec[argnum].value;
3749 rtx reg = argvec[argnum].reg;
3750 int partial = argvec[argnum].partial;
3752 /* Handle calls that pass values in multiple non-contiguous
3753 locations. The PA64 has examples of this for library calls. */
3754 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3755 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3756 else if (reg != 0 && partial == 0)
3757 emit_move_insn (reg, val);
3762 /* Any regs containing parms remain in use through the call. */
3763 for (count = 0; count < nargs; count++)
3765 rtx reg = argvec[count].reg;
3766 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3767 use_group_regs (&call_fusage, reg);
3770 int partial = argvec[count].partial;
3774 gcc_assert (partial % UNITS_PER_WORD == 0);
3775 nregs = partial / UNITS_PER_WORD;
3776 use_regs (&call_fusage, REGNO (reg), nregs);
3779 use_reg (&call_fusage, reg);
3783 /* Pass the function the address in which to return a structure value. */
3784 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3786 emit_move_insn (struct_value,
3788 force_operand (XEXP (mem_value, 0),
3790 if (REG_P (struct_value))
3791 use_reg (&call_fusage, struct_value);
3794 /* Don't allow popping to be deferred, since then
3795 cse'ing of library calls could delete a call and leave the pop. */
3797 valreg = (mem_value == 0 && outmode != VOIDmode
3798 ? hard_libcall_value (outmode) : NULL_RTX);
3800 /* Stack must be properly aligned now. */
3801 gcc_assert (!(stack_pointer_delta
3802 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3804 before_call = get_last_insn ();
3806 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3807 will set inhibit_defer_pop to that value. */
3808 /* The return type is needed to decide how many bytes the function pops.
3809 Signedness plays no role in that, so for simplicity, we pretend it's
3810 always signed. We also assume that the list of arguments passed has
3811 no impact, so we pretend it is unknown. */
3813 emit_call_1 (fun, NULL,
3814 get_identifier (XSTR (orgfun, 0)),
3815 build_function_type (tfom, NULL_TREE),
3816 original_args_size.constant, args_size.constant,
3818 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3820 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3822 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
3823 that it should complain if nonvolatile values are live. For
3824 functions that cannot return, inform flow that control does not
3827 if (flags & ECF_NORETURN)
3829 /* The barrier note must be emitted
3830 immediately after the CALL_INSN. Some ports emit more than
3831 just a CALL_INSN above, so we must search for it here. */
3833 rtx last = get_last_insn ();
3834 while (!CALL_P (last))
3836 last = PREV_INSN (last);
3837 /* There was no CALL_INSN? */
3838 gcc_assert (last != before_call);
3841 emit_barrier_after (last);
3844 /* Now restore inhibit_defer_pop to its actual original value. */
3847 /* If call is cse'able, make appropriate pair of reg-notes around it.
3848 Test valreg so we don't crash; may safely ignore `const'
3849 if return type is void. Disable for PARALLEL return values, because
3850 we have no way to move such values into a pseudo register. */
3851 if (flags & ECF_LIBCALL_BLOCK)
3857 insns = get_insns ();
3867 if (GET_CODE (valreg) == PARALLEL)
3869 temp = gen_reg_rtx (outmode);
3870 emit_group_store (temp, valreg, NULL_TREE,
3871 GET_MODE_SIZE (outmode));
3875 temp = gen_reg_rtx (GET_MODE (valreg));
3877 /* Construct an "equal form" for the value which mentions all the
3878 arguments in order as well as the function name. */
3879 for (i = 0; i < nargs; i++)
3880 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3881 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3883 insns = get_insns ();
3886 if (flags & ECF_PURE)
3887 note = gen_rtx_EXPR_LIST (VOIDmode,
3888 gen_rtx_USE (VOIDmode,
3889 gen_rtx_MEM (BLKmode,
3890 gen_rtx_SCRATCH (VOIDmode))),
3893 emit_libcall_block (insns, temp, valreg, note);
3900 /* Copy the value to the right place. */
3901 if (outmode != VOIDmode && retval)
3907 if (value != mem_value)
3908 emit_move_insn (value, mem_value);
3910 else if (GET_CODE (valreg) == PARALLEL)
3913 value = gen_reg_rtx (outmode);
3914 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3918 /* Convert to the proper mode if PROMOTE_MODE has been active. */
3919 if (GET_MODE (valreg) != outmode)
3921 int unsignedp = TYPE_UNSIGNED (tfom);
3923 gcc_assert (targetm.calls.promote_function_return (tfom));
3924 gcc_assert (promote_mode (tfom, outmode, &unsignedp, 0)
3925 == GET_MODE (valreg));
3927 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
3931 emit_move_insn (value, valreg);
3937 if (ACCUMULATE_OUTGOING_ARGS)
3939 #ifdef REG_PARM_STACK_SPACE
3941 restore_fixed_argument_area (save_area, argblock,
3942 high_to_save, low_to_save);
3945 /* If we saved any argument areas, restore them. */
3946 for (count = 0; count < nargs; count++)
3947 if (argvec[count].save_area)
3949 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3950 rtx adr = plus_constant (argblock,
3951 argvec[count].locate.offset.constant);
3952 rtx stack_area = gen_rtx_MEM (save_mode,
3953 memory_address (save_mode, adr));
3955 if (save_mode == BLKmode)
3956 emit_block_move (stack_area,
3957 validize_mem (argvec[count].save_area),
3958 GEN_INT (argvec[count].locate.size.constant),
3959 BLOCK_OP_CALL_PARM);
3961 emit_move_insn (stack_area, argvec[count].save_area);
3964 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3965 stack_usage_map = initial_stack_usage_map;
3968 if (stack_usage_map_buf)
3969 free (stack_usage_map_buf);
3975 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3976 (emitting the queue unless NO_QUEUE is nonzero),
3977 for a value of mode OUTMODE,
3978 with NARGS different arguments, passed as alternating rtx values
3979 and machine_modes to convert them to.
3981 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
3982 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
3983 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
3984 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
3985 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
3986 or other LCT_ value for other types of library calls. */
3989 emit_library_call (rtx orgfun, enum libcall_type fn_type,
3990 enum machine_mode outmode, int nargs, ...)
3994 va_start (p, nargs);
3995 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
3999 /* Like emit_library_call except that an extra argument, VALUE,
4000 comes second and says where to store the result.
4001 (If VALUE is zero, this function chooses a convenient way
4002 to return the value.
4004 This function returns an rtx for where the value is to be found.
4005 If VALUE is nonzero, VALUE is returned. */
4008 emit_library_call_value (rtx orgfun, rtx value,
4009 enum libcall_type fn_type,
4010 enum machine_mode outmode, int nargs, ...)
4015 va_start (p, nargs);
4016 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4023 /* Store a single argument for a function call
4024 into the register or memory area where it must be passed.
4025 *ARG describes the argument value and where to pass it.
4027 ARGBLOCK is the address of the stack-block for all the arguments,
4028 or 0 on a machine where arguments are pushed individually.
4030 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4031 so must be careful about how the stack is used.
4033 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4034 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4035 that we need not worry about saving and restoring the stack.
4037 FNDECL is the declaration of the function we are calling.
4039 Return nonzero if this arg should cause sibcall failure,
4043 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4044 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4046 tree pval = arg->tree_value;
4050 int i, lower_bound = 0, upper_bound = 0;
4051 int sibcall_failure = 0;
4053 if (TREE_CODE (pval) == ERROR_MARK)
4056 /* Push a new temporary level for any temporaries we make for
4060 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4062 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4063 save any previous data at that location. */
4064 if (argblock && ! variable_size && arg->stack)
4066 #ifdef ARGS_GROW_DOWNWARD
4067 /* stack_slot is negative, but we want to index stack_usage_map
4068 with positive values. */
4069 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4070 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4074 lower_bound = upper_bound - arg->locate.size.constant;
4076 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4077 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4081 upper_bound = lower_bound + arg->locate.size.constant;
4085 /* Don't worry about things in the fixed argument area;
4086 it has already been saved. */
4087 if (i < reg_parm_stack_space)
4088 i = reg_parm_stack_space;
4089 while (i < upper_bound && stack_usage_map[i] == 0)
4092 if (i < upper_bound)
4094 /* We need to make a save area. */
4095 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4096 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4097 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4098 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4100 if (save_mode == BLKmode)
4102 tree ot = TREE_TYPE (arg->tree_value);
4103 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4104 | TYPE_QUAL_CONST));
4106 arg->save_area = assign_temp (nt, 0, 1, 1);
4107 preserve_temp_slots (arg->save_area);
4108 emit_block_move (validize_mem (arg->save_area), stack_area,
4109 GEN_INT (arg->locate.size.constant),
4110 BLOCK_OP_CALL_PARM);
4114 arg->save_area = gen_reg_rtx (save_mode);
4115 emit_move_insn (arg->save_area, stack_area);
4121 /* If this isn't going to be placed on both the stack and in registers,
4122 set up the register and number of words. */
4123 if (! arg->pass_on_stack)
4125 if (flags & ECF_SIBCALL)
4126 reg = arg->tail_call_reg;
4129 partial = arg->partial;
4132 /* Being passed entirely in a register. We shouldn't be called in
4134 gcc_assert (reg == 0 || partial != 0);
4136 /* If this arg needs special alignment, don't load the registers
4138 if (arg->n_aligned_regs != 0)
4141 /* If this is being passed partially in a register, we can't evaluate
4142 it directly into its stack slot. Otherwise, we can. */
4143 if (arg->value == 0)
4145 /* stack_arg_under_construction is nonzero if a function argument is
4146 being evaluated directly into the outgoing argument list and
4147 expand_call must take special action to preserve the argument list
4148 if it is called recursively.
4150 For scalar function arguments stack_usage_map is sufficient to
4151 determine which stack slots must be saved and restored. Scalar
4152 arguments in general have pass_on_stack == 0.
4154 If this argument is initialized by a function which takes the
4155 address of the argument (a C++ constructor or a C function
4156 returning a BLKmode structure), then stack_usage_map is
4157 insufficient and expand_call must push the stack around the
4158 function call. Such arguments have pass_on_stack == 1.
4160 Note that it is always safe to set stack_arg_under_construction,
4161 but this generates suboptimal code if set when not needed. */
4163 if (arg->pass_on_stack)
4164 stack_arg_under_construction++;
4166 arg->value = expand_expr (pval,
4168 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4169 ? NULL_RTX : arg->stack,
4170 VOIDmode, EXPAND_STACK_PARM);
4172 /* If we are promoting object (or for any other reason) the mode
4173 doesn't agree, convert the mode. */
4175 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4176 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4177 arg->value, arg->unsignedp);
4179 if (arg->pass_on_stack)
4180 stack_arg_under_construction--;
4183 /* Check for overlap with already clobbered argument area. */
4184 if ((flags & ECF_SIBCALL)
4185 && MEM_P (arg->value)
4186 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4187 arg->locate.size.constant))
4188 sibcall_failure = 1;
4190 /* Don't allow anything left on stack from computation
4191 of argument to alloca. */
4192 if (flags & ECF_MAY_BE_ALLOCA)
4193 do_pending_stack_adjust ();
4195 if (arg->value == arg->stack)
4196 /* If the value is already in the stack slot, we are done. */
4198 else if (arg->mode != BLKmode)
4201 unsigned int parm_align;
4203 /* Argument is a scalar, not entirely passed in registers.
4204 (If part is passed in registers, arg->partial says how much
4205 and emit_push_insn will take care of putting it there.)
4207 Push it, and if its size is less than the
4208 amount of space allocated to it,
4209 also bump stack pointer by the additional space.
4210 Note that in C the default argument promotions
4211 will prevent such mismatches. */
4213 size = GET_MODE_SIZE (arg->mode);
4214 /* Compute how much space the push instruction will push.
4215 On many machines, pushing a byte will advance the stack
4216 pointer by a halfword. */
4217 #ifdef PUSH_ROUNDING
4218 size = PUSH_ROUNDING (size);
4222 /* Compute how much space the argument should get:
4223 round up to a multiple of the alignment for arguments. */
4224 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4225 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4226 / (PARM_BOUNDARY / BITS_PER_UNIT))
4227 * (PARM_BOUNDARY / BITS_PER_UNIT));
4229 /* Compute the alignment of the pushed argument. */
4230 parm_align = arg->locate.boundary;
4231 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4233 int pad = used - size;
4236 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4237 parm_align = MIN (parm_align, pad_align);
4241 /* This isn't already where we want it on the stack, so put it there.
4242 This can either be done with push or copy insns. */
4243 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4244 parm_align, partial, reg, used - size, argblock,
4245 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4246 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4248 /* Unless this is a partially-in-register argument, the argument is now
4251 arg->value = arg->stack;
4255 /* BLKmode, at least partly to be pushed. */
4257 unsigned int parm_align;
4261 /* Pushing a nonscalar.
4262 If part is passed in registers, PARTIAL says how much
4263 and emit_push_insn will take care of putting it there. */
4265 /* Round its size up to a multiple
4266 of the allocation unit for arguments. */
4268 if (arg->locate.size.var != 0)
4271 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4275 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4276 for BLKmode is careful to avoid it. */
4277 excess = (arg->locate.size.constant
4278 - int_size_in_bytes (TREE_TYPE (pval))
4280 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4281 NULL_RTX, TYPE_MODE (sizetype), 0);
4284 parm_align = arg->locate.boundary;
4286 /* When an argument is padded down, the block is aligned to
4287 PARM_BOUNDARY, but the actual argument isn't. */
4288 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4290 if (arg->locate.size.var)
4291 parm_align = BITS_PER_UNIT;
4294 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4295 parm_align = MIN (parm_align, excess_align);
4299 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4301 /* emit_push_insn might not work properly if arg->value and
4302 argblock + arg->locate.offset areas overlap. */
4306 if (XEXP (x, 0) == current_function_internal_arg_pointer
4307 || (GET_CODE (XEXP (x, 0)) == PLUS
4308 && XEXP (XEXP (x, 0), 0) ==
4309 current_function_internal_arg_pointer
4310 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4312 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4313 i = INTVAL (XEXP (XEXP (x, 0), 1));
4315 /* expand_call should ensure this. */
4316 gcc_assert (!arg->locate.offset.var
4317 && GET_CODE (size_rtx) == CONST_INT);
4319 if (arg->locate.offset.constant > i)
4321 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4322 sibcall_failure = 1;
4324 else if (arg->locate.offset.constant < i)
4326 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4327 sibcall_failure = 1;
4332 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4333 parm_align, partial, reg, excess, argblock,
4334 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4335 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4337 /* Unless this is a partially-in-register argument, the argument is now
4340 ??? Unlike the case above, in which we want the actual
4341 address of the data, so that we can load it directly into a
4342 register, here we want the address of the stack slot, so that
4343 it's properly aligned for word-by-word copying or something
4344 like that. It's not clear that this is always correct. */
4346 arg->value = arg->stack_slot;
4349 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4351 tree type = TREE_TYPE (arg->tree_value);
4353 = emit_group_load_into_temps (arg->reg, arg->value, type,
4354 int_size_in_bytes (type));
4357 /* Mark all slots this store used. */
4358 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4359 && argblock && ! variable_size && arg->stack)
4360 for (i = lower_bound; i < upper_bound; i++)
4361 stack_usage_map[i] = 1;
4363 /* Once we have pushed something, pops can't safely
4364 be deferred during the rest of the arguments. */
4367 /* Free any temporary slots made in processing this argument. Show
4368 that we might have taken the address of something and pushed that
4370 preserve_temp_slots (NULL_RTX);
4374 return sibcall_failure;
4377 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4380 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4386 /* If the type has variable size... */
4387 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4390 /* If the type is marked as addressable (it is required
4391 to be constructed into the stack)... */
4392 if (TREE_ADDRESSABLE (type))
4398 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4399 takes trailing padding of a structure into account. */
4400 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4403 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, tree type)
4408 /* If the type has variable size... */
4409 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4412 /* If the type is marked as addressable (it is required
4413 to be constructed into the stack)... */
4414 if (TREE_ADDRESSABLE (type))
4417 /* If the padding and mode of the type is such that a copy into
4418 a register would put it into the wrong part of the register. */
4420 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4421 && (FUNCTION_ARG_PADDING (mode, type)
4422 == (BYTES_BIG_ENDIAN ? upward : downward)))