1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
35 #include "diagnostic-core.h"
40 #include "langhooks.h"
46 #include "tree-flow.h"
48 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
49 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51 /* Data structure and subroutines used within expand_call. */
55 /* Tree node for this argument. */
57 /* Mode for value; TYPE_MODE unless promoted. */
58 enum machine_mode mode;
59 /* Current RTL value for argument, or 0 if it isn't precomputed. */
61 /* Initially-compute RTL value for argument; only for const functions. */
63 /* Register to pass this argument in, 0 if passed on stack, or an
64 PARALLEL if the arg is to be copied into multiple non-contiguous
67 /* Register to pass this argument in when generating tail call sequence.
68 This is not the same register as for normal calls on machines with
71 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
72 form for emit_group_move. */
74 /* If REG was promoted from the actual mode of the argument expression,
75 indicates whether the promotion is sign- or zero-extended. */
77 /* Number of bytes to put in registers. 0 means put the whole arg
78 in registers. Also 0 if not passed in registers. */
80 /* Nonzero if argument must be passed on stack.
81 Note that some arguments may be passed on the stack
82 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
83 pass_on_stack identifies arguments that *cannot* go in registers. */
85 /* Some fields packaged up for locate_and_pad_parm. */
86 struct locate_and_pad_arg_data locate;
87 /* Location on the stack at which parameter should be stored. The store
88 has already been done if STACK == VALUE. */
90 /* Location on the stack of the start of this argument slot. This can
91 differ from STACK if this arg pads downward. This location is known
92 to be aligned to FUNCTION_ARG_BOUNDARY. */
94 /* Place that this stack area has been saved, if needed. */
96 /* If an argument's alignment does not permit direct copying into registers,
97 copy in smaller-sized pieces into pseudos. These are stored in a
98 block pointed to by this field. The next field says how many
99 word-sized pseudos we made. */
104 /* A vector of one char per byte of stack space. A byte if nonzero if
105 the corresponding stack location has been used.
106 This vector is used to prevent a function call within an argument from
107 clobbering any stack already set up. */
108 static char *stack_usage_map;
110 /* Size of STACK_USAGE_MAP. */
111 static int highest_outgoing_arg_in_use;
113 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
114 stack location's tail call argument has been already stored into the stack.
115 This bitmap is used to prevent sibling call optimization if function tries
116 to use parent's incoming argument slots when they have been already
117 overwritten with tail call arguments. */
118 static sbitmap stored_args_map;
120 /* stack_arg_under_construction is nonzero when an argument may be
121 initialized with a constructor call (including a C function that
122 returns a BLKmode struct) and expand_call must take special action
123 to make sure the object being constructed does not overlap the
124 argument list for the constructor call. */
125 static int stack_arg_under_construction;
127 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
128 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
130 static void precompute_register_parameters (int, struct arg_data *, int *);
131 static int store_one_arg (struct arg_data *, rtx, int, int, int);
132 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
133 static int finalize_must_preallocate (int, int, struct arg_data *,
135 static void precompute_arguments (int, struct arg_data *);
136 static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
137 static void initialize_argument_information (int, struct arg_data *,
138 struct args_size *, int,
140 tree, tree, CUMULATIVE_ARGS *, int,
141 rtx *, int *, int *, int *,
143 static void compute_argument_addresses (struct arg_data *, rtx, int);
144 static rtx rtx_for_function_call (tree, tree);
145 static void load_register_parameters (struct arg_data *, int, rtx *, int,
147 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
148 enum machine_mode, int, va_list);
149 static int special_function_p (const_tree, int);
150 static int check_sibcall_argument_overlap_1 (rtx);
151 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
153 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
155 static tree split_complex_types (tree);
157 #ifdef REG_PARM_STACK_SPACE
158 static rtx save_fixed_argument_area (int, rtx, int *, int *);
159 static void restore_fixed_argument_area (rtx, rtx, int, int);
162 /* Force FUNEXP into a form suitable for the address of a CALL,
163 and return that as an rtx. Also load the static chain register
164 if FNDECL is a nested function.
166 CALL_FUSAGE points to a variable holding the prospective
167 CALL_INSN_FUNCTION_USAGE information. */
170 prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value,
171 rtx *call_fusage, int reg_parm_seen, int sibcallp)
173 /* Make a valid memory address and copy constants through pseudo-regs,
174 but not for a constant address if -fno-function-cse. */
175 if (GET_CODE (funexp) != SYMBOL_REF)
176 /* If we are using registers for parameters, force the
177 function address into a register now. */
178 funexp = ((reg_parm_seen
179 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
180 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
181 : memory_address (FUNCTION_MODE, funexp));
184 #ifndef NO_FUNCTION_CSE
185 if (optimize && ! flag_no_function_cse)
186 funexp = force_reg (Pmode, funexp);
190 if (static_chain_value != 0)
195 chain = targetm.calls.static_chain (fndecl, false);
196 static_chain_value = convert_memory_address (Pmode, static_chain_value);
198 emit_move_insn (chain, static_chain_value);
200 use_reg (call_fusage, chain);
206 /* Generate instructions to call function FUNEXP,
207 and optionally pop the results.
208 The CALL_INSN is the first insn generated.
210 FNDECL is the declaration node of the function. This is given to the
211 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
214 FUNTYPE is the data type of the function. This is given to the hook
215 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
216 own args. We used to allow an identifier for library functions, but
217 that doesn't work when the return type is an aggregate type and the
218 calling convention says that the pointer to this aggregate is to be
219 popped by the callee.
221 STACK_SIZE is the number of bytes of arguments on the stack,
222 ROUNDED_STACK_SIZE is that number rounded up to
223 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
224 both to put into the call insn and to generate explicit popping
227 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
228 It is zero if this call doesn't want a structure value.
230 NEXT_ARG_REG is the rtx that results from executing
231 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
232 just after all the args have had their registers assigned.
233 This could be whatever you like, but normally it is the first
234 arg-register beyond those used for args in this call,
235 or 0 if all the arg-registers are used in this call.
236 It is passed on to `gen_call' so you can put this info in the call insn.
238 VALREG is a hard register in which a value is returned,
239 or 0 if the call does not return a value.
241 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
242 the args to this call were processed.
243 We restore `inhibit_defer_pop' to that value.
245 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
246 denote registers used by the called function. */
249 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
250 tree funtype ATTRIBUTE_UNUSED,
251 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
252 HOST_WIDE_INT rounded_stack_size,
253 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
254 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
255 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
256 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
258 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
260 int already_popped = 0;
261 HOST_WIDE_INT n_popped
262 = targetm.calls.return_pops_args (fndecl, funtype, stack_size);
264 #ifdef CALL_POPS_ARGS
265 n_popped += CALL_POPS_ARGS (* args_so_far);
268 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
269 and we don't want to load it into a register as an optimization,
270 because prepare_call_address already did it if it should be done. */
271 if (GET_CODE (funexp) != SYMBOL_REF)
272 funexp = memory_address (FUNCTION_MODE, funexp);
274 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
275 if ((ecf_flags & ECF_SIBCALL)
276 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
277 && (n_popped > 0 || stack_size == 0))
279 rtx n_pop = GEN_INT (n_popped);
282 /* If this subroutine pops its own args, record that in the call insn
283 if possible, for the sake of frame pointer elimination. */
286 pat = GEN_SIBCALL_VALUE_POP (valreg,
287 gen_rtx_MEM (FUNCTION_MODE, funexp),
288 rounded_stack_size_rtx, next_arg_reg,
291 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
292 rounded_stack_size_rtx, next_arg_reg, n_pop);
294 emit_call_insn (pat);
300 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
301 /* If the target has "call" or "call_value" insns, then prefer them
302 if no arguments are actually popped. If the target does not have
303 "call" or "call_value" insns, then we must use the popping versions
304 even if the call has no arguments to pop. */
305 #if defined (HAVE_call) && defined (HAVE_call_value)
306 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
309 if (HAVE_call_pop && HAVE_call_value_pop)
312 rtx n_pop = GEN_INT (n_popped);
315 /* If this subroutine pops its own args, record that in the call insn
316 if possible, for the sake of frame pointer elimination. */
319 pat = GEN_CALL_VALUE_POP (valreg,
320 gen_rtx_MEM (FUNCTION_MODE, funexp),
321 rounded_stack_size_rtx, next_arg_reg, n_pop);
323 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
324 rounded_stack_size_rtx, next_arg_reg, n_pop);
326 emit_call_insn (pat);
332 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
333 if ((ecf_flags & ECF_SIBCALL)
334 && HAVE_sibcall && HAVE_sibcall_value)
337 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
338 gen_rtx_MEM (FUNCTION_MODE, funexp),
339 rounded_stack_size_rtx,
340 next_arg_reg, NULL_RTX));
342 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
343 rounded_stack_size_rtx, next_arg_reg,
344 GEN_INT (struct_value_size)));
349 #if defined (HAVE_call) && defined (HAVE_call_value)
350 if (HAVE_call && HAVE_call_value)
353 emit_call_insn (GEN_CALL_VALUE (valreg,
354 gen_rtx_MEM (FUNCTION_MODE, funexp),
355 rounded_stack_size_rtx, next_arg_reg,
358 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
359 rounded_stack_size_rtx, next_arg_reg,
360 GEN_INT (struct_value_size)));
366 /* Find the call we just emitted. */
367 call_insn = last_call_insn ();
369 /* Put the register usage information there. */
370 add_function_usage_to (call_insn, call_fusage);
372 /* If this is a const call, then set the insn's unchanging bit. */
373 if (ecf_flags & ECF_CONST)
374 RTL_CONST_CALL_P (call_insn) = 1;
376 /* If this is a pure call, then set the insn's unchanging bit. */
377 if (ecf_flags & ECF_PURE)
378 RTL_PURE_CALL_P (call_insn) = 1;
380 /* If this is a const call, then set the insn's unchanging bit. */
381 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
382 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
384 /* Create a nothrow REG_EH_REGION note, if needed. */
385 make_reg_eh_region_note (call_insn, ecf_flags, 0);
387 if (ecf_flags & ECF_NORETURN)
388 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
390 if (ecf_flags & ECF_RETURNS_TWICE)
392 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
393 cfun->calls_setjmp = 1;
396 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
398 /* Record debug information for virtual calls. */
399 if (flag_enable_icf_debug && fndecl == NULL)
400 (*debug_hooks->virtual_call_token) (CALL_EXPR_FN (fntree),
401 INSN_UID (call_insn));
403 /* Restore this now, so that we do defer pops for this call's args
404 if the context of the call as a whole permits. */
405 inhibit_defer_pop = old_inhibit_defer_pop;
410 CALL_INSN_FUNCTION_USAGE (call_insn)
411 = gen_rtx_EXPR_LIST (VOIDmode,
412 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
413 CALL_INSN_FUNCTION_USAGE (call_insn));
414 rounded_stack_size -= n_popped;
415 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
416 stack_pointer_delta -= n_popped;
418 /* If popup is needed, stack realign must use DRAP */
419 if (SUPPORTS_STACK_ALIGNMENT)
420 crtl->need_drap = true;
423 if (!ACCUMULATE_OUTGOING_ARGS)
425 /* If returning from the subroutine does not automatically pop the args,
426 we need an instruction to pop them sooner or later.
427 Perhaps do it now; perhaps just record how much space to pop later.
429 If returning from the subroutine does pop the args, indicate that the
430 stack pointer will be changed. */
432 if (rounded_stack_size != 0)
434 if (ecf_flags & ECF_NORETURN)
435 /* Just pretend we did the pop. */
436 stack_pointer_delta -= rounded_stack_size;
437 else if (flag_defer_pop && inhibit_defer_pop == 0
438 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
439 pending_stack_adjust += rounded_stack_size;
441 adjust_stack (rounded_stack_size_rtx);
444 /* When we accumulate outgoing args, we must avoid any stack manipulations.
445 Restore the stack pointer to its original value now. Usually
446 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
447 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
448 popping variants of functions exist as well.
450 ??? We may optimize similar to defer_pop above, but it is
451 probably not worthwhile.
453 ??? It will be worthwhile to enable combine_stack_adjustments even for
456 anti_adjust_stack (GEN_INT (n_popped));
459 /* Determine if the function identified by NAME and FNDECL is one with
460 special properties we wish to know about.
462 For example, if the function might return more than one time (setjmp), then
463 set RETURNS_TWICE to a nonzero value.
465 Similarly set NORETURN if the function is in the longjmp family.
467 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
468 space from the stack such as alloca. */
471 special_function_p (const_tree fndecl, int flags)
473 if (fndecl && DECL_NAME (fndecl)
474 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
475 /* Exclude functions not at the file scope, or not `extern',
476 since they are not the magic functions we would otherwise
478 FIXME: this should be handled with attributes, not with this
479 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
480 because you can declare fork() inside a function if you
482 && (DECL_CONTEXT (fndecl) == NULL_TREE
483 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
484 && TREE_PUBLIC (fndecl))
486 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
487 const char *tname = name;
489 /* We assume that alloca will always be called by name. It
490 makes no sense to pass it as a pointer-to-function to
491 anything that does not understand its behavior. */
492 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
494 && ! strcmp (name, "alloca"))
495 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
497 && ! strcmp (name, "__builtin_alloca"))))
498 flags |= ECF_MAY_BE_ALLOCA;
500 /* Disregard prefix _, __, __x or __builtin_. */
505 && !strncmp (name + 3, "uiltin_", 7))
507 else if (name[1] == '_' && name[2] == 'x')
509 else if (name[1] == '_')
518 && (! strcmp (tname, "setjmp")
519 || ! strcmp (tname, "setjmp_syscall")))
521 && ! strcmp (tname, "sigsetjmp"))
523 && ! strcmp (tname, "savectx")))
524 flags |= ECF_RETURNS_TWICE;
527 && ! strcmp (tname, "siglongjmp"))
528 flags |= ECF_NORETURN;
530 else if ((tname[0] == 'q' && tname[1] == 's'
531 && ! strcmp (tname, "qsetjmp"))
532 || (tname[0] == 'v' && tname[1] == 'f'
533 && ! strcmp (tname, "vfork"))
534 || (tname[0] == 'g' && tname[1] == 'e'
535 && !strcmp (tname, "getcontext")))
536 flags |= ECF_RETURNS_TWICE;
538 else if (tname[0] == 'l' && tname[1] == 'o'
539 && ! strcmp (tname, "longjmp"))
540 flags |= ECF_NORETURN;
546 /* Return nonzero when FNDECL represents a call to setjmp. */
549 setjmp_call_p (const_tree fndecl)
551 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
555 /* Return true if STMT is an alloca call. */
558 gimple_alloca_call_p (const_gimple stmt)
562 if (!is_gimple_call (stmt))
565 fndecl = gimple_call_fndecl (stmt);
566 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
572 /* Return true when exp contains alloca call. */
575 alloca_call_p (const_tree exp)
577 if (TREE_CODE (exp) == CALL_EXPR
578 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
579 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
580 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
581 & ECF_MAY_BE_ALLOCA))
586 /* Detect flags (function attributes) from the function decl or type node. */
589 flags_from_decl_or_type (const_tree exp)
595 /* The function exp may have the `malloc' attribute. */
596 if (DECL_IS_MALLOC (exp))
599 /* The function exp may have the `returns_twice' attribute. */
600 if (DECL_IS_RETURNS_TWICE (exp))
601 flags |= ECF_RETURNS_TWICE;
603 /* Process the pure and const attributes. */
604 if (TREE_READONLY (exp))
606 if (DECL_PURE_P (exp))
608 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
609 flags |= ECF_LOOPING_CONST_OR_PURE;
611 if (DECL_IS_NOVOPS (exp))
613 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
616 if (TREE_NOTHROW (exp))
617 flags |= ECF_NOTHROW;
619 flags = special_function_p (exp, flags);
621 else if (TYPE_P (exp) && TYPE_READONLY (exp))
624 if (TREE_THIS_VOLATILE (exp))
626 flags |= ECF_NORETURN;
627 if (flags & (ECF_CONST|ECF_PURE))
628 flags |= ECF_LOOPING_CONST_OR_PURE;
634 /* Detect flags from a CALL_EXPR. */
637 call_expr_flags (const_tree t)
640 tree decl = get_callee_fndecl (t);
643 flags = flags_from_decl_or_type (decl);
646 t = TREE_TYPE (CALL_EXPR_FN (t));
647 if (t && TREE_CODE (t) == POINTER_TYPE)
648 flags = flags_from_decl_or_type (TREE_TYPE (t));
656 /* Precompute all register parameters as described by ARGS, storing values
657 into fields within the ARGS array.
659 NUM_ACTUALS indicates the total number elements in the ARGS array.
661 Set REG_PARM_SEEN if we encounter a register parameter. */
664 precompute_register_parameters (int num_actuals, struct arg_data *args,
671 for (i = 0; i < num_actuals; i++)
672 if (args[i].reg != 0 && ! args[i].pass_on_stack)
676 if (args[i].value == 0)
679 args[i].value = expand_normal (args[i].tree_value);
680 preserve_temp_slots (args[i].value);
684 /* If the value is a non-legitimate constant, force it into a
685 pseudo now. TLS symbols sometimes need a call to resolve. */
686 if (CONSTANT_P (args[i].value)
687 && !LEGITIMATE_CONSTANT_P (args[i].value))
688 args[i].value = force_reg (args[i].mode, args[i].value);
690 /* If we are to promote the function arg to a wider mode,
693 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
695 = convert_modes (args[i].mode,
696 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
697 args[i].value, args[i].unsignedp);
699 /* If we're going to have to load the value by parts, pull the
700 parts into pseudos. The part extraction process can involve
701 non-trivial computation. */
702 if (GET_CODE (args[i].reg) == PARALLEL)
704 tree type = TREE_TYPE (args[i].tree_value);
705 args[i].parallel_value
706 = emit_group_load_into_temps (args[i].reg, args[i].value,
707 type, int_size_in_bytes (type));
710 /* If the value is expensive, and we are inside an appropriately
711 short loop, put the value into a pseudo and then put the pseudo
714 For small register classes, also do this if this call uses
715 register parameters. This is to avoid reload conflicts while
716 loading the parameters registers. */
718 else if ((! (REG_P (args[i].value)
719 || (GET_CODE (args[i].value) == SUBREG
720 && REG_P (SUBREG_REG (args[i].value)))))
721 && args[i].mode != BLKmode
722 && rtx_cost (args[i].value, SET, optimize_insn_for_speed_p ())
725 && targetm.small_register_classes_for_mode_p (args[i].mode))
727 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
731 #ifdef REG_PARM_STACK_SPACE
733 /* The argument list is the property of the called routine and it
734 may clobber it. If the fixed area has been used for previous
735 parameters, we must save and restore it. */
738 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
743 /* Compute the boundary of the area that needs to be saved, if any. */
744 high = reg_parm_stack_space;
745 #ifdef ARGS_GROW_DOWNWARD
748 if (high > highest_outgoing_arg_in_use)
749 high = highest_outgoing_arg_in_use;
751 for (low = 0; low < high; low++)
752 if (stack_usage_map[low] != 0)
755 enum machine_mode save_mode;
760 while (stack_usage_map[--high] == 0)
764 *high_to_save = high;
766 num_to_save = high - low + 1;
767 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
769 /* If we don't have the required alignment, must do this
771 if ((low & (MIN (GET_MODE_SIZE (save_mode),
772 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
775 #ifdef ARGS_GROW_DOWNWARD
780 stack_area = gen_rtx_MEM (save_mode,
781 memory_address (save_mode,
782 plus_constant (argblock,
785 set_mem_align (stack_area, PARM_BOUNDARY);
786 if (save_mode == BLKmode)
788 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
789 emit_block_move (validize_mem (save_area), stack_area,
790 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
794 save_area = gen_reg_rtx (save_mode);
795 emit_move_insn (save_area, stack_area);
805 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
807 enum machine_mode save_mode = GET_MODE (save_area);
811 #ifdef ARGS_GROW_DOWNWARD
812 delta = -high_to_save;
816 stack_area = gen_rtx_MEM (save_mode,
817 memory_address (save_mode,
818 plus_constant (argblock, delta)));
819 set_mem_align (stack_area, PARM_BOUNDARY);
821 if (save_mode != BLKmode)
822 emit_move_insn (stack_area, save_area);
824 emit_block_move (stack_area, validize_mem (save_area),
825 GEN_INT (high_to_save - low_to_save + 1),
828 #endif /* REG_PARM_STACK_SPACE */
830 /* If any elements in ARGS refer to parameters that are to be passed in
831 registers, but not in memory, and whose alignment does not permit a
832 direct copy into registers. Copy the values into a group of pseudos
833 which we will later copy into the appropriate hard registers.
835 Pseudos for each unaligned argument will be stored into the array
836 args[argnum].aligned_regs. The caller is responsible for deallocating
837 the aligned_regs array if it is nonzero. */
840 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
844 for (i = 0; i < num_actuals; i++)
845 if (args[i].reg != 0 && ! args[i].pass_on_stack
846 && args[i].mode == BLKmode
847 && MEM_P (args[i].value)
848 && (MEM_ALIGN (args[i].value)
849 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
851 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
852 int endian_correction = 0;
856 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
857 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
861 args[i].n_aligned_regs
862 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
865 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
867 /* Structures smaller than a word are normally aligned to the
868 least significant byte. On a BYTES_BIG_ENDIAN machine,
869 this means we must skip the empty high order bytes when
870 calculating the bit offset. */
871 if (bytes < UNITS_PER_WORD
872 #ifdef BLOCK_REG_PADDING
873 && (BLOCK_REG_PADDING (args[i].mode,
874 TREE_TYPE (args[i].tree_value), 1)
880 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
882 for (j = 0; j < args[i].n_aligned_regs; j++)
884 rtx reg = gen_reg_rtx (word_mode);
885 rtx word = operand_subword_force (args[i].value, j, BLKmode);
886 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
888 args[i].aligned_regs[j] = reg;
889 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
890 word_mode, word_mode);
892 /* There is no need to restrict this code to loading items
893 in TYPE_ALIGN sized hunks. The bitfield instructions can
894 load up entire word sized registers efficiently.
896 ??? This may not be needed anymore.
897 We use to emit a clobber here but that doesn't let later
898 passes optimize the instructions we emit. By storing 0 into
899 the register later passes know the first AND to zero out the
900 bitfield being set in the register is unnecessary. The store
901 of 0 will be deleted as will at least the first AND. */
903 emit_move_insn (reg, const0_rtx);
905 bytes -= bitsize / BITS_PER_UNIT;
906 store_bit_field (reg, bitsize, endian_correction, word_mode,
912 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
915 NUM_ACTUALS is the total number of parameters.
917 N_NAMED_ARGS is the total number of named arguments.
919 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
922 FNDECL is the tree code for the target of this call (if known)
924 ARGS_SO_FAR holds state needed by the target to know where to place
927 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
928 for arguments which are passed in registers.
930 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
931 and may be modified by this routine.
933 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
934 flags which may may be modified by this routine.
936 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
937 that requires allocation of stack space.
939 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
940 the thunked-to function. */
943 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
944 struct arg_data *args,
945 struct args_size *args_size,
946 int n_named_args ATTRIBUTE_UNUSED,
947 tree exp, tree struct_value_addr_value,
948 tree fndecl, tree fntype,
949 CUMULATIVE_ARGS *args_so_far,
950 int reg_parm_stack_space,
951 rtx *old_stack_level, int *old_pending_adj,
952 int *must_preallocate, int *ecf_flags,
953 bool *may_tailcall, bool call_from_thunk_p)
955 location_t loc = EXPR_LOCATION (exp);
956 /* 1 if scanning parms front to back, -1 if scanning back to front. */
959 /* Count arg position in order args appear. */
964 args_size->constant = 0;
967 /* In this loop, we consider args in the order they are written.
968 We fill up ARGS from the front or from the back if necessary
969 so that in any case the first arg to be pushed ends up at the front. */
971 if (PUSH_ARGS_REVERSED)
973 i = num_actuals - 1, inc = -1;
974 /* In this case, must reverse order of args
975 so that we compute and push the last arg first. */
982 /* First fill in the actual arguments in the ARGS array, splitting
983 complex arguments if necessary. */
986 call_expr_arg_iterator iter;
989 if (struct_value_addr_value)
991 args[j].tree_value = struct_value_addr_value;
994 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
996 tree argtype = TREE_TYPE (arg);
997 if (targetm.calls.split_complex_arg
999 && TREE_CODE (argtype) == COMPLEX_TYPE
1000 && targetm.calls.split_complex_arg (argtype))
1002 tree subtype = TREE_TYPE (argtype);
1003 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1005 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1008 args[j].tree_value = arg;
1013 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1014 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
1016 tree type = TREE_TYPE (args[i].tree_value);
1018 enum machine_mode mode;
1020 /* Replace erroneous argument with constant zero. */
1021 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1022 args[i].tree_value = integer_zero_node, type = integer_type_node;
1024 /* If TYPE is a transparent union or record, pass things the way
1025 we would pass the first field of the union or record. We have
1026 already verified that the modes are the same. */
1027 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1028 && TYPE_TRANSPARENT_AGGR (type))
1029 type = TREE_TYPE (first_field (type));
1031 /* Decide where to pass this arg.
1033 args[i].reg is nonzero if all or part is passed in registers.
1035 args[i].partial is nonzero if part but not all is passed in registers,
1036 and the exact value says how many bytes are passed in registers.
1038 args[i].pass_on_stack is nonzero if the argument must at least be
1039 computed on the stack. It may then be loaded back into registers
1040 if args[i].reg is nonzero.
1042 These decisions are driven by the FUNCTION_... macros and must agree
1043 with those made by function.c. */
1045 /* See if this argument should be passed by invisible reference. */
1046 if (pass_by_reference (args_so_far, TYPE_MODE (type),
1047 type, argpos < n_named_args))
1053 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1054 type, argpos < n_named_args);
1056 /* If we're compiling a thunk, pass through invisible references
1057 instead of making a copy. */
1058 if (call_from_thunk_p
1060 && !TREE_ADDRESSABLE (type)
1061 && (base = get_base_address (args[i].tree_value))
1062 && TREE_CODE (base) != SSA_NAME
1063 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1065 /* We can't use sibcalls if a callee-copied argument is
1066 stored in the current function's frame. */
1067 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1068 *may_tailcall = false;
1070 args[i].tree_value = build_fold_addr_expr_loc (loc,
1071 args[i].tree_value);
1072 type = TREE_TYPE (args[i].tree_value);
1074 if (*ecf_flags & ECF_CONST)
1075 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1079 /* We make a copy of the object and pass the address to the
1080 function being called. */
1083 if (!COMPLETE_TYPE_P (type)
1084 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1085 || (flag_stack_check == GENERIC_STACK_CHECK
1086 && compare_tree_int (TYPE_SIZE_UNIT (type),
1087 STACK_CHECK_MAX_VAR_SIZE) > 0))
1089 /* This is a variable-sized object. Make space on the stack
1091 rtx size_rtx = expr_size (args[i].tree_value);
1093 if (*old_stack_level == 0)
1095 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1096 *old_pending_adj = pending_stack_adjust;
1097 pending_stack_adjust = 0;
1100 /* We can pass TRUE as the 4th argument because we just
1101 saved the stack pointer and will restore it right after
1103 copy = gen_rtx_MEM (BLKmode,
1104 allocate_dynamic_stack_space
1105 (size_rtx, NULL_RTX,
1106 TYPE_ALIGN (type), TRUE));
1107 set_mem_attributes (copy, type, 1);
1110 copy = assign_temp (type, 0, 1, 0);
1112 store_expr (args[i].tree_value, copy, 0, false);
1114 /* Just change the const function to pure and then let
1115 the next test clear the pure based on
1117 if (*ecf_flags & ECF_CONST)
1119 *ecf_flags &= ~ECF_CONST;
1120 *ecf_flags |= ECF_PURE;
1123 if (!callee_copies && *ecf_flags & ECF_PURE)
1124 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1127 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
1128 type = TREE_TYPE (args[i].tree_value);
1129 *may_tailcall = false;
1133 unsignedp = TYPE_UNSIGNED (type);
1134 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1135 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
1137 args[i].unsignedp = unsignedp;
1138 args[i].mode = mode;
1140 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1141 argpos < n_named_args);
1143 /* If this is a sibling call and the machine has register windows, the
1144 register window has to be unwinded before calling the routine, so
1145 arguments have to go into the incoming registers. */
1146 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1147 args[i].tail_call_reg
1148 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
1149 argpos < n_named_args);
1151 args[i].tail_call_reg = args[i].reg;
1155 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1156 argpos < n_named_args);
1158 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1160 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1161 it means that we are to pass this arg in the register(s) designated
1162 by the PARALLEL, but also to pass it in the stack. */
1163 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1164 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1165 args[i].pass_on_stack = 1;
1167 /* If this is an addressable type, we must preallocate the stack
1168 since we must evaluate the object into its final location.
1170 If this is to be passed in both registers and the stack, it is simpler
1172 if (TREE_ADDRESSABLE (type)
1173 || (args[i].pass_on_stack && args[i].reg != 0))
1174 *must_preallocate = 1;
1176 /* Compute the stack-size of this argument. */
1177 if (args[i].reg == 0 || args[i].partial != 0
1178 || reg_parm_stack_space > 0
1179 || args[i].pass_on_stack)
1180 locate_and_pad_parm (mode, type,
1181 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1186 args[i].pass_on_stack ? 0 : args[i].partial,
1187 fndecl, args_size, &args[i].locate);
1188 #ifdef BLOCK_REG_PADDING
1190 /* The argument is passed entirely in registers. See at which
1191 end it should be padded. */
1192 args[i].locate.where_pad =
1193 BLOCK_REG_PADDING (mode, type,
1194 int_size_in_bytes (type) <= UNITS_PER_WORD);
1197 /* Update ARGS_SIZE, the total stack space for args so far. */
1199 args_size->constant += args[i].locate.size.constant;
1200 if (args[i].locate.size.var)
1201 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1203 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1204 have been used, etc. */
1206 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
1207 type, argpos < n_named_args);
1211 /* Update ARGS_SIZE to contain the total size for the argument block.
1212 Return the original constant component of the argument block's size.
1214 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1215 for arguments passed in registers. */
1218 compute_argument_block_size (int reg_parm_stack_space,
1219 struct args_size *args_size,
1220 tree fndecl ATTRIBUTE_UNUSED,
1221 tree fntype ATTRIBUTE_UNUSED,
1222 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1224 int unadjusted_args_size = args_size->constant;
1226 /* For accumulate outgoing args mode we don't need to align, since the frame
1227 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1228 backends from generating misaligned frame sizes. */
1229 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1230 preferred_stack_boundary = STACK_BOUNDARY;
1232 /* Compute the actual size of the argument block required. The variable
1233 and constant sizes must be combined, the size may have to be rounded,
1234 and there may be a minimum required size. */
1238 args_size->var = ARGS_SIZE_TREE (*args_size);
1239 args_size->constant = 0;
1241 preferred_stack_boundary /= BITS_PER_UNIT;
1242 if (preferred_stack_boundary > 1)
1244 /* We don't handle this case yet. To handle it correctly we have
1245 to add the delta, round and subtract the delta.
1246 Currently no machine description requires this support. */
1247 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1248 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1251 if (reg_parm_stack_space > 0)
1254 = size_binop (MAX_EXPR, args_size->var,
1255 ssize_int (reg_parm_stack_space));
1257 /* The area corresponding to register parameters is not to count in
1258 the size of the block we need. So make the adjustment. */
1259 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1261 = size_binop (MINUS_EXPR, args_size->var,
1262 ssize_int (reg_parm_stack_space));
1267 preferred_stack_boundary /= BITS_PER_UNIT;
1268 if (preferred_stack_boundary < 1)
1269 preferred_stack_boundary = 1;
1270 args_size->constant = (((args_size->constant
1271 + stack_pointer_delta
1272 + preferred_stack_boundary - 1)
1273 / preferred_stack_boundary
1274 * preferred_stack_boundary)
1275 - stack_pointer_delta);
1277 args_size->constant = MAX (args_size->constant,
1278 reg_parm_stack_space);
1280 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1281 args_size->constant -= reg_parm_stack_space;
1283 return unadjusted_args_size;
1286 /* Precompute parameters as needed for a function call.
1288 FLAGS is mask of ECF_* constants.
1290 NUM_ACTUALS is the number of arguments.
1292 ARGS is an array containing information for each argument; this
1293 routine fills in the INITIAL_VALUE and VALUE fields for each
1294 precomputed argument. */
1297 precompute_arguments (int num_actuals, struct arg_data *args)
1301 /* If this is a libcall, then precompute all arguments so that we do not
1302 get extraneous instructions emitted as part of the libcall sequence. */
1304 /* If we preallocated the stack space, and some arguments must be passed
1305 on the stack, then we must precompute any parameter which contains a
1306 function call which will store arguments on the stack.
1307 Otherwise, evaluating the parameter may clobber previous parameters
1308 which have already been stored into the stack. (we have code to avoid
1309 such case by saving the outgoing stack arguments, but it results in
1311 if (!ACCUMULATE_OUTGOING_ARGS)
1314 for (i = 0; i < num_actuals; i++)
1317 enum machine_mode mode;
1319 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
1322 /* If this is an addressable type, we cannot pre-evaluate it. */
1323 type = TREE_TYPE (args[i].tree_value);
1324 gcc_assert (!TREE_ADDRESSABLE (type));
1326 args[i].initial_value = args[i].value
1327 = expand_normal (args[i].tree_value);
1329 mode = TYPE_MODE (type);
1330 if (mode != args[i].mode)
1332 int unsignedp = args[i].unsignedp;
1334 = convert_modes (args[i].mode, mode,
1335 args[i].value, args[i].unsignedp);
1337 /* CSE will replace this only if it contains args[i].value
1338 pseudo, so convert it down to the declared mode using
1340 if (REG_P (args[i].value)
1341 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1342 && promote_mode (type, mode, &unsignedp) != args[i].mode)
1344 args[i].initial_value
1345 = gen_lowpart_SUBREG (mode, args[i].value);
1346 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1347 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1354 /* Given the current state of MUST_PREALLOCATE and information about
1355 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1356 compute and return the final value for MUST_PREALLOCATE. */
1359 finalize_must_preallocate (int must_preallocate, int num_actuals,
1360 struct arg_data *args, struct args_size *args_size)
1362 /* See if we have or want to preallocate stack space.
1364 If we would have to push a partially-in-regs parm
1365 before other stack parms, preallocate stack space instead.
1367 If the size of some parm is not a multiple of the required stack
1368 alignment, we must preallocate.
1370 If the total size of arguments that would otherwise create a copy in
1371 a temporary (such as a CALL) is more than half the total argument list
1372 size, preallocation is faster.
1374 Another reason to preallocate is if we have a machine (like the m88k)
1375 where stack alignment is required to be maintained between every
1376 pair of insns, not just when the call is made. However, we assume here
1377 that such machines either do not have push insns (and hence preallocation
1378 would occur anyway) or the problem is taken care of with
1381 if (! must_preallocate)
1383 int partial_seen = 0;
1384 int copy_to_evaluate_size = 0;
1387 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1389 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1391 else if (partial_seen && args[i].reg == 0)
1392 must_preallocate = 1;
1394 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1395 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1396 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1397 || TREE_CODE (args[i].tree_value) == COND_EXPR
1398 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1399 copy_to_evaluate_size
1400 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1403 if (copy_to_evaluate_size * 2 >= args_size->constant
1404 && args_size->constant > 0)
1405 must_preallocate = 1;
1407 return must_preallocate;
1410 /* If we preallocated stack space, compute the address of each argument
1411 and store it into the ARGS array.
1413 We need not ensure it is a valid memory address here; it will be
1414 validized when it is used.
1416 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1419 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1423 rtx arg_reg = argblock;
1424 int i, arg_offset = 0;
1426 if (GET_CODE (argblock) == PLUS)
1427 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1429 for (i = 0; i < num_actuals; i++)
1431 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1432 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1434 unsigned int align, boundary;
1435 unsigned int units_on_stack = 0;
1436 enum machine_mode partial_mode = VOIDmode;
1438 /* Skip this parm if it will not be passed on the stack. */
1439 if (! args[i].pass_on_stack
1441 && args[i].partial == 0)
1444 if (CONST_INT_P (offset))
1445 addr = plus_constant (arg_reg, INTVAL (offset));
1447 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1449 addr = plus_constant (addr, arg_offset);
1451 if (args[i].partial != 0)
1453 /* Only part of the parameter is being passed on the stack.
1454 Generate a simple memory reference of the correct size. */
1455 units_on_stack = args[i].locate.size.constant;
1456 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1458 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1459 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1463 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1464 set_mem_attributes (args[i].stack,
1465 TREE_TYPE (args[i].tree_value), 1);
1467 align = BITS_PER_UNIT;
1468 boundary = args[i].locate.boundary;
1469 if (args[i].locate.where_pad != downward)
1471 else if (CONST_INT_P (offset))
1473 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1474 align = align & -align;
1476 set_mem_align (args[i].stack, align);
1478 if (CONST_INT_P (slot_offset))
1479 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1481 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1483 addr = plus_constant (addr, arg_offset);
1485 if (args[i].partial != 0)
1487 /* Only part of the parameter is being passed on the stack.
1488 Generate a simple memory reference of the correct size.
1490 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1491 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1495 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1496 set_mem_attributes (args[i].stack_slot,
1497 TREE_TYPE (args[i].tree_value), 1);
1499 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1501 /* Function incoming arguments may overlap with sibling call
1502 outgoing arguments and we cannot allow reordering of reads
1503 from function arguments with stores to outgoing arguments
1504 of sibling calls. */
1505 set_mem_alias_set (args[i].stack, 0);
1506 set_mem_alias_set (args[i].stack_slot, 0);
1511 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1512 in a call instruction.
1514 FNDECL is the tree node for the target function. For an indirect call
1515 FNDECL will be NULL_TREE.
1517 ADDR is the operand 0 of CALL_EXPR for this call. */
1520 rtx_for_function_call (tree fndecl, tree addr)
1524 /* Get the function to call, in the form of RTL. */
1527 /* If this is the first use of the function, see if we need to
1528 make an external definition for it. */
1529 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1531 assemble_external (fndecl);
1532 TREE_USED (fndecl) = 1;
1535 /* Get a SYMBOL_REF rtx for the function address. */
1536 funexp = XEXP (DECL_RTL (fndecl), 0);
1539 /* Generate an rtx (probably a pseudo-register) for the address. */
1542 funexp = expand_normal (addr);
1543 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1548 /* Return true if and only if SIZE storage units (usually bytes)
1549 starting from address ADDR overlap with already clobbered argument
1550 area. This function is used to determine if we should give up a
1554 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1558 if (addr == crtl->args.internal_arg_pointer)
1560 else if (GET_CODE (addr) == PLUS
1561 && XEXP (addr, 0) == crtl->args.internal_arg_pointer
1562 && CONST_INT_P (XEXP (addr, 1)))
1563 i = INTVAL (XEXP (addr, 1));
1564 /* Return true for arg pointer based indexed addressing. */
1565 else if (GET_CODE (addr) == PLUS
1566 && (XEXP (addr, 0) == crtl->args.internal_arg_pointer
1567 || XEXP (addr, 1) == crtl->args.internal_arg_pointer))
1572 #ifdef ARGS_GROW_DOWNWARD
1577 unsigned HOST_WIDE_INT k;
1579 for (k = 0; k < size; k++)
1580 if (i + k < stored_args_map->n_bits
1581 && TEST_BIT (stored_args_map, i + k))
1588 /* Do the register loads required for any wholly-register parms or any
1589 parms which are passed both on the stack and in a register. Their
1590 expressions were already evaluated.
1592 Mark all register-parms as living through the call, putting these USE
1593 insns in the CALL_INSN_FUNCTION_USAGE field.
1595 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1596 checking, setting *SIBCALL_FAILURE if appropriate. */
1599 load_register_parameters (struct arg_data *args, int num_actuals,
1600 rtx *call_fusage, int flags, int is_sibcall,
1601 int *sibcall_failure)
1605 for (i = 0; i < num_actuals; i++)
1607 rtx reg = ((flags & ECF_SIBCALL)
1608 ? args[i].tail_call_reg : args[i].reg);
1611 int partial = args[i].partial;
1614 rtx before_arg = get_last_insn ();
1615 /* Set non-negative if we must move a word at a time, even if
1616 just one word (e.g, partial == 4 && mode == DFmode). Set
1617 to -1 if we just use a normal move insn. This value can be
1618 zero if the argument is a zero size structure. */
1620 if (GET_CODE (reg) == PARALLEL)
1624 gcc_assert (partial % UNITS_PER_WORD == 0);
1625 nregs = partial / UNITS_PER_WORD;
1627 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1629 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1630 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1633 size = GET_MODE_SIZE (args[i].mode);
1635 /* Handle calls that pass values in multiple non-contiguous
1636 locations. The Irix 6 ABI has examples of this. */
1638 if (GET_CODE (reg) == PARALLEL)
1639 emit_group_move (reg, args[i].parallel_value);
1641 /* If simple case, just do move. If normal partial, store_one_arg
1642 has already loaded the register for us. In all other cases,
1643 load the register(s) from memory. */
1645 else if (nregs == -1)
1647 emit_move_insn (reg, args[i].value);
1648 #ifdef BLOCK_REG_PADDING
1649 /* Handle case where we have a value that needs shifting
1650 up to the msb. eg. a QImode value and we're padding
1651 upward on a BYTES_BIG_ENDIAN machine. */
1652 if (size < UNITS_PER_WORD
1653 && (args[i].locate.where_pad
1654 == (BYTES_BIG_ENDIAN ? upward : downward)))
1657 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1659 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1660 report the whole reg as used. Strictly speaking, the
1661 call only uses SIZE bytes at the msb end, but it doesn't
1662 seem worth generating rtl to say that. */
1663 reg = gen_rtx_REG (word_mode, REGNO (reg));
1664 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1665 build_int_cst (NULL_TREE, shift),
1668 emit_move_insn (reg, x);
1673 /* If we have pre-computed the values to put in the registers in
1674 the case of non-aligned structures, copy them in now. */
1676 else if (args[i].n_aligned_regs != 0)
1677 for (j = 0; j < args[i].n_aligned_regs; j++)
1678 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1679 args[i].aligned_regs[j]);
1681 else if (partial == 0 || args[i].pass_on_stack)
1683 rtx mem = validize_mem (args[i].value);
1685 /* Check for overlap with already clobbered argument area,
1686 providing that this has non-zero size. */
1689 || mem_overlaps_already_clobbered_arg_p
1690 (XEXP (args[i].value, 0), size)))
1691 *sibcall_failure = 1;
1693 /* Handle a BLKmode that needs shifting. */
1694 if (nregs == 1 && size < UNITS_PER_WORD
1695 #ifdef BLOCK_REG_PADDING
1696 && args[i].locate.where_pad == downward
1702 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1703 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1704 rtx x = gen_reg_rtx (word_mode);
1705 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1706 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1709 emit_move_insn (x, tem);
1710 x = expand_shift (dir, word_mode, x,
1711 build_int_cst (NULL_TREE, shift),
1714 emit_move_insn (ri, x);
1717 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1720 /* When a parameter is a block, and perhaps in other cases, it is
1721 possible that it did a load from an argument slot that was
1722 already clobbered. */
1724 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1725 *sibcall_failure = 1;
1727 /* Handle calls that pass values in multiple non-contiguous
1728 locations. The Irix 6 ABI has examples of this. */
1729 if (GET_CODE (reg) == PARALLEL)
1730 use_group_regs (call_fusage, reg);
1731 else if (nregs == -1)
1732 use_reg (call_fusage, reg);
1734 use_regs (call_fusage, REGNO (reg), nregs);
1739 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1740 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1741 bytes, then we would need to push some additional bytes to pad the
1742 arguments. So, we compute an adjust to the stack pointer for an
1743 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1744 bytes. Then, when the arguments are pushed the stack will be perfectly
1745 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1746 be popped after the call. Returns the adjustment. */
1749 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1750 struct args_size *args_size,
1751 unsigned int preferred_unit_stack_boundary)
1753 /* The number of bytes to pop so that the stack will be
1754 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1755 HOST_WIDE_INT adjustment;
1756 /* The alignment of the stack after the arguments are pushed, if we
1757 just pushed the arguments without adjust the stack here. */
1758 unsigned HOST_WIDE_INT unadjusted_alignment;
1760 unadjusted_alignment
1761 = ((stack_pointer_delta + unadjusted_args_size)
1762 % preferred_unit_stack_boundary);
1764 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1765 as possible -- leaving just enough left to cancel out the
1766 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1767 PENDING_STACK_ADJUST is non-negative, and congruent to
1768 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1770 /* Begin by trying to pop all the bytes. */
1771 unadjusted_alignment
1772 = (unadjusted_alignment
1773 - (pending_stack_adjust % preferred_unit_stack_boundary));
1774 adjustment = pending_stack_adjust;
1775 /* Push enough additional bytes that the stack will be aligned
1776 after the arguments are pushed. */
1777 if (preferred_unit_stack_boundary > 1)
1779 if (unadjusted_alignment > 0)
1780 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1782 adjustment += unadjusted_alignment;
1785 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1786 bytes after the call. The right number is the entire
1787 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1788 by the arguments in the first place. */
1790 = pending_stack_adjust - adjustment + unadjusted_args_size;
1795 /* Scan X expression if it does not dereference any argument slots
1796 we already clobbered by tail call arguments (as noted in stored_args_map
1798 Return nonzero if X expression dereferences such argument slots,
1802 check_sibcall_argument_overlap_1 (rtx x)
1811 code = GET_CODE (x);
1814 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1815 GET_MODE_SIZE (GET_MODE (x)));
1817 /* Scan all subexpressions. */
1818 fmt = GET_RTX_FORMAT (code);
1819 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1823 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1826 else if (*fmt == 'E')
1828 for (j = 0; j < XVECLEN (x, i); j++)
1829 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1836 /* Scan sequence after INSN if it does not dereference any argument slots
1837 we already clobbered by tail call arguments (as noted in stored_args_map
1838 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1839 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1840 should be 0). Return nonzero if sequence after INSN dereferences such argument
1841 slots, zero otherwise. */
1844 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1848 if (insn == NULL_RTX)
1849 insn = get_insns ();
1851 insn = NEXT_INSN (insn);
1853 for (; insn; insn = NEXT_INSN (insn))
1855 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1858 if (mark_stored_args_map)
1860 #ifdef ARGS_GROW_DOWNWARD
1861 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1863 low = arg->locate.slot_offset.constant;
1866 for (high = low + arg->locate.size.constant; low < high; low++)
1867 SET_BIT (stored_args_map, low);
1869 return insn != NULL_RTX;
1872 /* Given that a function returns a value of mode MODE at the most
1873 significant end of hard register VALUE, shift VALUE left or right
1874 as specified by LEFT_P. Return true if some action was needed. */
1877 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1879 HOST_WIDE_INT shift;
1881 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1882 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1886 /* Use ashr rather than lshr for right shifts. This is for the benefit
1887 of the MIPS port, which requires SImode values to be sign-extended
1888 when stored in 64-bit registers. */
1889 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1890 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1895 /* If X is a likely-spilled register value, copy it to a pseudo
1896 register and return that register. Return X otherwise. */
1899 avoid_likely_spilled_reg (rtx x)
1904 && HARD_REGISTER_P (x)
1905 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
1907 /* Make sure that we generate a REG rather than a CONCAT.
1908 Moves into CONCATs can need nontrivial instructions,
1909 and the whole point of this function is to avoid
1910 using the hard register directly in such a situation. */
1911 generating_concat_p = 0;
1912 new_rtx = gen_reg_rtx (GET_MODE (x));
1913 generating_concat_p = 1;
1914 emit_move_insn (new_rtx, x);
1920 /* Generate all the code for a CALL_EXPR exp
1921 and return an rtx for its value.
1922 Store the value in TARGET (specified as an rtx) if convenient.
1923 If the value is stored in TARGET then TARGET is returned.
1924 If IGNORE is nonzero, then we ignore the value of the function call. */
1927 expand_call (tree exp, rtx target, int ignore)
1929 /* Nonzero if we are currently expanding a call. */
1930 static int currently_expanding_call = 0;
1932 /* RTX for the function to be called. */
1934 /* Sequence of insns to perform a normal "call". */
1935 rtx normal_call_insns = NULL_RTX;
1936 /* Sequence of insns to perform a tail "call". */
1937 rtx tail_call_insns = NULL_RTX;
1938 /* Data type of the function. */
1940 tree type_arg_types;
1942 /* Declaration of the function being called,
1943 or 0 if the function is computed (not known by name). */
1945 /* The type of the function being called. */
1947 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1950 /* Register in which non-BLKmode value will be returned,
1951 or 0 if no value or if value is BLKmode. */
1953 /* Address where we should return a BLKmode value;
1954 0 if value not BLKmode. */
1955 rtx structure_value_addr = 0;
1956 /* Nonzero if that address is being passed by treating it as
1957 an extra, implicit first parameter. Otherwise,
1958 it is passed by being copied directly into struct_value_rtx. */
1959 int structure_value_addr_parm = 0;
1960 /* Holds the value of implicit argument for the struct value. */
1961 tree structure_value_addr_value = NULL_TREE;
1962 /* Size of aggregate value wanted, or zero if none wanted
1963 or if we are using the non-reentrant PCC calling convention
1964 or expecting the value in registers. */
1965 HOST_WIDE_INT struct_value_size = 0;
1966 /* Nonzero if called function returns an aggregate in memory PCC style,
1967 by returning the address of where to find it. */
1968 int pcc_struct_value = 0;
1969 rtx struct_value = 0;
1971 /* Number of actual parameters in this call, including struct value addr. */
1973 /* Number of named args. Args after this are anonymous ones
1974 and they must all go on the stack. */
1976 /* Number of complex actual arguments that need to be split. */
1977 int num_complex_actuals = 0;
1979 /* Vector of information about each argument.
1980 Arguments are numbered in the order they will be pushed,
1981 not the order they are written. */
1982 struct arg_data *args;
1984 /* Total size in bytes of all the stack-parms scanned so far. */
1985 struct args_size args_size;
1986 struct args_size adjusted_args_size;
1987 /* Size of arguments before any adjustments (such as rounding). */
1988 int unadjusted_args_size;
1989 /* Data on reg parms scanned so far. */
1990 CUMULATIVE_ARGS args_so_far;
1991 /* Nonzero if a reg parm has been scanned. */
1993 /* Nonzero if this is an indirect function call. */
1995 /* Nonzero if we must avoid push-insns in the args for this call.
1996 If stack space is allocated for register parameters, but not by the
1997 caller, then it is preallocated in the fixed part of the stack frame.
1998 So the entire argument block must then be preallocated (i.e., we
1999 ignore PUSH_ROUNDING in that case). */
2001 int must_preallocate = !PUSH_ARGS;
2003 /* Size of the stack reserved for parameter registers. */
2004 int reg_parm_stack_space = 0;
2006 /* Address of space preallocated for stack parms
2007 (on machines that lack push insns), or 0 if space not preallocated. */
2010 /* Mask of ECF_ flags. */
2012 #ifdef REG_PARM_STACK_SPACE
2013 /* Define the boundary of the register parm stack space that needs to be
2015 int low_to_save, high_to_save;
2016 rtx save_area = 0; /* Place that it is saved */
2019 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2020 char *initial_stack_usage_map = stack_usage_map;
2021 char *stack_usage_map_buf = NULL;
2023 int old_stack_allocated;
2025 /* State variables to track stack modifications. */
2026 rtx old_stack_level = 0;
2027 int old_stack_arg_under_construction = 0;
2028 int old_pending_adj = 0;
2029 int old_inhibit_defer_pop = inhibit_defer_pop;
2031 /* Some stack pointer alterations we make are performed via
2032 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2033 which we then also need to save/restore along the way. */
2034 int old_stack_pointer_delta = 0;
2037 tree addr = CALL_EXPR_FN (exp);
2039 /* The alignment of the stack, in bits. */
2040 unsigned HOST_WIDE_INT preferred_stack_boundary;
2041 /* The alignment of the stack, in bytes. */
2042 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
2043 /* The static chain value to use for this call. */
2044 rtx static_chain_value;
2045 /* See if this is "nothrow" function call. */
2046 if (TREE_NOTHROW (exp))
2047 flags |= ECF_NOTHROW;
2049 /* See if we can find a DECL-node for the actual function, and get the
2050 function attributes (flags) from the function decl or type node. */
2051 fndecl = get_callee_fndecl (exp);
2054 fntype = TREE_TYPE (fndecl);
2055 flags |= flags_from_decl_or_type (fndecl);
2059 fntype = TREE_TYPE (TREE_TYPE (addr));
2060 flags |= flags_from_decl_or_type (fntype);
2062 rettype = TREE_TYPE (exp);
2064 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2066 /* Warn if this value is an aggregate type,
2067 regardless of which calling convention we are using for it. */
2068 if (AGGREGATE_TYPE_P (rettype))
2069 warning (OPT_Waggregate_return, "function call has aggregate value");
2071 /* If the result of a non looping pure or const function call is
2072 ignored (or void), and none of its arguments are volatile, we can
2073 avoid expanding the call and just evaluate the arguments for
2075 if ((flags & (ECF_CONST | ECF_PURE))
2076 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2077 && (ignore || target == const0_rtx
2078 || TYPE_MODE (rettype) == VOIDmode))
2080 bool volatilep = false;
2082 call_expr_arg_iterator iter;
2084 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2085 if (TREE_THIS_VOLATILE (arg))
2093 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2094 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2099 #ifdef REG_PARM_STACK_SPACE
2100 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2103 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2104 && reg_parm_stack_space > 0 && PUSH_ARGS)
2105 must_preallocate = 1;
2107 /* Set up a place to return a structure. */
2109 /* Cater to broken compilers. */
2110 if (aggregate_value_p (exp, fntype))
2112 /* This call returns a big structure. */
2113 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2115 #ifdef PCC_STATIC_STRUCT_RETURN
2117 pcc_struct_value = 1;
2119 #else /* not PCC_STATIC_STRUCT_RETURN */
2121 struct_value_size = int_size_in_bytes (rettype);
2123 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2124 structure_value_addr = XEXP (target, 0);
2127 /* For variable-sized objects, we must be called with a target
2128 specified. If we were to allocate space on the stack here,
2129 we would have no way of knowing when to free it. */
2130 rtx d = assign_temp (rettype, 0, 1, 1);
2132 mark_temp_addr_taken (d);
2133 structure_value_addr = XEXP (d, 0);
2137 #endif /* not PCC_STATIC_STRUCT_RETURN */
2140 /* Figure out the amount to which the stack should be aligned. */
2141 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2144 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2145 /* Without automatic stack alignment, we can't increase preferred
2146 stack boundary. With automatic stack alignment, it is
2147 unnecessary since unless we can guarantee that all callers will
2148 align the outgoing stack properly, callee has to align its
2151 && i->preferred_incoming_stack_boundary
2152 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
2153 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2156 /* Operand 0 is a pointer-to-function; get the type of the function. */
2157 funtype = TREE_TYPE (addr);
2158 gcc_assert (POINTER_TYPE_P (funtype));
2159 funtype = TREE_TYPE (funtype);
2161 /* Count whether there are actual complex arguments that need to be split
2162 into their real and imaginary parts. Munge the type_arg_types
2163 appropriately here as well. */
2164 if (targetm.calls.split_complex_arg)
2166 call_expr_arg_iterator iter;
2168 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2170 tree type = TREE_TYPE (arg);
2171 if (type && TREE_CODE (type) == COMPLEX_TYPE
2172 && targetm.calls.split_complex_arg (type))
2173 num_complex_actuals++;
2175 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2178 type_arg_types = TYPE_ARG_TYPES (funtype);
2180 if (flags & ECF_MAY_BE_ALLOCA)
2181 cfun->calls_alloca = 1;
2183 /* If struct_value_rtx is 0, it means pass the address
2184 as if it were an extra parameter. Put the argument expression
2185 in structure_value_addr_value. */
2186 if (structure_value_addr && struct_value == 0)
2188 /* If structure_value_addr is a REG other than
2189 virtual_outgoing_args_rtx, we can use always use it. If it
2190 is not a REG, we must always copy it into a register.
2191 If it is virtual_outgoing_args_rtx, we must copy it to another
2192 register in some cases. */
2193 rtx temp = (!REG_P (structure_value_addr)
2194 || (ACCUMULATE_OUTGOING_ARGS
2195 && stack_arg_under_construction
2196 && structure_value_addr == virtual_outgoing_args_rtx)
2197 ? copy_addr_to_reg (convert_memory_address
2198 (Pmode, structure_value_addr))
2199 : structure_value_addr);
2201 structure_value_addr_value =
2202 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2203 structure_value_addr_parm = 1;
2206 /* Count the arguments and set NUM_ACTUALS. */
2208 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2210 /* Compute number of named args.
2211 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2213 if (type_arg_types != 0)
2215 = (list_length (type_arg_types)
2216 /* Count the struct value address, if it is passed as a parm. */
2217 + structure_value_addr_parm);
2219 /* If we know nothing, treat all args as named. */
2220 n_named_args = num_actuals;
2222 /* Start updating where the next arg would go.
2224 On some machines (such as the PA) indirect calls have a different
2225 calling convention than normal calls. The fourth argument in
2226 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2228 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2230 /* Now possibly adjust the number of named args.
2231 Normally, don't include the last named arg if anonymous args follow.
2232 We do include the last named arg if
2233 targetm.calls.strict_argument_naming() returns nonzero.
2234 (If no anonymous args follow, the result of list_length is actually
2235 one too large. This is harmless.)
2237 If targetm.calls.pretend_outgoing_varargs_named() returns
2238 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2239 this machine will be able to place unnamed args that were passed
2240 in registers into the stack. So treat all args as named. This
2241 allows the insns emitting for a specific argument list to be
2242 independent of the function declaration.
2244 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2245 we do not have any reliable way to pass unnamed args in
2246 registers, so we must force them into memory. */
2248 if (type_arg_types != 0
2249 && targetm.calls.strict_argument_naming (&args_so_far))
2251 else if (type_arg_types != 0
2252 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2253 /* Don't include the last named arg. */
2256 /* Treat all args as named. */
2257 n_named_args = num_actuals;
2259 /* Make a vector to hold all the information about each arg. */
2260 args = XALLOCAVEC (struct arg_data, num_actuals);
2261 memset (args, 0, num_actuals * sizeof (struct arg_data));
2263 /* Build up entries in the ARGS array, compute the size of the
2264 arguments into ARGS_SIZE, etc. */
2265 initialize_argument_information (num_actuals, args, &args_size,
2267 structure_value_addr_value, fndecl, fntype,
2268 &args_so_far, reg_parm_stack_space,
2269 &old_stack_level, &old_pending_adj,
2270 &must_preallocate, &flags,
2271 &try_tail_call, CALL_FROM_THUNK_P (exp));
2274 must_preallocate = 1;
2276 /* Now make final decision about preallocating stack space. */
2277 must_preallocate = finalize_must_preallocate (must_preallocate,
2281 /* If the structure value address will reference the stack pointer, we
2282 must stabilize it. We don't need to do this if we know that we are
2283 not going to adjust the stack pointer in processing this call. */
2285 if (structure_value_addr
2286 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2287 || reg_mentioned_p (virtual_outgoing_args_rtx,
2288 structure_value_addr))
2290 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2291 structure_value_addr = copy_to_reg (structure_value_addr);
2293 /* Tail calls can make things harder to debug, and we've traditionally
2294 pushed these optimizations into -O2. Don't try if we're already
2295 expanding a call, as that means we're an argument. Don't try if
2296 there's cleanups, as we know there's code to follow the call. */
2298 if (currently_expanding_call++ != 0
2299 || !flag_optimize_sibling_calls
2301 || dbg_cnt (tail_call) == false)
2304 /* Rest of purposes for tail call optimizations to fail. */
2306 #ifdef HAVE_sibcall_epilogue
2307 !HAVE_sibcall_epilogue
2312 /* Doing sibling call optimization needs some work, since
2313 structure_value_addr can be allocated on the stack.
2314 It does not seem worth the effort since few optimizable
2315 sibling calls will return a structure. */
2316 || structure_value_addr != NULL_RTX
2317 #ifdef REG_PARM_STACK_SPACE
2318 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2319 || (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2320 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)))
2321 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl))
2323 /* Check whether the target is able to optimize the call
2325 || !targetm.function_ok_for_sibcall (fndecl, exp)
2326 /* Functions that do not return exactly once may not be sibcall
2328 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2329 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2330 /* If the called function is nested in the current one, it might access
2331 some of the caller's arguments, but could clobber them beforehand if
2332 the argument areas are shared. */
2333 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2334 /* If this function requires more stack slots than the current
2335 function, we cannot change it into a sibling call.
2336 crtl->args.pretend_args_size is not part of the
2337 stack allocated by our caller. */
2338 || args_size.constant > (crtl->args.size
2339 - crtl->args.pretend_args_size)
2340 /* If the callee pops its own arguments, then it must pop exactly
2341 the same number of arguments as the current function. */
2342 || (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
2343 != targetm.calls.return_pops_args (current_function_decl,
2344 TREE_TYPE (current_function_decl),
2346 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2349 /* Check if caller and callee disagree in promotion of function
2353 enum machine_mode caller_mode, caller_promoted_mode;
2354 enum machine_mode callee_mode, callee_promoted_mode;
2355 int caller_unsignedp, callee_unsignedp;
2356 tree caller_res = DECL_RESULT (current_function_decl);
2358 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
2359 caller_mode = DECL_MODE (caller_res);
2360 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
2361 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
2362 caller_promoted_mode
2363 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
2365 TREE_TYPE (current_function_decl), 1);
2366 callee_promoted_mode
2367 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
2370 if (caller_mode != VOIDmode
2371 && (caller_promoted_mode != callee_promoted_mode
2372 || ((caller_mode != caller_promoted_mode
2373 || callee_mode != callee_promoted_mode)
2374 && (caller_unsignedp != callee_unsignedp
2375 || GET_MODE_BITSIZE (caller_mode)
2376 < GET_MODE_BITSIZE (callee_mode)))))
2380 /* Ensure current function's preferred stack boundary is at least
2381 what we need. Stack alignment may also increase preferred stack
2383 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
2384 crtl->preferred_stack_boundary = preferred_stack_boundary;
2386 preferred_stack_boundary = crtl->preferred_stack_boundary;
2388 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2390 /* We want to make two insn chains; one for a sibling call, the other
2391 for a normal call. We will select one of the two chains after
2392 initial RTL generation is complete. */
2393 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2395 int sibcall_failure = 0;
2396 /* We want to emit any pending stack adjustments before the tail
2397 recursion "call". That way we know any adjustment after the tail
2398 recursion call can be ignored if we indeed use the tail
2400 int save_pending_stack_adjust = 0;
2401 int save_stack_pointer_delta = 0;
2403 rtx before_call, next_arg_reg, after_args;
2407 /* State variables we need to save and restore between
2409 save_pending_stack_adjust = pending_stack_adjust;
2410 save_stack_pointer_delta = stack_pointer_delta;
2413 flags &= ~ECF_SIBCALL;
2415 flags |= ECF_SIBCALL;
2417 /* Other state variables that we must reinitialize each time
2418 through the loop (that are not initialized by the loop itself). */
2422 /* Start a new sequence for the normal call case.
2424 From this point on, if the sibling call fails, we want to set
2425 sibcall_failure instead of continuing the loop. */
2428 /* Don't let pending stack adjusts add up to too much.
2429 Also, do all pending adjustments now if there is any chance
2430 this might be a call to alloca or if we are expanding a sibling
2432 Also do the adjustments before a throwing call, otherwise
2433 exception handling can fail; PR 19225. */
2434 if (pending_stack_adjust >= 32
2435 || (pending_stack_adjust > 0
2436 && (flags & ECF_MAY_BE_ALLOCA))
2437 || (pending_stack_adjust > 0
2438 && flag_exceptions && !(flags & ECF_NOTHROW))
2440 do_pending_stack_adjust ();
2442 /* Precompute any arguments as needed. */
2444 precompute_arguments (num_actuals, args);
2446 /* Now we are about to start emitting insns that can be deleted
2447 if a libcall is deleted. */
2448 if (pass && (flags & ECF_MALLOC))
2451 if (pass == 0 && crtl->stack_protect_guard)
2452 stack_protect_epilogue ();
2454 adjusted_args_size = args_size;
2455 /* Compute the actual size of the argument block required. The variable
2456 and constant sizes must be combined, the size may have to be rounded,
2457 and there may be a minimum required size. When generating a sibcall
2458 pattern, do not round up, since we'll be re-using whatever space our
2460 unadjusted_args_size
2461 = compute_argument_block_size (reg_parm_stack_space,
2462 &adjusted_args_size,
2465 : preferred_stack_boundary));
2467 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2469 /* The argument block when performing a sibling call is the
2470 incoming argument block. */
2473 argblock = crtl->args.internal_arg_pointer;
2475 #ifdef STACK_GROWS_DOWNWARD
2476 = plus_constant (argblock, crtl->args.pretend_args_size);
2478 = plus_constant (argblock, -crtl->args.pretend_args_size);
2480 stored_args_map = sbitmap_alloc (args_size.constant);
2481 sbitmap_zero (stored_args_map);
2484 /* If we have no actual push instructions, or shouldn't use them,
2485 make space for all args right now. */
2486 else if (adjusted_args_size.var != 0)
2488 if (old_stack_level == 0)
2490 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2491 old_stack_pointer_delta = stack_pointer_delta;
2492 old_pending_adj = pending_stack_adjust;
2493 pending_stack_adjust = 0;
2494 /* stack_arg_under_construction says whether a stack arg is
2495 being constructed at the old stack level. Pushing the stack
2496 gets a clean outgoing argument block. */
2497 old_stack_arg_under_construction = stack_arg_under_construction;
2498 stack_arg_under_construction = 0;
2500 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2501 if (flag_stack_usage)
2502 current_function_has_unbounded_dynamic_stack_size = 1;
2506 /* Note that we must go through the motions of allocating an argument
2507 block even if the size is zero because we may be storing args
2508 in the area reserved for register arguments, which may be part of
2511 int needed = adjusted_args_size.constant;
2513 /* Store the maximum argument space used. It will be pushed by
2514 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2517 if (needed > crtl->outgoing_args_size)
2518 crtl->outgoing_args_size = needed;
2520 if (must_preallocate)
2522 if (ACCUMULATE_OUTGOING_ARGS)
2524 /* Since the stack pointer will never be pushed, it is
2525 possible for the evaluation of a parm to clobber
2526 something we have already written to the stack.
2527 Since most function calls on RISC machines do not use
2528 the stack, this is uncommon, but must work correctly.
2530 Therefore, we save any area of the stack that was already
2531 written and that we are using. Here we set up to do this
2532 by making a new stack usage map from the old one. The
2533 actual save will be done by store_one_arg.
2535 Another approach might be to try to reorder the argument
2536 evaluations to avoid this conflicting stack usage. */
2538 /* Since we will be writing into the entire argument area,
2539 the map must be allocated for its entire size, not just
2540 the part that is the responsibility of the caller. */
2541 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2542 needed += reg_parm_stack_space;
2544 #ifdef ARGS_GROW_DOWNWARD
2545 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2548 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2551 if (stack_usage_map_buf)
2552 free (stack_usage_map_buf);
2553 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2554 stack_usage_map = stack_usage_map_buf;
2556 if (initial_highest_arg_in_use)
2557 memcpy (stack_usage_map, initial_stack_usage_map,
2558 initial_highest_arg_in_use);
2560 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2561 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2562 (highest_outgoing_arg_in_use
2563 - initial_highest_arg_in_use));
2566 /* The address of the outgoing argument list must not be
2567 copied to a register here, because argblock would be left
2568 pointing to the wrong place after the call to
2569 allocate_dynamic_stack_space below. */
2571 argblock = virtual_outgoing_args_rtx;
2575 if (inhibit_defer_pop == 0)
2577 /* Try to reuse some or all of the pending_stack_adjust
2578 to get this space. */
2580 = (combine_pending_stack_adjustment_and_call
2581 (unadjusted_args_size,
2582 &adjusted_args_size,
2583 preferred_unit_stack_boundary));
2585 /* combine_pending_stack_adjustment_and_call computes
2586 an adjustment before the arguments are allocated.
2587 Account for them and see whether or not the stack
2588 needs to go up or down. */
2589 needed = unadjusted_args_size - needed;
2593 /* We're releasing stack space. */
2594 /* ??? We can avoid any adjustment at all if we're
2595 already aligned. FIXME. */
2596 pending_stack_adjust = -needed;
2597 do_pending_stack_adjust ();
2601 /* We need to allocate space. We'll do that in
2602 push_block below. */
2603 pending_stack_adjust = 0;
2606 /* Special case this because overhead of `push_block' in
2607 this case is non-trivial. */
2609 argblock = virtual_outgoing_args_rtx;
2612 argblock = push_block (GEN_INT (needed), 0, 0);
2613 #ifdef ARGS_GROW_DOWNWARD
2614 argblock = plus_constant (argblock, needed);
2618 /* We only really need to call `copy_to_reg' in the case
2619 where push insns are going to be used to pass ARGBLOCK
2620 to a function call in ARGS. In that case, the stack
2621 pointer changes value from the allocation point to the
2622 call point, and hence the value of
2623 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2624 as well always do it. */
2625 argblock = copy_to_reg (argblock);
2630 if (ACCUMULATE_OUTGOING_ARGS)
2632 /* The save/restore code in store_one_arg handles all
2633 cases except one: a constructor call (including a C
2634 function returning a BLKmode struct) to initialize
2636 if (stack_arg_under_construction)
2639 = GEN_INT (adjusted_args_size.constant
2640 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
2641 : TREE_TYPE (fndecl))) ? 0
2642 : reg_parm_stack_space));
2643 if (old_stack_level == 0)
2645 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2647 old_stack_pointer_delta = stack_pointer_delta;
2648 old_pending_adj = pending_stack_adjust;
2649 pending_stack_adjust = 0;
2650 /* stack_arg_under_construction says whether a stack
2651 arg is being constructed at the old stack level.
2652 Pushing the stack gets a clean outgoing argument
2654 old_stack_arg_under_construction
2655 = stack_arg_under_construction;
2656 stack_arg_under_construction = 0;
2657 /* Make a new map for the new argument list. */
2658 if (stack_usage_map_buf)
2659 free (stack_usage_map_buf);
2660 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
2661 stack_usage_map = stack_usage_map_buf;
2662 highest_outgoing_arg_in_use = 0;
2664 /* We can pass TRUE as the 4th argument because we just
2665 saved the stack pointer and will restore it right after
2667 allocate_dynamic_stack_space (push_size, NULL_RTX,
2668 BITS_PER_UNIT, TRUE);
2671 /* If argument evaluation might modify the stack pointer,
2672 copy the address of the argument list to a register. */
2673 for (i = 0; i < num_actuals; i++)
2674 if (args[i].pass_on_stack)
2676 argblock = copy_addr_to_reg (argblock);
2681 compute_argument_addresses (args, argblock, num_actuals);
2683 /* If we push args individually in reverse order, perform stack alignment
2684 before the first push (the last arg). */
2685 if (PUSH_ARGS_REVERSED && argblock == 0
2686 && adjusted_args_size.constant != unadjusted_args_size)
2688 /* When the stack adjustment is pending, we get better code
2689 by combining the adjustments. */
2690 if (pending_stack_adjust
2691 && ! inhibit_defer_pop)
2693 pending_stack_adjust
2694 = (combine_pending_stack_adjustment_and_call
2695 (unadjusted_args_size,
2696 &adjusted_args_size,
2697 preferred_unit_stack_boundary));
2698 do_pending_stack_adjust ();
2700 else if (argblock == 0)
2701 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2702 - unadjusted_args_size));
2704 /* Now that the stack is properly aligned, pops can't safely
2705 be deferred during the evaluation of the arguments. */
2708 /* Record the maximum pushed stack space size. We need to delay
2709 doing it this far to take into account the optimization done
2710 by combine_pending_stack_adjustment_and_call. */
2711 if (flag_stack_usage
2712 && !ACCUMULATE_OUTGOING_ARGS
2714 && adjusted_args_size.var == 0)
2716 int pushed = adjusted_args_size.constant + pending_stack_adjust;
2717 if (pushed > current_function_pushed_stack_size)
2718 current_function_pushed_stack_size = pushed;
2721 funexp = rtx_for_function_call (fndecl, addr);
2723 /* Figure out the register where the value, if any, will come back. */
2725 if (TYPE_MODE (rettype) != VOIDmode
2726 && ! structure_value_addr)
2728 if (pcc_struct_value)
2729 valreg = hard_function_value (build_pointer_type (rettype),
2730 fndecl, NULL, (pass == 0));
2732 valreg = hard_function_value (rettype, fndecl, fntype,
2735 /* If VALREG is a PARALLEL whose first member has a zero
2736 offset, use that. This is for targets such as m68k that
2737 return the same value in multiple places. */
2738 if (GET_CODE (valreg) == PARALLEL)
2740 rtx elem = XVECEXP (valreg, 0, 0);
2741 rtx where = XEXP (elem, 0);
2742 rtx offset = XEXP (elem, 1);
2743 if (offset == const0_rtx
2744 && GET_MODE (where) == GET_MODE (valreg))
2749 /* Precompute all register parameters. It isn't safe to compute anything
2750 once we have started filling any specific hard regs. */
2751 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2753 if (CALL_EXPR_STATIC_CHAIN (exp))
2754 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
2756 static_chain_value = 0;
2758 #ifdef REG_PARM_STACK_SPACE
2759 /* Save the fixed argument area if it's part of the caller's frame and
2760 is clobbered by argument setup for this call. */
2761 if (ACCUMULATE_OUTGOING_ARGS && pass)
2762 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2763 &low_to_save, &high_to_save);
2766 /* Now store (and compute if necessary) all non-register parms.
2767 These come before register parms, since they can require block-moves,
2768 which could clobber the registers used for register parms.
2769 Parms which have partial registers are not stored here,
2770 but we do preallocate space here if they want that. */
2772 for (i = 0; i < num_actuals; i++)
2774 if (args[i].reg == 0 || args[i].pass_on_stack)
2776 rtx before_arg = get_last_insn ();
2778 if (store_one_arg (&args[i], argblock, flags,
2779 adjusted_args_size.var != 0,
2780 reg_parm_stack_space)
2782 && check_sibcall_argument_overlap (before_arg,
2784 sibcall_failure = 1;
2787 if (((flags & ECF_CONST)
2788 || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
2790 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2791 gen_rtx_USE (VOIDmode,
2796 /* If we have a parm that is passed in registers but not in memory
2797 and whose alignment does not permit a direct copy into registers,
2798 make a group of pseudos that correspond to each register that we
2800 if (STRICT_ALIGNMENT)
2801 store_unaligned_arguments_into_pseudos (args, num_actuals);
2803 /* Now store any partially-in-registers parm.
2804 This is the last place a block-move can happen. */
2806 for (i = 0; i < num_actuals; i++)
2807 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2809 rtx before_arg = get_last_insn ();
2811 if (store_one_arg (&args[i], argblock, flags,
2812 adjusted_args_size.var != 0,
2813 reg_parm_stack_space)
2815 && check_sibcall_argument_overlap (before_arg,
2817 sibcall_failure = 1;
2820 /* If we pushed args in forward order, perform stack alignment
2821 after pushing the last arg. */
2822 if (!PUSH_ARGS_REVERSED && argblock == 0)
2823 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2824 - unadjusted_args_size));
2826 /* If register arguments require space on the stack and stack space
2827 was not preallocated, allocate stack space here for arguments
2828 passed in registers. */
2829 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2830 && !ACCUMULATE_OUTGOING_ARGS
2831 && must_preallocate == 0 && reg_parm_stack_space > 0)
2832 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2834 /* Pass the function the address in which to return a
2836 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2838 structure_value_addr
2839 = convert_memory_address (Pmode, structure_value_addr);
2840 emit_move_insn (struct_value,
2842 force_operand (structure_value_addr,
2845 if (REG_P (struct_value))
2846 use_reg (&call_fusage, struct_value);
2849 after_args = get_last_insn ();
2850 funexp = prepare_call_address (fndecl, funexp, static_chain_value,
2851 &call_fusage, reg_parm_seen, pass == 0);
2853 load_register_parameters (args, num_actuals, &call_fusage, flags,
2854 pass == 0, &sibcall_failure);
2856 /* Save a pointer to the last insn before the call, so that we can
2857 later safely search backwards to find the CALL_INSN. */
2858 before_call = get_last_insn ();
2860 /* Set up next argument register. For sibling calls on machines
2861 with register windows this should be the incoming register. */
2863 next_arg_reg = targetm.calls.function_incoming_arg (&args_so_far,
2868 next_arg_reg = targetm.calls.function_arg (&args_so_far,
2869 VOIDmode, void_type_node,
2872 /* All arguments and registers used for the call must be set up by
2875 /* Stack must be properly aligned now. */
2877 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2879 /* Generate the actual call instruction. */
2880 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2881 adjusted_args_size.constant, struct_value_size,
2882 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2883 flags, & args_so_far);
2885 /* If the call setup or the call itself overlaps with anything
2886 of the argument setup we probably clobbered our call address.
2887 In that case we can't do sibcalls. */
2889 && check_sibcall_argument_overlap (after_args, 0, 0))
2890 sibcall_failure = 1;
2892 /* If a non-BLKmode value is returned at the most significant end
2893 of a register, shift the register right by the appropriate amount
2894 and update VALREG accordingly. BLKmode values are handled by the
2895 group load/store machinery below. */
2896 if (!structure_value_addr
2897 && !pcc_struct_value
2898 && TYPE_MODE (rettype) != BLKmode
2899 && targetm.calls.return_in_msb (rettype))
2901 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
2902 sibcall_failure = 1;
2903 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
2906 if (pass && (flags & ECF_MALLOC))
2908 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2911 /* The return value from a malloc-like function is a pointer. */
2912 if (TREE_CODE (rettype) == POINTER_TYPE)
2913 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2915 emit_move_insn (temp, valreg);
2917 /* The return value from a malloc-like function can not alias
2919 last = get_last_insn ();
2920 add_reg_note (last, REG_NOALIAS, temp);
2922 /* Write out the sequence. */
2923 insns = get_insns ();
2929 /* For calls to `setjmp', etc., inform
2930 function.c:setjmp_warnings that it should complain if
2931 nonvolatile values are live. For functions that cannot
2932 return, inform flow that control does not fall through. */
2934 if ((flags & ECF_NORETURN) || pass == 0)
2936 /* The barrier must be emitted
2937 immediately after the CALL_INSN. Some ports emit more
2938 than just a CALL_INSN above, so we must search for it here. */
2940 rtx last = get_last_insn ();
2941 while (!CALL_P (last))
2943 last = PREV_INSN (last);
2944 /* There was no CALL_INSN? */
2945 gcc_assert (last != before_call);
2948 emit_barrier_after (last);
2950 /* Stack adjustments after a noreturn call are dead code.
2951 However when NO_DEFER_POP is in effect, we must preserve
2952 stack_pointer_delta. */
2953 if (inhibit_defer_pop == 0)
2955 stack_pointer_delta = old_stack_allocated;
2956 pending_stack_adjust = 0;
2960 /* If value type not void, return an rtx for the value. */
2962 if (TYPE_MODE (rettype) == VOIDmode
2964 target = const0_rtx;
2965 else if (structure_value_addr)
2967 if (target == 0 || !MEM_P (target))
2970 = gen_rtx_MEM (TYPE_MODE (rettype),
2971 memory_address (TYPE_MODE (rettype),
2972 structure_value_addr));
2973 set_mem_attributes (target, rettype, 1);
2976 else if (pcc_struct_value)
2978 /* This is the special C++ case where we need to
2979 know what the true target was. We take care to
2980 never use this value more than once in one expression. */
2981 target = gen_rtx_MEM (TYPE_MODE (rettype),
2982 copy_to_reg (valreg));
2983 set_mem_attributes (target, rettype, 1);
2985 /* Handle calls that return values in multiple non-contiguous locations.
2986 The Irix 6 ABI has examples of this. */
2987 else if (GET_CODE (valreg) == PARALLEL)
2991 /* This will only be assigned once, so it can be readonly. */
2992 tree nt = build_qualified_type (rettype,
2993 (TYPE_QUALS (rettype)
2994 | TYPE_QUAL_CONST));
2996 target = assign_temp (nt, 0, 1, 1);
2999 if (! rtx_equal_p (target, valreg))
3000 emit_group_store (target, valreg, rettype,
3001 int_size_in_bytes (rettype));
3003 /* We can not support sibling calls for this case. */
3004 sibcall_failure = 1;
3007 && GET_MODE (target) == TYPE_MODE (rettype)
3008 && GET_MODE (target) == GET_MODE (valreg))
3010 bool may_overlap = false;
3012 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3013 reg to a plain register. */
3014 if (!REG_P (target) || HARD_REGISTER_P (target))
3015 valreg = avoid_likely_spilled_reg (valreg);
3017 /* If TARGET is a MEM in the argument area, and we have
3018 saved part of the argument area, then we can't store
3019 directly into TARGET as it may get overwritten when we
3020 restore the argument save area below. Don't work too
3021 hard though and simply force TARGET to a register if it
3022 is a MEM; the optimizer is quite likely to sort it out. */
3023 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3024 for (i = 0; i < num_actuals; i++)
3025 if (args[i].save_area)
3032 target = copy_to_reg (valreg);
3035 /* TARGET and VALREG cannot be equal at this point
3036 because the latter would not have
3037 REG_FUNCTION_VALUE_P true, while the former would if
3038 it were referring to the same register.
3040 If they refer to the same register, this move will be
3041 a no-op, except when function inlining is being
3043 emit_move_insn (target, valreg);
3045 /* If we are setting a MEM, this code must be executed.
3046 Since it is emitted after the call insn, sibcall
3047 optimization cannot be performed in that case. */
3049 sibcall_failure = 1;
3052 else if (TYPE_MODE (rettype) == BLKmode)
3055 if (GET_MODE (val) != BLKmode)
3056 val = avoid_likely_spilled_reg (val);
3057 target = copy_blkmode_from_reg (target, val, rettype);
3059 /* We can not support sibling calls for this case. */
3060 sibcall_failure = 1;
3063 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
3065 /* If we promoted this return value, make the proper SUBREG.
3066 TARGET might be const0_rtx here, so be careful. */
3068 && TYPE_MODE (rettype) != BLKmode
3069 && GET_MODE (target) != TYPE_MODE (rettype))
3071 tree type = rettype;
3072 int unsignedp = TYPE_UNSIGNED (type);
3074 enum machine_mode pmode;
3076 /* Ensure we promote as expected, and get the new unsignedness. */
3077 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
3079 gcc_assert (GET_MODE (target) == pmode);
3081 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3082 && (GET_MODE_SIZE (GET_MODE (target))
3083 > GET_MODE_SIZE (TYPE_MODE (type))))
3085 offset = GET_MODE_SIZE (GET_MODE (target))
3086 - GET_MODE_SIZE (TYPE_MODE (type));
3087 if (! BYTES_BIG_ENDIAN)
3088 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3089 else if (! WORDS_BIG_ENDIAN)
3090 offset %= UNITS_PER_WORD;
3093 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3094 SUBREG_PROMOTED_VAR_P (target) = 1;
3095 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3098 /* If size of args is variable or this was a constructor call for a stack
3099 argument, restore saved stack-pointer value. */
3101 if (old_stack_level)
3103 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3104 stack_pointer_delta = old_stack_pointer_delta;
3105 pending_stack_adjust = old_pending_adj;
3106 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3107 stack_arg_under_construction = old_stack_arg_under_construction;
3108 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3109 stack_usage_map = initial_stack_usage_map;
3110 sibcall_failure = 1;
3112 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3114 #ifdef REG_PARM_STACK_SPACE
3116 restore_fixed_argument_area (save_area, argblock,
3117 high_to_save, low_to_save);
3120 /* If we saved any argument areas, restore them. */
3121 for (i = 0; i < num_actuals; i++)
3122 if (args[i].save_area)
3124 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3126 = gen_rtx_MEM (save_mode,
3127 memory_address (save_mode,
3128 XEXP (args[i].stack_slot, 0)));
3130 if (save_mode != BLKmode)
3131 emit_move_insn (stack_area, args[i].save_area);
3133 emit_block_move (stack_area, args[i].save_area,
3134 GEN_INT (args[i].locate.size.constant),
3135 BLOCK_OP_CALL_PARM);
3138 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3139 stack_usage_map = initial_stack_usage_map;
3142 /* If this was alloca, record the new stack level for nonlocal gotos.
3143 Check for the handler slots since we might not have a save area
3144 for non-local gotos. */
3146 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3147 update_nonlocal_goto_save_area ();
3149 /* Free up storage we no longer need. */
3150 for (i = 0; i < num_actuals; ++i)
3151 if (args[i].aligned_regs)
3152 free (args[i].aligned_regs);
3154 insns = get_insns ();
3159 tail_call_insns = insns;
3161 /* Restore the pending stack adjustment now that we have
3162 finished generating the sibling call sequence. */
3164 pending_stack_adjust = save_pending_stack_adjust;
3165 stack_pointer_delta = save_stack_pointer_delta;
3167 /* Prepare arg structure for next iteration. */
3168 for (i = 0; i < num_actuals; i++)
3171 args[i].aligned_regs = 0;
3175 sbitmap_free (stored_args_map);
3179 normal_call_insns = insns;
3181 /* Verify that we've deallocated all the stack we used. */
3182 gcc_assert ((flags & ECF_NORETURN)
3183 || (old_stack_allocated
3184 == stack_pointer_delta - pending_stack_adjust));
3187 /* If something prevents making this a sibling call,
3188 zero out the sequence. */
3189 if (sibcall_failure)
3190 tail_call_insns = NULL_RTX;
3195 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3196 arguments too, as argument area is now clobbered by the call. */
3197 if (tail_call_insns)
3199 emit_insn (tail_call_insns);
3200 crtl->tail_call_emit = true;
3203 emit_insn (normal_call_insns);
3205 currently_expanding_call--;
3207 if (stack_usage_map_buf)
3208 free (stack_usage_map_buf);
3213 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3214 this function's incoming arguments.
3216 At the start of RTL generation we know the only REG_EQUIV notes
3217 in the rtl chain are those for incoming arguments, so we can look
3218 for REG_EQUIV notes between the start of the function and the
3219 NOTE_INSN_FUNCTION_BEG.
3221 This is (slight) overkill. We could keep track of the highest
3222 argument we clobber and be more selective in removing notes, but it
3223 does not seem to be worth the effort. */
3226 fixup_tail_calls (void)
3230 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3234 /* There are never REG_EQUIV notes for the incoming arguments
3235 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3237 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3240 note = find_reg_note (insn, REG_EQUIV, 0);
3242 remove_note (insn, note);
3243 note = find_reg_note (insn, REG_EQUIV, 0);
3248 /* Traverse a list of TYPES and expand all complex types into their
3251 split_complex_types (tree types)
3255 /* Before allocating memory, check for the common case of no complex. */
3256 for (p = types; p; p = TREE_CHAIN (p))
3258 tree type = TREE_VALUE (p);
3259 if (TREE_CODE (type) == COMPLEX_TYPE
3260 && targetm.calls.split_complex_arg (type))
3266 types = copy_list (types);
3268 for (p = types; p; p = TREE_CHAIN (p))
3270 tree complex_type = TREE_VALUE (p);
3272 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3273 && targetm.calls.split_complex_arg (complex_type))
3277 /* Rewrite complex type with component type. */
3278 TREE_VALUE (p) = TREE_TYPE (complex_type);
3279 next = TREE_CHAIN (p);
3281 /* Add another component type for the imaginary part. */
3282 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3283 TREE_CHAIN (p) = imag;
3284 TREE_CHAIN (imag) = next;
3286 /* Skip the newly created node. */
3294 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3295 The RETVAL parameter specifies whether return value needs to be saved, other
3296 parameters are documented in the emit_library_call function below. */
3299 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3300 enum libcall_type fn_type,
3301 enum machine_mode outmode, int nargs, va_list p)
3303 /* Total size in bytes of all the stack-parms scanned so far. */
3304 struct args_size args_size;
3305 /* Size of arguments before any adjustments (such as rounding). */
3306 struct args_size original_args_size;
3309 /* Todo, choose the correct decl type of orgfun. Sadly this information
3310 isn't present here, so we default to native calling abi here. */
3311 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3312 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3316 CUMULATIVE_ARGS args_so_far;
3320 enum machine_mode mode;
3323 struct locate_and_pad_arg_data locate;
3327 int old_inhibit_defer_pop = inhibit_defer_pop;
3328 rtx call_fusage = 0;
3331 int pcc_struct_value = 0;
3332 int struct_value_size = 0;
3334 int reg_parm_stack_space = 0;
3337 tree tfom; /* type_for_mode (outmode, 0) */
3339 #ifdef REG_PARM_STACK_SPACE
3340 /* Define the boundary of the register parm stack space that needs to be
3342 int low_to_save = 0, high_to_save = 0;
3343 rtx save_area = 0; /* Place that it is saved. */
3346 /* Size of the stack reserved for parameter registers. */
3347 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3348 char *initial_stack_usage_map = stack_usage_map;
3349 char *stack_usage_map_buf = NULL;
3351 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3353 #ifdef REG_PARM_STACK_SPACE
3354 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3357 /* By default, library functions can not throw. */
3358 flags = ECF_NOTHROW;
3371 flags |= ECF_NORETURN;
3374 flags = ECF_NORETURN;
3376 case LCT_RETURNS_TWICE:
3377 flags = ECF_RETURNS_TWICE;
3382 /* Ensure current function's preferred stack boundary is at least
3384 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3385 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3387 /* If this kind of value comes back in memory,
3388 decide where in memory it should come back. */
3389 if (outmode != VOIDmode)
3391 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3392 if (aggregate_value_p (tfom, 0))
3394 #ifdef PCC_STATIC_STRUCT_RETURN
3396 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3397 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3398 pcc_struct_value = 1;
3400 value = gen_reg_rtx (outmode);
3401 #else /* not PCC_STATIC_STRUCT_RETURN */
3402 struct_value_size = GET_MODE_SIZE (outmode);
3403 if (value != 0 && MEM_P (value))
3406 mem_value = assign_temp (tfom, 0, 1, 1);
3408 /* This call returns a big structure. */
3409 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3413 tfom = void_type_node;
3415 /* ??? Unfinished: must pass the memory address as an argument. */
3417 /* Copy all the libcall-arguments out of the varargs data
3418 and into a vector ARGVEC.
3420 Compute how to pass each argument. We only support a very small subset
3421 of the full argument passing conventions to limit complexity here since
3422 library functions shouldn't have many args. */
3424 argvec = XALLOCAVEC (struct arg, nargs + 1);
3425 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3427 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3428 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3430 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3433 args_size.constant = 0;
3440 /* If there's a structure value address to be passed,
3441 either pass it in the special place, or pass it as an extra argument. */
3442 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3444 rtx addr = XEXP (mem_value, 0);
3448 /* Make sure it is a reasonable operand for a move or push insn. */
3449 if (!REG_P (addr) && !MEM_P (addr)
3450 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3451 addr = force_operand (addr, NULL_RTX);
3453 argvec[count].value = addr;
3454 argvec[count].mode = Pmode;
3455 argvec[count].partial = 0;
3457 argvec[count].reg = targetm.calls.function_arg (&args_so_far,
3458 Pmode, NULL_TREE, true);
3459 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3460 NULL_TREE, 1) == 0);
3462 locate_and_pad_parm (Pmode, NULL_TREE,
3463 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3466 argvec[count].reg != 0,
3468 0, NULL_TREE, &args_size, &argvec[count].locate);
3470 if (argvec[count].reg == 0 || argvec[count].partial != 0
3471 || reg_parm_stack_space > 0)
3472 args_size.constant += argvec[count].locate.size.constant;
3474 targetm.calls.function_arg_advance (&args_so_far, Pmode, (tree) 0, true);
3479 for (; count < nargs; count++)
3481 rtx val = va_arg (p, rtx);
3482 enum machine_mode mode = (enum machine_mode) va_arg (p, int);
3484 /* We cannot convert the arg value to the mode the library wants here;
3485 must do it earlier where we know the signedness of the arg. */
3486 gcc_assert (mode != BLKmode
3487 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3489 /* Make sure it is a reasonable operand for a move or push insn. */
3490 if (!REG_P (val) && !MEM_P (val)
3491 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3492 val = force_operand (val, NULL_RTX);
3494 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3498 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3500 /* If this was a CONST function, it is now PURE since it now
3502 if (flags & ECF_CONST)
3504 flags &= ~ECF_CONST;
3508 if (MEM_P (val) && !must_copy)
3512 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3514 emit_move_insn (slot, val);
3517 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3518 gen_rtx_USE (VOIDmode, slot),
3521 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3522 gen_rtx_CLOBBER (VOIDmode,
3527 val = force_operand (XEXP (slot, 0), NULL_RTX);
3530 argvec[count].value = val;
3531 argvec[count].mode = mode;
3533 argvec[count].reg = targetm.calls.function_arg (&args_so_far, mode,
3536 argvec[count].partial
3537 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3539 locate_and_pad_parm (mode, NULL_TREE,
3540 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3543 argvec[count].reg != 0,
3545 argvec[count].partial,
3546 NULL_TREE, &args_size, &argvec[count].locate);
3548 gcc_assert (!argvec[count].locate.size.var);
3550 if (argvec[count].reg == 0 || argvec[count].partial != 0
3551 || reg_parm_stack_space > 0)
3552 args_size.constant += argvec[count].locate.size.constant;
3554 targetm.calls.function_arg_advance (&args_so_far, mode, (tree) 0, true);
3557 /* If this machine requires an external definition for library
3558 functions, write one out. */
3559 assemble_external_libcall (fun);
3561 original_args_size = args_size;
3562 args_size.constant = (((args_size.constant
3563 + stack_pointer_delta
3567 - stack_pointer_delta);
3569 args_size.constant = MAX (args_size.constant,
3570 reg_parm_stack_space);
3572 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3573 args_size.constant -= reg_parm_stack_space;
3575 if (args_size.constant > crtl->outgoing_args_size)
3576 crtl->outgoing_args_size = args_size.constant;
3578 if (flag_stack_usage && !ACCUMULATE_OUTGOING_ARGS)
3580 int pushed = args_size.constant + pending_stack_adjust;
3581 if (pushed > current_function_pushed_stack_size)
3582 current_function_pushed_stack_size = pushed;
3585 if (ACCUMULATE_OUTGOING_ARGS)
3587 /* Since the stack pointer will never be pushed, it is possible for
3588 the evaluation of a parm to clobber something we have already
3589 written to the stack. Since most function calls on RISC machines
3590 do not use the stack, this is uncommon, but must work correctly.
3592 Therefore, we save any area of the stack that was already written
3593 and that we are using. Here we set up to do this by making a new
3594 stack usage map from the old one.
3596 Another approach might be to try to reorder the argument
3597 evaluations to avoid this conflicting stack usage. */
3599 needed = args_size.constant;
3601 /* Since we will be writing into the entire argument area, the
3602 map must be allocated for its entire size, not just the part that
3603 is the responsibility of the caller. */
3604 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3605 needed += reg_parm_stack_space;
3607 #ifdef ARGS_GROW_DOWNWARD
3608 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3611 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3614 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3615 stack_usage_map = stack_usage_map_buf;
3617 if (initial_highest_arg_in_use)
3618 memcpy (stack_usage_map, initial_stack_usage_map,
3619 initial_highest_arg_in_use);
3621 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3622 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3623 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3626 /* We must be careful to use virtual regs before they're instantiated,
3627 and real regs afterwards. Loop optimization, for example, can create
3628 new libcalls after we've instantiated the virtual regs, and if we
3629 use virtuals anyway, they won't match the rtl patterns. */
3631 if (virtuals_instantiated)
3632 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3634 argblock = virtual_outgoing_args_rtx;
3639 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3642 /* If we push args individually in reverse order, perform stack alignment
3643 before the first push (the last arg). */
3644 if (argblock == 0 && PUSH_ARGS_REVERSED)
3645 anti_adjust_stack (GEN_INT (args_size.constant
3646 - original_args_size.constant));
3648 if (PUSH_ARGS_REVERSED)
3659 #ifdef REG_PARM_STACK_SPACE
3660 if (ACCUMULATE_OUTGOING_ARGS)
3662 /* The argument list is the property of the called routine and it
3663 may clobber it. If the fixed area has been used for previous
3664 parameters, we must save and restore it. */
3665 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3666 &low_to_save, &high_to_save);
3670 /* Push the args that need to be pushed. */
3672 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3673 are to be pushed. */
3674 for (count = 0; count < nargs; count++, argnum += inc)
3676 enum machine_mode mode = argvec[argnum].mode;
3677 rtx val = argvec[argnum].value;
3678 rtx reg = argvec[argnum].reg;
3679 int partial = argvec[argnum].partial;
3680 unsigned int parm_align = argvec[argnum].locate.boundary;
3681 int lower_bound = 0, upper_bound = 0, i;
3683 if (! (reg != 0 && partial == 0))
3685 if (ACCUMULATE_OUTGOING_ARGS)
3687 /* If this is being stored into a pre-allocated, fixed-size,
3688 stack area, save any previous data at that location. */
3690 #ifdef ARGS_GROW_DOWNWARD
3691 /* stack_slot is negative, but we want to index stack_usage_map
3692 with positive values. */
3693 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
3694 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3696 lower_bound = argvec[argnum].locate.slot_offset.constant;
3697 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3701 /* Don't worry about things in the fixed argument area;
3702 it has already been saved. */
3703 if (i < reg_parm_stack_space)
3704 i = reg_parm_stack_space;
3705 while (i < upper_bound && stack_usage_map[i] == 0)
3708 if (i < upper_bound)
3710 /* We need to make a save area. */
3712 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3713 enum machine_mode save_mode
3714 = mode_for_size (size, MODE_INT, 1);
3716 = plus_constant (argblock,
3717 argvec[argnum].locate.offset.constant);
3719 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3721 if (save_mode == BLKmode)
3723 argvec[argnum].save_area
3724 = assign_stack_temp (BLKmode,
3725 argvec[argnum].locate.size.constant,
3728 emit_block_move (validize_mem (argvec[argnum].save_area),
3730 GEN_INT (argvec[argnum].locate.size.constant),
3731 BLOCK_OP_CALL_PARM);
3735 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3737 emit_move_insn (argvec[argnum].save_area, stack_area);
3742 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
3743 partial, reg, 0, argblock,
3744 GEN_INT (argvec[argnum].locate.offset.constant),
3745 reg_parm_stack_space,
3746 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3748 /* Now mark the segment we just used. */
3749 if (ACCUMULATE_OUTGOING_ARGS)
3750 for (i = lower_bound; i < upper_bound; i++)
3751 stack_usage_map[i] = 1;
3755 if ((flags & ECF_CONST)
3756 || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
3760 /* Indicate argument access so that alias.c knows that these
3763 use = plus_constant (argblock,
3764 argvec[argnum].locate.offset.constant);
3766 /* When arguments are pushed, trying to tell alias.c where
3767 exactly this argument is won't work, because the
3768 auto-increment causes confusion. So we merely indicate
3769 that we access something with a known mode somewhere on
3771 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3772 gen_rtx_SCRATCH (Pmode));
3773 use = gen_rtx_MEM (argvec[argnum].mode, use);
3774 use = gen_rtx_USE (VOIDmode, use);
3775 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3780 /* If we pushed args in forward order, perform stack alignment
3781 after pushing the last arg. */
3782 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3783 anti_adjust_stack (GEN_INT (args_size.constant
3784 - original_args_size.constant));
3786 if (PUSH_ARGS_REVERSED)
3791 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
3793 /* Now load any reg parms into their regs. */
3795 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3796 are to be pushed. */
3797 for (count = 0; count < nargs; count++, argnum += inc)
3799 enum machine_mode mode = argvec[argnum].mode;
3800 rtx val = argvec[argnum].value;
3801 rtx reg = argvec[argnum].reg;
3802 int partial = argvec[argnum].partial;
3804 /* Handle calls that pass values in multiple non-contiguous
3805 locations. The PA64 has examples of this for library calls. */
3806 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3807 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3808 else if (reg != 0 && partial == 0)
3809 emit_move_insn (reg, val);
3814 /* Any regs containing parms remain in use through the call. */
3815 for (count = 0; count < nargs; count++)
3817 rtx reg = argvec[count].reg;
3818 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3819 use_group_regs (&call_fusage, reg);
3822 int partial = argvec[count].partial;
3826 gcc_assert (partial % UNITS_PER_WORD == 0);
3827 nregs = partial / UNITS_PER_WORD;
3828 use_regs (&call_fusage, REGNO (reg), nregs);
3831 use_reg (&call_fusage, reg);
3835 /* Pass the function the address in which to return a structure value. */
3836 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3838 emit_move_insn (struct_value,
3840 force_operand (XEXP (mem_value, 0),
3842 if (REG_P (struct_value))
3843 use_reg (&call_fusage, struct_value);
3846 /* Don't allow popping to be deferred, since then
3847 cse'ing of library calls could delete a call and leave the pop. */
3849 valreg = (mem_value == 0 && outmode != VOIDmode
3850 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
3852 /* Stack must be properly aligned now. */
3853 gcc_assert (!(stack_pointer_delta
3854 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3856 before_call = get_last_insn ();
3858 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3859 will set inhibit_defer_pop to that value. */
3860 /* The return type is needed to decide how many bytes the function pops.
3861 Signedness plays no role in that, so for simplicity, we pretend it's
3862 always signed. We also assume that the list of arguments passed has
3863 no impact, so we pretend it is unknown. */
3865 emit_call_1 (fun, NULL,
3866 get_identifier (XSTR (orgfun, 0)),
3867 build_function_type (tfom, NULL_TREE),
3868 original_args_size.constant, args_size.constant,
3870 targetm.calls.function_arg (&args_so_far,
3871 VOIDmode, void_type_node, true),
3873 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3875 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
3876 that it should complain if nonvolatile values are live. For
3877 functions that cannot return, inform flow that control does not
3880 if (flags & ECF_NORETURN)
3882 /* The barrier note must be emitted
3883 immediately after the CALL_INSN. Some ports emit more than
3884 just a CALL_INSN above, so we must search for it here. */
3886 rtx last = get_last_insn ();
3887 while (!CALL_P (last))
3889 last = PREV_INSN (last);
3890 /* There was no CALL_INSN? */
3891 gcc_assert (last != before_call);
3894 emit_barrier_after (last);
3897 /* Now restore inhibit_defer_pop to its actual original value. */
3902 /* Copy the value to the right place. */
3903 if (outmode != VOIDmode && retval)
3909 if (value != mem_value)
3910 emit_move_insn (value, mem_value);
3912 else if (GET_CODE (valreg) == PARALLEL)
3915 value = gen_reg_rtx (outmode);
3916 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3920 /* Convert to the proper mode if a promotion has been active. */
3921 if (GET_MODE (valreg) != outmode)
3923 int unsignedp = TYPE_UNSIGNED (tfom);
3925 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
3926 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
3927 == GET_MODE (valreg));
3928 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
3932 emit_move_insn (value, valreg);
3938 if (ACCUMULATE_OUTGOING_ARGS)
3940 #ifdef REG_PARM_STACK_SPACE
3942 restore_fixed_argument_area (save_area, argblock,
3943 high_to_save, low_to_save);
3946 /* If we saved any argument areas, restore them. */
3947 for (count = 0; count < nargs; count++)
3948 if (argvec[count].save_area)
3950 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3951 rtx adr = plus_constant (argblock,
3952 argvec[count].locate.offset.constant);
3953 rtx stack_area = gen_rtx_MEM (save_mode,
3954 memory_address (save_mode, adr));
3956 if (save_mode == BLKmode)
3957 emit_block_move (stack_area,
3958 validize_mem (argvec[count].save_area),
3959 GEN_INT (argvec[count].locate.size.constant),
3960 BLOCK_OP_CALL_PARM);
3962 emit_move_insn (stack_area, argvec[count].save_area);
3965 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3966 stack_usage_map = initial_stack_usage_map;
3969 if (stack_usage_map_buf)
3970 free (stack_usage_map_buf);
3976 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3977 (emitting the queue unless NO_QUEUE is nonzero),
3978 for a value of mode OUTMODE,
3979 with NARGS different arguments, passed as alternating rtx values
3980 and machine_modes to convert them to.
3982 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
3983 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
3984 other types of library calls. */
3987 emit_library_call (rtx orgfun, enum libcall_type fn_type,
3988 enum machine_mode outmode, int nargs, ...)
3992 va_start (p, nargs);
3993 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
3997 /* Like emit_library_call except that an extra argument, VALUE,
3998 comes second and says where to store the result.
3999 (If VALUE is zero, this function chooses a convenient way
4000 to return the value.
4002 This function returns an rtx for where the value is to be found.
4003 If VALUE is nonzero, VALUE is returned. */
4006 emit_library_call_value (rtx orgfun, rtx value,
4007 enum libcall_type fn_type,
4008 enum machine_mode outmode, int nargs, ...)
4013 va_start (p, nargs);
4014 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4021 /* Store a single argument for a function call
4022 into the register or memory area where it must be passed.
4023 *ARG describes the argument value and where to pass it.
4025 ARGBLOCK is the address of the stack-block for all the arguments,
4026 or 0 on a machine where arguments are pushed individually.
4028 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4029 so must be careful about how the stack is used.
4031 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4032 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4033 that we need not worry about saving and restoring the stack.
4035 FNDECL is the declaration of the function we are calling.
4037 Return nonzero if this arg should cause sibcall failure,
4041 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4042 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4044 tree pval = arg->tree_value;
4048 int i, lower_bound = 0, upper_bound = 0;
4049 int sibcall_failure = 0;
4051 if (TREE_CODE (pval) == ERROR_MARK)
4054 /* Push a new temporary level for any temporaries we make for
4058 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4060 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4061 save any previous data at that location. */
4062 if (argblock && ! variable_size && arg->stack)
4064 #ifdef ARGS_GROW_DOWNWARD
4065 /* stack_slot is negative, but we want to index stack_usage_map
4066 with positive values. */
4067 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4068 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4072 lower_bound = upper_bound - arg->locate.size.constant;
4074 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4075 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4079 upper_bound = lower_bound + arg->locate.size.constant;
4083 /* Don't worry about things in the fixed argument area;
4084 it has already been saved. */
4085 if (i < reg_parm_stack_space)
4086 i = reg_parm_stack_space;
4087 while (i < upper_bound && stack_usage_map[i] == 0)
4090 if (i < upper_bound)
4092 /* We need to make a save area. */
4093 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4094 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4095 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4096 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4098 if (save_mode == BLKmode)
4100 tree ot = TREE_TYPE (arg->tree_value);
4101 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4102 | TYPE_QUAL_CONST));
4104 arg->save_area = assign_temp (nt, 0, 1, 1);
4105 preserve_temp_slots (arg->save_area);
4106 emit_block_move (validize_mem (arg->save_area), stack_area,
4107 GEN_INT (arg->locate.size.constant),
4108 BLOCK_OP_CALL_PARM);
4112 arg->save_area = gen_reg_rtx (save_mode);
4113 emit_move_insn (arg->save_area, stack_area);
4119 /* If this isn't going to be placed on both the stack and in registers,
4120 set up the register and number of words. */
4121 if (! arg->pass_on_stack)
4123 if (flags & ECF_SIBCALL)
4124 reg = arg->tail_call_reg;
4127 partial = arg->partial;
4130 /* Being passed entirely in a register. We shouldn't be called in
4132 gcc_assert (reg == 0 || partial != 0);
4134 /* If this arg needs special alignment, don't load the registers
4136 if (arg->n_aligned_regs != 0)
4139 /* If this is being passed partially in a register, we can't evaluate
4140 it directly into its stack slot. Otherwise, we can. */
4141 if (arg->value == 0)
4143 /* stack_arg_under_construction is nonzero if a function argument is
4144 being evaluated directly into the outgoing argument list and
4145 expand_call must take special action to preserve the argument list
4146 if it is called recursively.
4148 For scalar function arguments stack_usage_map is sufficient to
4149 determine which stack slots must be saved and restored. Scalar
4150 arguments in general have pass_on_stack == 0.
4152 If this argument is initialized by a function which takes the
4153 address of the argument (a C++ constructor or a C function
4154 returning a BLKmode structure), then stack_usage_map is
4155 insufficient and expand_call must push the stack around the
4156 function call. Such arguments have pass_on_stack == 1.
4158 Note that it is always safe to set stack_arg_under_construction,
4159 but this generates suboptimal code if set when not needed. */
4161 if (arg->pass_on_stack)
4162 stack_arg_under_construction++;
4164 arg->value = expand_expr (pval,
4166 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4167 ? NULL_RTX : arg->stack,
4168 VOIDmode, EXPAND_STACK_PARM);
4170 /* If we are promoting object (or for any other reason) the mode
4171 doesn't agree, convert the mode. */
4173 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4174 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4175 arg->value, arg->unsignedp);
4177 if (arg->pass_on_stack)
4178 stack_arg_under_construction--;
4181 /* Check for overlap with already clobbered argument area. */
4182 if ((flags & ECF_SIBCALL)
4183 && MEM_P (arg->value)
4184 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4185 arg->locate.size.constant))
4186 sibcall_failure = 1;
4188 /* Don't allow anything left on stack from computation
4189 of argument to alloca. */
4190 if (flags & ECF_MAY_BE_ALLOCA)
4191 do_pending_stack_adjust ();
4193 if (arg->value == arg->stack)
4194 /* If the value is already in the stack slot, we are done. */
4196 else if (arg->mode != BLKmode)
4199 unsigned int parm_align;
4201 /* Argument is a scalar, not entirely passed in registers.
4202 (If part is passed in registers, arg->partial says how much
4203 and emit_push_insn will take care of putting it there.)
4205 Push it, and if its size is less than the
4206 amount of space allocated to it,
4207 also bump stack pointer by the additional space.
4208 Note that in C the default argument promotions
4209 will prevent such mismatches. */
4211 size = GET_MODE_SIZE (arg->mode);
4212 /* Compute how much space the push instruction will push.
4213 On many machines, pushing a byte will advance the stack
4214 pointer by a halfword. */
4215 #ifdef PUSH_ROUNDING
4216 size = PUSH_ROUNDING (size);
4220 /* Compute how much space the argument should get:
4221 round up to a multiple of the alignment for arguments. */
4222 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4223 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4224 / (PARM_BOUNDARY / BITS_PER_UNIT))
4225 * (PARM_BOUNDARY / BITS_PER_UNIT));
4227 /* Compute the alignment of the pushed argument. */
4228 parm_align = arg->locate.boundary;
4229 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4231 int pad = used - size;
4234 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4235 parm_align = MIN (parm_align, pad_align);
4239 /* This isn't already where we want it on the stack, so put it there.
4240 This can either be done with push or copy insns. */
4241 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4242 parm_align, partial, reg, used - size, argblock,
4243 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4244 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4246 /* Unless this is a partially-in-register argument, the argument is now
4249 arg->value = arg->stack;
4253 /* BLKmode, at least partly to be pushed. */
4255 unsigned int parm_align;
4259 /* Pushing a nonscalar.
4260 If part is passed in registers, PARTIAL says how much
4261 and emit_push_insn will take care of putting it there. */
4263 /* Round its size up to a multiple
4264 of the allocation unit for arguments. */
4266 if (arg->locate.size.var != 0)
4269 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4273 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4274 for BLKmode is careful to avoid it. */
4275 excess = (arg->locate.size.constant
4276 - int_size_in_bytes (TREE_TYPE (pval))
4278 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4279 NULL_RTX, TYPE_MODE (sizetype),
4283 parm_align = arg->locate.boundary;
4285 /* When an argument is padded down, the block is aligned to
4286 PARM_BOUNDARY, but the actual argument isn't. */
4287 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4289 if (arg->locate.size.var)
4290 parm_align = BITS_PER_UNIT;
4293 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4294 parm_align = MIN (parm_align, excess_align);
4298 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4300 /* emit_push_insn might not work properly if arg->value and
4301 argblock + arg->locate.offset areas overlap. */
4305 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4306 || (GET_CODE (XEXP (x, 0)) == PLUS
4307 && XEXP (XEXP (x, 0), 0) ==
4308 crtl->args.internal_arg_pointer
4309 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
4311 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4312 i = INTVAL (XEXP (XEXP (x, 0), 1));
4314 /* expand_call should ensure this. */
4315 gcc_assert (!arg->locate.offset.var
4316 && arg->locate.size.var == 0
4317 && CONST_INT_P (size_rtx));
4319 if (arg->locate.offset.constant > i)
4321 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4322 sibcall_failure = 1;
4324 else if (arg->locate.offset.constant < i)
4326 /* Use arg->locate.size.constant instead of size_rtx
4327 because we only care about the part of the argument
4329 if (i < (arg->locate.offset.constant
4330 + arg->locate.size.constant))
4331 sibcall_failure = 1;
4335 /* Even though they appear to be at the same location,
4336 if part of the outgoing argument is in registers,
4337 they aren't really at the same location. Check for
4338 this by making sure that the incoming size is the
4339 same as the outgoing size. */
4340 if (arg->locate.size.constant != INTVAL (size_rtx))
4341 sibcall_failure = 1;
4346 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4347 parm_align, partial, reg, excess, argblock,
4348 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4349 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4351 /* Unless this is a partially-in-register argument, the argument is now
4354 ??? Unlike the case above, in which we want the actual
4355 address of the data, so that we can load it directly into a
4356 register, here we want the address of the stack slot, so that
4357 it's properly aligned for word-by-word copying or something
4358 like that. It's not clear that this is always correct. */
4360 arg->value = arg->stack_slot;
4363 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4365 tree type = TREE_TYPE (arg->tree_value);
4367 = emit_group_load_into_temps (arg->reg, arg->value, type,
4368 int_size_in_bytes (type));
4371 /* Mark all slots this store used. */
4372 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4373 && argblock && ! variable_size && arg->stack)
4374 for (i = lower_bound; i < upper_bound; i++)
4375 stack_usage_map[i] = 1;
4377 /* Once we have pushed something, pops can't safely
4378 be deferred during the rest of the arguments. */
4381 /* Free any temporary slots made in processing this argument. Show
4382 that we might have taken the address of something and pushed that
4384 preserve_temp_slots (NULL_RTX);
4388 return sibcall_failure;
4391 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4394 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4400 /* If the type has variable size... */
4401 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4404 /* If the type is marked as addressable (it is required
4405 to be constructed into the stack)... */
4406 if (TREE_ADDRESSABLE (type))
4412 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4413 takes trailing padding of a structure into account. */
4414 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4417 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
4422 /* If the type has variable size... */
4423 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4426 /* If the type is marked as addressable (it is required
4427 to be constructed into the stack)... */
4428 if (TREE_ADDRESSABLE (type))
4431 /* If the padding and mode of the type is such that a copy into
4432 a register would put it into the wrong part of the register. */
4434 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4435 && (FUNCTION_ARG_PADDING (mode, type)
4436 == (BYTES_BIG_ENDIAN ? upward : downward)))