1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 2011 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
35 #include "diagnostic-core.h"
40 #include "langhooks.h"
45 #include "tree-flow.h"
47 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
48 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
50 /* Data structure and subroutines used within expand_call. */
54 /* Tree node for this argument. */
56 /* Mode for value; TYPE_MODE unless promoted. */
57 enum machine_mode mode;
58 /* Current RTL value for argument, or 0 if it isn't precomputed. */
60 /* Initially-compute RTL value for argument; only for const functions. */
62 /* Register to pass this argument in, 0 if passed on stack, or an
63 PARALLEL if the arg is to be copied into multiple non-contiguous
66 /* Register to pass this argument in when generating tail call sequence.
67 This is not the same register as for normal calls on machines with
70 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
71 form for emit_group_move. */
73 /* If REG was promoted from the actual mode of the argument expression,
74 indicates whether the promotion is sign- or zero-extended. */
76 /* Number of bytes to put in registers. 0 means put the whole arg
77 in registers. Also 0 if not passed in registers. */
79 /* Nonzero if argument must be passed on stack.
80 Note that some arguments may be passed on the stack
81 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
82 pass_on_stack identifies arguments that *cannot* go in registers. */
84 /* Some fields packaged up for locate_and_pad_parm. */
85 struct locate_and_pad_arg_data locate;
86 /* Location on the stack at which parameter should be stored. The store
87 has already been done if STACK == VALUE. */
89 /* Location on the stack of the start of this argument slot. This can
90 differ from STACK if this arg pads downward. This location is known
91 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
93 /* Place that this stack area has been saved, if needed. */
95 /* If an argument's alignment does not permit direct copying into registers,
96 copy in smaller-sized pieces into pseudos. These are stored in a
97 block pointed to by this field. The next field says how many
98 word-sized pseudos we made. */
103 /* A vector of one char per byte of stack space. A byte if nonzero if
104 the corresponding stack location has been used.
105 This vector is used to prevent a function call within an argument from
106 clobbering any stack already set up. */
107 static char *stack_usage_map;
109 /* Size of STACK_USAGE_MAP. */
110 static int highest_outgoing_arg_in_use;
112 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
113 stack location's tail call argument has been already stored into the stack.
114 This bitmap is used to prevent sibling call optimization if function tries
115 to use parent's incoming argument slots when they have been already
116 overwritten with tail call arguments. */
117 static sbitmap stored_args_map;
119 /* stack_arg_under_construction is nonzero when an argument may be
120 initialized with a constructor call (including a C function that
121 returns a BLKmode struct) and expand_call must take special action
122 to make sure the object being constructed does not overlap the
123 argument list for the constructor call. */
124 static int stack_arg_under_construction;
126 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
127 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
129 static void precompute_register_parameters (int, struct arg_data *, int *);
130 static int store_one_arg (struct arg_data *, rtx, int, int, int);
131 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
132 static int finalize_must_preallocate (int, int, struct arg_data *,
134 static void precompute_arguments (int, struct arg_data *);
135 static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
136 static void initialize_argument_information (int, struct arg_data *,
137 struct args_size *, int,
139 tree, tree, cumulative_args_t, int,
140 rtx *, int *, int *, int *,
142 static void compute_argument_addresses (struct arg_data *, rtx, int);
143 static rtx rtx_for_function_call (tree, tree);
144 static void load_register_parameters (struct arg_data *, int, rtx *, int,
146 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
147 enum machine_mode, int, va_list);
148 static int special_function_p (const_tree, int);
149 static int check_sibcall_argument_overlap_1 (rtx);
150 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
152 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
154 static tree split_complex_types (tree);
156 #ifdef REG_PARM_STACK_SPACE
157 static rtx save_fixed_argument_area (int, rtx, int *, int *);
158 static void restore_fixed_argument_area (rtx, rtx, int, int);
161 /* Force FUNEXP into a form suitable for the address of a CALL,
162 and return that as an rtx. Also load the static chain register
163 if FNDECL is a nested function.
165 CALL_FUSAGE points to a variable holding the prospective
166 CALL_INSN_FUNCTION_USAGE information. */
169 prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value,
170 rtx *call_fusage, int reg_parm_seen, int sibcallp)
172 /* Make a valid memory address and copy constants through pseudo-regs,
173 but not for a constant address if -fno-function-cse. */
174 if (GET_CODE (funexp) != SYMBOL_REF)
175 /* If we are using registers for parameters, force the
176 function address into a register now. */
177 funexp = ((reg_parm_seen
178 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
179 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
180 : memory_address (FUNCTION_MODE, funexp));
183 #ifndef NO_FUNCTION_CSE
184 if (optimize && ! flag_no_function_cse)
185 funexp = force_reg (Pmode, funexp);
189 if (static_chain_value != 0)
194 chain = targetm.calls.static_chain (fndecl, false);
195 static_chain_value = convert_memory_address (Pmode, static_chain_value);
197 emit_move_insn (chain, static_chain_value);
199 use_reg (call_fusage, chain);
205 /* Generate instructions to call function FUNEXP,
206 and optionally pop the results.
207 The CALL_INSN is the first insn generated.
209 FNDECL is the declaration node of the function. This is given to the
210 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
213 FUNTYPE is the data type of the function. This is given to the hook
214 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
215 own args. We used to allow an identifier for library functions, but
216 that doesn't work when the return type is an aggregate type and the
217 calling convention says that the pointer to this aggregate is to be
218 popped by the callee.
220 STACK_SIZE is the number of bytes of arguments on the stack,
221 ROUNDED_STACK_SIZE is that number rounded up to
222 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
223 both to put into the call insn and to generate explicit popping
226 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
227 It is zero if this call doesn't want a structure value.
229 NEXT_ARG_REG is the rtx that results from executing
230 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
231 just after all the args have had their registers assigned.
232 This could be whatever you like, but normally it is the first
233 arg-register beyond those used for args in this call,
234 or 0 if all the arg-registers are used in this call.
235 It is passed on to `gen_call' so you can put this info in the call insn.
237 VALREG is a hard register in which a value is returned,
238 or 0 if the call does not return a value.
240 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
241 the args to this call were processed.
242 We restore `inhibit_defer_pop' to that value.
244 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
245 denote registers used by the called function. */
248 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
249 tree funtype ATTRIBUTE_UNUSED,
250 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
251 HOST_WIDE_INT rounded_stack_size,
252 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
253 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
254 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
255 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
257 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
258 rtx call_insn, call, funmem;
259 int already_popped = 0;
260 HOST_WIDE_INT n_popped
261 = targetm.calls.return_pops_args (fndecl, funtype, stack_size);
263 #ifdef CALL_POPS_ARGS
264 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
267 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
268 and we don't want to load it into a register as an optimization,
269 because prepare_call_address already did it if it should be done. */
270 if (GET_CODE (funexp) != SYMBOL_REF)
271 funexp = memory_address (FUNCTION_MODE, funexp);
273 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
274 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
277 /* Although a built-in FUNCTION_DECL and its non-__builtin
278 counterpart compare equal and get a shared mem_attrs, they
279 produce different dump output in compare-debug compilations,
280 if an entry gets garbage collected in one compilation, then
281 adds a different (but equivalent) entry, while the other
282 doesn't run the garbage collector at the same spot and then
283 shares the mem_attr with the equivalent entry. */
284 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
285 && built_in_decls[DECL_FUNCTION_CODE (t)])
286 t = built_in_decls[DECL_FUNCTION_CODE (t)];
287 set_mem_expr (funmem, t);
290 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
292 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
293 if ((ecf_flags & ECF_SIBCALL)
294 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
295 && (n_popped > 0 || stack_size == 0))
297 rtx n_pop = GEN_INT (n_popped);
300 /* If this subroutine pops its own args, record that in the call insn
301 if possible, for the sake of frame pointer elimination. */
304 pat = GEN_SIBCALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
305 next_arg_reg, n_pop);
307 pat = GEN_SIBCALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
310 emit_call_insn (pat);
316 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
317 /* If the target has "call" or "call_value" insns, then prefer them
318 if no arguments are actually popped. If the target does not have
319 "call" or "call_value" insns, then we must use the popping versions
320 even if the call has no arguments to pop. */
321 #if defined (HAVE_call) && defined (HAVE_call_value)
322 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
325 if (HAVE_call_pop && HAVE_call_value_pop)
328 rtx n_pop = GEN_INT (n_popped);
331 /* If this subroutine pops its own args, record that in the call insn
332 if possible, for the sake of frame pointer elimination. */
335 pat = GEN_CALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
336 next_arg_reg, n_pop);
338 pat = GEN_CALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
341 emit_call_insn (pat);
347 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
348 if ((ecf_flags & ECF_SIBCALL)
349 && HAVE_sibcall && HAVE_sibcall_value)
352 emit_call_insn (GEN_SIBCALL_VALUE (valreg, funmem,
353 rounded_stack_size_rtx,
354 next_arg_reg, NULL_RTX));
356 emit_call_insn (GEN_SIBCALL (funmem, rounded_stack_size_rtx,
358 GEN_INT (struct_value_size)));
363 #if defined (HAVE_call) && defined (HAVE_call_value)
364 if (HAVE_call && HAVE_call_value)
367 emit_call_insn (GEN_CALL_VALUE (valreg, funmem, rounded_stack_size_rtx,
368 next_arg_reg, NULL_RTX));
370 emit_call_insn (GEN_CALL (funmem, rounded_stack_size_rtx, next_arg_reg,
371 GEN_INT (struct_value_size)));
377 /* Find the call we just emitted. */
378 call_insn = last_call_insn ();
380 /* Some target create a fresh MEM instead of reusing the one provided
381 above. Set its MEM_EXPR. */
382 call = PATTERN (call_insn);
383 if (GET_CODE (call) == PARALLEL)
384 call = XVECEXP (call, 0, 0);
385 if (GET_CODE (call) == SET)
386 call = SET_SRC (call);
387 if (GET_CODE (call) == CALL
388 && MEM_P (XEXP (call, 0))
389 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
390 && MEM_EXPR (funmem) != NULL_TREE)
391 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
393 /* Put the register usage information there. */
394 add_function_usage_to (call_insn, call_fusage);
396 /* If this is a const call, then set the insn's unchanging bit. */
397 if (ecf_flags & ECF_CONST)
398 RTL_CONST_CALL_P (call_insn) = 1;
400 /* If this is a pure call, then set the insn's unchanging bit. */
401 if (ecf_flags & ECF_PURE)
402 RTL_PURE_CALL_P (call_insn) = 1;
404 /* If this is a const call, then set the insn's unchanging bit. */
405 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
406 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
408 /* Create a nothrow REG_EH_REGION note, if needed. */
409 make_reg_eh_region_note (call_insn, ecf_flags, 0);
411 if (ecf_flags & ECF_NORETURN)
412 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
414 if (ecf_flags & ECF_RETURNS_TWICE)
416 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
417 cfun->calls_setjmp = 1;
420 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
422 /* Restore this now, so that we do defer pops for this call's args
423 if the context of the call as a whole permits. */
424 inhibit_defer_pop = old_inhibit_defer_pop;
429 CALL_INSN_FUNCTION_USAGE (call_insn)
430 = gen_rtx_EXPR_LIST (VOIDmode,
431 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
432 CALL_INSN_FUNCTION_USAGE (call_insn));
433 rounded_stack_size -= n_popped;
434 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
435 stack_pointer_delta -= n_popped;
437 /* If popup is needed, stack realign must use DRAP */
438 if (SUPPORTS_STACK_ALIGNMENT)
439 crtl->need_drap = true;
442 if (!ACCUMULATE_OUTGOING_ARGS)
444 /* If returning from the subroutine does not automatically pop the args,
445 we need an instruction to pop them sooner or later.
446 Perhaps do it now; perhaps just record how much space to pop later.
448 If returning from the subroutine does pop the args, indicate that the
449 stack pointer will be changed. */
451 if (rounded_stack_size != 0)
453 if (ecf_flags & ECF_NORETURN)
454 /* Just pretend we did the pop. */
455 stack_pointer_delta -= rounded_stack_size;
456 else if (flag_defer_pop && inhibit_defer_pop == 0
457 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
458 pending_stack_adjust += rounded_stack_size;
460 adjust_stack (rounded_stack_size_rtx);
463 /* When we accumulate outgoing args, we must avoid any stack manipulations.
464 Restore the stack pointer to its original value now. Usually
465 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
466 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
467 popping variants of functions exist as well.
469 ??? We may optimize similar to defer_pop above, but it is
470 probably not worthwhile.
472 ??? It will be worthwhile to enable combine_stack_adjustments even for
475 anti_adjust_stack (GEN_INT (n_popped));
478 /* Determine if the function identified by NAME and FNDECL is one with
479 special properties we wish to know about.
481 For example, if the function might return more than one time (setjmp), then
482 set RETURNS_TWICE to a nonzero value.
484 Similarly set NORETURN if the function is in the longjmp family.
486 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
487 space from the stack such as alloca. */
490 special_function_p (const_tree fndecl, int flags)
492 if (fndecl && DECL_NAME (fndecl)
493 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
494 /* Exclude functions not at the file scope, or not `extern',
495 since they are not the magic functions we would otherwise
497 FIXME: this should be handled with attributes, not with this
498 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
499 because you can declare fork() inside a function if you
501 && (DECL_CONTEXT (fndecl) == NULL_TREE
502 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
503 && TREE_PUBLIC (fndecl))
505 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
506 const char *tname = name;
508 /* We assume that alloca will always be called by name. It
509 makes no sense to pass it as a pointer-to-function to
510 anything that does not understand its behavior. */
511 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
513 && ! strcmp (name, "alloca"))
514 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
516 && ! strcmp (name, "__builtin_alloca"))))
517 flags |= ECF_MAY_BE_ALLOCA;
519 /* Disregard prefix _, __, __x or __builtin_. */
524 && !strncmp (name + 3, "uiltin_", 7))
526 else if (name[1] == '_' && name[2] == 'x')
528 else if (name[1] == '_')
537 && (! strcmp (tname, "setjmp")
538 || ! strcmp (tname, "setjmp_syscall")))
540 && ! strcmp (tname, "sigsetjmp"))
542 && ! strcmp (tname, "savectx")))
543 flags |= ECF_RETURNS_TWICE;
546 && ! strcmp (tname, "siglongjmp"))
547 flags |= ECF_NORETURN;
549 else if ((tname[0] == 'q' && tname[1] == 's'
550 && ! strcmp (tname, "qsetjmp"))
551 || (tname[0] == 'v' && tname[1] == 'f'
552 && ! strcmp (tname, "vfork"))
553 || (tname[0] == 'g' && tname[1] == 'e'
554 && !strcmp (tname, "getcontext")))
555 flags |= ECF_RETURNS_TWICE;
557 else if (tname[0] == 'l' && tname[1] == 'o'
558 && ! strcmp (tname, "longjmp"))
559 flags |= ECF_NORETURN;
565 /* Return nonzero when FNDECL represents a call to setjmp. */
568 setjmp_call_p (const_tree fndecl)
570 if (DECL_IS_RETURNS_TWICE (fndecl))
571 return ECF_RETURNS_TWICE;
572 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
576 /* Return true if STMT is an alloca call. */
579 gimple_alloca_call_p (const_gimple stmt)
583 if (!is_gimple_call (stmt))
586 fndecl = gimple_call_fndecl (stmt);
587 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
593 /* Return true when exp contains alloca call. */
596 alloca_call_p (const_tree exp)
598 if (TREE_CODE (exp) == CALL_EXPR
599 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
600 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
601 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
602 & ECF_MAY_BE_ALLOCA))
607 /* Detect flags (function attributes) from the function decl or type node. */
610 flags_from_decl_or_type (const_tree exp)
616 /* The function exp may have the `malloc' attribute. */
617 if (DECL_IS_MALLOC (exp))
620 /* The function exp may have the `returns_twice' attribute. */
621 if (DECL_IS_RETURNS_TWICE (exp))
622 flags |= ECF_RETURNS_TWICE;
624 /* Process the pure and const attributes. */
625 if (TREE_READONLY (exp))
627 if (DECL_PURE_P (exp))
629 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
630 flags |= ECF_LOOPING_CONST_OR_PURE;
632 if (DECL_IS_NOVOPS (exp))
634 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
637 if (TREE_NOTHROW (exp))
638 flags |= ECF_NOTHROW;
640 flags = special_function_p (exp, flags);
642 else if (TYPE_P (exp) && TYPE_READONLY (exp))
645 if (TREE_THIS_VOLATILE (exp))
647 flags |= ECF_NORETURN;
648 if (flags & (ECF_CONST|ECF_PURE))
649 flags |= ECF_LOOPING_CONST_OR_PURE;
655 /* Detect flags from a CALL_EXPR. */
658 call_expr_flags (const_tree t)
661 tree decl = get_callee_fndecl (t);
664 flags = flags_from_decl_or_type (decl);
667 t = TREE_TYPE (CALL_EXPR_FN (t));
668 if (t && TREE_CODE (t) == POINTER_TYPE)
669 flags = flags_from_decl_or_type (TREE_TYPE (t));
677 /* Precompute all register parameters as described by ARGS, storing values
678 into fields within the ARGS array.
680 NUM_ACTUALS indicates the total number elements in the ARGS array.
682 Set REG_PARM_SEEN if we encounter a register parameter. */
685 precompute_register_parameters (int num_actuals, struct arg_data *args,
692 for (i = 0; i < num_actuals; i++)
693 if (args[i].reg != 0 && ! args[i].pass_on_stack)
697 if (args[i].value == 0)
700 args[i].value = expand_normal (args[i].tree_value);
701 preserve_temp_slots (args[i].value);
705 /* If we are to promote the function arg to a wider mode,
708 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
710 = convert_modes (args[i].mode,
711 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
712 args[i].value, args[i].unsignedp);
714 /* If the value is a non-legitimate constant, force it into a
715 pseudo now. TLS symbols sometimes need a call to resolve. */
716 if (CONSTANT_P (args[i].value)
717 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
718 args[i].value = force_reg (args[i].mode, args[i].value);
720 /* If we're going to have to load the value by parts, pull the
721 parts into pseudos. The part extraction process can involve
722 non-trivial computation. */
723 if (GET_CODE (args[i].reg) == PARALLEL)
725 tree type = TREE_TYPE (args[i].tree_value);
726 args[i].parallel_value
727 = emit_group_load_into_temps (args[i].reg, args[i].value,
728 type, int_size_in_bytes (type));
731 /* If the value is expensive, and we are inside an appropriately
732 short loop, put the value into a pseudo and then put the pseudo
735 For small register classes, also do this if this call uses
736 register parameters. This is to avoid reload conflicts while
737 loading the parameters registers. */
739 else if ((! (REG_P (args[i].value)
740 || (GET_CODE (args[i].value) == SUBREG
741 && REG_P (SUBREG_REG (args[i].value)))))
742 && args[i].mode != BLKmode
743 && rtx_cost (args[i].value, SET, optimize_insn_for_speed_p ())
746 && targetm.small_register_classes_for_mode_p (args[i].mode))
748 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
752 #ifdef REG_PARM_STACK_SPACE
754 /* The argument list is the property of the called routine and it
755 may clobber it. If the fixed area has been used for previous
756 parameters, we must save and restore it. */
759 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
764 /* Compute the boundary of the area that needs to be saved, if any. */
765 high = reg_parm_stack_space;
766 #ifdef ARGS_GROW_DOWNWARD
769 if (high > highest_outgoing_arg_in_use)
770 high = highest_outgoing_arg_in_use;
772 for (low = 0; low < high; low++)
773 if (stack_usage_map[low] != 0)
776 enum machine_mode save_mode;
781 while (stack_usage_map[--high] == 0)
785 *high_to_save = high;
787 num_to_save = high - low + 1;
788 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
790 /* If we don't have the required alignment, must do this
792 if ((low & (MIN (GET_MODE_SIZE (save_mode),
793 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
796 #ifdef ARGS_GROW_DOWNWARD
801 stack_area = gen_rtx_MEM (save_mode,
802 memory_address (save_mode,
803 plus_constant (argblock,
806 set_mem_align (stack_area, PARM_BOUNDARY);
807 if (save_mode == BLKmode)
809 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
810 emit_block_move (validize_mem (save_area), stack_area,
811 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
815 save_area = gen_reg_rtx (save_mode);
816 emit_move_insn (save_area, stack_area);
826 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
828 enum machine_mode save_mode = GET_MODE (save_area);
832 #ifdef ARGS_GROW_DOWNWARD
833 delta = -high_to_save;
837 stack_area = gen_rtx_MEM (save_mode,
838 memory_address (save_mode,
839 plus_constant (argblock, delta)));
840 set_mem_align (stack_area, PARM_BOUNDARY);
842 if (save_mode != BLKmode)
843 emit_move_insn (stack_area, save_area);
845 emit_block_move (stack_area, validize_mem (save_area),
846 GEN_INT (high_to_save - low_to_save + 1),
849 #endif /* REG_PARM_STACK_SPACE */
851 /* If any elements in ARGS refer to parameters that are to be passed in
852 registers, but not in memory, and whose alignment does not permit a
853 direct copy into registers. Copy the values into a group of pseudos
854 which we will later copy into the appropriate hard registers.
856 Pseudos for each unaligned argument will be stored into the array
857 args[argnum].aligned_regs. The caller is responsible for deallocating
858 the aligned_regs array if it is nonzero. */
861 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
865 for (i = 0; i < num_actuals; i++)
866 if (args[i].reg != 0 && ! args[i].pass_on_stack
867 && args[i].mode == BLKmode
868 && MEM_P (args[i].value)
869 && (MEM_ALIGN (args[i].value)
870 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
872 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
873 int endian_correction = 0;
877 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
878 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
882 args[i].n_aligned_regs
883 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
886 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
888 /* Structures smaller than a word are normally aligned to the
889 least significant byte. On a BYTES_BIG_ENDIAN machine,
890 this means we must skip the empty high order bytes when
891 calculating the bit offset. */
892 if (bytes < UNITS_PER_WORD
893 #ifdef BLOCK_REG_PADDING
894 && (BLOCK_REG_PADDING (args[i].mode,
895 TREE_TYPE (args[i].tree_value), 1)
901 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
903 for (j = 0; j < args[i].n_aligned_regs; j++)
905 rtx reg = gen_reg_rtx (word_mode);
906 rtx word = operand_subword_force (args[i].value, j, BLKmode);
907 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
909 args[i].aligned_regs[j] = reg;
910 word = extract_bit_field (word, bitsize, 0, 1, false, NULL_RTX,
911 word_mode, word_mode);
913 /* There is no need to restrict this code to loading items
914 in TYPE_ALIGN sized hunks. The bitfield instructions can
915 load up entire word sized registers efficiently.
917 ??? This may not be needed anymore.
918 We use to emit a clobber here but that doesn't let later
919 passes optimize the instructions we emit. By storing 0 into
920 the register later passes know the first AND to zero out the
921 bitfield being set in the register is unnecessary. The store
922 of 0 will be deleted as will at least the first AND. */
924 emit_move_insn (reg, const0_rtx);
926 bytes -= bitsize / BITS_PER_UNIT;
927 store_bit_field (reg, bitsize, endian_correction, 0, 0,
933 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
936 NUM_ACTUALS is the total number of parameters.
938 N_NAMED_ARGS is the total number of named arguments.
940 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
943 FNDECL is the tree code for the target of this call (if known)
945 ARGS_SO_FAR holds state needed by the target to know where to place
948 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
949 for arguments which are passed in registers.
951 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
952 and may be modified by this routine.
954 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
955 flags which may may be modified by this routine.
957 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
958 that requires allocation of stack space.
960 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
961 the thunked-to function. */
964 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
965 struct arg_data *args,
966 struct args_size *args_size,
967 int n_named_args ATTRIBUTE_UNUSED,
968 tree exp, tree struct_value_addr_value,
969 tree fndecl, tree fntype,
970 cumulative_args_t args_so_far,
971 int reg_parm_stack_space,
972 rtx *old_stack_level, int *old_pending_adj,
973 int *must_preallocate, int *ecf_flags,
974 bool *may_tailcall, bool call_from_thunk_p)
976 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
977 location_t loc = EXPR_LOCATION (exp);
978 /* 1 if scanning parms front to back, -1 if scanning back to front. */
981 /* Count arg position in order args appear. */
986 args_size->constant = 0;
989 /* In this loop, we consider args in the order they are written.
990 We fill up ARGS from the front or from the back if necessary
991 so that in any case the first arg to be pushed ends up at the front. */
993 if (PUSH_ARGS_REVERSED)
995 i = num_actuals - 1, inc = -1;
996 /* In this case, must reverse order of args
997 so that we compute and push the last arg first. */
1004 /* First fill in the actual arguments in the ARGS array, splitting
1005 complex arguments if necessary. */
1008 call_expr_arg_iterator iter;
1011 if (struct_value_addr_value)
1013 args[j].tree_value = struct_value_addr_value;
1016 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1018 tree argtype = TREE_TYPE (arg);
1019 if (targetm.calls.split_complex_arg
1021 && TREE_CODE (argtype) == COMPLEX_TYPE
1022 && targetm.calls.split_complex_arg (argtype))
1024 tree subtype = TREE_TYPE (argtype);
1025 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1027 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1030 args[j].tree_value = arg;
1035 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1036 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
1038 tree type = TREE_TYPE (args[i].tree_value);
1040 enum machine_mode mode;
1042 /* Replace erroneous argument with constant zero. */
1043 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1044 args[i].tree_value = integer_zero_node, type = integer_type_node;
1046 /* If TYPE is a transparent union or record, pass things the way
1047 we would pass the first field of the union or record. We have
1048 already verified that the modes are the same. */
1049 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1050 && TYPE_TRANSPARENT_AGGR (type))
1051 type = TREE_TYPE (first_field (type));
1053 /* Decide where to pass this arg.
1055 args[i].reg is nonzero if all or part is passed in registers.
1057 args[i].partial is nonzero if part but not all is passed in registers,
1058 and the exact value says how many bytes are passed in registers.
1060 args[i].pass_on_stack is nonzero if the argument must at least be
1061 computed on the stack. It may then be loaded back into registers
1062 if args[i].reg is nonzero.
1064 These decisions are driven by the FUNCTION_... macros and must agree
1065 with those made by function.c. */
1067 /* See if this argument should be passed by invisible reference. */
1068 if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
1069 type, argpos < n_named_args))
1075 = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
1076 type, argpos < n_named_args);
1078 /* If we're compiling a thunk, pass through invisible references
1079 instead of making a copy. */
1080 if (call_from_thunk_p
1082 && !TREE_ADDRESSABLE (type)
1083 && (base = get_base_address (args[i].tree_value))
1084 && TREE_CODE (base) != SSA_NAME
1085 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1087 mark_addressable (args[i].tree_value);
1089 /* We can't use sibcalls if a callee-copied argument is
1090 stored in the current function's frame. */
1091 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1092 *may_tailcall = false;
1094 args[i].tree_value = build_fold_addr_expr_loc (loc,
1095 args[i].tree_value);
1096 type = TREE_TYPE (args[i].tree_value);
1098 if (*ecf_flags & ECF_CONST)
1099 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1103 /* We make a copy of the object and pass the address to the
1104 function being called. */
1107 if (!COMPLETE_TYPE_P (type)
1108 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1109 || (flag_stack_check == GENERIC_STACK_CHECK
1110 && compare_tree_int (TYPE_SIZE_UNIT (type),
1111 STACK_CHECK_MAX_VAR_SIZE) > 0))
1113 /* This is a variable-sized object. Make space on the stack
1115 rtx size_rtx = expr_size (args[i].tree_value);
1117 if (*old_stack_level == 0)
1119 emit_stack_save (SAVE_BLOCK, old_stack_level);
1120 *old_pending_adj = pending_stack_adjust;
1121 pending_stack_adjust = 0;
1124 /* We can pass TRUE as the 4th argument because we just
1125 saved the stack pointer and will restore it right after
1127 copy = allocate_dynamic_stack_space (size_rtx,
1131 copy = gen_rtx_MEM (BLKmode, copy);
1132 set_mem_attributes (copy, type, 1);
1135 copy = assign_temp (type, 0, 1, 0);
1137 store_expr (args[i].tree_value, copy, 0, false);
1139 /* Just change the const function to pure and then let
1140 the next test clear the pure based on
1142 if (*ecf_flags & ECF_CONST)
1144 *ecf_flags &= ~ECF_CONST;
1145 *ecf_flags |= ECF_PURE;
1148 if (!callee_copies && *ecf_flags & ECF_PURE)
1149 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1152 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
1153 type = TREE_TYPE (args[i].tree_value);
1154 *may_tailcall = false;
1158 unsignedp = TYPE_UNSIGNED (type);
1159 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1160 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
1162 args[i].unsignedp = unsignedp;
1163 args[i].mode = mode;
1165 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1166 argpos < n_named_args);
1168 /* If this is a sibling call and the machine has register windows, the
1169 register window has to be unwinded before calling the routine, so
1170 arguments have to go into the incoming registers. */
1171 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1172 args[i].tail_call_reg
1173 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
1174 argpos < n_named_args);
1176 args[i].tail_call_reg = args[i].reg;
1180 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1181 argpos < n_named_args);
1183 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1185 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1186 it means that we are to pass this arg in the register(s) designated
1187 by the PARALLEL, but also to pass it in the stack. */
1188 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1189 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1190 args[i].pass_on_stack = 1;
1192 /* If this is an addressable type, we must preallocate the stack
1193 since we must evaluate the object into its final location.
1195 If this is to be passed in both registers and the stack, it is simpler
1197 if (TREE_ADDRESSABLE (type)
1198 || (args[i].pass_on_stack && args[i].reg != 0))
1199 *must_preallocate = 1;
1201 /* Compute the stack-size of this argument. */
1202 if (args[i].reg == 0 || args[i].partial != 0
1203 || reg_parm_stack_space > 0
1204 || args[i].pass_on_stack)
1205 locate_and_pad_parm (mode, type,
1206 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1211 args[i].pass_on_stack ? 0 : args[i].partial,
1212 fndecl, args_size, &args[i].locate);
1213 #ifdef BLOCK_REG_PADDING
1215 /* The argument is passed entirely in registers. See at which
1216 end it should be padded. */
1217 args[i].locate.where_pad =
1218 BLOCK_REG_PADDING (mode, type,
1219 int_size_in_bytes (type) <= UNITS_PER_WORD);
1222 /* Update ARGS_SIZE, the total stack space for args so far. */
1224 args_size->constant += args[i].locate.size.constant;
1225 if (args[i].locate.size.var)
1226 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1228 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1229 have been used, etc. */
1231 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
1232 type, argpos < n_named_args);
1236 /* Update ARGS_SIZE to contain the total size for the argument block.
1237 Return the original constant component of the argument block's size.
1239 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1240 for arguments passed in registers. */
1243 compute_argument_block_size (int reg_parm_stack_space,
1244 struct args_size *args_size,
1245 tree fndecl ATTRIBUTE_UNUSED,
1246 tree fntype ATTRIBUTE_UNUSED,
1247 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1249 int unadjusted_args_size = args_size->constant;
1251 /* For accumulate outgoing args mode we don't need to align, since the frame
1252 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1253 backends from generating misaligned frame sizes. */
1254 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1255 preferred_stack_boundary = STACK_BOUNDARY;
1257 /* Compute the actual size of the argument block required. The variable
1258 and constant sizes must be combined, the size may have to be rounded,
1259 and there may be a minimum required size. */
1263 args_size->var = ARGS_SIZE_TREE (*args_size);
1264 args_size->constant = 0;
1266 preferred_stack_boundary /= BITS_PER_UNIT;
1267 if (preferred_stack_boundary > 1)
1269 /* We don't handle this case yet. To handle it correctly we have
1270 to add the delta, round and subtract the delta.
1271 Currently no machine description requires this support. */
1272 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1273 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1276 if (reg_parm_stack_space > 0)
1279 = size_binop (MAX_EXPR, args_size->var,
1280 ssize_int (reg_parm_stack_space));
1282 /* The area corresponding to register parameters is not to count in
1283 the size of the block we need. So make the adjustment. */
1284 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1286 = size_binop (MINUS_EXPR, args_size->var,
1287 ssize_int (reg_parm_stack_space));
1292 preferred_stack_boundary /= BITS_PER_UNIT;
1293 if (preferred_stack_boundary < 1)
1294 preferred_stack_boundary = 1;
1295 args_size->constant = (((args_size->constant
1296 + stack_pointer_delta
1297 + preferred_stack_boundary - 1)
1298 / preferred_stack_boundary
1299 * preferred_stack_boundary)
1300 - stack_pointer_delta);
1302 args_size->constant = MAX (args_size->constant,
1303 reg_parm_stack_space);
1305 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1306 args_size->constant -= reg_parm_stack_space;
1308 return unadjusted_args_size;
1311 /* Precompute parameters as needed for a function call.
1313 FLAGS is mask of ECF_* constants.
1315 NUM_ACTUALS is the number of arguments.
1317 ARGS is an array containing information for each argument; this
1318 routine fills in the INITIAL_VALUE and VALUE fields for each
1319 precomputed argument. */
1322 precompute_arguments (int num_actuals, struct arg_data *args)
1326 /* If this is a libcall, then precompute all arguments so that we do not
1327 get extraneous instructions emitted as part of the libcall sequence. */
1329 /* If we preallocated the stack space, and some arguments must be passed
1330 on the stack, then we must precompute any parameter which contains a
1331 function call which will store arguments on the stack.
1332 Otherwise, evaluating the parameter may clobber previous parameters
1333 which have already been stored into the stack. (we have code to avoid
1334 such case by saving the outgoing stack arguments, but it results in
1336 if (!ACCUMULATE_OUTGOING_ARGS)
1339 for (i = 0; i < num_actuals; i++)
1342 enum machine_mode mode;
1344 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
1347 /* If this is an addressable type, we cannot pre-evaluate it. */
1348 type = TREE_TYPE (args[i].tree_value);
1349 gcc_assert (!TREE_ADDRESSABLE (type));
1351 args[i].initial_value = args[i].value
1352 = expand_normal (args[i].tree_value);
1354 mode = TYPE_MODE (type);
1355 if (mode != args[i].mode)
1357 int unsignedp = args[i].unsignedp;
1359 = convert_modes (args[i].mode, mode,
1360 args[i].value, args[i].unsignedp);
1362 /* CSE will replace this only if it contains args[i].value
1363 pseudo, so convert it down to the declared mode using
1365 if (REG_P (args[i].value)
1366 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1367 && promote_mode (type, mode, &unsignedp) != args[i].mode)
1369 args[i].initial_value
1370 = gen_lowpart_SUBREG (mode, args[i].value);
1371 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1372 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1379 /* Given the current state of MUST_PREALLOCATE and information about
1380 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1381 compute and return the final value for MUST_PREALLOCATE. */
1384 finalize_must_preallocate (int must_preallocate, int num_actuals,
1385 struct arg_data *args, struct args_size *args_size)
1387 /* See if we have or want to preallocate stack space.
1389 If we would have to push a partially-in-regs parm
1390 before other stack parms, preallocate stack space instead.
1392 If the size of some parm is not a multiple of the required stack
1393 alignment, we must preallocate.
1395 If the total size of arguments that would otherwise create a copy in
1396 a temporary (such as a CALL) is more than half the total argument list
1397 size, preallocation is faster.
1399 Another reason to preallocate is if we have a machine (like the m88k)
1400 where stack alignment is required to be maintained between every
1401 pair of insns, not just when the call is made. However, we assume here
1402 that such machines either do not have push insns (and hence preallocation
1403 would occur anyway) or the problem is taken care of with
1406 if (! must_preallocate)
1408 int partial_seen = 0;
1409 int copy_to_evaluate_size = 0;
1412 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1414 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1416 else if (partial_seen && args[i].reg == 0)
1417 must_preallocate = 1;
1419 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1420 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1421 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1422 || TREE_CODE (args[i].tree_value) == COND_EXPR
1423 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1424 copy_to_evaluate_size
1425 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1428 if (copy_to_evaluate_size * 2 >= args_size->constant
1429 && args_size->constant > 0)
1430 must_preallocate = 1;
1432 return must_preallocate;
1435 /* If we preallocated stack space, compute the address of each argument
1436 and store it into the ARGS array.
1438 We need not ensure it is a valid memory address here; it will be
1439 validized when it is used.
1441 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1444 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1448 rtx arg_reg = argblock;
1449 int i, arg_offset = 0;
1451 if (GET_CODE (argblock) == PLUS)
1452 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1454 for (i = 0; i < num_actuals; i++)
1456 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1457 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1459 unsigned int align, boundary;
1460 unsigned int units_on_stack = 0;
1461 enum machine_mode partial_mode = VOIDmode;
1463 /* Skip this parm if it will not be passed on the stack. */
1464 if (! args[i].pass_on_stack
1466 && args[i].partial == 0)
1469 if (CONST_INT_P (offset))
1470 addr = plus_constant (arg_reg, INTVAL (offset));
1472 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1474 addr = plus_constant (addr, arg_offset);
1476 if (args[i].partial != 0)
1478 /* Only part of the parameter is being passed on the stack.
1479 Generate a simple memory reference of the correct size. */
1480 units_on_stack = args[i].locate.size.constant;
1481 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1483 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1484 set_mem_size (args[i].stack, units_on_stack);
1488 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1489 set_mem_attributes (args[i].stack,
1490 TREE_TYPE (args[i].tree_value), 1);
1492 align = BITS_PER_UNIT;
1493 boundary = args[i].locate.boundary;
1494 if (args[i].locate.where_pad != downward)
1496 else if (CONST_INT_P (offset))
1498 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1499 align = align & -align;
1501 set_mem_align (args[i].stack, align);
1503 if (CONST_INT_P (slot_offset))
1504 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1506 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1508 addr = plus_constant (addr, arg_offset);
1510 if (args[i].partial != 0)
1512 /* Only part of the parameter is being passed on the stack.
1513 Generate a simple memory reference of the correct size.
1515 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1516 set_mem_size (args[i].stack_slot, units_on_stack);
1520 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1521 set_mem_attributes (args[i].stack_slot,
1522 TREE_TYPE (args[i].tree_value), 1);
1524 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1526 /* Function incoming arguments may overlap with sibling call
1527 outgoing arguments and we cannot allow reordering of reads
1528 from function arguments with stores to outgoing arguments
1529 of sibling calls. */
1530 set_mem_alias_set (args[i].stack, 0);
1531 set_mem_alias_set (args[i].stack_slot, 0);
1536 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1537 in a call instruction.
1539 FNDECL is the tree node for the target function. For an indirect call
1540 FNDECL will be NULL_TREE.
1542 ADDR is the operand 0 of CALL_EXPR for this call. */
1545 rtx_for_function_call (tree fndecl, tree addr)
1549 /* Get the function to call, in the form of RTL. */
1552 /* If this is the first use of the function, see if we need to
1553 make an external definition for it. */
1554 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1556 assemble_external (fndecl);
1557 TREE_USED (fndecl) = 1;
1560 /* Get a SYMBOL_REF rtx for the function address. */
1561 funexp = XEXP (DECL_RTL (fndecl), 0);
1564 /* Generate an rtx (probably a pseudo-register) for the address. */
1567 funexp = expand_normal (addr);
1568 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1573 /* Return true if and only if SIZE storage units (usually bytes)
1574 starting from address ADDR overlap with already clobbered argument
1575 area. This function is used to determine if we should give up a
1579 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1583 if (addr == crtl->args.internal_arg_pointer)
1585 else if (GET_CODE (addr) == PLUS
1586 && XEXP (addr, 0) == crtl->args.internal_arg_pointer
1587 && CONST_INT_P (XEXP (addr, 1)))
1588 i = INTVAL (XEXP (addr, 1));
1589 /* Return true for arg pointer based indexed addressing. */
1590 else if (GET_CODE (addr) == PLUS
1591 && (XEXP (addr, 0) == crtl->args.internal_arg_pointer
1592 || XEXP (addr, 1) == crtl->args.internal_arg_pointer))
1594 /* If the address comes in a register, we have no idea of its origin so
1595 give up and conservatively return true. */
1596 else if (REG_P(addr))
1601 #ifdef ARGS_GROW_DOWNWARD
1606 unsigned HOST_WIDE_INT k;
1608 for (k = 0; k < size; k++)
1609 if (i + k < stored_args_map->n_bits
1610 && TEST_BIT (stored_args_map, i + k))
1617 /* Do the register loads required for any wholly-register parms or any
1618 parms which are passed both on the stack and in a register. Their
1619 expressions were already evaluated.
1621 Mark all register-parms as living through the call, putting these USE
1622 insns in the CALL_INSN_FUNCTION_USAGE field.
1624 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1625 checking, setting *SIBCALL_FAILURE if appropriate. */
1628 load_register_parameters (struct arg_data *args, int num_actuals,
1629 rtx *call_fusage, int flags, int is_sibcall,
1630 int *sibcall_failure)
1634 for (i = 0; i < num_actuals; i++)
1636 rtx reg = ((flags & ECF_SIBCALL)
1637 ? args[i].tail_call_reg : args[i].reg);
1640 int partial = args[i].partial;
1643 rtx before_arg = get_last_insn ();
1644 /* Set non-negative if we must move a word at a time, even if
1645 just one word (e.g, partial == 4 && mode == DFmode). Set
1646 to -1 if we just use a normal move insn. This value can be
1647 zero if the argument is a zero size structure. */
1649 if (GET_CODE (reg) == PARALLEL)
1653 gcc_assert (partial % UNITS_PER_WORD == 0);
1654 nregs = partial / UNITS_PER_WORD;
1656 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1658 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1659 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1662 size = GET_MODE_SIZE (args[i].mode);
1664 /* Handle calls that pass values in multiple non-contiguous
1665 locations. The Irix 6 ABI has examples of this. */
1667 if (GET_CODE (reg) == PARALLEL)
1668 emit_group_move (reg, args[i].parallel_value);
1670 /* If simple case, just do move. If normal partial, store_one_arg
1671 has already loaded the register for us. In all other cases,
1672 load the register(s) from memory. */
1674 else if (nregs == -1)
1676 emit_move_insn (reg, args[i].value);
1677 #ifdef BLOCK_REG_PADDING
1678 /* Handle case where we have a value that needs shifting
1679 up to the msb. eg. a QImode value and we're padding
1680 upward on a BYTES_BIG_ENDIAN machine. */
1681 if (size < UNITS_PER_WORD
1682 && (args[i].locate.where_pad
1683 == (BYTES_BIG_ENDIAN ? upward : downward)))
1686 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1688 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1689 report the whole reg as used. Strictly speaking, the
1690 call only uses SIZE bytes at the msb end, but it doesn't
1691 seem worth generating rtl to say that. */
1692 reg = gen_rtx_REG (word_mode, REGNO (reg));
1693 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
1695 emit_move_insn (reg, x);
1700 /* If we have pre-computed the values to put in the registers in
1701 the case of non-aligned structures, copy them in now. */
1703 else if (args[i].n_aligned_regs != 0)
1704 for (j = 0; j < args[i].n_aligned_regs; j++)
1705 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1706 args[i].aligned_regs[j]);
1708 else if (partial == 0 || args[i].pass_on_stack)
1710 rtx mem = validize_mem (args[i].value);
1712 /* Check for overlap with already clobbered argument area,
1713 providing that this has non-zero size. */
1716 || mem_overlaps_already_clobbered_arg_p
1717 (XEXP (args[i].value, 0), size)))
1718 *sibcall_failure = 1;
1720 /* Handle a BLKmode that needs shifting. */
1721 if (nregs == 1 && size < UNITS_PER_WORD
1722 #ifdef BLOCK_REG_PADDING
1723 && args[i].locate.where_pad == downward
1729 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1730 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1731 rtx x = gen_reg_rtx (word_mode);
1732 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1733 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1736 emit_move_insn (x, tem);
1737 x = expand_shift (dir, word_mode, x, shift, ri, 1);
1739 emit_move_insn (ri, x);
1742 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1745 /* When a parameter is a block, and perhaps in other cases, it is
1746 possible that it did a load from an argument slot that was
1747 already clobbered. */
1749 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1750 *sibcall_failure = 1;
1752 /* Handle calls that pass values in multiple non-contiguous
1753 locations. The Irix 6 ABI has examples of this. */
1754 if (GET_CODE (reg) == PARALLEL)
1755 use_group_regs (call_fusage, reg);
1756 else if (nregs == -1)
1757 use_reg (call_fusage, reg);
1759 use_regs (call_fusage, REGNO (reg), nregs);
1764 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1765 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1766 bytes, then we would need to push some additional bytes to pad the
1767 arguments. So, we compute an adjust to the stack pointer for an
1768 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1769 bytes. Then, when the arguments are pushed the stack will be perfectly
1770 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1771 be popped after the call. Returns the adjustment. */
1774 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1775 struct args_size *args_size,
1776 unsigned int preferred_unit_stack_boundary)
1778 /* The number of bytes to pop so that the stack will be
1779 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1780 HOST_WIDE_INT adjustment;
1781 /* The alignment of the stack after the arguments are pushed, if we
1782 just pushed the arguments without adjust the stack here. */
1783 unsigned HOST_WIDE_INT unadjusted_alignment;
1785 unadjusted_alignment
1786 = ((stack_pointer_delta + unadjusted_args_size)
1787 % preferred_unit_stack_boundary);
1789 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1790 as possible -- leaving just enough left to cancel out the
1791 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1792 PENDING_STACK_ADJUST is non-negative, and congruent to
1793 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1795 /* Begin by trying to pop all the bytes. */
1796 unadjusted_alignment
1797 = (unadjusted_alignment
1798 - (pending_stack_adjust % preferred_unit_stack_boundary));
1799 adjustment = pending_stack_adjust;
1800 /* Push enough additional bytes that the stack will be aligned
1801 after the arguments are pushed. */
1802 if (preferred_unit_stack_boundary > 1)
1804 if (unadjusted_alignment > 0)
1805 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1807 adjustment += unadjusted_alignment;
1810 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1811 bytes after the call. The right number is the entire
1812 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1813 by the arguments in the first place. */
1815 = pending_stack_adjust - adjustment + unadjusted_args_size;
1820 /* Scan X expression if it does not dereference any argument slots
1821 we already clobbered by tail call arguments (as noted in stored_args_map
1823 Return nonzero if X expression dereferences such argument slots,
1827 check_sibcall_argument_overlap_1 (rtx x)
1836 code = GET_CODE (x);
1838 /* We need not check the operands of the CALL expression itself. */
1843 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1844 GET_MODE_SIZE (GET_MODE (x)));
1846 /* Scan all subexpressions. */
1847 fmt = GET_RTX_FORMAT (code);
1848 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1852 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1855 else if (*fmt == 'E')
1857 for (j = 0; j < XVECLEN (x, i); j++)
1858 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1865 /* Scan sequence after INSN if it does not dereference any argument slots
1866 we already clobbered by tail call arguments (as noted in stored_args_map
1867 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1868 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1869 should be 0). Return nonzero if sequence after INSN dereferences such argument
1870 slots, zero otherwise. */
1873 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1877 if (insn == NULL_RTX)
1878 insn = get_insns ();
1880 insn = NEXT_INSN (insn);
1882 for (; insn; insn = NEXT_INSN (insn))
1884 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1887 if (mark_stored_args_map)
1889 #ifdef ARGS_GROW_DOWNWARD
1890 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1892 low = arg->locate.slot_offset.constant;
1895 for (high = low + arg->locate.size.constant; low < high; low++)
1896 SET_BIT (stored_args_map, low);
1898 return insn != NULL_RTX;
1901 /* Given that a function returns a value of mode MODE at the most
1902 significant end of hard register VALUE, shift VALUE left or right
1903 as specified by LEFT_P. Return true if some action was needed. */
1906 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1908 HOST_WIDE_INT shift;
1910 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1911 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1915 /* Use ashr rather than lshr for right shifts. This is for the benefit
1916 of the MIPS port, which requires SImode values to be sign-extended
1917 when stored in 64-bit registers. */
1918 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1919 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1924 /* If X is a likely-spilled register value, copy it to a pseudo
1925 register and return that register. Return X otherwise. */
1928 avoid_likely_spilled_reg (rtx x)
1933 && HARD_REGISTER_P (x)
1934 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
1936 /* Make sure that we generate a REG rather than a CONCAT.
1937 Moves into CONCATs can need nontrivial instructions,
1938 and the whole point of this function is to avoid
1939 using the hard register directly in such a situation. */
1940 generating_concat_p = 0;
1941 new_rtx = gen_reg_rtx (GET_MODE (x));
1942 generating_concat_p = 1;
1943 emit_move_insn (new_rtx, x);
1949 /* Generate all the code for a CALL_EXPR exp
1950 and return an rtx for its value.
1951 Store the value in TARGET (specified as an rtx) if convenient.
1952 If the value is stored in TARGET then TARGET is returned.
1953 If IGNORE is nonzero, then we ignore the value of the function call. */
1956 expand_call (tree exp, rtx target, int ignore)
1958 /* Nonzero if we are currently expanding a call. */
1959 static int currently_expanding_call = 0;
1961 /* RTX for the function to be called. */
1963 /* Sequence of insns to perform a normal "call". */
1964 rtx normal_call_insns = NULL_RTX;
1965 /* Sequence of insns to perform a tail "call". */
1966 rtx tail_call_insns = NULL_RTX;
1967 /* Data type of the function. */
1969 tree type_arg_types;
1971 /* Declaration of the function being called,
1972 or 0 if the function is computed (not known by name). */
1974 /* The type of the function being called. */
1976 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1979 /* Register in which non-BLKmode value will be returned,
1980 or 0 if no value or if value is BLKmode. */
1982 /* Address where we should return a BLKmode value;
1983 0 if value not BLKmode. */
1984 rtx structure_value_addr = 0;
1985 /* Nonzero if that address is being passed by treating it as
1986 an extra, implicit first parameter. Otherwise,
1987 it is passed by being copied directly into struct_value_rtx. */
1988 int structure_value_addr_parm = 0;
1989 /* Holds the value of implicit argument for the struct value. */
1990 tree structure_value_addr_value = NULL_TREE;
1991 /* Size of aggregate value wanted, or zero if none wanted
1992 or if we are using the non-reentrant PCC calling convention
1993 or expecting the value in registers. */
1994 HOST_WIDE_INT struct_value_size = 0;
1995 /* Nonzero if called function returns an aggregate in memory PCC style,
1996 by returning the address of where to find it. */
1997 int pcc_struct_value = 0;
1998 rtx struct_value = 0;
2000 /* Number of actual parameters in this call, including struct value addr. */
2002 /* Number of named args. Args after this are anonymous ones
2003 and they must all go on the stack. */
2005 /* Number of complex actual arguments that need to be split. */
2006 int num_complex_actuals = 0;
2008 /* Vector of information about each argument.
2009 Arguments are numbered in the order they will be pushed,
2010 not the order they are written. */
2011 struct arg_data *args;
2013 /* Total size in bytes of all the stack-parms scanned so far. */
2014 struct args_size args_size;
2015 struct args_size adjusted_args_size;
2016 /* Size of arguments before any adjustments (such as rounding). */
2017 int unadjusted_args_size;
2018 /* Data on reg parms scanned so far. */
2019 CUMULATIVE_ARGS args_so_far_v;
2020 cumulative_args_t args_so_far;
2021 /* Nonzero if a reg parm has been scanned. */
2023 /* Nonzero if this is an indirect function call. */
2025 /* Nonzero if we must avoid push-insns in the args for this call.
2026 If stack space is allocated for register parameters, but not by the
2027 caller, then it is preallocated in the fixed part of the stack frame.
2028 So the entire argument block must then be preallocated (i.e., we
2029 ignore PUSH_ROUNDING in that case). */
2031 int must_preallocate = !PUSH_ARGS;
2033 /* Size of the stack reserved for parameter registers. */
2034 int reg_parm_stack_space = 0;
2036 /* Address of space preallocated for stack parms
2037 (on machines that lack push insns), or 0 if space not preallocated. */
2040 /* Mask of ECF_ flags. */
2042 #ifdef REG_PARM_STACK_SPACE
2043 /* Define the boundary of the register parm stack space that needs to be
2045 int low_to_save, high_to_save;
2046 rtx save_area = 0; /* Place that it is saved */
2049 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2050 char *initial_stack_usage_map = stack_usage_map;
2051 char *stack_usage_map_buf = NULL;
2053 int old_stack_allocated;
2055 /* State variables to track stack modifications. */
2056 rtx old_stack_level = 0;
2057 int old_stack_arg_under_construction = 0;
2058 int old_pending_adj = 0;
2059 int old_inhibit_defer_pop = inhibit_defer_pop;
2061 /* Some stack pointer alterations we make are performed via
2062 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2063 which we then also need to save/restore along the way. */
2064 int old_stack_pointer_delta = 0;
2067 tree addr = CALL_EXPR_FN (exp);
2069 /* The alignment of the stack, in bits. */
2070 unsigned HOST_WIDE_INT preferred_stack_boundary;
2071 /* The alignment of the stack, in bytes. */
2072 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
2073 /* The static chain value to use for this call. */
2074 rtx static_chain_value;
2075 /* See if this is "nothrow" function call. */
2076 if (TREE_NOTHROW (exp))
2077 flags |= ECF_NOTHROW;
2079 /* See if we can find a DECL-node for the actual function, and get the
2080 function attributes (flags) from the function decl or type node. */
2081 fndecl = get_callee_fndecl (exp);
2084 fntype = TREE_TYPE (fndecl);
2085 flags |= flags_from_decl_or_type (fndecl);
2089 fntype = TREE_TYPE (TREE_TYPE (addr));
2090 flags |= flags_from_decl_or_type (fntype);
2092 rettype = TREE_TYPE (exp);
2094 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2096 /* Warn if this value is an aggregate type,
2097 regardless of which calling convention we are using for it. */
2098 if (AGGREGATE_TYPE_P (rettype))
2099 warning (OPT_Waggregate_return, "function call has aggregate value");
2101 /* If the result of a non looping pure or const function call is
2102 ignored (or void), and none of its arguments are volatile, we can
2103 avoid expanding the call and just evaluate the arguments for
2105 if ((flags & (ECF_CONST | ECF_PURE))
2106 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2107 && (ignore || target == const0_rtx
2108 || TYPE_MODE (rettype) == VOIDmode))
2110 bool volatilep = false;
2112 call_expr_arg_iterator iter;
2114 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2115 if (TREE_THIS_VOLATILE (arg))
2123 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2124 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2129 #ifdef REG_PARM_STACK_SPACE
2130 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2133 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2134 && reg_parm_stack_space > 0 && PUSH_ARGS)
2135 must_preallocate = 1;
2137 /* Set up a place to return a structure. */
2139 /* Cater to broken compilers. */
2140 if (aggregate_value_p (exp, fntype))
2142 /* This call returns a big structure. */
2143 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2145 #ifdef PCC_STATIC_STRUCT_RETURN
2147 pcc_struct_value = 1;
2149 #else /* not PCC_STATIC_STRUCT_RETURN */
2151 struct_value_size = int_size_in_bytes (rettype);
2153 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2154 structure_value_addr = XEXP (target, 0);
2157 /* For variable-sized objects, we must be called with a target
2158 specified. If we were to allocate space on the stack here,
2159 we would have no way of knowing when to free it. */
2160 rtx d = assign_temp (rettype, 0, 1, 1);
2162 mark_temp_addr_taken (d);
2163 structure_value_addr = XEXP (d, 0);
2167 #endif /* not PCC_STATIC_STRUCT_RETURN */
2170 /* Figure out the amount to which the stack should be aligned. */
2171 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2174 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2175 /* Without automatic stack alignment, we can't increase preferred
2176 stack boundary. With automatic stack alignment, it is
2177 unnecessary since unless we can guarantee that all callers will
2178 align the outgoing stack properly, callee has to align its
2181 && i->preferred_incoming_stack_boundary
2182 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
2183 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2186 /* Operand 0 is a pointer-to-function; get the type of the function. */
2187 funtype = TREE_TYPE (addr);
2188 gcc_assert (POINTER_TYPE_P (funtype));
2189 funtype = TREE_TYPE (funtype);
2191 /* Count whether there are actual complex arguments that need to be split
2192 into their real and imaginary parts. Munge the type_arg_types
2193 appropriately here as well. */
2194 if (targetm.calls.split_complex_arg)
2196 call_expr_arg_iterator iter;
2198 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2200 tree type = TREE_TYPE (arg);
2201 if (type && TREE_CODE (type) == COMPLEX_TYPE
2202 && targetm.calls.split_complex_arg (type))
2203 num_complex_actuals++;
2205 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2208 type_arg_types = TYPE_ARG_TYPES (funtype);
2210 if (flags & ECF_MAY_BE_ALLOCA)
2211 cfun->calls_alloca = 1;
2213 /* If struct_value_rtx is 0, it means pass the address
2214 as if it were an extra parameter. Put the argument expression
2215 in structure_value_addr_value. */
2216 if (structure_value_addr && struct_value == 0)
2218 /* If structure_value_addr is a REG other than
2219 virtual_outgoing_args_rtx, we can use always use it. If it
2220 is not a REG, we must always copy it into a register.
2221 If it is virtual_outgoing_args_rtx, we must copy it to another
2222 register in some cases. */
2223 rtx temp = (!REG_P (structure_value_addr)
2224 || (ACCUMULATE_OUTGOING_ARGS
2225 && stack_arg_under_construction
2226 && structure_value_addr == virtual_outgoing_args_rtx)
2227 ? copy_addr_to_reg (convert_memory_address
2228 (Pmode, structure_value_addr))
2229 : structure_value_addr);
2231 structure_value_addr_value =
2232 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2233 structure_value_addr_parm = 1;
2236 /* Count the arguments and set NUM_ACTUALS. */
2238 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2240 /* Compute number of named args.
2241 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2243 if (type_arg_types != 0)
2245 = (list_length (type_arg_types)
2246 /* Count the struct value address, if it is passed as a parm. */
2247 + structure_value_addr_parm);
2249 /* If we know nothing, treat all args as named. */
2250 n_named_args = num_actuals;
2252 /* Start updating where the next arg would go.
2254 On some machines (such as the PA) indirect calls have a different
2255 calling convention than normal calls. The fourth argument in
2256 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2258 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
2259 args_so_far = pack_cumulative_args (&args_so_far_v);
2261 /* Now possibly adjust the number of named args.
2262 Normally, don't include the last named arg if anonymous args follow.
2263 We do include the last named arg if
2264 targetm.calls.strict_argument_naming() returns nonzero.
2265 (If no anonymous args follow, the result of list_length is actually
2266 one too large. This is harmless.)
2268 If targetm.calls.pretend_outgoing_varargs_named() returns
2269 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2270 this machine will be able to place unnamed args that were passed
2271 in registers into the stack. So treat all args as named. This
2272 allows the insns emitting for a specific argument list to be
2273 independent of the function declaration.
2275 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2276 we do not have any reliable way to pass unnamed args in
2277 registers, so we must force them into memory. */
2279 if (type_arg_types != 0
2280 && targetm.calls.strict_argument_naming (args_so_far))
2282 else if (type_arg_types != 0
2283 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
2284 /* Don't include the last named arg. */
2287 /* Treat all args as named. */
2288 n_named_args = num_actuals;
2290 /* Make a vector to hold all the information about each arg. */
2291 args = XALLOCAVEC (struct arg_data, num_actuals);
2292 memset (args, 0, num_actuals * sizeof (struct arg_data));
2294 /* Build up entries in the ARGS array, compute the size of the
2295 arguments into ARGS_SIZE, etc. */
2296 initialize_argument_information (num_actuals, args, &args_size,
2298 structure_value_addr_value, fndecl, fntype,
2299 args_so_far, reg_parm_stack_space,
2300 &old_stack_level, &old_pending_adj,
2301 &must_preallocate, &flags,
2302 &try_tail_call, CALL_FROM_THUNK_P (exp));
2305 must_preallocate = 1;
2307 /* Now make final decision about preallocating stack space. */
2308 must_preallocate = finalize_must_preallocate (must_preallocate,
2312 /* If the structure value address will reference the stack pointer, we
2313 must stabilize it. We don't need to do this if we know that we are
2314 not going to adjust the stack pointer in processing this call. */
2316 if (structure_value_addr
2317 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2318 || reg_mentioned_p (virtual_outgoing_args_rtx,
2319 structure_value_addr))
2321 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2322 structure_value_addr = copy_to_reg (structure_value_addr);
2324 /* Tail calls can make things harder to debug, and we've traditionally
2325 pushed these optimizations into -O2. Don't try if we're already
2326 expanding a call, as that means we're an argument. Don't try if
2327 there's cleanups, as we know there's code to follow the call. */
2329 if (currently_expanding_call++ != 0
2330 || !flag_optimize_sibling_calls
2332 || dbg_cnt (tail_call) == false)
2335 /* Rest of purposes for tail call optimizations to fail. */
2337 #ifdef HAVE_sibcall_epilogue
2338 !HAVE_sibcall_epilogue
2343 /* Doing sibling call optimization needs some work, since
2344 structure_value_addr can be allocated on the stack.
2345 It does not seem worth the effort since few optimizable
2346 sibling calls will return a structure. */
2347 || structure_value_addr != NULL_RTX
2348 #ifdef REG_PARM_STACK_SPACE
2349 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2350 || (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2351 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)))
2352 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl))
2354 /* Check whether the target is able to optimize the call
2356 || !targetm.function_ok_for_sibcall (fndecl, exp)
2357 /* Functions that do not return exactly once may not be sibcall
2359 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2360 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2361 /* If the called function is nested in the current one, it might access
2362 some of the caller's arguments, but could clobber them beforehand if
2363 the argument areas are shared. */
2364 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2365 /* If this function requires more stack slots than the current
2366 function, we cannot change it into a sibling call.
2367 crtl->args.pretend_args_size is not part of the
2368 stack allocated by our caller. */
2369 || args_size.constant > (crtl->args.size
2370 - crtl->args.pretend_args_size)
2371 /* If the callee pops its own arguments, then it must pop exactly
2372 the same number of arguments as the current function. */
2373 || (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
2374 != targetm.calls.return_pops_args (current_function_decl,
2375 TREE_TYPE (current_function_decl),
2377 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2380 /* Check if caller and callee disagree in promotion of function
2384 enum machine_mode caller_mode, caller_promoted_mode;
2385 enum machine_mode callee_mode, callee_promoted_mode;
2386 int caller_unsignedp, callee_unsignedp;
2387 tree caller_res = DECL_RESULT (current_function_decl);
2389 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
2390 caller_mode = DECL_MODE (caller_res);
2391 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
2392 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
2393 caller_promoted_mode
2394 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
2396 TREE_TYPE (current_function_decl), 1);
2397 callee_promoted_mode
2398 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
2401 if (caller_mode != VOIDmode
2402 && (caller_promoted_mode != callee_promoted_mode
2403 || ((caller_mode != caller_promoted_mode
2404 || callee_mode != callee_promoted_mode)
2405 && (caller_unsignedp != callee_unsignedp
2406 || GET_MODE_BITSIZE (caller_mode)
2407 < GET_MODE_BITSIZE (callee_mode)))))
2411 /* Ensure current function's preferred stack boundary is at least
2412 what we need. Stack alignment may also increase preferred stack
2414 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
2415 crtl->preferred_stack_boundary = preferred_stack_boundary;
2417 preferred_stack_boundary = crtl->preferred_stack_boundary;
2419 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2421 /* We want to make two insn chains; one for a sibling call, the other
2422 for a normal call. We will select one of the two chains after
2423 initial RTL generation is complete. */
2424 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2426 int sibcall_failure = 0;
2427 /* We want to emit any pending stack adjustments before the tail
2428 recursion "call". That way we know any adjustment after the tail
2429 recursion call can be ignored if we indeed use the tail
2431 int save_pending_stack_adjust = 0;
2432 int save_stack_pointer_delta = 0;
2434 rtx before_call, next_arg_reg, after_args;
2438 /* State variables we need to save and restore between
2440 save_pending_stack_adjust = pending_stack_adjust;
2441 save_stack_pointer_delta = stack_pointer_delta;
2444 flags &= ~ECF_SIBCALL;
2446 flags |= ECF_SIBCALL;
2448 /* Other state variables that we must reinitialize each time
2449 through the loop (that are not initialized by the loop itself). */
2453 /* Start a new sequence for the normal call case.
2455 From this point on, if the sibling call fails, we want to set
2456 sibcall_failure instead of continuing the loop. */
2459 /* Don't let pending stack adjusts add up to too much.
2460 Also, do all pending adjustments now if there is any chance
2461 this might be a call to alloca or if we are expanding a sibling
2463 Also do the adjustments before a throwing call, otherwise
2464 exception handling can fail; PR 19225. */
2465 if (pending_stack_adjust >= 32
2466 || (pending_stack_adjust > 0
2467 && (flags & ECF_MAY_BE_ALLOCA))
2468 || (pending_stack_adjust > 0
2469 && flag_exceptions && !(flags & ECF_NOTHROW))
2471 do_pending_stack_adjust ();
2473 /* Precompute any arguments as needed. */
2475 precompute_arguments (num_actuals, args);
2477 /* Now we are about to start emitting insns that can be deleted
2478 if a libcall is deleted. */
2479 if (pass && (flags & ECF_MALLOC))
2482 if (pass == 0 && crtl->stack_protect_guard)
2483 stack_protect_epilogue ();
2485 adjusted_args_size = args_size;
2486 /* Compute the actual size of the argument block required. The variable
2487 and constant sizes must be combined, the size may have to be rounded,
2488 and there may be a minimum required size. When generating a sibcall
2489 pattern, do not round up, since we'll be re-using whatever space our
2491 unadjusted_args_size
2492 = compute_argument_block_size (reg_parm_stack_space,
2493 &adjusted_args_size,
2496 : preferred_stack_boundary));
2498 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2500 /* The argument block when performing a sibling call is the
2501 incoming argument block. */
2504 argblock = crtl->args.internal_arg_pointer;
2506 #ifdef STACK_GROWS_DOWNWARD
2507 = plus_constant (argblock, crtl->args.pretend_args_size);
2509 = plus_constant (argblock, -crtl->args.pretend_args_size);
2511 stored_args_map = sbitmap_alloc (args_size.constant);
2512 sbitmap_zero (stored_args_map);
2515 /* If we have no actual push instructions, or shouldn't use them,
2516 make space for all args right now. */
2517 else if (adjusted_args_size.var != 0)
2519 if (old_stack_level == 0)
2521 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2522 old_stack_pointer_delta = stack_pointer_delta;
2523 old_pending_adj = pending_stack_adjust;
2524 pending_stack_adjust = 0;
2525 /* stack_arg_under_construction says whether a stack arg is
2526 being constructed at the old stack level. Pushing the stack
2527 gets a clean outgoing argument block. */
2528 old_stack_arg_under_construction = stack_arg_under_construction;
2529 stack_arg_under_construction = 0;
2531 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2532 if (flag_stack_usage_info)
2533 current_function_has_unbounded_dynamic_stack_size = 1;
2537 /* Note that we must go through the motions of allocating an argument
2538 block even if the size is zero because we may be storing args
2539 in the area reserved for register arguments, which may be part of
2542 int needed = adjusted_args_size.constant;
2544 /* Store the maximum argument space used. It will be pushed by
2545 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2548 if (needed > crtl->outgoing_args_size)
2549 crtl->outgoing_args_size = needed;
2551 if (must_preallocate)
2553 if (ACCUMULATE_OUTGOING_ARGS)
2555 /* Since the stack pointer will never be pushed, it is
2556 possible for the evaluation of a parm to clobber
2557 something we have already written to the stack.
2558 Since most function calls on RISC machines do not use
2559 the stack, this is uncommon, but must work correctly.
2561 Therefore, we save any area of the stack that was already
2562 written and that we are using. Here we set up to do this
2563 by making a new stack usage map from the old one. The
2564 actual save will be done by store_one_arg.
2566 Another approach might be to try to reorder the argument
2567 evaluations to avoid this conflicting stack usage. */
2569 /* Since we will be writing into the entire argument area,
2570 the map must be allocated for its entire size, not just
2571 the part that is the responsibility of the caller. */
2572 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2573 needed += reg_parm_stack_space;
2575 #ifdef ARGS_GROW_DOWNWARD
2576 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2579 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2582 free (stack_usage_map_buf);
2583 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2584 stack_usage_map = stack_usage_map_buf;
2586 if (initial_highest_arg_in_use)
2587 memcpy (stack_usage_map, initial_stack_usage_map,
2588 initial_highest_arg_in_use);
2590 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2591 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2592 (highest_outgoing_arg_in_use
2593 - initial_highest_arg_in_use));
2596 /* The address of the outgoing argument list must not be
2597 copied to a register here, because argblock would be left
2598 pointing to the wrong place after the call to
2599 allocate_dynamic_stack_space below. */
2601 argblock = virtual_outgoing_args_rtx;
2605 if (inhibit_defer_pop == 0)
2607 /* Try to reuse some or all of the pending_stack_adjust
2608 to get this space. */
2610 = (combine_pending_stack_adjustment_and_call
2611 (unadjusted_args_size,
2612 &adjusted_args_size,
2613 preferred_unit_stack_boundary));
2615 /* combine_pending_stack_adjustment_and_call computes
2616 an adjustment before the arguments are allocated.
2617 Account for them and see whether or not the stack
2618 needs to go up or down. */
2619 needed = unadjusted_args_size - needed;
2623 /* We're releasing stack space. */
2624 /* ??? We can avoid any adjustment at all if we're
2625 already aligned. FIXME. */
2626 pending_stack_adjust = -needed;
2627 do_pending_stack_adjust ();
2631 /* We need to allocate space. We'll do that in
2632 push_block below. */
2633 pending_stack_adjust = 0;
2636 /* Special case this because overhead of `push_block' in
2637 this case is non-trivial. */
2639 argblock = virtual_outgoing_args_rtx;
2642 argblock = push_block (GEN_INT (needed), 0, 0);
2643 #ifdef ARGS_GROW_DOWNWARD
2644 argblock = plus_constant (argblock, needed);
2648 /* We only really need to call `copy_to_reg' in the case
2649 where push insns are going to be used to pass ARGBLOCK
2650 to a function call in ARGS. In that case, the stack
2651 pointer changes value from the allocation point to the
2652 call point, and hence the value of
2653 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2654 as well always do it. */
2655 argblock = copy_to_reg (argblock);
2660 if (ACCUMULATE_OUTGOING_ARGS)
2662 /* The save/restore code in store_one_arg handles all
2663 cases except one: a constructor call (including a C
2664 function returning a BLKmode struct) to initialize
2666 if (stack_arg_under_construction)
2669 = GEN_INT (adjusted_args_size.constant
2670 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
2671 : TREE_TYPE (fndecl))) ? 0
2672 : reg_parm_stack_space));
2673 if (old_stack_level == 0)
2675 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2676 old_stack_pointer_delta = stack_pointer_delta;
2677 old_pending_adj = pending_stack_adjust;
2678 pending_stack_adjust = 0;
2679 /* stack_arg_under_construction says whether a stack
2680 arg is being constructed at the old stack level.
2681 Pushing the stack gets a clean outgoing argument
2683 old_stack_arg_under_construction
2684 = stack_arg_under_construction;
2685 stack_arg_under_construction = 0;
2686 /* Make a new map for the new argument list. */
2687 free (stack_usage_map_buf);
2688 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
2689 stack_usage_map = stack_usage_map_buf;
2690 highest_outgoing_arg_in_use = 0;
2692 /* We can pass TRUE as the 4th argument because we just
2693 saved the stack pointer and will restore it right after
2695 allocate_dynamic_stack_space (push_size, 0,
2696 BIGGEST_ALIGNMENT, true);
2699 /* If argument evaluation might modify the stack pointer,
2700 copy the address of the argument list to a register. */
2701 for (i = 0; i < num_actuals; i++)
2702 if (args[i].pass_on_stack)
2704 argblock = copy_addr_to_reg (argblock);
2709 compute_argument_addresses (args, argblock, num_actuals);
2711 /* If we push args individually in reverse order, perform stack alignment
2712 before the first push (the last arg). */
2713 if (PUSH_ARGS_REVERSED && argblock == 0
2714 && adjusted_args_size.constant != unadjusted_args_size)
2716 /* When the stack adjustment is pending, we get better code
2717 by combining the adjustments. */
2718 if (pending_stack_adjust
2719 && ! inhibit_defer_pop)
2721 pending_stack_adjust
2722 = (combine_pending_stack_adjustment_and_call
2723 (unadjusted_args_size,
2724 &adjusted_args_size,
2725 preferred_unit_stack_boundary));
2726 do_pending_stack_adjust ();
2728 else if (argblock == 0)
2729 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2730 - unadjusted_args_size));
2732 /* Now that the stack is properly aligned, pops can't safely
2733 be deferred during the evaluation of the arguments. */
2736 /* Record the maximum pushed stack space size. We need to delay
2737 doing it this far to take into account the optimization done
2738 by combine_pending_stack_adjustment_and_call. */
2739 if (flag_stack_usage_info
2740 && !ACCUMULATE_OUTGOING_ARGS
2742 && adjusted_args_size.var == 0)
2744 int pushed = adjusted_args_size.constant + pending_stack_adjust;
2745 if (pushed > current_function_pushed_stack_size)
2746 current_function_pushed_stack_size = pushed;
2749 funexp = rtx_for_function_call (fndecl, addr);
2751 /* Figure out the register where the value, if any, will come back. */
2753 if (TYPE_MODE (rettype) != VOIDmode
2754 && ! structure_value_addr)
2756 if (pcc_struct_value)
2757 valreg = hard_function_value (build_pointer_type (rettype),
2758 fndecl, NULL, (pass == 0));
2760 valreg = hard_function_value (rettype, fndecl, fntype,
2763 /* If VALREG is a PARALLEL whose first member has a zero
2764 offset, use that. This is for targets such as m68k that
2765 return the same value in multiple places. */
2766 if (GET_CODE (valreg) == PARALLEL)
2768 rtx elem = XVECEXP (valreg, 0, 0);
2769 rtx where = XEXP (elem, 0);
2770 rtx offset = XEXP (elem, 1);
2771 if (offset == const0_rtx
2772 && GET_MODE (where) == GET_MODE (valreg))
2777 /* Precompute all register parameters. It isn't safe to compute anything
2778 once we have started filling any specific hard regs. */
2779 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2781 if (CALL_EXPR_STATIC_CHAIN (exp))
2782 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
2784 static_chain_value = 0;
2786 #ifdef REG_PARM_STACK_SPACE
2787 /* Save the fixed argument area if it's part of the caller's frame and
2788 is clobbered by argument setup for this call. */
2789 if (ACCUMULATE_OUTGOING_ARGS && pass)
2790 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2791 &low_to_save, &high_to_save);
2794 /* Now store (and compute if necessary) all non-register parms.
2795 These come before register parms, since they can require block-moves,
2796 which could clobber the registers used for register parms.
2797 Parms which have partial registers are not stored here,
2798 but we do preallocate space here if they want that. */
2800 for (i = 0; i < num_actuals; i++)
2802 if (args[i].reg == 0 || args[i].pass_on_stack)
2804 rtx before_arg = get_last_insn ();
2806 if (store_one_arg (&args[i], argblock, flags,
2807 adjusted_args_size.var != 0,
2808 reg_parm_stack_space)
2810 && check_sibcall_argument_overlap (before_arg,
2812 sibcall_failure = 1;
2816 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2817 gen_rtx_USE (VOIDmode,
2822 /* If we have a parm that is passed in registers but not in memory
2823 and whose alignment does not permit a direct copy into registers,
2824 make a group of pseudos that correspond to each register that we
2826 if (STRICT_ALIGNMENT)
2827 store_unaligned_arguments_into_pseudos (args, num_actuals);
2829 /* Now store any partially-in-registers parm.
2830 This is the last place a block-move can happen. */
2832 for (i = 0; i < num_actuals; i++)
2833 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2835 rtx before_arg = get_last_insn ();
2837 if (store_one_arg (&args[i], argblock, flags,
2838 adjusted_args_size.var != 0,
2839 reg_parm_stack_space)
2841 && check_sibcall_argument_overlap (before_arg,
2843 sibcall_failure = 1;
2846 /* If we pushed args in forward order, perform stack alignment
2847 after pushing the last arg. */
2848 if (!PUSH_ARGS_REVERSED && argblock == 0)
2849 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2850 - unadjusted_args_size));
2852 /* If register arguments require space on the stack and stack space
2853 was not preallocated, allocate stack space here for arguments
2854 passed in registers. */
2855 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2856 && !ACCUMULATE_OUTGOING_ARGS
2857 && must_preallocate == 0 && reg_parm_stack_space > 0)
2858 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2860 /* Pass the function the address in which to return a
2862 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2864 structure_value_addr
2865 = convert_memory_address (Pmode, structure_value_addr);
2866 emit_move_insn (struct_value,
2868 force_operand (structure_value_addr,
2871 if (REG_P (struct_value))
2872 use_reg (&call_fusage, struct_value);
2875 after_args = get_last_insn ();
2876 funexp = prepare_call_address (fndecl, funexp, static_chain_value,
2877 &call_fusage, reg_parm_seen, pass == 0);
2879 load_register_parameters (args, num_actuals, &call_fusage, flags,
2880 pass == 0, &sibcall_failure);
2882 /* Save a pointer to the last insn before the call, so that we can
2883 later safely search backwards to find the CALL_INSN. */
2884 before_call = get_last_insn ();
2886 /* Set up next argument register. For sibling calls on machines
2887 with register windows this should be the incoming register. */
2889 next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
2894 next_arg_reg = targetm.calls.function_arg (args_so_far,
2895 VOIDmode, void_type_node,
2898 /* All arguments and registers used for the call must be set up by
2901 /* Stack must be properly aligned now. */
2903 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2905 /* Generate the actual call instruction. */
2906 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2907 adjusted_args_size.constant, struct_value_size,
2908 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2909 flags, args_so_far);
2911 /* If the call setup or the call itself overlaps with anything
2912 of the argument setup we probably clobbered our call address.
2913 In that case we can't do sibcalls. */
2915 && check_sibcall_argument_overlap (after_args, 0, 0))
2916 sibcall_failure = 1;
2918 /* If a non-BLKmode value is returned at the most significant end
2919 of a register, shift the register right by the appropriate amount
2920 and update VALREG accordingly. BLKmode values are handled by the
2921 group load/store machinery below. */
2922 if (!structure_value_addr
2923 && !pcc_struct_value
2924 && TYPE_MODE (rettype) != BLKmode
2925 && targetm.calls.return_in_msb (rettype))
2927 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
2928 sibcall_failure = 1;
2929 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
2932 if (pass && (flags & ECF_MALLOC))
2934 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2937 /* The return value from a malloc-like function is a pointer. */
2938 if (TREE_CODE (rettype) == POINTER_TYPE)
2939 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2941 emit_move_insn (temp, valreg);
2943 /* The return value from a malloc-like function can not alias
2945 last = get_last_insn ();
2946 add_reg_note (last, REG_NOALIAS, temp);
2948 /* Write out the sequence. */
2949 insns = get_insns ();
2955 /* For calls to `setjmp', etc., inform
2956 function.c:setjmp_warnings that it should complain if
2957 nonvolatile values are live. For functions that cannot
2958 return, inform flow that control does not fall through. */
2960 if ((flags & ECF_NORETURN) || pass == 0)
2962 /* The barrier must be emitted
2963 immediately after the CALL_INSN. Some ports emit more
2964 than just a CALL_INSN above, so we must search for it here. */
2966 rtx last = get_last_insn ();
2967 while (!CALL_P (last))
2969 last = PREV_INSN (last);
2970 /* There was no CALL_INSN? */
2971 gcc_assert (last != before_call);
2974 emit_barrier_after (last);
2976 /* Stack adjustments after a noreturn call are dead code.
2977 However when NO_DEFER_POP is in effect, we must preserve
2978 stack_pointer_delta. */
2979 if (inhibit_defer_pop == 0)
2981 stack_pointer_delta = old_stack_allocated;
2982 pending_stack_adjust = 0;
2986 /* If value type not void, return an rtx for the value. */
2988 if (TYPE_MODE (rettype) == VOIDmode
2990 target = const0_rtx;
2991 else if (structure_value_addr)
2993 if (target == 0 || !MEM_P (target))
2996 = gen_rtx_MEM (TYPE_MODE (rettype),
2997 memory_address (TYPE_MODE (rettype),
2998 structure_value_addr));
2999 set_mem_attributes (target, rettype, 1);
3002 else if (pcc_struct_value)
3004 /* This is the special C++ case where we need to
3005 know what the true target was. We take care to
3006 never use this value more than once in one expression. */
3007 target = gen_rtx_MEM (TYPE_MODE (rettype),
3008 copy_to_reg (valreg));
3009 set_mem_attributes (target, rettype, 1);
3011 /* Handle calls that return values in multiple non-contiguous locations.
3012 The Irix 6 ABI has examples of this. */
3013 else if (GET_CODE (valreg) == PARALLEL)
3017 /* This will only be assigned once, so it can be readonly. */
3018 tree nt = build_qualified_type (rettype,
3019 (TYPE_QUALS (rettype)
3020 | TYPE_QUAL_CONST));
3022 target = assign_temp (nt, 0, 1, 1);
3025 if (! rtx_equal_p (target, valreg))
3026 emit_group_store (target, valreg, rettype,
3027 int_size_in_bytes (rettype));
3029 /* We can not support sibling calls for this case. */
3030 sibcall_failure = 1;
3033 && GET_MODE (target) == TYPE_MODE (rettype)
3034 && GET_MODE (target) == GET_MODE (valreg))
3036 bool may_overlap = false;
3038 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3039 reg to a plain register. */
3040 if (!REG_P (target) || HARD_REGISTER_P (target))
3041 valreg = avoid_likely_spilled_reg (valreg);
3043 /* If TARGET is a MEM in the argument area, and we have
3044 saved part of the argument area, then we can't store
3045 directly into TARGET as it may get overwritten when we
3046 restore the argument save area below. Don't work too
3047 hard though and simply force TARGET to a register if it
3048 is a MEM; the optimizer is quite likely to sort it out. */
3049 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3050 for (i = 0; i < num_actuals; i++)
3051 if (args[i].save_area)
3058 target = copy_to_reg (valreg);
3061 /* TARGET and VALREG cannot be equal at this point
3062 because the latter would not have
3063 REG_FUNCTION_VALUE_P true, while the former would if
3064 it were referring to the same register.
3066 If they refer to the same register, this move will be
3067 a no-op, except when function inlining is being
3069 emit_move_insn (target, valreg);
3071 /* If we are setting a MEM, this code must be executed.
3072 Since it is emitted after the call insn, sibcall
3073 optimization cannot be performed in that case. */
3075 sibcall_failure = 1;
3078 else if (TYPE_MODE (rettype) == BLKmode)
3081 if (GET_MODE (val) != BLKmode)
3082 val = avoid_likely_spilled_reg (val);
3083 target = copy_blkmode_from_reg (target, val, rettype);
3085 /* We can not support sibling calls for this case. */
3086 sibcall_failure = 1;
3089 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
3091 /* If we promoted this return value, make the proper SUBREG.
3092 TARGET might be const0_rtx here, so be careful. */
3094 && TYPE_MODE (rettype) != BLKmode
3095 && GET_MODE (target) != TYPE_MODE (rettype))
3097 tree type = rettype;
3098 int unsignedp = TYPE_UNSIGNED (type);
3100 enum machine_mode pmode;
3102 /* Ensure we promote as expected, and get the new unsignedness. */
3103 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
3105 gcc_assert (GET_MODE (target) == pmode);
3107 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3108 && (GET_MODE_SIZE (GET_MODE (target))
3109 > GET_MODE_SIZE (TYPE_MODE (type))))
3111 offset = GET_MODE_SIZE (GET_MODE (target))
3112 - GET_MODE_SIZE (TYPE_MODE (type));
3113 if (! BYTES_BIG_ENDIAN)
3114 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3115 else if (! WORDS_BIG_ENDIAN)
3116 offset %= UNITS_PER_WORD;
3119 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3120 SUBREG_PROMOTED_VAR_P (target) = 1;
3121 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3124 /* If size of args is variable or this was a constructor call for a stack
3125 argument, restore saved stack-pointer value. */
3127 if (old_stack_level)
3129 emit_stack_restore (SAVE_BLOCK, old_stack_level);
3130 stack_pointer_delta = old_stack_pointer_delta;
3131 pending_stack_adjust = old_pending_adj;
3132 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3133 stack_arg_under_construction = old_stack_arg_under_construction;
3134 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3135 stack_usage_map = initial_stack_usage_map;
3136 sibcall_failure = 1;
3138 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3140 #ifdef REG_PARM_STACK_SPACE
3142 restore_fixed_argument_area (save_area, argblock,
3143 high_to_save, low_to_save);
3146 /* If we saved any argument areas, restore them. */
3147 for (i = 0; i < num_actuals; i++)
3148 if (args[i].save_area)
3150 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3152 = gen_rtx_MEM (save_mode,
3153 memory_address (save_mode,
3154 XEXP (args[i].stack_slot, 0)));
3156 if (save_mode != BLKmode)
3157 emit_move_insn (stack_area, args[i].save_area);
3159 emit_block_move (stack_area, args[i].save_area,
3160 GEN_INT (args[i].locate.size.constant),
3161 BLOCK_OP_CALL_PARM);
3164 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3165 stack_usage_map = initial_stack_usage_map;
3168 /* If this was alloca, record the new stack level for nonlocal gotos.
3169 Check for the handler slots since we might not have a save area
3170 for non-local gotos. */
3172 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3173 update_nonlocal_goto_save_area ();
3175 /* Free up storage we no longer need. */
3176 for (i = 0; i < num_actuals; ++i)
3177 free (args[i].aligned_regs);
3179 insns = get_insns ();
3184 tail_call_insns = insns;
3186 /* Restore the pending stack adjustment now that we have
3187 finished generating the sibling call sequence. */
3189 pending_stack_adjust = save_pending_stack_adjust;
3190 stack_pointer_delta = save_stack_pointer_delta;
3192 /* Prepare arg structure for next iteration. */
3193 for (i = 0; i < num_actuals; i++)
3196 args[i].aligned_regs = 0;
3200 sbitmap_free (stored_args_map);
3204 normal_call_insns = insns;
3206 /* Verify that we've deallocated all the stack we used. */
3207 gcc_assert ((flags & ECF_NORETURN)
3208 || (old_stack_allocated
3209 == stack_pointer_delta - pending_stack_adjust));
3212 /* If something prevents making this a sibling call,
3213 zero out the sequence. */
3214 if (sibcall_failure)
3215 tail_call_insns = NULL_RTX;
3220 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3221 arguments too, as argument area is now clobbered by the call. */
3222 if (tail_call_insns)
3224 emit_insn (tail_call_insns);
3225 crtl->tail_call_emit = true;
3228 emit_insn (normal_call_insns);
3230 currently_expanding_call--;
3232 free (stack_usage_map_buf);
3237 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3238 this function's incoming arguments.
3240 At the start of RTL generation we know the only REG_EQUIV notes
3241 in the rtl chain are those for incoming arguments, so we can look
3242 for REG_EQUIV notes between the start of the function and the
3243 NOTE_INSN_FUNCTION_BEG.
3245 This is (slight) overkill. We could keep track of the highest
3246 argument we clobber and be more selective in removing notes, but it
3247 does not seem to be worth the effort. */
3250 fixup_tail_calls (void)
3254 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3258 /* There are never REG_EQUIV notes for the incoming arguments
3259 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3261 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3264 note = find_reg_note (insn, REG_EQUIV, 0);
3266 remove_note (insn, note);
3267 note = find_reg_note (insn, REG_EQUIV, 0);
3272 /* Traverse a list of TYPES and expand all complex types into their
3275 split_complex_types (tree types)
3279 /* Before allocating memory, check for the common case of no complex. */
3280 for (p = types; p; p = TREE_CHAIN (p))
3282 tree type = TREE_VALUE (p);
3283 if (TREE_CODE (type) == COMPLEX_TYPE
3284 && targetm.calls.split_complex_arg (type))
3290 types = copy_list (types);
3292 for (p = types; p; p = TREE_CHAIN (p))
3294 tree complex_type = TREE_VALUE (p);
3296 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3297 && targetm.calls.split_complex_arg (complex_type))
3301 /* Rewrite complex type with component type. */
3302 TREE_VALUE (p) = TREE_TYPE (complex_type);
3303 next = TREE_CHAIN (p);
3305 /* Add another component type for the imaginary part. */
3306 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3307 TREE_CHAIN (p) = imag;
3308 TREE_CHAIN (imag) = next;
3310 /* Skip the newly created node. */
3318 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3319 The RETVAL parameter specifies whether return value needs to be saved, other
3320 parameters are documented in the emit_library_call function below. */
3323 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3324 enum libcall_type fn_type,
3325 enum machine_mode outmode, int nargs, va_list p)
3327 /* Total size in bytes of all the stack-parms scanned so far. */
3328 struct args_size args_size;
3329 /* Size of arguments before any adjustments (such as rounding). */
3330 struct args_size original_args_size;
3333 /* Todo, choose the correct decl type of orgfun. Sadly this information
3334 isn't present here, so we default to native calling abi here. */
3335 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3336 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3340 CUMULATIVE_ARGS args_so_far_v;
3341 cumulative_args_t args_so_far;
3345 enum machine_mode mode;
3348 struct locate_and_pad_arg_data locate;
3352 int old_inhibit_defer_pop = inhibit_defer_pop;
3353 rtx call_fusage = 0;
3356 int pcc_struct_value = 0;
3357 int struct_value_size = 0;
3359 int reg_parm_stack_space = 0;
3362 tree tfom; /* type_for_mode (outmode, 0) */
3364 #ifdef REG_PARM_STACK_SPACE
3365 /* Define the boundary of the register parm stack space that needs to be
3367 int low_to_save = 0, high_to_save = 0;
3368 rtx save_area = 0; /* Place that it is saved. */
3371 /* Size of the stack reserved for parameter registers. */
3372 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3373 char *initial_stack_usage_map = stack_usage_map;
3374 char *stack_usage_map_buf = NULL;
3376 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3378 #ifdef REG_PARM_STACK_SPACE
3379 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3382 /* By default, library functions can not throw. */
3383 flags = ECF_NOTHROW;
3396 flags |= ECF_NORETURN;
3399 flags = ECF_NORETURN;
3401 case LCT_RETURNS_TWICE:
3402 flags = ECF_RETURNS_TWICE;
3407 /* Ensure current function's preferred stack boundary is at least
3409 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3410 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3412 /* If this kind of value comes back in memory,
3413 decide where in memory it should come back. */
3414 if (outmode != VOIDmode)
3416 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3417 if (aggregate_value_p (tfom, 0))
3419 #ifdef PCC_STATIC_STRUCT_RETURN
3421 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3422 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3423 pcc_struct_value = 1;
3425 value = gen_reg_rtx (outmode);
3426 #else /* not PCC_STATIC_STRUCT_RETURN */
3427 struct_value_size = GET_MODE_SIZE (outmode);
3428 if (value != 0 && MEM_P (value))
3431 mem_value = assign_temp (tfom, 0, 1, 1);
3433 /* This call returns a big structure. */
3434 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3438 tfom = void_type_node;
3440 /* ??? Unfinished: must pass the memory address as an argument. */
3442 /* Copy all the libcall-arguments out of the varargs data
3443 and into a vector ARGVEC.
3445 Compute how to pass each argument. We only support a very small subset
3446 of the full argument passing conventions to limit complexity here since
3447 library functions shouldn't have many args. */
3449 argvec = XALLOCAVEC (struct arg, nargs + 1);
3450 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3452 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3453 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
3455 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
3457 args_so_far = pack_cumulative_args (&args_so_far_v);
3459 args_size.constant = 0;
3466 /* If there's a structure value address to be passed,
3467 either pass it in the special place, or pass it as an extra argument. */
3468 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3470 rtx addr = XEXP (mem_value, 0);
3474 /* Make sure it is a reasonable operand for a move or push insn. */
3475 if (!REG_P (addr) && !MEM_P (addr)
3476 && !(CONSTANT_P (addr)
3477 && targetm.legitimate_constant_p (Pmode, addr)))
3478 addr = force_operand (addr, NULL_RTX);
3480 argvec[count].value = addr;
3481 argvec[count].mode = Pmode;
3482 argvec[count].partial = 0;
3484 argvec[count].reg = targetm.calls.function_arg (args_so_far,
3485 Pmode, NULL_TREE, true);
3486 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
3487 NULL_TREE, 1) == 0);
3489 locate_and_pad_parm (Pmode, NULL_TREE,
3490 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3493 argvec[count].reg != 0,
3495 0, NULL_TREE, &args_size, &argvec[count].locate);
3497 if (argvec[count].reg == 0 || argvec[count].partial != 0
3498 || reg_parm_stack_space > 0)
3499 args_size.constant += argvec[count].locate.size.constant;
3501 targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
3506 for (; count < nargs; count++)
3508 rtx val = va_arg (p, rtx);
3509 enum machine_mode mode = (enum machine_mode) va_arg (p, int);
3512 /* We cannot convert the arg value to the mode the library wants here;
3513 must do it earlier where we know the signedness of the arg. */
3514 gcc_assert (mode != BLKmode
3515 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3517 /* Make sure it is a reasonable operand for a move or push insn. */
3518 if (!REG_P (val) && !MEM_P (val)
3519 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
3520 val = force_operand (val, NULL_RTX);
3522 if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
3526 = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
3528 /* If this was a CONST function, it is now PURE since it now
3530 if (flags & ECF_CONST)
3532 flags &= ~ECF_CONST;
3536 if (MEM_P (val) && !must_copy)
3538 tree val_expr = MEM_EXPR (val);
3540 mark_addressable (val_expr);
3545 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3547 emit_move_insn (slot, val);
3550 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3551 gen_rtx_USE (VOIDmode, slot),
3554 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3555 gen_rtx_CLOBBER (VOIDmode,
3560 val = force_operand (XEXP (slot, 0), NULL_RTX);
3563 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
3564 argvec[count].mode = mode;
3565 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
3566 argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
3569 argvec[count].partial
3570 = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
3572 locate_and_pad_parm (mode, NULL_TREE,
3573 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3576 argvec[count].reg != 0,
3578 argvec[count].partial,
3579 NULL_TREE, &args_size, &argvec[count].locate);
3581 gcc_assert (!argvec[count].locate.size.var);
3583 if (argvec[count].reg == 0 || argvec[count].partial != 0
3584 || reg_parm_stack_space > 0)
3585 args_size.constant += argvec[count].locate.size.constant;
3587 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
3590 /* If this machine requires an external definition for library
3591 functions, write one out. */
3592 assemble_external_libcall (fun);
3594 original_args_size = args_size;
3595 args_size.constant = (((args_size.constant
3596 + stack_pointer_delta
3600 - stack_pointer_delta);
3602 args_size.constant = MAX (args_size.constant,
3603 reg_parm_stack_space);
3605 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3606 args_size.constant -= reg_parm_stack_space;
3608 if (args_size.constant > crtl->outgoing_args_size)
3609 crtl->outgoing_args_size = args_size.constant;
3611 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
3613 int pushed = args_size.constant + pending_stack_adjust;
3614 if (pushed > current_function_pushed_stack_size)
3615 current_function_pushed_stack_size = pushed;
3618 if (ACCUMULATE_OUTGOING_ARGS)
3620 /* Since the stack pointer will never be pushed, it is possible for
3621 the evaluation of a parm to clobber something we have already
3622 written to the stack. Since most function calls on RISC machines
3623 do not use the stack, this is uncommon, but must work correctly.
3625 Therefore, we save any area of the stack that was already written
3626 and that we are using. Here we set up to do this by making a new
3627 stack usage map from the old one.
3629 Another approach might be to try to reorder the argument
3630 evaluations to avoid this conflicting stack usage. */
3632 needed = args_size.constant;
3634 /* Since we will be writing into the entire argument area, the
3635 map must be allocated for its entire size, not just the part that
3636 is the responsibility of the caller. */
3637 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3638 needed += reg_parm_stack_space;
3640 #ifdef ARGS_GROW_DOWNWARD
3641 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3644 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3647 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3648 stack_usage_map = stack_usage_map_buf;
3650 if (initial_highest_arg_in_use)
3651 memcpy (stack_usage_map, initial_stack_usage_map,
3652 initial_highest_arg_in_use);
3654 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3655 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3656 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3659 /* We must be careful to use virtual regs before they're instantiated,
3660 and real regs afterwards. Loop optimization, for example, can create
3661 new libcalls after we've instantiated the virtual regs, and if we
3662 use virtuals anyway, they won't match the rtl patterns. */
3664 if (virtuals_instantiated)
3665 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3667 argblock = virtual_outgoing_args_rtx;
3672 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3675 /* If we push args individually in reverse order, perform stack alignment
3676 before the first push (the last arg). */
3677 if (argblock == 0 && PUSH_ARGS_REVERSED)
3678 anti_adjust_stack (GEN_INT (args_size.constant
3679 - original_args_size.constant));
3681 if (PUSH_ARGS_REVERSED)
3692 #ifdef REG_PARM_STACK_SPACE
3693 if (ACCUMULATE_OUTGOING_ARGS)
3695 /* The argument list is the property of the called routine and it
3696 may clobber it. If the fixed area has been used for previous
3697 parameters, we must save and restore it. */
3698 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3699 &low_to_save, &high_to_save);
3703 /* Push the args that need to be pushed. */
3705 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3706 are to be pushed. */
3707 for (count = 0; count < nargs; count++, argnum += inc)
3709 enum machine_mode mode = argvec[argnum].mode;
3710 rtx val = argvec[argnum].value;
3711 rtx reg = argvec[argnum].reg;
3712 int partial = argvec[argnum].partial;
3713 unsigned int parm_align = argvec[argnum].locate.boundary;
3714 int lower_bound = 0, upper_bound = 0, i;
3716 if (! (reg != 0 && partial == 0))
3720 if (ACCUMULATE_OUTGOING_ARGS)
3722 /* If this is being stored into a pre-allocated, fixed-size,
3723 stack area, save any previous data at that location. */
3725 #ifdef ARGS_GROW_DOWNWARD
3726 /* stack_slot is negative, but we want to index stack_usage_map
3727 with positive values. */
3728 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
3729 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3731 lower_bound = argvec[argnum].locate.slot_offset.constant;
3732 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3736 /* Don't worry about things in the fixed argument area;
3737 it has already been saved. */
3738 if (i < reg_parm_stack_space)
3739 i = reg_parm_stack_space;
3740 while (i < upper_bound && stack_usage_map[i] == 0)
3743 if (i < upper_bound)
3745 /* We need to make a save area. */
3747 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3748 enum machine_mode save_mode
3749 = mode_for_size (size, MODE_INT, 1);
3751 = plus_constant (argblock,
3752 argvec[argnum].locate.offset.constant);
3754 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3756 if (save_mode == BLKmode)
3758 argvec[argnum].save_area
3759 = assign_stack_temp (BLKmode,
3760 argvec[argnum].locate.size.constant,
3763 emit_block_move (validize_mem (argvec[argnum].save_area),
3765 GEN_INT (argvec[argnum].locate.size.constant),
3766 BLOCK_OP_CALL_PARM);
3770 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3772 emit_move_insn (argvec[argnum].save_area, stack_area);
3777 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
3778 partial, reg, 0, argblock,
3779 GEN_INT (argvec[argnum].locate.offset.constant),
3780 reg_parm_stack_space,
3781 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3783 /* Now mark the segment we just used. */
3784 if (ACCUMULATE_OUTGOING_ARGS)
3785 for (i = lower_bound; i < upper_bound; i++)
3786 stack_usage_map[i] = 1;
3790 /* Indicate argument access so that alias.c knows that these
3793 use = plus_constant (argblock,
3794 argvec[argnum].locate.offset.constant);
3796 /* When arguments are pushed, trying to tell alias.c where
3797 exactly this argument is won't work, because the
3798 auto-increment causes confusion. So we merely indicate
3799 that we access something with a known mode somewhere on
3801 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3802 gen_rtx_SCRATCH (Pmode));
3803 use = gen_rtx_MEM (argvec[argnum].mode, use);
3804 use = gen_rtx_USE (VOIDmode, use);
3805 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3809 /* If we pushed args in forward order, perform stack alignment
3810 after pushing the last arg. */
3811 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3812 anti_adjust_stack (GEN_INT (args_size.constant
3813 - original_args_size.constant));
3815 if (PUSH_ARGS_REVERSED)
3820 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
3822 /* Now load any reg parms into their regs. */
3824 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3825 are to be pushed. */
3826 for (count = 0; count < nargs; count++, argnum += inc)
3828 enum machine_mode mode = argvec[argnum].mode;
3829 rtx val = argvec[argnum].value;
3830 rtx reg = argvec[argnum].reg;
3831 int partial = argvec[argnum].partial;
3833 /* Handle calls that pass values in multiple non-contiguous
3834 locations. The PA64 has examples of this for library calls. */
3835 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3836 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3837 else if (reg != 0 && partial == 0)
3838 emit_move_insn (reg, val);
3843 /* Any regs containing parms remain in use through the call. */
3844 for (count = 0; count < nargs; count++)
3846 rtx reg = argvec[count].reg;
3847 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3848 use_group_regs (&call_fusage, reg);
3851 int partial = argvec[count].partial;
3855 gcc_assert (partial % UNITS_PER_WORD == 0);
3856 nregs = partial / UNITS_PER_WORD;
3857 use_regs (&call_fusage, REGNO (reg), nregs);
3860 use_reg (&call_fusage, reg);
3864 /* Pass the function the address in which to return a structure value. */
3865 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3867 emit_move_insn (struct_value,
3869 force_operand (XEXP (mem_value, 0),
3871 if (REG_P (struct_value))
3872 use_reg (&call_fusage, struct_value);
3875 /* Don't allow popping to be deferred, since then
3876 cse'ing of library calls could delete a call and leave the pop. */
3878 valreg = (mem_value == 0 && outmode != VOIDmode
3879 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
3881 /* Stack must be properly aligned now. */
3882 gcc_assert (!(stack_pointer_delta
3883 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3885 before_call = get_last_insn ();
3887 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3888 will set inhibit_defer_pop to that value. */
3889 /* The return type is needed to decide how many bytes the function pops.
3890 Signedness plays no role in that, so for simplicity, we pretend it's
3891 always signed. We also assume that the list of arguments passed has
3892 no impact, so we pretend it is unknown. */
3894 emit_call_1 (fun, NULL,
3895 get_identifier (XSTR (orgfun, 0)),
3896 build_function_type (tfom, NULL_TREE),
3897 original_args_size.constant, args_size.constant,
3899 targetm.calls.function_arg (args_so_far,
3900 VOIDmode, void_type_node, true),
3902 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
3904 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
3905 that it should complain if nonvolatile values are live. For
3906 functions that cannot return, inform flow that control does not
3909 if (flags & ECF_NORETURN)
3911 /* The barrier note must be emitted
3912 immediately after the CALL_INSN. Some ports emit more than
3913 just a CALL_INSN above, so we must search for it here. */
3915 rtx last = get_last_insn ();
3916 while (!CALL_P (last))
3918 last = PREV_INSN (last);
3919 /* There was no CALL_INSN? */
3920 gcc_assert (last != before_call);
3923 emit_barrier_after (last);
3926 /* Now restore inhibit_defer_pop to its actual original value. */
3931 /* Copy the value to the right place. */
3932 if (outmode != VOIDmode && retval)
3938 if (value != mem_value)
3939 emit_move_insn (value, mem_value);
3941 else if (GET_CODE (valreg) == PARALLEL)
3944 value = gen_reg_rtx (outmode);
3945 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3949 /* Convert to the proper mode if a promotion has been active. */
3950 if (GET_MODE (valreg) != outmode)
3952 int unsignedp = TYPE_UNSIGNED (tfom);
3954 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
3955 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
3956 == GET_MODE (valreg));
3957 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
3961 emit_move_insn (value, valreg);
3967 if (ACCUMULATE_OUTGOING_ARGS)
3969 #ifdef REG_PARM_STACK_SPACE
3971 restore_fixed_argument_area (save_area, argblock,
3972 high_to_save, low_to_save);
3975 /* If we saved any argument areas, restore them. */
3976 for (count = 0; count < nargs; count++)
3977 if (argvec[count].save_area)
3979 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3980 rtx adr = plus_constant (argblock,
3981 argvec[count].locate.offset.constant);
3982 rtx stack_area = gen_rtx_MEM (save_mode,
3983 memory_address (save_mode, adr));
3985 if (save_mode == BLKmode)
3986 emit_block_move (stack_area,
3987 validize_mem (argvec[count].save_area),
3988 GEN_INT (argvec[count].locate.size.constant),
3989 BLOCK_OP_CALL_PARM);
3991 emit_move_insn (stack_area, argvec[count].save_area);
3994 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3995 stack_usage_map = initial_stack_usage_map;
3998 free (stack_usage_map_buf);
4004 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4005 (emitting the queue unless NO_QUEUE is nonzero),
4006 for a value of mode OUTMODE,
4007 with NARGS different arguments, passed as alternating rtx values
4008 and machine_modes to convert them to.
4010 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4011 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
4012 other types of library calls. */
4015 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4016 enum machine_mode outmode, int nargs, ...)
4020 va_start (p, nargs);
4021 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4025 /* Like emit_library_call except that an extra argument, VALUE,
4026 comes second and says where to store the result.
4027 (If VALUE is zero, this function chooses a convenient way
4028 to return the value.
4030 This function returns an rtx for where the value is to be found.
4031 If VALUE is nonzero, VALUE is returned. */
4034 emit_library_call_value (rtx orgfun, rtx value,
4035 enum libcall_type fn_type,
4036 enum machine_mode outmode, int nargs, ...)
4041 va_start (p, nargs);
4042 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4049 /* Store a single argument for a function call
4050 into the register or memory area where it must be passed.
4051 *ARG describes the argument value and where to pass it.
4053 ARGBLOCK is the address of the stack-block for all the arguments,
4054 or 0 on a machine where arguments are pushed individually.
4056 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4057 so must be careful about how the stack is used.
4059 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4060 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4061 that we need not worry about saving and restoring the stack.
4063 FNDECL is the declaration of the function we are calling.
4065 Return nonzero if this arg should cause sibcall failure,
4069 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4070 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4072 tree pval = arg->tree_value;
4076 int i, lower_bound = 0, upper_bound = 0;
4077 int sibcall_failure = 0;
4079 if (TREE_CODE (pval) == ERROR_MARK)
4082 /* Push a new temporary level for any temporaries we make for
4086 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4088 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4089 save any previous data at that location. */
4090 if (argblock && ! variable_size && arg->stack)
4092 #ifdef ARGS_GROW_DOWNWARD
4093 /* stack_slot is negative, but we want to index stack_usage_map
4094 with positive values. */
4095 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4096 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4100 lower_bound = upper_bound - arg->locate.size.constant;
4102 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4103 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4107 upper_bound = lower_bound + arg->locate.size.constant;
4111 /* Don't worry about things in the fixed argument area;
4112 it has already been saved. */
4113 if (i < reg_parm_stack_space)
4114 i = reg_parm_stack_space;
4115 while (i < upper_bound && stack_usage_map[i] == 0)
4118 if (i < upper_bound)
4120 /* We need to make a save area. */
4121 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4122 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4123 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4124 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4126 if (save_mode == BLKmode)
4128 tree ot = TREE_TYPE (arg->tree_value);
4129 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4130 | TYPE_QUAL_CONST));
4132 arg->save_area = assign_temp (nt, 0, 1, 1);
4133 preserve_temp_slots (arg->save_area);
4134 emit_block_move (validize_mem (arg->save_area), stack_area,
4135 GEN_INT (arg->locate.size.constant),
4136 BLOCK_OP_CALL_PARM);
4140 arg->save_area = gen_reg_rtx (save_mode);
4141 emit_move_insn (arg->save_area, stack_area);
4147 /* If this isn't going to be placed on both the stack and in registers,
4148 set up the register and number of words. */
4149 if (! arg->pass_on_stack)
4151 if (flags & ECF_SIBCALL)
4152 reg = arg->tail_call_reg;
4155 partial = arg->partial;
4158 /* Being passed entirely in a register. We shouldn't be called in
4160 gcc_assert (reg == 0 || partial != 0);
4162 /* If this arg needs special alignment, don't load the registers
4164 if (arg->n_aligned_regs != 0)
4167 /* If this is being passed partially in a register, we can't evaluate
4168 it directly into its stack slot. Otherwise, we can. */
4169 if (arg->value == 0)
4171 /* stack_arg_under_construction is nonzero if a function argument is
4172 being evaluated directly into the outgoing argument list and
4173 expand_call must take special action to preserve the argument list
4174 if it is called recursively.
4176 For scalar function arguments stack_usage_map is sufficient to
4177 determine which stack slots must be saved and restored. Scalar
4178 arguments in general have pass_on_stack == 0.
4180 If this argument is initialized by a function which takes the
4181 address of the argument (a C++ constructor or a C function
4182 returning a BLKmode structure), then stack_usage_map is
4183 insufficient and expand_call must push the stack around the
4184 function call. Such arguments have pass_on_stack == 1.
4186 Note that it is always safe to set stack_arg_under_construction,
4187 but this generates suboptimal code if set when not needed. */
4189 if (arg->pass_on_stack)
4190 stack_arg_under_construction++;
4192 arg->value = expand_expr (pval,
4194 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4195 ? NULL_RTX : arg->stack,
4196 VOIDmode, EXPAND_STACK_PARM);
4198 /* If we are promoting object (or for any other reason) the mode
4199 doesn't agree, convert the mode. */
4201 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4202 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4203 arg->value, arg->unsignedp);
4205 if (arg->pass_on_stack)
4206 stack_arg_under_construction--;
4209 /* Check for overlap with already clobbered argument area. */
4210 if ((flags & ECF_SIBCALL)
4211 && MEM_P (arg->value)
4212 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4213 arg->locate.size.constant))
4214 sibcall_failure = 1;
4216 /* Don't allow anything left on stack from computation
4217 of argument to alloca. */
4218 if (flags & ECF_MAY_BE_ALLOCA)
4219 do_pending_stack_adjust ();
4221 if (arg->value == arg->stack)
4222 /* If the value is already in the stack slot, we are done. */
4224 else if (arg->mode != BLKmode)
4227 unsigned int parm_align;
4229 /* Argument is a scalar, not entirely passed in registers.
4230 (If part is passed in registers, arg->partial says how much
4231 and emit_push_insn will take care of putting it there.)
4233 Push it, and if its size is less than the
4234 amount of space allocated to it,
4235 also bump stack pointer by the additional space.
4236 Note that in C the default argument promotions
4237 will prevent such mismatches. */
4239 size = GET_MODE_SIZE (arg->mode);
4240 /* Compute how much space the push instruction will push.
4241 On many machines, pushing a byte will advance the stack
4242 pointer by a halfword. */
4243 #ifdef PUSH_ROUNDING
4244 size = PUSH_ROUNDING (size);
4248 /* Compute how much space the argument should get:
4249 round up to a multiple of the alignment for arguments. */
4250 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4251 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4252 / (PARM_BOUNDARY / BITS_PER_UNIT))
4253 * (PARM_BOUNDARY / BITS_PER_UNIT));
4255 /* Compute the alignment of the pushed argument. */
4256 parm_align = arg->locate.boundary;
4257 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4259 int pad = used - size;
4262 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4263 parm_align = MIN (parm_align, pad_align);
4267 /* This isn't already where we want it on the stack, so put it there.
4268 This can either be done with push or copy insns. */
4269 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4270 parm_align, partial, reg, used - size, argblock,
4271 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4272 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4274 /* Unless this is a partially-in-register argument, the argument is now
4277 arg->value = arg->stack;
4281 /* BLKmode, at least partly to be pushed. */
4283 unsigned int parm_align;
4287 /* Pushing a nonscalar.
4288 If part is passed in registers, PARTIAL says how much
4289 and emit_push_insn will take care of putting it there. */
4291 /* Round its size up to a multiple
4292 of the allocation unit for arguments. */
4294 if (arg->locate.size.var != 0)
4297 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4301 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4302 for BLKmode is careful to avoid it. */
4303 excess = (arg->locate.size.constant
4304 - int_size_in_bytes (TREE_TYPE (pval))
4306 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4307 NULL_RTX, TYPE_MODE (sizetype),
4311 parm_align = arg->locate.boundary;
4313 /* When an argument is padded down, the block is aligned to
4314 PARM_BOUNDARY, but the actual argument isn't. */
4315 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4317 if (arg->locate.size.var)
4318 parm_align = BITS_PER_UNIT;
4321 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4322 parm_align = MIN (parm_align, excess_align);
4326 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4328 /* emit_push_insn might not work properly if arg->value and
4329 argblock + arg->locate.offset areas overlap. */
4333 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4334 || (GET_CODE (XEXP (x, 0)) == PLUS
4335 && XEXP (XEXP (x, 0), 0) ==
4336 crtl->args.internal_arg_pointer
4337 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
4339 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4340 i = INTVAL (XEXP (XEXP (x, 0), 1));
4342 /* expand_call should ensure this. */
4343 gcc_assert (!arg->locate.offset.var
4344 && arg->locate.size.var == 0
4345 && CONST_INT_P (size_rtx));
4347 if (arg->locate.offset.constant > i)
4349 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4350 sibcall_failure = 1;
4352 else if (arg->locate.offset.constant < i)
4354 /* Use arg->locate.size.constant instead of size_rtx
4355 because we only care about the part of the argument
4357 if (i < (arg->locate.offset.constant
4358 + arg->locate.size.constant))
4359 sibcall_failure = 1;
4363 /* Even though they appear to be at the same location,
4364 if part of the outgoing argument is in registers,
4365 they aren't really at the same location. Check for
4366 this by making sure that the incoming size is the
4367 same as the outgoing size. */
4368 if (arg->locate.size.constant != INTVAL (size_rtx))
4369 sibcall_failure = 1;
4374 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4375 parm_align, partial, reg, excess, argblock,
4376 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4377 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4379 /* Unless this is a partially-in-register argument, the argument is now
4382 ??? Unlike the case above, in which we want the actual
4383 address of the data, so that we can load it directly into a
4384 register, here we want the address of the stack slot, so that
4385 it's properly aligned for word-by-word copying or something
4386 like that. It's not clear that this is always correct. */
4388 arg->value = arg->stack_slot;
4391 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4393 tree type = TREE_TYPE (arg->tree_value);
4395 = emit_group_load_into_temps (arg->reg, arg->value, type,
4396 int_size_in_bytes (type));
4399 /* Mark all slots this store used. */
4400 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4401 && argblock && ! variable_size && arg->stack)
4402 for (i = lower_bound; i < upper_bound; i++)
4403 stack_usage_map[i] = 1;
4405 /* Once we have pushed something, pops can't safely
4406 be deferred during the rest of the arguments. */
4409 /* Free any temporary slots made in processing this argument. Show
4410 that we might have taken the address of something and pushed that
4412 preserve_temp_slots (NULL_RTX);
4416 return sibcall_failure;
4419 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4422 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4428 /* If the type has variable size... */
4429 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4432 /* If the type is marked as addressable (it is required
4433 to be constructed into the stack)... */
4434 if (TREE_ADDRESSABLE (type))
4440 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4441 takes trailing padding of a structure into account. */
4442 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4445 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
4450 /* If the type has variable size... */
4451 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4454 /* If the type is marked as addressable (it is required
4455 to be constructed into the stack)... */
4456 if (TREE_ADDRESSABLE (type))
4459 /* If the padding and mode of the type is such that a copy into
4460 a register would put it into the wrong part of the register. */
4462 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4463 && (FUNCTION_ARG_PADDING (mode, type)
4464 == (BYTES_BIG_ENDIAN ? upward : downward)))