1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
35 #include "diagnostic-core.h"
40 #include "langhooks.h"
46 #include "tree-flow.h"
48 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
49 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51 /* Data structure and subroutines used within expand_call. */
55 /* Tree node for this argument. */
57 /* Mode for value; TYPE_MODE unless promoted. */
58 enum machine_mode mode;
59 /* Current RTL value for argument, or 0 if it isn't precomputed. */
61 /* Initially-compute RTL value for argument; only for const functions. */
63 /* Register to pass this argument in, 0 if passed on stack, or an
64 PARALLEL if the arg is to be copied into multiple non-contiguous
67 /* Register to pass this argument in when generating tail call sequence.
68 This is not the same register as for normal calls on machines with
71 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
72 form for emit_group_move. */
74 /* If REG was promoted from the actual mode of the argument expression,
75 indicates whether the promotion is sign- or zero-extended. */
77 /* Number of bytes to put in registers. 0 means put the whole arg
78 in registers. Also 0 if not passed in registers. */
80 /* Nonzero if argument must be passed on stack.
81 Note that some arguments may be passed on the stack
82 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
83 pass_on_stack identifies arguments that *cannot* go in registers. */
85 /* Some fields packaged up for locate_and_pad_parm. */
86 struct locate_and_pad_arg_data locate;
87 /* Location on the stack at which parameter should be stored. The store
88 has already been done if STACK == VALUE. */
90 /* Location on the stack of the start of this argument slot. This can
91 differ from STACK if this arg pads downward. This location is known
92 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
94 /* Place that this stack area has been saved, if needed. */
96 /* If an argument's alignment does not permit direct copying into registers,
97 copy in smaller-sized pieces into pseudos. These are stored in a
98 block pointed to by this field. The next field says how many
99 word-sized pseudos we made. */
104 /* A vector of one char per byte of stack space. A byte if nonzero if
105 the corresponding stack location has been used.
106 This vector is used to prevent a function call within an argument from
107 clobbering any stack already set up. */
108 static char *stack_usage_map;
110 /* Size of STACK_USAGE_MAP. */
111 static int highest_outgoing_arg_in_use;
113 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
114 stack location's tail call argument has been already stored into the stack.
115 This bitmap is used to prevent sibling call optimization if function tries
116 to use parent's incoming argument slots when they have been already
117 overwritten with tail call arguments. */
118 static sbitmap stored_args_map;
120 /* stack_arg_under_construction is nonzero when an argument may be
121 initialized with a constructor call (including a C function that
122 returns a BLKmode struct) and expand_call must take special action
123 to make sure the object being constructed does not overlap the
124 argument list for the constructor call. */
125 static int stack_arg_under_construction;
127 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
128 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
130 static void precompute_register_parameters (int, struct arg_data *, int *);
131 static int store_one_arg (struct arg_data *, rtx, int, int, int);
132 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
133 static int finalize_must_preallocate (int, int, struct arg_data *,
135 static void precompute_arguments (int, struct arg_data *);
136 static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
137 static void initialize_argument_information (int, struct arg_data *,
138 struct args_size *, int,
140 tree, tree, CUMULATIVE_ARGS *, int,
141 rtx *, int *, int *, int *,
143 static void compute_argument_addresses (struct arg_data *, rtx, int);
144 static rtx rtx_for_function_call (tree, tree);
145 static void load_register_parameters (struct arg_data *, int, rtx *, int,
147 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
148 enum machine_mode, int, va_list);
149 static int special_function_p (const_tree, int);
150 static int check_sibcall_argument_overlap_1 (rtx);
151 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
153 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
155 static tree split_complex_types (tree);
157 #ifdef REG_PARM_STACK_SPACE
158 static rtx save_fixed_argument_area (int, rtx, int *, int *);
159 static void restore_fixed_argument_area (rtx, rtx, int, int);
162 /* Force FUNEXP into a form suitable for the address of a CALL,
163 and return that as an rtx. Also load the static chain register
164 if FNDECL is a nested function.
166 CALL_FUSAGE points to a variable holding the prospective
167 CALL_INSN_FUNCTION_USAGE information. */
170 prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value,
171 rtx *call_fusage, int reg_parm_seen, int sibcallp)
173 /* Make a valid memory address and copy constants through pseudo-regs,
174 but not for a constant address if -fno-function-cse. */
175 if (GET_CODE (funexp) != SYMBOL_REF)
176 /* If we are using registers for parameters, force the
177 function address into a register now. */
178 funexp = ((reg_parm_seen
179 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
180 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
181 : memory_address (FUNCTION_MODE, funexp));
184 #ifndef NO_FUNCTION_CSE
185 if (optimize && ! flag_no_function_cse)
186 funexp = force_reg (Pmode, funexp);
190 if (static_chain_value != 0)
195 chain = targetm.calls.static_chain (fndecl, false);
196 static_chain_value = convert_memory_address (Pmode, static_chain_value);
198 emit_move_insn (chain, static_chain_value);
200 use_reg (call_fusage, chain);
206 /* Generate instructions to call function FUNEXP,
207 and optionally pop the results.
208 The CALL_INSN is the first insn generated.
210 FNDECL is the declaration node of the function. This is given to the
211 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
214 FUNTYPE is the data type of the function. This is given to the hook
215 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
216 own args. We used to allow an identifier for library functions, but
217 that doesn't work when the return type is an aggregate type and the
218 calling convention says that the pointer to this aggregate is to be
219 popped by the callee.
221 STACK_SIZE is the number of bytes of arguments on the stack,
222 ROUNDED_STACK_SIZE is that number rounded up to
223 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
224 both to put into the call insn and to generate explicit popping
227 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
228 It is zero if this call doesn't want a structure value.
230 NEXT_ARG_REG is the rtx that results from executing
231 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
232 just after all the args have had their registers assigned.
233 This could be whatever you like, but normally it is the first
234 arg-register beyond those used for args in this call,
235 or 0 if all the arg-registers are used in this call.
236 It is passed on to `gen_call' so you can put this info in the call insn.
238 VALREG is a hard register in which a value is returned,
239 or 0 if the call does not return a value.
241 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
242 the args to this call were processed.
243 We restore `inhibit_defer_pop' to that value.
245 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
246 denote registers used by the called function. */
249 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
250 tree funtype ATTRIBUTE_UNUSED,
251 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
252 HOST_WIDE_INT rounded_stack_size,
253 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
254 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
255 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
256 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
258 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
260 int already_popped = 0;
261 HOST_WIDE_INT n_popped
262 = targetm.calls.return_pops_args (fndecl, funtype, stack_size);
264 #ifdef CALL_POPS_ARGS
265 n_popped += CALL_POPS_ARGS (* args_so_far);
268 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
269 and we don't want to load it into a register as an optimization,
270 because prepare_call_address already did it if it should be done. */
271 if (GET_CODE (funexp) != SYMBOL_REF)
272 funexp = memory_address (FUNCTION_MODE, funexp);
274 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
275 if ((ecf_flags & ECF_SIBCALL)
276 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
277 && (n_popped > 0 || stack_size == 0))
279 rtx n_pop = GEN_INT (n_popped);
282 /* If this subroutine pops its own args, record that in the call insn
283 if possible, for the sake of frame pointer elimination. */
286 pat = GEN_SIBCALL_VALUE_POP (valreg,
287 gen_rtx_MEM (FUNCTION_MODE, funexp),
288 rounded_stack_size_rtx, next_arg_reg,
291 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
292 rounded_stack_size_rtx, next_arg_reg, n_pop);
294 emit_call_insn (pat);
300 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
301 /* If the target has "call" or "call_value" insns, then prefer them
302 if no arguments are actually popped. If the target does not have
303 "call" or "call_value" insns, then we must use the popping versions
304 even if the call has no arguments to pop. */
305 #if defined (HAVE_call) && defined (HAVE_call_value)
306 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
309 if (HAVE_call_pop && HAVE_call_value_pop)
312 rtx n_pop = GEN_INT (n_popped);
315 /* If this subroutine pops its own args, record that in the call insn
316 if possible, for the sake of frame pointer elimination. */
319 pat = GEN_CALL_VALUE_POP (valreg,
320 gen_rtx_MEM (FUNCTION_MODE, funexp),
321 rounded_stack_size_rtx, next_arg_reg, n_pop);
323 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
324 rounded_stack_size_rtx, next_arg_reg, n_pop);
326 emit_call_insn (pat);
332 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
333 if ((ecf_flags & ECF_SIBCALL)
334 && HAVE_sibcall && HAVE_sibcall_value)
337 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
338 gen_rtx_MEM (FUNCTION_MODE, funexp),
339 rounded_stack_size_rtx,
340 next_arg_reg, NULL_RTX));
342 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
343 rounded_stack_size_rtx, next_arg_reg,
344 GEN_INT (struct_value_size)));
349 #if defined (HAVE_call) && defined (HAVE_call_value)
350 if (HAVE_call && HAVE_call_value)
353 emit_call_insn (GEN_CALL_VALUE (valreg,
354 gen_rtx_MEM (FUNCTION_MODE, funexp),
355 rounded_stack_size_rtx, next_arg_reg,
358 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
359 rounded_stack_size_rtx, next_arg_reg,
360 GEN_INT (struct_value_size)));
366 /* Find the call we just emitted. */
367 call_insn = last_call_insn ();
369 /* Put the register usage information there. */
370 add_function_usage_to (call_insn, call_fusage);
372 /* If this is a const call, then set the insn's unchanging bit. */
373 if (ecf_flags & ECF_CONST)
374 RTL_CONST_CALL_P (call_insn) = 1;
376 /* If this is a pure call, then set the insn's unchanging bit. */
377 if (ecf_flags & ECF_PURE)
378 RTL_PURE_CALL_P (call_insn) = 1;
380 /* If this is a const call, then set the insn's unchanging bit. */
381 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
382 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
384 /* Create a nothrow REG_EH_REGION note, if needed. */
385 make_reg_eh_region_note (call_insn, ecf_flags, 0);
387 if (ecf_flags & ECF_NORETURN)
388 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
390 if (ecf_flags & ECF_RETURNS_TWICE)
392 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
393 cfun->calls_setjmp = 1;
396 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
398 /* Record debug information for virtual calls. */
399 if (flag_enable_icf_debug && fndecl == NULL)
400 (*debug_hooks->virtual_call_token) (CALL_EXPR_FN (fntree),
401 INSN_UID (call_insn));
403 /* Restore this now, so that we do defer pops for this call's args
404 if the context of the call as a whole permits. */
405 inhibit_defer_pop = old_inhibit_defer_pop;
410 CALL_INSN_FUNCTION_USAGE (call_insn)
411 = gen_rtx_EXPR_LIST (VOIDmode,
412 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
413 CALL_INSN_FUNCTION_USAGE (call_insn));
414 rounded_stack_size -= n_popped;
415 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
416 stack_pointer_delta -= n_popped;
418 /* If popup is needed, stack realign must use DRAP */
419 if (SUPPORTS_STACK_ALIGNMENT)
420 crtl->need_drap = true;
423 if (!ACCUMULATE_OUTGOING_ARGS)
425 /* If returning from the subroutine does not automatically pop the args,
426 we need an instruction to pop them sooner or later.
427 Perhaps do it now; perhaps just record how much space to pop later.
429 If returning from the subroutine does pop the args, indicate that the
430 stack pointer will be changed. */
432 if (rounded_stack_size != 0)
434 if (ecf_flags & ECF_NORETURN)
435 /* Just pretend we did the pop. */
436 stack_pointer_delta -= rounded_stack_size;
437 else if (flag_defer_pop && inhibit_defer_pop == 0
438 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
439 pending_stack_adjust += rounded_stack_size;
441 adjust_stack (rounded_stack_size_rtx);
444 /* When we accumulate outgoing args, we must avoid any stack manipulations.
445 Restore the stack pointer to its original value now. Usually
446 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
447 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
448 popping variants of functions exist as well.
450 ??? We may optimize similar to defer_pop above, but it is
451 probably not worthwhile.
453 ??? It will be worthwhile to enable combine_stack_adjustments even for
456 anti_adjust_stack (GEN_INT (n_popped));
459 /* Determine if the function identified by NAME and FNDECL is one with
460 special properties we wish to know about.
462 For example, if the function might return more than one time (setjmp), then
463 set RETURNS_TWICE to a nonzero value.
465 Similarly set NORETURN if the function is in the longjmp family.
467 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
468 space from the stack such as alloca. */
471 special_function_p (const_tree fndecl, int flags)
473 if (fndecl && DECL_NAME (fndecl)
474 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
475 /* Exclude functions not at the file scope, or not `extern',
476 since they are not the magic functions we would otherwise
478 FIXME: this should be handled with attributes, not with this
479 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
480 because you can declare fork() inside a function if you
482 && (DECL_CONTEXT (fndecl) == NULL_TREE
483 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
484 && TREE_PUBLIC (fndecl))
486 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
487 const char *tname = name;
489 /* We assume that alloca will always be called by name. It
490 makes no sense to pass it as a pointer-to-function to
491 anything that does not understand its behavior. */
492 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
494 && ! strcmp (name, "alloca"))
495 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
497 && ! strcmp (name, "__builtin_alloca"))))
498 flags |= ECF_MAY_BE_ALLOCA;
500 /* Disregard prefix _, __, __x or __builtin_. */
505 && !strncmp (name + 3, "uiltin_", 7))
507 else if (name[1] == '_' && name[2] == 'x')
509 else if (name[1] == '_')
518 && (! strcmp (tname, "setjmp")
519 || ! strcmp (tname, "setjmp_syscall")))
521 && ! strcmp (tname, "sigsetjmp"))
523 && ! strcmp (tname, "savectx")))
524 flags |= ECF_RETURNS_TWICE;
527 && ! strcmp (tname, "siglongjmp"))
528 flags |= ECF_NORETURN;
530 else if ((tname[0] == 'q' && tname[1] == 's'
531 && ! strcmp (tname, "qsetjmp"))
532 || (tname[0] == 'v' && tname[1] == 'f'
533 && ! strcmp (tname, "vfork"))
534 || (tname[0] == 'g' && tname[1] == 'e'
535 && !strcmp (tname, "getcontext")))
536 flags |= ECF_RETURNS_TWICE;
538 else if (tname[0] == 'l' && tname[1] == 'o'
539 && ! strcmp (tname, "longjmp"))
540 flags |= ECF_NORETURN;
546 /* Return nonzero when FNDECL represents a call to setjmp. */
549 setjmp_call_p (const_tree fndecl)
551 if (DECL_IS_RETURNS_TWICE (fndecl))
552 return ECF_RETURNS_TWICE;
553 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
557 /* Return true if STMT is an alloca call. */
560 gimple_alloca_call_p (const_gimple stmt)
564 if (!is_gimple_call (stmt))
567 fndecl = gimple_call_fndecl (stmt);
568 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
574 /* Return true when exp contains alloca call. */
577 alloca_call_p (const_tree exp)
579 if (TREE_CODE (exp) == CALL_EXPR
580 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
581 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
582 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
583 & ECF_MAY_BE_ALLOCA))
588 /* Detect flags (function attributes) from the function decl or type node. */
591 flags_from_decl_or_type (const_tree exp)
597 /* The function exp may have the `malloc' attribute. */
598 if (DECL_IS_MALLOC (exp))
601 /* The function exp may have the `returns_twice' attribute. */
602 if (DECL_IS_RETURNS_TWICE (exp))
603 flags |= ECF_RETURNS_TWICE;
605 /* Process the pure and const attributes. */
606 if (TREE_READONLY (exp))
608 if (DECL_PURE_P (exp))
610 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
611 flags |= ECF_LOOPING_CONST_OR_PURE;
613 if (DECL_IS_NOVOPS (exp))
615 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
618 if (TREE_NOTHROW (exp))
619 flags |= ECF_NOTHROW;
621 flags = special_function_p (exp, flags);
623 else if (TYPE_P (exp) && TYPE_READONLY (exp))
626 if (TREE_THIS_VOLATILE (exp))
628 flags |= ECF_NORETURN;
629 if (flags & (ECF_CONST|ECF_PURE))
630 flags |= ECF_LOOPING_CONST_OR_PURE;
636 /* Detect flags from a CALL_EXPR. */
639 call_expr_flags (const_tree t)
642 tree decl = get_callee_fndecl (t);
645 flags = flags_from_decl_or_type (decl);
648 t = TREE_TYPE (CALL_EXPR_FN (t));
649 if (t && TREE_CODE (t) == POINTER_TYPE)
650 flags = flags_from_decl_or_type (TREE_TYPE (t));
658 /* Precompute all register parameters as described by ARGS, storing values
659 into fields within the ARGS array.
661 NUM_ACTUALS indicates the total number elements in the ARGS array.
663 Set REG_PARM_SEEN if we encounter a register parameter. */
666 precompute_register_parameters (int num_actuals, struct arg_data *args,
673 for (i = 0; i < num_actuals; i++)
674 if (args[i].reg != 0 && ! args[i].pass_on_stack)
678 if (args[i].value == 0)
681 args[i].value = expand_normal (args[i].tree_value);
682 preserve_temp_slots (args[i].value);
686 /* If the value is a non-legitimate constant, force it into a
687 pseudo now. TLS symbols sometimes need a call to resolve. */
688 if (CONSTANT_P (args[i].value)
689 && !LEGITIMATE_CONSTANT_P (args[i].value))
690 args[i].value = force_reg (args[i].mode, args[i].value);
692 /* If we are to promote the function arg to a wider mode,
695 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
697 = convert_modes (args[i].mode,
698 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
699 args[i].value, args[i].unsignedp);
701 /* If we're going to have to load the value by parts, pull the
702 parts into pseudos. The part extraction process can involve
703 non-trivial computation. */
704 if (GET_CODE (args[i].reg) == PARALLEL)
706 tree type = TREE_TYPE (args[i].tree_value);
707 args[i].parallel_value
708 = emit_group_load_into_temps (args[i].reg, args[i].value,
709 type, int_size_in_bytes (type));
712 /* If the value is expensive, and we are inside an appropriately
713 short loop, put the value into a pseudo and then put the pseudo
716 For small register classes, also do this if this call uses
717 register parameters. This is to avoid reload conflicts while
718 loading the parameters registers. */
720 else if ((! (REG_P (args[i].value)
721 || (GET_CODE (args[i].value) == SUBREG
722 && REG_P (SUBREG_REG (args[i].value)))))
723 && args[i].mode != BLKmode
724 && rtx_cost (args[i].value, SET, optimize_insn_for_speed_p ())
727 && targetm.small_register_classes_for_mode_p (args[i].mode))
729 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
733 #ifdef REG_PARM_STACK_SPACE
735 /* The argument list is the property of the called routine and it
736 may clobber it. If the fixed area has been used for previous
737 parameters, we must save and restore it. */
740 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
745 /* Compute the boundary of the area that needs to be saved, if any. */
746 high = reg_parm_stack_space;
747 #ifdef ARGS_GROW_DOWNWARD
750 if (high > highest_outgoing_arg_in_use)
751 high = highest_outgoing_arg_in_use;
753 for (low = 0; low < high; low++)
754 if (stack_usage_map[low] != 0)
757 enum machine_mode save_mode;
762 while (stack_usage_map[--high] == 0)
766 *high_to_save = high;
768 num_to_save = high - low + 1;
769 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
771 /* If we don't have the required alignment, must do this
773 if ((low & (MIN (GET_MODE_SIZE (save_mode),
774 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
777 #ifdef ARGS_GROW_DOWNWARD
782 stack_area = gen_rtx_MEM (save_mode,
783 memory_address (save_mode,
784 plus_constant (argblock,
787 set_mem_align (stack_area, PARM_BOUNDARY);
788 if (save_mode == BLKmode)
790 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
791 emit_block_move (validize_mem (save_area), stack_area,
792 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
796 save_area = gen_reg_rtx (save_mode);
797 emit_move_insn (save_area, stack_area);
807 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
809 enum machine_mode save_mode = GET_MODE (save_area);
813 #ifdef ARGS_GROW_DOWNWARD
814 delta = -high_to_save;
818 stack_area = gen_rtx_MEM (save_mode,
819 memory_address (save_mode,
820 plus_constant (argblock, delta)));
821 set_mem_align (stack_area, PARM_BOUNDARY);
823 if (save_mode != BLKmode)
824 emit_move_insn (stack_area, save_area);
826 emit_block_move (stack_area, validize_mem (save_area),
827 GEN_INT (high_to_save - low_to_save + 1),
830 #endif /* REG_PARM_STACK_SPACE */
832 /* If any elements in ARGS refer to parameters that are to be passed in
833 registers, but not in memory, and whose alignment does not permit a
834 direct copy into registers. Copy the values into a group of pseudos
835 which we will later copy into the appropriate hard registers.
837 Pseudos for each unaligned argument will be stored into the array
838 args[argnum].aligned_regs. The caller is responsible for deallocating
839 the aligned_regs array if it is nonzero. */
842 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
846 for (i = 0; i < num_actuals; i++)
847 if (args[i].reg != 0 && ! args[i].pass_on_stack
848 && args[i].mode == BLKmode
849 && MEM_P (args[i].value)
850 && (MEM_ALIGN (args[i].value)
851 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
853 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
854 int endian_correction = 0;
858 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
859 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
863 args[i].n_aligned_regs
864 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
867 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
869 /* Structures smaller than a word are normally aligned to the
870 least significant byte. On a BYTES_BIG_ENDIAN machine,
871 this means we must skip the empty high order bytes when
872 calculating the bit offset. */
873 if (bytes < UNITS_PER_WORD
874 #ifdef BLOCK_REG_PADDING
875 && (BLOCK_REG_PADDING (args[i].mode,
876 TREE_TYPE (args[i].tree_value), 1)
882 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
884 for (j = 0; j < args[i].n_aligned_regs; j++)
886 rtx reg = gen_reg_rtx (word_mode);
887 rtx word = operand_subword_force (args[i].value, j, BLKmode);
888 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
890 args[i].aligned_regs[j] = reg;
891 word = extract_bit_field (word, bitsize, 0, 1, false, NULL_RTX,
892 word_mode, word_mode);
894 /* There is no need to restrict this code to loading items
895 in TYPE_ALIGN sized hunks. The bitfield instructions can
896 load up entire word sized registers efficiently.
898 ??? This may not be needed anymore.
899 We use to emit a clobber here but that doesn't let later
900 passes optimize the instructions we emit. By storing 0 into
901 the register later passes know the first AND to zero out the
902 bitfield being set in the register is unnecessary. The store
903 of 0 will be deleted as will at least the first AND. */
905 emit_move_insn (reg, const0_rtx);
907 bytes -= bitsize / BITS_PER_UNIT;
908 store_bit_field (reg, bitsize, endian_correction, word_mode,
914 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
917 NUM_ACTUALS is the total number of parameters.
919 N_NAMED_ARGS is the total number of named arguments.
921 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
924 FNDECL is the tree code for the target of this call (if known)
926 ARGS_SO_FAR holds state needed by the target to know where to place
929 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
930 for arguments which are passed in registers.
932 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
933 and may be modified by this routine.
935 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
936 flags which may may be modified by this routine.
938 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
939 that requires allocation of stack space.
941 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
942 the thunked-to function. */
945 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
946 struct arg_data *args,
947 struct args_size *args_size,
948 int n_named_args ATTRIBUTE_UNUSED,
949 tree exp, tree struct_value_addr_value,
950 tree fndecl, tree fntype,
951 CUMULATIVE_ARGS *args_so_far,
952 int reg_parm_stack_space,
953 rtx *old_stack_level, int *old_pending_adj,
954 int *must_preallocate, int *ecf_flags,
955 bool *may_tailcall, bool call_from_thunk_p)
957 location_t loc = EXPR_LOCATION (exp);
958 /* 1 if scanning parms front to back, -1 if scanning back to front. */
961 /* Count arg position in order args appear. */
966 args_size->constant = 0;
969 /* In this loop, we consider args in the order they are written.
970 We fill up ARGS from the front or from the back if necessary
971 so that in any case the first arg to be pushed ends up at the front. */
973 if (PUSH_ARGS_REVERSED)
975 i = num_actuals - 1, inc = -1;
976 /* In this case, must reverse order of args
977 so that we compute and push the last arg first. */
984 /* First fill in the actual arguments in the ARGS array, splitting
985 complex arguments if necessary. */
988 call_expr_arg_iterator iter;
991 if (struct_value_addr_value)
993 args[j].tree_value = struct_value_addr_value;
996 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
998 tree argtype = TREE_TYPE (arg);
999 if (targetm.calls.split_complex_arg
1001 && TREE_CODE (argtype) == COMPLEX_TYPE
1002 && targetm.calls.split_complex_arg (argtype))
1004 tree subtype = TREE_TYPE (argtype);
1005 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1007 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1010 args[j].tree_value = arg;
1015 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1016 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
1018 tree type = TREE_TYPE (args[i].tree_value);
1020 enum machine_mode mode;
1022 /* Replace erroneous argument with constant zero. */
1023 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1024 args[i].tree_value = integer_zero_node, type = integer_type_node;
1026 /* If TYPE is a transparent union or record, pass things the way
1027 we would pass the first field of the union or record. We have
1028 already verified that the modes are the same. */
1029 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1030 && TYPE_TRANSPARENT_AGGR (type))
1031 type = TREE_TYPE (first_field (type));
1033 /* Decide where to pass this arg.
1035 args[i].reg is nonzero if all or part is passed in registers.
1037 args[i].partial is nonzero if part but not all is passed in registers,
1038 and the exact value says how many bytes are passed in registers.
1040 args[i].pass_on_stack is nonzero if the argument must at least be
1041 computed on the stack. It may then be loaded back into registers
1042 if args[i].reg is nonzero.
1044 These decisions are driven by the FUNCTION_... macros and must agree
1045 with those made by function.c. */
1047 /* See if this argument should be passed by invisible reference. */
1048 if (pass_by_reference (args_so_far, TYPE_MODE (type),
1049 type, argpos < n_named_args))
1055 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1056 type, argpos < n_named_args);
1058 /* If we're compiling a thunk, pass through invisible references
1059 instead of making a copy. */
1060 if (call_from_thunk_p
1062 && !TREE_ADDRESSABLE (type)
1063 && (base = get_base_address (args[i].tree_value))
1064 && TREE_CODE (base) != SSA_NAME
1065 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1067 /* We can't use sibcalls if a callee-copied argument is
1068 stored in the current function's frame. */
1069 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1070 *may_tailcall = false;
1072 args[i].tree_value = build_fold_addr_expr_loc (loc,
1073 args[i].tree_value);
1074 type = TREE_TYPE (args[i].tree_value);
1076 if (*ecf_flags & ECF_CONST)
1077 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1081 /* We make a copy of the object and pass the address to the
1082 function being called. */
1085 if (!COMPLETE_TYPE_P (type)
1086 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1087 || (flag_stack_check == GENERIC_STACK_CHECK
1088 && compare_tree_int (TYPE_SIZE_UNIT (type),
1089 STACK_CHECK_MAX_VAR_SIZE) > 0))
1091 /* This is a variable-sized object. Make space on the stack
1093 rtx size_rtx = expr_size (args[i].tree_value);
1095 if (*old_stack_level == 0)
1097 emit_stack_save (SAVE_BLOCK, old_stack_level);
1098 *old_pending_adj = pending_stack_adjust;
1099 pending_stack_adjust = 0;
1102 /* We can pass TRUE as the 4th argument because we just
1103 saved the stack pointer and will restore it right after
1105 copy = allocate_dynamic_stack_space (size_rtx,
1109 copy = gen_rtx_MEM (BLKmode, copy);
1110 set_mem_attributes (copy, type, 1);
1113 copy = assign_temp (type, 0, 1, 0);
1115 store_expr (args[i].tree_value, copy, 0, false);
1117 /* Just change the const function to pure and then let
1118 the next test clear the pure based on
1120 if (*ecf_flags & ECF_CONST)
1122 *ecf_flags &= ~ECF_CONST;
1123 *ecf_flags |= ECF_PURE;
1126 if (!callee_copies && *ecf_flags & ECF_PURE)
1127 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1130 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
1131 type = TREE_TYPE (args[i].tree_value);
1132 *may_tailcall = false;
1136 unsignedp = TYPE_UNSIGNED (type);
1137 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1138 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
1140 args[i].unsignedp = unsignedp;
1141 args[i].mode = mode;
1143 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1144 argpos < n_named_args);
1146 /* If this is a sibling call and the machine has register windows, the
1147 register window has to be unwinded before calling the routine, so
1148 arguments have to go into the incoming registers. */
1149 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1150 args[i].tail_call_reg
1151 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
1152 argpos < n_named_args);
1154 args[i].tail_call_reg = args[i].reg;
1158 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1159 argpos < n_named_args);
1161 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1163 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1164 it means that we are to pass this arg in the register(s) designated
1165 by the PARALLEL, but also to pass it in the stack. */
1166 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1167 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1168 args[i].pass_on_stack = 1;
1170 /* If this is an addressable type, we must preallocate the stack
1171 since we must evaluate the object into its final location.
1173 If this is to be passed in both registers and the stack, it is simpler
1175 if (TREE_ADDRESSABLE (type)
1176 || (args[i].pass_on_stack && args[i].reg != 0))
1177 *must_preallocate = 1;
1179 /* Compute the stack-size of this argument. */
1180 if (args[i].reg == 0 || args[i].partial != 0
1181 || reg_parm_stack_space > 0
1182 || args[i].pass_on_stack)
1183 locate_and_pad_parm (mode, type,
1184 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1189 args[i].pass_on_stack ? 0 : args[i].partial,
1190 fndecl, args_size, &args[i].locate);
1191 #ifdef BLOCK_REG_PADDING
1193 /* The argument is passed entirely in registers. See at which
1194 end it should be padded. */
1195 args[i].locate.where_pad =
1196 BLOCK_REG_PADDING (mode, type,
1197 int_size_in_bytes (type) <= UNITS_PER_WORD);
1200 /* Update ARGS_SIZE, the total stack space for args so far. */
1202 args_size->constant += args[i].locate.size.constant;
1203 if (args[i].locate.size.var)
1204 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1206 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1207 have been used, etc. */
1209 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
1210 type, argpos < n_named_args);
1214 /* Update ARGS_SIZE to contain the total size for the argument block.
1215 Return the original constant component of the argument block's size.
1217 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1218 for arguments passed in registers. */
1221 compute_argument_block_size (int reg_parm_stack_space,
1222 struct args_size *args_size,
1223 tree fndecl ATTRIBUTE_UNUSED,
1224 tree fntype ATTRIBUTE_UNUSED,
1225 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1227 int unadjusted_args_size = args_size->constant;
1229 /* For accumulate outgoing args mode we don't need to align, since the frame
1230 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1231 backends from generating misaligned frame sizes. */
1232 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1233 preferred_stack_boundary = STACK_BOUNDARY;
1235 /* Compute the actual size of the argument block required. The variable
1236 and constant sizes must be combined, the size may have to be rounded,
1237 and there may be a minimum required size. */
1241 args_size->var = ARGS_SIZE_TREE (*args_size);
1242 args_size->constant = 0;
1244 preferred_stack_boundary /= BITS_PER_UNIT;
1245 if (preferred_stack_boundary > 1)
1247 /* We don't handle this case yet. To handle it correctly we have
1248 to add the delta, round and subtract the delta.
1249 Currently no machine description requires this support. */
1250 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1251 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1254 if (reg_parm_stack_space > 0)
1257 = size_binop (MAX_EXPR, args_size->var,
1258 ssize_int (reg_parm_stack_space));
1260 /* The area corresponding to register parameters is not to count in
1261 the size of the block we need. So make the adjustment. */
1262 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1264 = size_binop (MINUS_EXPR, args_size->var,
1265 ssize_int (reg_parm_stack_space));
1270 preferred_stack_boundary /= BITS_PER_UNIT;
1271 if (preferred_stack_boundary < 1)
1272 preferred_stack_boundary = 1;
1273 args_size->constant = (((args_size->constant
1274 + stack_pointer_delta
1275 + preferred_stack_boundary - 1)
1276 / preferred_stack_boundary
1277 * preferred_stack_boundary)
1278 - stack_pointer_delta);
1280 args_size->constant = MAX (args_size->constant,
1281 reg_parm_stack_space);
1283 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1284 args_size->constant -= reg_parm_stack_space;
1286 return unadjusted_args_size;
1289 /* Precompute parameters as needed for a function call.
1291 FLAGS is mask of ECF_* constants.
1293 NUM_ACTUALS is the number of arguments.
1295 ARGS is an array containing information for each argument; this
1296 routine fills in the INITIAL_VALUE and VALUE fields for each
1297 precomputed argument. */
1300 precompute_arguments (int num_actuals, struct arg_data *args)
1304 /* If this is a libcall, then precompute all arguments so that we do not
1305 get extraneous instructions emitted as part of the libcall sequence. */
1307 /* If we preallocated the stack space, and some arguments must be passed
1308 on the stack, then we must precompute any parameter which contains a
1309 function call which will store arguments on the stack.
1310 Otherwise, evaluating the parameter may clobber previous parameters
1311 which have already been stored into the stack. (we have code to avoid
1312 such case by saving the outgoing stack arguments, but it results in
1314 if (!ACCUMULATE_OUTGOING_ARGS)
1317 for (i = 0; i < num_actuals; i++)
1320 enum machine_mode mode;
1322 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
1325 /* If this is an addressable type, we cannot pre-evaluate it. */
1326 type = TREE_TYPE (args[i].tree_value);
1327 gcc_assert (!TREE_ADDRESSABLE (type));
1329 args[i].initial_value = args[i].value
1330 = expand_normal (args[i].tree_value);
1332 mode = TYPE_MODE (type);
1333 if (mode != args[i].mode)
1335 int unsignedp = args[i].unsignedp;
1337 = convert_modes (args[i].mode, mode,
1338 args[i].value, args[i].unsignedp);
1340 /* CSE will replace this only if it contains args[i].value
1341 pseudo, so convert it down to the declared mode using
1343 if (REG_P (args[i].value)
1344 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1345 && promote_mode (type, mode, &unsignedp) != args[i].mode)
1347 args[i].initial_value
1348 = gen_lowpart_SUBREG (mode, args[i].value);
1349 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1350 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1357 /* Given the current state of MUST_PREALLOCATE and information about
1358 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1359 compute and return the final value for MUST_PREALLOCATE. */
1362 finalize_must_preallocate (int must_preallocate, int num_actuals,
1363 struct arg_data *args, struct args_size *args_size)
1365 /* See if we have or want to preallocate stack space.
1367 If we would have to push a partially-in-regs parm
1368 before other stack parms, preallocate stack space instead.
1370 If the size of some parm is not a multiple of the required stack
1371 alignment, we must preallocate.
1373 If the total size of arguments that would otherwise create a copy in
1374 a temporary (such as a CALL) is more than half the total argument list
1375 size, preallocation is faster.
1377 Another reason to preallocate is if we have a machine (like the m88k)
1378 where stack alignment is required to be maintained between every
1379 pair of insns, not just when the call is made. However, we assume here
1380 that such machines either do not have push insns (and hence preallocation
1381 would occur anyway) or the problem is taken care of with
1384 if (! must_preallocate)
1386 int partial_seen = 0;
1387 int copy_to_evaluate_size = 0;
1390 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1392 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1394 else if (partial_seen && args[i].reg == 0)
1395 must_preallocate = 1;
1397 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1398 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1399 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1400 || TREE_CODE (args[i].tree_value) == COND_EXPR
1401 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1402 copy_to_evaluate_size
1403 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1406 if (copy_to_evaluate_size * 2 >= args_size->constant
1407 && args_size->constant > 0)
1408 must_preallocate = 1;
1410 return must_preallocate;
1413 /* If we preallocated stack space, compute the address of each argument
1414 and store it into the ARGS array.
1416 We need not ensure it is a valid memory address here; it will be
1417 validized when it is used.
1419 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1422 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1426 rtx arg_reg = argblock;
1427 int i, arg_offset = 0;
1429 if (GET_CODE (argblock) == PLUS)
1430 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1432 for (i = 0; i < num_actuals; i++)
1434 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1435 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1437 unsigned int align, boundary;
1438 unsigned int units_on_stack = 0;
1439 enum machine_mode partial_mode = VOIDmode;
1441 /* Skip this parm if it will not be passed on the stack. */
1442 if (! args[i].pass_on_stack
1444 && args[i].partial == 0)
1447 if (CONST_INT_P (offset))
1448 addr = plus_constant (arg_reg, INTVAL (offset));
1450 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1452 addr = plus_constant (addr, arg_offset);
1454 if (args[i].partial != 0)
1456 /* Only part of the parameter is being passed on the stack.
1457 Generate a simple memory reference of the correct size. */
1458 units_on_stack = args[i].locate.size.constant;
1459 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1461 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1462 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1466 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1467 set_mem_attributes (args[i].stack,
1468 TREE_TYPE (args[i].tree_value), 1);
1470 align = BITS_PER_UNIT;
1471 boundary = args[i].locate.boundary;
1472 if (args[i].locate.where_pad != downward)
1474 else if (CONST_INT_P (offset))
1476 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1477 align = align & -align;
1479 set_mem_align (args[i].stack, align);
1481 if (CONST_INT_P (slot_offset))
1482 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1484 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1486 addr = plus_constant (addr, arg_offset);
1488 if (args[i].partial != 0)
1490 /* Only part of the parameter is being passed on the stack.
1491 Generate a simple memory reference of the correct size.
1493 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1494 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1498 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1499 set_mem_attributes (args[i].stack_slot,
1500 TREE_TYPE (args[i].tree_value), 1);
1502 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1504 /* Function incoming arguments may overlap with sibling call
1505 outgoing arguments and we cannot allow reordering of reads
1506 from function arguments with stores to outgoing arguments
1507 of sibling calls. */
1508 set_mem_alias_set (args[i].stack, 0);
1509 set_mem_alias_set (args[i].stack_slot, 0);
1514 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1515 in a call instruction.
1517 FNDECL is the tree node for the target function. For an indirect call
1518 FNDECL will be NULL_TREE.
1520 ADDR is the operand 0 of CALL_EXPR for this call. */
1523 rtx_for_function_call (tree fndecl, tree addr)
1527 /* Get the function to call, in the form of RTL. */
1530 /* If this is the first use of the function, see if we need to
1531 make an external definition for it. */
1532 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1534 assemble_external (fndecl);
1535 TREE_USED (fndecl) = 1;
1538 /* Get a SYMBOL_REF rtx for the function address. */
1539 funexp = XEXP (DECL_RTL (fndecl), 0);
1542 /* Generate an rtx (probably a pseudo-register) for the address. */
1545 funexp = expand_normal (addr);
1546 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1551 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
1554 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
1555 or NULL_RTX if none has been scanned yet. */
1557 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
1558 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
1559 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
1560 with fixed offset, or PC if this is with variable or unknown offset. */
1561 VEC(rtx, heap) *cache;
1562 } internal_arg_pointer_exp_state;
1564 static rtx internal_arg_pointer_based_exp (rtx, bool);
1566 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
1567 the tail call sequence, starting with first insn that hasn't been
1568 scanned yet, and note for each pseudo on the LHS whether it is based
1569 on crtl->args.internal_arg_pointer or not, and what offset from that
1570 that pointer it has. */
1573 internal_arg_pointer_based_exp_scan (void)
1575 rtx insn, scan_start = internal_arg_pointer_exp_state.scan_start;
1577 if (scan_start == NULL_RTX)
1578 insn = get_insns ();
1580 insn = NEXT_INSN (scan_start);
1584 rtx set = single_set (insn);
1585 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
1588 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
1589 /* Punt on pseudos set multiple times. */
1590 if (idx < VEC_length (rtx, internal_arg_pointer_exp_state.cache)
1591 && (VEC_index (rtx, internal_arg_pointer_exp_state.cache, idx)
1595 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
1596 if (val != NULL_RTX)
1599 >= VEC_length (rtx, internal_arg_pointer_exp_state.cache))
1600 VEC_safe_grow_cleared (rtx, heap,
1601 internal_arg_pointer_exp_state.cache,
1603 VEC_replace (rtx, internal_arg_pointer_exp_state.cache,
1607 if (NEXT_INSN (insn) == NULL_RTX)
1609 insn = NEXT_INSN (insn);
1612 internal_arg_pointer_exp_state.scan_start = scan_start;
1615 /* Helper function for internal_arg_pointer_based_exp, called through
1616 for_each_rtx. Return 1 if *LOC is a register based on
1617 crtl->args.internal_arg_pointer. Return -1 if *LOC is not based on it
1618 and the subexpressions need not be examined. Otherwise return 0. */
1621 internal_arg_pointer_based_exp_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1623 if (REG_P (*loc) && internal_arg_pointer_based_exp (*loc, false) != NULL_RTX)
1630 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
1631 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
1632 it with fixed offset, or PC if this is with variable or unknown offset.
1633 TOPLEVEL is true if the function is invoked at the topmost level. */
1636 internal_arg_pointer_based_exp (rtx rtl, bool toplevel)
1638 if (CONSTANT_P (rtl))
1641 if (rtl == crtl->args.internal_arg_pointer)
1644 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
1647 if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1)))
1649 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
1650 if (val == NULL_RTX || val == pc_rtx)
1652 return plus_constant (val, INTVAL (XEXP (rtl, 1)));
1655 /* When called at the topmost level, scan pseudo assignments in between the
1656 last scanned instruction in the tail call sequence and the latest insn
1657 in that sequence. */
1659 internal_arg_pointer_based_exp_scan ();
1663 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
1664 if (idx < VEC_length (rtx, internal_arg_pointer_exp_state.cache))
1665 return VEC_index (rtx, internal_arg_pointer_exp_state.cache, idx);
1670 if (for_each_rtx (&rtl, internal_arg_pointer_based_exp_1, NULL))
1676 /* Return true if and only if SIZE storage units (usually bytes)
1677 starting from address ADDR overlap with already clobbered argument
1678 area. This function is used to determine if we should give up a
1682 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1687 if (sbitmap_empty_p (stored_args_map))
1689 val = internal_arg_pointer_based_exp (addr, true);
1690 if (val == NULL_RTX)
1692 else if (val == pc_rtx)
1696 #ifdef STACK_GROWS_DOWNWARD
1697 i -= crtl->args.pretend_args_size;
1699 i += crtl->args.pretend_args_size;
1702 #ifdef ARGS_GROW_DOWNWARD
1707 unsigned HOST_WIDE_INT k;
1709 for (k = 0; k < size; k++)
1710 if (i + k < stored_args_map->n_bits
1711 && TEST_BIT (stored_args_map, i + k))
1718 /* Do the register loads required for any wholly-register parms or any
1719 parms which are passed both on the stack and in a register. Their
1720 expressions were already evaluated.
1722 Mark all register-parms as living through the call, putting these USE
1723 insns in the CALL_INSN_FUNCTION_USAGE field.
1725 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1726 checking, setting *SIBCALL_FAILURE if appropriate. */
1729 load_register_parameters (struct arg_data *args, int num_actuals,
1730 rtx *call_fusage, int flags, int is_sibcall,
1731 int *sibcall_failure)
1735 for (i = 0; i < num_actuals; i++)
1737 rtx reg = ((flags & ECF_SIBCALL)
1738 ? args[i].tail_call_reg : args[i].reg);
1741 int partial = args[i].partial;
1744 rtx before_arg = get_last_insn ();
1745 /* Set non-negative if we must move a word at a time, even if
1746 just one word (e.g, partial == 4 && mode == DFmode). Set
1747 to -1 if we just use a normal move insn. This value can be
1748 zero if the argument is a zero size structure. */
1750 if (GET_CODE (reg) == PARALLEL)
1754 gcc_assert (partial % UNITS_PER_WORD == 0);
1755 nregs = partial / UNITS_PER_WORD;
1757 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1759 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1760 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1763 size = GET_MODE_SIZE (args[i].mode);
1765 /* Handle calls that pass values in multiple non-contiguous
1766 locations. The Irix 6 ABI has examples of this. */
1768 if (GET_CODE (reg) == PARALLEL)
1769 emit_group_move (reg, args[i].parallel_value);
1771 /* If simple case, just do move. If normal partial, store_one_arg
1772 has already loaded the register for us. In all other cases,
1773 load the register(s) from memory. */
1775 else if (nregs == -1)
1777 emit_move_insn (reg, args[i].value);
1778 #ifdef BLOCK_REG_PADDING
1779 /* Handle case where we have a value that needs shifting
1780 up to the msb. eg. a QImode value and we're padding
1781 upward on a BYTES_BIG_ENDIAN machine. */
1782 if (size < UNITS_PER_WORD
1783 && (args[i].locate.where_pad
1784 == (BYTES_BIG_ENDIAN ? upward : downward)))
1787 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1789 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1790 report the whole reg as used. Strictly speaking, the
1791 call only uses SIZE bytes at the msb end, but it doesn't
1792 seem worth generating rtl to say that. */
1793 reg = gen_rtx_REG (word_mode, REGNO (reg));
1794 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1795 build_int_cst (NULL_TREE, shift),
1798 emit_move_insn (reg, x);
1803 /* If we have pre-computed the values to put in the registers in
1804 the case of non-aligned structures, copy them in now. */
1806 else if (args[i].n_aligned_regs != 0)
1807 for (j = 0; j < args[i].n_aligned_regs; j++)
1808 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1809 args[i].aligned_regs[j]);
1811 else if (partial == 0 || args[i].pass_on_stack)
1813 rtx mem = validize_mem (args[i].value);
1815 /* Check for overlap with already clobbered argument area,
1816 providing that this has non-zero size. */
1819 || mem_overlaps_already_clobbered_arg_p
1820 (XEXP (args[i].value, 0), size)))
1821 *sibcall_failure = 1;
1823 /* Handle a BLKmode that needs shifting. */
1824 if (nregs == 1 && size < UNITS_PER_WORD
1825 #ifdef BLOCK_REG_PADDING
1826 && args[i].locate.where_pad == downward
1832 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1833 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1834 rtx x = gen_reg_rtx (word_mode);
1835 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1836 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1839 emit_move_insn (x, tem);
1840 x = expand_shift (dir, word_mode, x,
1841 build_int_cst (NULL_TREE, shift),
1844 emit_move_insn (ri, x);
1847 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1850 /* When a parameter is a block, and perhaps in other cases, it is
1851 possible that it did a load from an argument slot that was
1852 already clobbered. */
1854 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1855 *sibcall_failure = 1;
1857 /* Handle calls that pass values in multiple non-contiguous
1858 locations. The Irix 6 ABI has examples of this. */
1859 if (GET_CODE (reg) == PARALLEL)
1860 use_group_regs (call_fusage, reg);
1861 else if (nregs == -1)
1862 use_reg (call_fusage, reg);
1864 use_regs (call_fusage, REGNO (reg), nregs);
1869 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1870 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1871 bytes, then we would need to push some additional bytes to pad the
1872 arguments. So, we compute an adjust to the stack pointer for an
1873 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1874 bytes. Then, when the arguments are pushed the stack will be perfectly
1875 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1876 be popped after the call. Returns the adjustment. */
1879 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1880 struct args_size *args_size,
1881 unsigned int preferred_unit_stack_boundary)
1883 /* The number of bytes to pop so that the stack will be
1884 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1885 HOST_WIDE_INT adjustment;
1886 /* The alignment of the stack after the arguments are pushed, if we
1887 just pushed the arguments without adjust the stack here. */
1888 unsigned HOST_WIDE_INT unadjusted_alignment;
1890 unadjusted_alignment
1891 = ((stack_pointer_delta + unadjusted_args_size)
1892 % preferred_unit_stack_boundary);
1894 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1895 as possible -- leaving just enough left to cancel out the
1896 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1897 PENDING_STACK_ADJUST is non-negative, and congruent to
1898 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1900 /* Begin by trying to pop all the bytes. */
1901 unadjusted_alignment
1902 = (unadjusted_alignment
1903 - (pending_stack_adjust % preferred_unit_stack_boundary));
1904 adjustment = pending_stack_adjust;
1905 /* Push enough additional bytes that the stack will be aligned
1906 after the arguments are pushed. */
1907 if (preferred_unit_stack_boundary > 1)
1909 if (unadjusted_alignment > 0)
1910 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1912 adjustment += unadjusted_alignment;
1915 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1916 bytes after the call. The right number is the entire
1917 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1918 by the arguments in the first place. */
1920 = pending_stack_adjust - adjustment + unadjusted_args_size;
1925 /* Scan X expression if it does not dereference any argument slots
1926 we already clobbered by tail call arguments (as noted in stored_args_map
1928 Return nonzero if X expression dereferences such argument slots,
1932 check_sibcall_argument_overlap_1 (rtx x)
1941 code = GET_CODE (x);
1944 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1945 GET_MODE_SIZE (GET_MODE (x)));
1947 /* Scan all subexpressions. */
1948 fmt = GET_RTX_FORMAT (code);
1949 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1953 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1956 else if (*fmt == 'E')
1958 for (j = 0; j < XVECLEN (x, i); j++)
1959 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1966 /* Scan sequence after INSN if it does not dereference any argument slots
1967 we already clobbered by tail call arguments (as noted in stored_args_map
1968 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1969 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1970 should be 0). Return nonzero if sequence after INSN dereferences such argument
1971 slots, zero otherwise. */
1974 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1978 if (insn == NULL_RTX)
1979 insn = get_insns ();
1981 insn = NEXT_INSN (insn);
1983 for (; insn; insn = NEXT_INSN (insn))
1985 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1988 if (mark_stored_args_map)
1990 #ifdef ARGS_GROW_DOWNWARD
1991 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1993 low = arg->locate.slot_offset.constant;
1996 for (high = low + arg->locate.size.constant; low < high; low++)
1997 SET_BIT (stored_args_map, low);
1999 return insn != NULL_RTX;
2002 /* Given that a function returns a value of mode MODE at the most
2003 significant end of hard register VALUE, shift VALUE left or right
2004 as specified by LEFT_P. Return true if some action was needed. */
2007 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
2009 HOST_WIDE_INT shift;
2011 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
2012 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
2016 /* Use ashr rather than lshr for right shifts. This is for the benefit
2017 of the MIPS port, which requires SImode values to be sign-extended
2018 when stored in 64-bit registers. */
2019 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
2020 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
2025 /* If X is a likely-spilled register value, copy it to a pseudo
2026 register and return that register. Return X otherwise. */
2029 avoid_likely_spilled_reg (rtx x)
2034 && HARD_REGISTER_P (x)
2035 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
2037 /* Make sure that we generate a REG rather than a CONCAT.
2038 Moves into CONCATs can need nontrivial instructions,
2039 and the whole point of this function is to avoid
2040 using the hard register directly in such a situation. */
2041 generating_concat_p = 0;
2042 new_rtx = gen_reg_rtx (GET_MODE (x));
2043 generating_concat_p = 1;
2044 emit_move_insn (new_rtx, x);
2050 /* Generate all the code for a CALL_EXPR exp
2051 and return an rtx for its value.
2052 Store the value in TARGET (specified as an rtx) if convenient.
2053 If the value is stored in TARGET then TARGET is returned.
2054 If IGNORE is nonzero, then we ignore the value of the function call. */
2057 expand_call (tree exp, rtx target, int ignore)
2059 /* Nonzero if we are currently expanding a call. */
2060 static int currently_expanding_call = 0;
2062 /* RTX for the function to be called. */
2064 /* Sequence of insns to perform a normal "call". */
2065 rtx normal_call_insns = NULL_RTX;
2066 /* Sequence of insns to perform a tail "call". */
2067 rtx tail_call_insns = NULL_RTX;
2068 /* Data type of the function. */
2070 tree type_arg_types;
2072 /* Declaration of the function being called,
2073 or 0 if the function is computed (not known by name). */
2075 /* The type of the function being called. */
2077 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
2080 /* Register in which non-BLKmode value will be returned,
2081 or 0 if no value or if value is BLKmode. */
2083 /* Address where we should return a BLKmode value;
2084 0 if value not BLKmode. */
2085 rtx structure_value_addr = 0;
2086 /* Nonzero if that address is being passed by treating it as
2087 an extra, implicit first parameter. Otherwise,
2088 it is passed by being copied directly into struct_value_rtx. */
2089 int structure_value_addr_parm = 0;
2090 /* Holds the value of implicit argument for the struct value. */
2091 tree structure_value_addr_value = NULL_TREE;
2092 /* Size of aggregate value wanted, or zero if none wanted
2093 or if we are using the non-reentrant PCC calling convention
2094 or expecting the value in registers. */
2095 HOST_WIDE_INT struct_value_size = 0;
2096 /* Nonzero if called function returns an aggregate in memory PCC style,
2097 by returning the address of where to find it. */
2098 int pcc_struct_value = 0;
2099 rtx struct_value = 0;
2101 /* Number of actual parameters in this call, including struct value addr. */
2103 /* Number of named args. Args after this are anonymous ones
2104 and they must all go on the stack. */
2106 /* Number of complex actual arguments that need to be split. */
2107 int num_complex_actuals = 0;
2109 /* Vector of information about each argument.
2110 Arguments are numbered in the order they will be pushed,
2111 not the order they are written. */
2112 struct arg_data *args;
2114 /* Total size in bytes of all the stack-parms scanned so far. */
2115 struct args_size args_size;
2116 struct args_size adjusted_args_size;
2117 /* Size of arguments before any adjustments (such as rounding). */
2118 int unadjusted_args_size;
2119 /* Data on reg parms scanned so far. */
2120 CUMULATIVE_ARGS args_so_far;
2121 /* Nonzero if a reg parm has been scanned. */
2123 /* Nonzero if this is an indirect function call. */
2125 /* Nonzero if we must avoid push-insns in the args for this call.
2126 If stack space is allocated for register parameters, but not by the
2127 caller, then it is preallocated in the fixed part of the stack frame.
2128 So the entire argument block must then be preallocated (i.e., we
2129 ignore PUSH_ROUNDING in that case). */
2131 int must_preallocate = !PUSH_ARGS;
2133 /* Size of the stack reserved for parameter registers. */
2134 int reg_parm_stack_space = 0;
2136 /* Address of space preallocated for stack parms
2137 (on machines that lack push insns), or 0 if space not preallocated. */
2140 /* Mask of ECF_ flags. */
2142 #ifdef REG_PARM_STACK_SPACE
2143 /* Define the boundary of the register parm stack space that needs to be
2145 int low_to_save, high_to_save;
2146 rtx save_area = 0; /* Place that it is saved */
2149 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2150 char *initial_stack_usage_map = stack_usage_map;
2151 char *stack_usage_map_buf = NULL;
2153 int old_stack_allocated;
2155 /* State variables to track stack modifications. */
2156 rtx old_stack_level = 0;
2157 int old_stack_arg_under_construction = 0;
2158 int old_pending_adj = 0;
2159 int old_inhibit_defer_pop = inhibit_defer_pop;
2161 /* Some stack pointer alterations we make are performed via
2162 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2163 which we then also need to save/restore along the way. */
2164 int old_stack_pointer_delta = 0;
2167 tree addr = CALL_EXPR_FN (exp);
2169 /* The alignment of the stack, in bits. */
2170 unsigned HOST_WIDE_INT preferred_stack_boundary;
2171 /* The alignment of the stack, in bytes. */
2172 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
2173 /* The static chain value to use for this call. */
2174 rtx static_chain_value;
2175 /* See if this is "nothrow" function call. */
2176 if (TREE_NOTHROW (exp))
2177 flags |= ECF_NOTHROW;
2179 /* See if we can find a DECL-node for the actual function, and get the
2180 function attributes (flags) from the function decl or type node. */
2181 fndecl = get_callee_fndecl (exp);
2184 fntype = TREE_TYPE (fndecl);
2185 flags |= flags_from_decl_or_type (fndecl);
2189 fntype = TREE_TYPE (TREE_TYPE (addr));
2190 flags |= flags_from_decl_or_type (fntype);
2192 rettype = TREE_TYPE (exp);
2194 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2196 /* Warn if this value is an aggregate type,
2197 regardless of which calling convention we are using for it. */
2198 if (AGGREGATE_TYPE_P (rettype))
2199 warning (OPT_Waggregate_return, "function call has aggregate value");
2201 /* If the result of a non looping pure or const function call is
2202 ignored (or void), and none of its arguments are volatile, we can
2203 avoid expanding the call and just evaluate the arguments for
2205 if ((flags & (ECF_CONST | ECF_PURE))
2206 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2207 && (ignore || target == const0_rtx
2208 || TYPE_MODE (rettype) == VOIDmode))
2210 bool volatilep = false;
2212 call_expr_arg_iterator iter;
2214 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2215 if (TREE_THIS_VOLATILE (arg))
2223 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2224 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2229 #ifdef REG_PARM_STACK_SPACE
2230 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2233 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2234 && reg_parm_stack_space > 0 && PUSH_ARGS)
2235 must_preallocate = 1;
2237 /* Set up a place to return a structure. */
2239 /* Cater to broken compilers. */
2240 if (aggregate_value_p (exp, fntype))
2242 /* This call returns a big structure. */
2243 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2245 #ifdef PCC_STATIC_STRUCT_RETURN
2247 pcc_struct_value = 1;
2249 #else /* not PCC_STATIC_STRUCT_RETURN */
2251 struct_value_size = int_size_in_bytes (rettype);
2253 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2254 structure_value_addr = XEXP (target, 0);
2257 /* For variable-sized objects, we must be called with a target
2258 specified. If we were to allocate space on the stack here,
2259 we would have no way of knowing when to free it. */
2260 rtx d = assign_temp (rettype, 0, 1, 1);
2262 mark_temp_addr_taken (d);
2263 structure_value_addr = XEXP (d, 0);
2267 #endif /* not PCC_STATIC_STRUCT_RETURN */
2270 /* Figure out the amount to which the stack should be aligned. */
2271 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2274 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2275 /* Without automatic stack alignment, we can't increase preferred
2276 stack boundary. With automatic stack alignment, it is
2277 unnecessary since unless we can guarantee that all callers will
2278 align the outgoing stack properly, callee has to align its
2281 && i->preferred_incoming_stack_boundary
2282 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
2283 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2286 /* Operand 0 is a pointer-to-function; get the type of the function. */
2287 funtype = TREE_TYPE (addr);
2288 gcc_assert (POINTER_TYPE_P (funtype));
2289 funtype = TREE_TYPE (funtype);
2291 /* Count whether there are actual complex arguments that need to be split
2292 into their real and imaginary parts. Munge the type_arg_types
2293 appropriately here as well. */
2294 if (targetm.calls.split_complex_arg)
2296 call_expr_arg_iterator iter;
2298 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2300 tree type = TREE_TYPE (arg);
2301 if (type && TREE_CODE (type) == COMPLEX_TYPE
2302 && targetm.calls.split_complex_arg (type))
2303 num_complex_actuals++;
2305 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2308 type_arg_types = TYPE_ARG_TYPES (funtype);
2310 if (flags & ECF_MAY_BE_ALLOCA)
2311 cfun->calls_alloca = 1;
2313 /* If struct_value_rtx is 0, it means pass the address
2314 as if it were an extra parameter. Put the argument expression
2315 in structure_value_addr_value. */
2316 if (structure_value_addr && struct_value == 0)
2318 /* If structure_value_addr is a REG other than
2319 virtual_outgoing_args_rtx, we can use always use it. If it
2320 is not a REG, we must always copy it into a register.
2321 If it is virtual_outgoing_args_rtx, we must copy it to another
2322 register in some cases. */
2323 rtx temp = (!REG_P (structure_value_addr)
2324 || (ACCUMULATE_OUTGOING_ARGS
2325 && stack_arg_under_construction
2326 && structure_value_addr == virtual_outgoing_args_rtx)
2327 ? copy_addr_to_reg (convert_memory_address
2328 (Pmode, structure_value_addr))
2329 : structure_value_addr);
2331 structure_value_addr_value =
2332 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2333 structure_value_addr_parm = 1;
2336 /* Count the arguments and set NUM_ACTUALS. */
2338 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2340 /* Compute number of named args.
2341 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2343 if (type_arg_types != 0)
2345 = (list_length (type_arg_types)
2346 /* Count the struct value address, if it is passed as a parm. */
2347 + structure_value_addr_parm);
2349 /* If we know nothing, treat all args as named. */
2350 n_named_args = num_actuals;
2352 /* Start updating where the next arg would go.
2354 On some machines (such as the PA) indirect calls have a different
2355 calling convention than normal calls. The fourth argument in
2356 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2358 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2360 /* Now possibly adjust the number of named args.
2361 Normally, don't include the last named arg if anonymous args follow.
2362 We do include the last named arg if
2363 targetm.calls.strict_argument_naming() returns nonzero.
2364 (If no anonymous args follow, the result of list_length is actually
2365 one too large. This is harmless.)
2367 If targetm.calls.pretend_outgoing_varargs_named() returns
2368 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2369 this machine will be able to place unnamed args that were passed
2370 in registers into the stack. So treat all args as named. This
2371 allows the insns emitting for a specific argument list to be
2372 independent of the function declaration.
2374 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2375 we do not have any reliable way to pass unnamed args in
2376 registers, so we must force them into memory. */
2378 if (type_arg_types != 0
2379 && targetm.calls.strict_argument_naming (&args_so_far))
2381 else if (type_arg_types != 0
2382 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2383 /* Don't include the last named arg. */
2386 /* Treat all args as named. */
2387 n_named_args = num_actuals;
2389 /* Make a vector to hold all the information about each arg. */
2390 args = XALLOCAVEC (struct arg_data, num_actuals);
2391 memset (args, 0, num_actuals * sizeof (struct arg_data));
2393 /* Build up entries in the ARGS array, compute the size of the
2394 arguments into ARGS_SIZE, etc. */
2395 initialize_argument_information (num_actuals, args, &args_size,
2397 structure_value_addr_value, fndecl, fntype,
2398 &args_so_far, reg_parm_stack_space,
2399 &old_stack_level, &old_pending_adj,
2400 &must_preallocate, &flags,
2401 &try_tail_call, CALL_FROM_THUNK_P (exp));
2404 must_preallocate = 1;
2406 /* Now make final decision about preallocating stack space. */
2407 must_preallocate = finalize_must_preallocate (must_preallocate,
2411 /* If the structure value address will reference the stack pointer, we
2412 must stabilize it. We don't need to do this if we know that we are
2413 not going to adjust the stack pointer in processing this call. */
2415 if (structure_value_addr
2416 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2417 || reg_mentioned_p (virtual_outgoing_args_rtx,
2418 structure_value_addr))
2420 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2421 structure_value_addr = copy_to_reg (structure_value_addr);
2423 /* Tail calls can make things harder to debug, and we've traditionally
2424 pushed these optimizations into -O2. Don't try if we're already
2425 expanding a call, as that means we're an argument. Don't try if
2426 there's cleanups, as we know there's code to follow the call. */
2428 if (currently_expanding_call++ != 0
2429 || !flag_optimize_sibling_calls
2431 || dbg_cnt (tail_call) == false)
2434 /* Rest of purposes for tail call optimizations to fail. */
2436 #ifdef HAVE_sibcall_epilogue
2437 !HAVE_sibcall_epilogue
2442 /* Doing sibling call optimization needs some work, since
2443 structure_value_addr can be allocated on the stack.
2444 It does not seem worth the effort since few optimizable
2445 sibling calls will return a structure. */
2446 || structure_value_addr != NULL_RTX
2447 #ifdef REG_PARM_STACK_SPACE
2448 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2449 || (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2450 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)))
2451 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl))
2453 /* Check whether the target is able to optimize the call
2455 || !targetm.function_ok_for_sibcall (fndecl, exp)
2456 /* Functions that do not return exactly once may not be sibcall
2458 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2459 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2460 /* If the called function is nested in the current one, it might access
2461 some of the caller's arguments, but could clobber them beforehand if
2462 the argument areas are shared. */
2463 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2464 /* If this function requires more stack slots than the current
2465 function, we cannot change it into a sibling call.
2466 crtl->args.pretend_args_size is not part of the
2467 stack allocated by our caller. */
2468 || args_size.constant > (crtl->args.size
2469 - crtl->args.pretend_args_size)
2470 /* If the callee pops its own arguments, then it must pop exactly
2471 the same number of arguments as the current function. */
2472 || (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
2473 != targetm.calls.return_pops_args (current_function_decl,
2474 TREE_TYPE (current_function_decl),
2476 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2479 /* Check if caller and callee disagree in promotion of function
2483 enum machine_mode caller_mode, caller_promoted_mode;
2484 enum machine_mode callee_mode, callee_promoted_mode;
2485 int caller_unsignedp, callee_unsignedp;
2486 tree caller_res = DECL_RESULT (current_function_decl);
2488 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
2489 caller_mode = DECL_MODE (caller_res);
2490 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
2491 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
2492 caller_promoted_mode
2493 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
2495 TREE_TYPE (current_function_decl), 1);
2496 callee_promoted_mode
2497 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
2500 if (caller_mode != VOIDmode
2501 && (caller_promoted_mode != callee_promoted_mode
2502 || ((caller_mode != caller_promoted_mode
2503 || callee_mode != callee_promoted_mode)
2504 && (caller_unsignedp != callee_unsignedp
2505 || GET_MODE_BITSIZE (caller_mode)
2506 < GET_MODE_BITSIZE (callee_mode)))))
2510 /* Ensure current function's preferred stack boundary is at least
2511 what we need. Stack alignment may also increase preferred stack
2513 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
2514 crtl->preferred_stack_boundary = preferred_stack_boundary;
2516 preferred_stack_boundary = crtl->preferred_stack_boundary;
2518 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2520 /* We want to make two insn chains; one for a sibling call, the other
2521 for a normal call. We will select one of the two chains after
2522 initial RTL generation is complete. */
2523 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2525 int sibcall_failure = 0;
2526 /* We want to emit any pending stack adjustments before the tail
2527 recursion "call". That way we know any adjustment after the tail
2528 recursion call can be ignored if we indeed use the tail
2530 int save_pending_stack_adjust = 0;
2531 int save_stack_pointer_delta = 0;
2533 rtx before_call, next_arg_reg, after_args;
2537 /* State variables we need to save and restore between
2539 save_pending_stack_adjust = pending_stack_adjust;
2540 save_stack_pointer_delta = stack_pointer_delta;
2543 flags &= ~ECF_SIBCALL;
2545 flags |= ECF_SIBCALL;
2547 /* Other state variables that we must reinitialize each time
2548 through the loop (that are not initialized by the loop itself). */
2552 /* Start a new sequence for the normal call case.
2554 From this point on, if the sibling call fails, we want to set
2555 sibcall_failure instead of continuing the loop. */
2558 /* Don't let pending stack adjusts add up to too much.
2559 Also, do all pending adjustments now if there is any chance
2560 this might be a call to alloca or if we are expanding a sibling
2562 Also do the adjustments before a throwing call, otherwise
2563 exception handling can fail; PR 19225. */
2564 if (pending_stack_adjust >= 32
2565 || (pending_stack_adjust > 0
2566 && (flags & ECF_MAY_BE_ALLOCA))
2567 || (pending_stack_adjust > 0
2568 && flag_exceptions && !(flags & ECF_NOTHROW))
2570 do_pending_stack_adjust ();
2572 /* Precompute any arguments as needed. */
2574 precompute_arguments (num_actuals, args);
2576 /* Now we are about to start emitting insns that can be deleted
2577 if a libcall is deleted. */
2578 if (pass && (flags & ECF_MALLOC))
2581 if (pass == 0 && crtl->stack_protect_guard)
2582 stack_protect_epilogue ();
2584 adjusted_args_size = args_size;
2585 /* Compute the actual size of the argument block required. The variable
2586 and constant sizes must be combined, the size may have to be rounded,
2587 and there may be a minimum required size. When generating a sibcall
2588 pattern, do not round up, since we'll be re-using whatever space our
2590 unadjusted_args_size
2591 = compute_argument_block_size (reg_parm_stack_space,
2592 &adjusted_args_size,
2595 : preferred_stack_boundary));
2597 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2599 /* The argument block when performing a sibling call is the
2600 incoming argument block. */
2603 argblock = crtl->args.internal_arg_pointer;
2605 #ifdef STACK_GROWS_DOWNWARD
2606 = plus_constant (argblock, crtl->args.pretend_args_size);
2608 = plus_constant (argblock, -crtl->args.pretend_args_size);
2610 stored_args_map = sbitmap_alloc (args_size.constant);
2611 sbitmap_zero (stored_args_map);
2614 /* If we have no actual push instructions, or shouldn't use them,
2615 make space for all args right now. */
2616 else if (adjusted_args_size.var != 0)
2618 if (old_stack_level == 0)
2620 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2621 old_stack_pointer_delta = stack_pointer_delta;
2622 old_pending_adj = pending_stack_adjust;
2623 pending_stack_adjust = 0;
2624 /* stack_arg_under_construction says whether a stack arg is
2625 being constructed at the old stack level. Pushing the stack
2626 gets a clean outgoing argument block. */
2627 old_stack_arg_under_construction = stack_arg_under_construction;
2628 stack_arg_under_construction = 0;
2630 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2631 if (flag_stack_usage)
2632 current_function_has_unbounded_dynamic_stack_size = 1;
2636 /* Note that we must go through the motions of allocating an argument
2637 block even if the size is zero because we may be storing args
2638 in the area reserved for register arguments, which may be part of
2641 int needed = adjusted_args_size.constant;
2643 /* Store the maximum argument space used. It will be pushed by
2644 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2647 if (needed > crtl->outgoing_args_size)
2648 crtl->outgoing_args_size = needed;
2650 if (must_preallocate)
2652 if (ACCUMULATE_OUTGOING_ARGS)
2654 /* Since the stack pointer will never be pushed, it is
2655 possible for the evaluation of a parm to clobber
2656 something we have already written to the stack.
2657 Since most function calls on RISC machines do not use
2658 the stack, this is uncommon, but must work correctly.
2660 Therefore, we save any area of the stack that was already
2661 written and that we are using. Here we set up to do this
2662 by making a new stack usage map from the old one. The
2663 actual save will be done by store_one_arg.
2665 Another approach might be to try to reorder the argument
2666 evaluations to avoid this conflicting stack usage. */
2668 /* Since we will be writing into the entire argument area,
2669 the map must be allocated for its entire size, not just
2670 the part that is the responsibility of the caller. */
2671 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2672 needed += reg_parm_stack_space;
2674 #ifdef ARGS_GROW_DOWNWARD
2675 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2678 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2681 if (stack_usage_map_buf)
2682 free (stack_usage_map_buf);
2683 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2684 stack_usage_map = stack_usage_map_buf;
2686 if (initial_highest_arg_in_use)
2687 memcpy (stack_usage_map, initial_stack_usage_map,
2688 initial_highest_arg_in_use);
2690 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2691 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2692 (highest_outgoing_arg_in_use
2693 - initial_highest_arg_in_use));
2696 /* The address of the outgoing argument list must not be
2697 copied to a register here, because argblock would be left
2698 pointing to the wrong place after the call to
2699 allocate_dynamic_stack_space below. */
2701 argblock = virtual_outgoing_args_rtx;
2705 if (inhibit_defer_pop == 0)
2707 /* Try to reuse some or all of the pending_stack_adjust
2708 to get this space. */
2710 = (combine_pending_stack_adjustment_and_call
2711 (unadjusted_args_size,
2712 &adjusted_args_size,
2713 preferred_unit_stack_boundary));
2715 /* combine_pending_stack_adjustment_and_call computes
2716 an adjustment before the arguments are allocated.
2717 Account for them and see whether or not the stack
2718 needs to go up or down. */
2719 needed = unadjusted_args_size - needed;
2723 /* We're releasing stack space. */
2724 /* ??? We can avoid any adjustment at all if we're
2725 already aligned. FIXME. */
2726 pending_stack_adjust = -needed;
2727 do_pending_stack_adjust ();
2731 /* We need to allocate space. We'll do that in
2732 push_block below. */
2733 pending_stack_adjust = 0;
2736 /* Special case this because overhead of `push_block' in
2737 this case is non-trivial. */
2739 argblock = virtual_outgoing_args_rtx;
2742 argblock = push_block (GEN_INT (needed), 0, 0);
2743 #ifdef ARGS_GROW_DOWNWARD
2744 argblock = plus_constant (argblock, needed);
2748 /* We only really need to call `copy_to_reg' in the case
2749 where push insns are going to be used to pass ARGBLOCK
2750 to a function call in ARGS. In that case, the stack
2751 pointer changes value from the allocation point to the
2752 call point, and hence the value of
2753 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2754 as well always do it. */
2755 argblock = copy_to_reg (argblock);
2760 if (ACCUMULATE_OUTGOING_ARGS)
2762 /* The save/restore code in store_one_arg handles all
2763 cases except one: a constructor call (including a C
2764 function returning a BLKmode struct) to initialize
2766 if (stack_arg_under_construction)
2769 = GEN_INT (adjusted_args_size.constant
2770 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
2771 : TREE_TYPE (fndecl))) ? 0
2772 : reg_parm_stack_space));
2773 if (old_stack_level == 0)
2775 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2776 old_stack_pointer_delta = stack_pointer_delta;
2777 old_pending_adj = pending_stack_adjust;
2778 pending_stack_adjust = 0;
2779 /* stack_arg_under_construction says whether a stack
2780 arg is being constructed at the old stack level.
2781 Pushing the stack gets a clean outgoing argument
2783 old_stack_arg_under_construction
2784 = stack_arg_under_construction;
2785 stack_arg_under_construction = 0;
2786 /* Make a new map for the new argument list. */
2787 if (stack_usage_map_buf)
2788 free (stack_usage_map_buf);
2789 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
2790 stack_usage_map = stack_usage_map_buf;
2791 highest_outgoing_arg_in_use = 0;
2793 /* We can pass TRUE as the 4th argument because we just
2794 saved the stack pointer and will restore it right after
2796 allocate_dynamic_stack_space (push_size, 0,
2797 BIGGEST_ALIGNMENT, true);
2800 /* If argument evaluation might modify the stack pointer,
2801 copy the address of the argument list to a register. */
2802 for (i = 0; i < num_actuals; i++)
2803 if (args[i].pass_on_stack)
2805 argblock = copy_addr_to_reg (argblock);
2810 compute_argument_addresses (args, argblock, num_actuals);
2812 /* If we push args individually in reverse order, perform stack alignment
2813 before the first push (the last arg). */
2814 if (PUSH_ARGS_REVERSED && argblock == 0
2815 && adjusted_args_size.constant != unadjusted_args_size)
2817 /* When the stack adjustment is pending, we get better code
2818 by combining the adjustments. */
2819 if (pending_stack_adjust
2820 && ! inhibit_defer_pop)
2822 pending_stack_adjust
2823 = (combine_pending_stack_adjustment_and_call
2824 (unadjusted_args_size,
2825 &adjusted_args_size,
2826 preferred_unit_stack_boundary));
2827 do_pending_stack_adjust ();
2829 else if (argblock == 0)
2830 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2831 - unadjusted_args_size));
2833 /* Now that the stack is properly aligned, pops can't safely
2834 be deferred during the evaluation of the arguments. */
2837 /* Record the maximum pushed stack space size. We need to delay
2838 doing it this far to take into account the optimization done
2839 by combine_pending_stack_adjustment_and_call. */
2840 if (flag_stack_usage
2841 && !ACCUMULATE_OUTGOING_ARGS
2843 && adjusted_args_size.var == 0)
2845 int pushed = adjusted_args_size.constant + pending_stack_adjust;
2846 if (pushed > current_function_pushed_stack_size)
2847 current_function_pushed_stack_size = pushed;
2850 funexp = rtx_for_function_call (fndecl, addr);
2852 /* Figure out the register where the value, if any, will come back. */
2854 if (TYPE_MODE (rettype) != VOIDmode
2855 && ! structure_value_addr)
2857 if (pcc_struct_value)
2858 valreg = hard_function_value (build_pointer_type (rettype),
2859 fndecl, NULL, (pass == 0));
2861 valreg = hard_function_value (rettype, fndecl, fntype,
2864 /* If VALREG is a PARALLEL whose first member has a zero
2865 offset, use that. This is for targets such as m68k that
2866 return the same value in multiple places. */
2867 if (GET_CODE (valreg) == PARALLEL)
2869 rtx elem = XVECEXP (valreg, 0, 0);
2870 rtx where = XEXP (elem, 0);
2871 rtx offset = XEXP (elem, 1);
2872 if (offset == const0_rtx
2873 && GET_MODE (where) == GET_MODE (valreg))
2878 /* Precompute all register parameters. It isn't safe to compute anything
2879 once we have started filling any specific hard regs. */
2880 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2882 if (CALL_EXPR_STATIC_CHAIN (exp))
2883 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
2885 static_chain_value = 0;
2887 #ifdef REG_PARM_STACK_SPACE
2888 /* Save the fixed argument area if it's part of the caller's frame and
2889 is clobbered by argument setup for this call. */
2890 if (ACCUMULATE_OUTGOING_ARGS && pass)
2891 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2892 &low_to_save, &high_to_save);
2895 /* Now store (and compute if necessary) all non-register parms.
2896 These come before register parms, since they can require block-moves,
2897 which could clobber the registers used for register parms.
2898 Parms which have partial registers are not stored here,
2899 but we do preallocate space here if they want that. */
2901 for (i = 0; i < num_actuals; i++)
2903 if (args[i].reg == 0 || args[i].pass_on_stack)
2905 rtx before_arg = get_last_insn ();
2907 if (store_one_arg (&args[i], argblock, flags,
2908 adjusted_args_size.var != 0,
2909 reg_parm_stack_space)
2911 && check_sibcall_argument_overlap (before_arg,
2913 sibcall_failure = 1;
2916 if (((flags & ECF_CONST)
2917 || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
2919 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2920 gen_rtx_USE (VOIDmode,
2925 /* If we have a parm that is passed in registers but not in memory
2926 and whose alignment does not permit a direct copy into registers,
2927 make a group of pseudos that correspond to each register that we
2929 if (STRICT_ALIGNMENT)
2930 store_unaligned_arguments_into_pseudos (args, num_actuals);
2932 /* Now store any partially-in-registers parm.
2933 This is the last place a block-move can happen. */
2935 for (i = 0; i < num_actuals; i++)
2936 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2938 rtx before_arg = get_last_insn ();
2940 if (store_one_arg (&args[i], argblock, flags,
2941 adjusted_args_size.var != 0,
2942 reg_parm_stack_space)
2944 && check_sibcall_argument_overlap (before_arg,
2946 sibcall_failure = 1;
2949 /* If we pushed args in forward order, perform stack alignment
2950 after pushing the last arg. */
2951 if (!PUSH_ARGS_REVERSED && argblock == 0)
2952 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2953 - unadjusted_args_size));
2955 /* If register arguments require space on the stack and stack space
2956 was not preallocated, allocate stack space here for arguments
2957 passed in registers. */
2958 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2959 && !ACCUMULATE_OUTGOING_ARGS
2960 && must_preallocate == 0 && reg_parm_stack_space > 0)
2961 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2963 /* Pass the function the address in which to return a
2965 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2967 structure_value_addr
2968 = convert_memory_address (Pmode, structure_value_addr);
2969 emit_move_insn (struct_value,
2971 force_operand (structure_value_addr,
2974 if (REG_P (struct_value))
2975 use_reg (&call_fusage, struct_value);
2978 after_args = get_last_insn ();
2979 funexp = prepare_call_address (fndecl, funexp, static_chain_value,
2980 &call_fusage, reg_parm_seen, pass == 0);
2982 load_register_parameters (args, num_actuals, &call_fusage, flags,
2983 pass == 0, &sibcall_failure);
2985 /* Save a pointer to the last insn before the call, so that we can
2986 later safely search backwards to find the CALL_INSN. */
2987 before_call = get_last_insn ();
2989 /* Set up next argument register. For sibling calls on machines
2990 with register windows this should be the incoming register. */
2992 next_arg_reg = targetm.calls.function_incoming_arg (&args_so_far,
2997 next_arg_reg = targetm.calls.function_arg (&args_so_far,
2998 VOIDmode, void_type_node,
3001 /* All arguments and registers used for the call must be set up by
3004 /* Stack must be properly aligned now. */
3006 || !(stack_pointer_delta % preferred_unit_stack_boundary));
3008 /* Generate the actual call instruction. */
3009 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
3010 adjusted_args_size.constant, struct_value_size,
3011 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3012 flags, & args_so_far);
3014 /* If the call setup or the call itself overlaps with anything
3015 of the argument setup we probably clobbered our call address.
3016 In that case we can't do sibcalls. */
3018 && check_sibcall_argument_overlap (after_args, 0, 0))
3019 sibcall_failure = 1;
3021 /* If a non-BLKmode value is returned at the most significant end
3022 of a register, shift the register right by the appropriate amount
3023 and update VALREG accordingly. BLKmode values are handled by the
3024 group load/store machinery below. */
3025 if (!structure_value_addr
3026 && !pcc_struct_value
3027 && TYPE_MODE (rettype) != BLKmode
3028 && targetm.calls.return_in_msb (rettype))
3030 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
3031 sibcall_failure = 1;
3032 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
3035 if (pass && (flags & ECF_MALLOC))
3037 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3040 /* The return value from a malloc-like function is a pointer. */
3041 if (TREE_CODE (rettype) == POINTER_TYPE)
3042 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3044 emit_move_insn (temp, valreg);
3046 /* The return value from a malloc-like function can not alias
3048 last = get_last_insn ();
3049 add_reg_note (last, REG_NOALIAS, temp);
3051 /* Write out the sequence. */
3052 insns = get_insns ();
3058 /* For calls to `setjmp', etc., inform
3059 function.c:setjmp_warnings that it should complain if
3060 nonvolatile values are live. For functions that cannot
3061 return, inform flow that control does not fall through. */
3063 if ((flags & ECF_NORETURN) || pass == 0)
3065 /* The barrier must be emitted
3066 immediately after the CALL_INSN. Some ports emit more
3067 than just a CALL_INSN above, so we must search for it here. */
3069 rtx last = get_last_insn ();
3070 while (!CALL_P (last))
3072 last = PREV_INSN (last);
3073 /* There was no CALL_INSN? */
3074 gcc_assert (last != before_call);
3077 emit_barrier_after (last);
3079 /* Stack adjustments after a noreturn call are dead code.
3080 However when NO_DEFER_POP is in effect, we must preserve
3081 stack_pointer_delta. */
3082 if (inhibit_defer_pop == 0)
3084 stack_pointer_delta = old_stack_allocated;
3085 pending_stack_adjust = 0;
3089 /* If value type not void, return an rtx for the value. */
3091 if (TYPE_MODE (rettype) == VOIDmode
3093 target = const0_rtx;
3094 else if (structure_value_addr)
3096 if (target == 0 || !MEM_P (target))
3099 = gen_rtx_MEM (TYPE_MODE (rettype),
3100 memory_address (TYPE_MODE (rettype),
3101 structure_value_addr));
3102 set_mem_attributes (target, rettype, 1);
3105 else if (pcc_struct_value)
3107 /* This is the special C++ case where we need to
3108 know what the true target was. We take care to
3109 never use this value more than once in one expression. */
3110 target = gen_rtx_MEM (TYPE_MODE (rettype),
3111 copy_to_reg (valreg));
3112 set_mem_attributes (target, rettype, 1);
3114 /* Handle calls that return values in multiple non-contiguous locations.
3115 The Irix 6 ABI has examples of this. */
3116 else if (GET_CODE (valreg) == PARALLEL)
3120 /* This will only be assigned once, so it can be readonly. */
3121 tree nt = build_qualified_type (rettype,
3122 (TYPE_QUALS (rettype)
3123 | TYPE_QUAL_CONST));
3125 target = assign_temp (nt, 0, 1, 1);
3128 if (! rtx_equal_p (target, valreg))
3129 emit_group_store (target, valreg, rettype,
3130 int_size_in_bytes (rettype));
3132 /* We can not support sibling calls for this case. */
3133 sibcall_failure = 1;
3136 && GET_MODE (target) == TYPE_MODE (rettype)
3137 && GET_MODE (target) == GET_MODE (valreg))
3139 bool may_overlap = false;
3141 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3142 reg to a plain register. */
3143 if (!REG_P (target) || HARD_REGISTER_P (target))
3144 valreg = avoid_likely_spilled_reg (valreg);
3146 /* If TARGET is a MEM in the argument area, and we have
3147 saved part of the argument area, then we can't store
3148 directly into TARGET as it may get overwritten when we
3149 restore the argument save area below. Don't work too
3150 hard though and simply force TARGET to a register if it
3151 is a MEM; the optimizer is quite likely to sort it out. */
3152 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3153 for (i = 0; i < num_actuals; i++)
3154 if (args[i].save_area)
3161 target = copy_to_reg (valreg);
3164 /* TARGET and VALREG cannot be equal at this point
3165 because the latter would not have
3166 REG_FUNCTION_VALUE_P true, while the former would if
3167 it were referring to the same register.
3169 If they refer to the same register, this move will be
3170 a no-op, except when function inlining is being
3172 emit_move_insn (target, valreg);
3174 /* If we are setting a MEM, this code must be executed.
3175 Since it is emitted after the call insn, sibcall
3176 optimization cannot be performed in that case. */
3178 sibcall_failure = 1;
3181 else if (TYPE_MODE (rettype) == BLKmode)
3184 if (GET_MODE (val) != BLKmode)
3185 val = avoid_likely_spilled_reg (val);
3186 target = copy_blkmode_from_reg (target, val, rettype);
3188 /* We can not support sibling calls for this case. */
3189 sibcall_failure = 1;
3192 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
3194 /* If we promoted this return value, make the proper SUBREG.
3195 TARGET might be const0_rtx here, so be careful. */
3197 && TYPE_MODE (rettype) != BLKmode
3198 && GET_MODE (target) != TYPE_MODE (rettype))
3200 tree type = rettype;
3201 int unsignedp = TYPE_UNSIGNED (type);
3203 enum machine_mode pmode;
3205 /* Ensure we promote as expected, and get the new unsignedness. */
3206 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
3208 gcc_assert (GET_MODE (target) == pmode);
3210 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3211 && (GET_MODE_SIZE (GET_MODE (target))
3212 > GET_MODE_SIZE (TYPE_MODE (type))))
3214 offset = GET_MODE_SIZE (GET_MODE (target))
3215 - GET_MODE_SIZE (TYPE_MODE (type));
3216 if (! BYTES_BIG_ENDIAN)
3217 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3218 else if (! WORDS_BIG_ENDIAN)
3219 offset %= UNITS_PER_WORD;
3222 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3223 SUBREG_PROMOTED_VAR_P (target) = 1;
3224 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3227 /* If size of args is variable or this was a constructor call for a stack
3228 argument, restore saved stack-pointer value. */
3230 if (old_stack_level)
3232 emit_stack_restore (SAVE_BLOCK, old_stack_level);
3233 stack_pointer_delta = old_stack_pointer_delta;
3234 pending_stack_adjust = old_pending_adj;
3235 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3236 stack_arg_under_construction = old_stack_arg_under_construction;
3237 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3238 stack_usage_map = initial_stack_usage_map;
3239 sibcall_failure = 1;
3241 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3243 #ifdef REG_PARM_STACK_SPACE
3245 restore_fixed_argument_area (save_area, argblock,
3246 high_to_save, low_to_save);
3249 /* If we saved any argument areas, restore them. */
3250 for (i = 0; i < num_actuals; i++)
3251 if (args[i].save_area)
3253 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3255 = gen_rtx_MEM (save_mode,
3256 memory_address (save_mode,
3257 XEXP (args[i].stack_slot, 0)));
3259 if (save_mode != BLKmode)
3260 emit_move_insn (stack_area, args[i].save_area);
3262 emit_block_move (stack_area, args[i].save_area,
3263 GEN_INT (args[i].locate.size.constant),
3264 BLOCK_OP_CALL_PARM);
3267 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3268 stack_usage_map = initial_stack_usage_map;
3271 /* If this was alloca, record the new stack level for nonlocal gotos.
3272 Check for the handler slots since we might not have a save area
3273 for non-local gotos. */
3275 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3276 update_nonlocal_goto_save_area ();
3278 /* Free up storage we no longer need. */
3279 for (i = 0; i < num_actuals; ++i)
3280 if (args[i].aligned_regs)
3281 free (args[i].aligned_regs);
3283 insns = get_insns ();
3288 tail_call_insns = insns;
3290 /* Restore the pending stack adjustment now that we have
3291 finished generating the sibling call sequence. */
3293 pending_stack_adjust = save_pending_stack_adjust;
3294 stack_pointer_delta = save_stack_pointer_delta;
3296 /* Prepare arg structure for next iteration. */
3297 for (i = 0; i < num_actuals; i++)
3300 args[i].aligned_regs = 0;
3304 sbitmap_free (stored_args_map);
3305 internal_arg_pointer_exp_state.scan_start = NULL_RTX;
3306 VEC_free (rtx, heap, internal_arg_pointer_exp_state.cache);
3310 normal_call_insns = insns;
3312 /* Verify that we've deallocated all the stack we used. */
3313 gcc_assert ((flags & ECF_NORETURN)
3314 || (old_stack_allocated
3315 == stack_pointer_delta - pending_stack_adjust));
3318 /* If something prevents making this a sibling call,
3319 zero out the sequence. */
3320 if (sibcall_failure)
3321 tail_call_insns = NULL_RTX;
3326 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3327 arguments too, as argument area is now clobbered by the call. */
3328 if (tail_call_insns)
3330 emit_insn (tail_call_insns);
3331 crtl->tail_call_emit = true;
3334 emit_insn (normal_call_insns);
3336 currently_expanding_call--;
3338 if (stack_usage_map_buf)
3339 free (stack_usage_map_buf);
3344 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3345 this function's incoming arguments.
3347 At the start of RTL generation we know the only REG_EQUIV notes
3348 in the rtl chain are those for incoming arguments, so we can look
3349 for REG_EQUIV notes between the start of the function and the
3350 NOTE_INSN_FUNCTION_BEG.
3352 This is (slight) overkill. We could keep track of the highest
3353 argument we clobber and be more selective in removing notes, but it
3354 does not seem to be worth the effort. */
3357 fixup_tail_calls (void)
3361 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3365 /* There are never REG_EQUIV notes for the incoming arguments
3366 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3368 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3371 note = find_reg_note (insn, REG_EQUIV, 0);
3373 remove_note (insn, note);
3374 note = find_reg_note (insn, REG_EQUIV, 0);
3379 /* Traverse a list of TYPES and expand all complex types into their
3382 split_complex_types (tree types)
3386 /* Before allocating memory, check for the common case of no complex. */
3387 for (p = types; p; p = TREE_CHAIN (p))
3389 tree type = TREE_VALUE (p);
3390 if (TREE_CODE (type) == COMPLEX_TYPE
3391 && targetm.calls.split_complex_arg (type))
3397 types = copy_list (types);
3399 for (p = types; p; p = TREE_CHAIN (p))
3401 tree complex_type = TREE_VALUE (p);
3403 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3404 && targetm.calls.split_complex_arg (complex_type))
3408 /* Rewrite complex type with component type. */
3409 TREE_VALUE (p) = TREE_TYPE (complex_type);
3410 next = TREE_CHAIN (p);
3412 /* Add another component type for the imaginary part. */
3413 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3414 TREE_CHAIN (p) = imag;
3415 TREE_CHAIN (imag) = next;
3417 /* Skip the newly created node. */
3425 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3426 The RETVAL parameter specifies whether return value needs to be saved, other
3427 parameters are documented in the emit_library_call function below. */
3430 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3431 enum libcall_type fn_type,
3432 enum machine_mode outmode, int nargs, va_list p)
3434 /* Total size in bytes of all the stack-parms scanned so far. */
3435 struct args_size args_size;
3436 /* Size of arguments before any adjustments (such as rounding). */
3437 struct args_size original_args_size;
3440 /* Todo, choose the correct decl type of orgfun. Sadly this information
3441 isn't present here, so we default to native calling abi here. */
3442 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3443 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3447 CUMULATIVE_ARGS args_so_far;
3451 enum machine_mode mode;
3454 struct locate_and_pad_arg_data locate;
3458 int old_inhibit_defer_pop = inhibit_defer_pop;
3459 rtx call_fusage = 0;
3462 int pcc_struct_value = 0;
3463 int struct_value_size = 0;
3465 int reg_parm_stack_space = 0;
3468 tree tfom; /* type_for_mode (outmode, 0) */
3470 #ifdef REG_PARM_STACK_SPACE
3471 /* Define the boundary of the register parm stack space that needs to be
3473 int low_to_save = 0, high_to_save = 0;
3474 rtx save_area = 0; /* Place that it is saved. */
3477 /* Size of the stack reserved for parameter registers. */
3478 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3479 char *initial_stack_usage_map = stack_usage_map;
3480 char *stack_usage_map_buf = NULL;
3482 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3484 #ifdef REG_PARM_STACK_SPACE
3485 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3488 /* By default, library functions can not throw. */
3489 flags = ECF_NOTHROW;
3502 flags |= ECF_NORETURN;
3505 flags = ECF_NORETURN;
3507 case LCT_RETURNS_TWICE:
3508 flags = ECF_RETURNS_TWICE;
3513 /* Ensure current function's preferred stack boundary is at least
3515 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3516 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3518 /* If this kind of value comes back in memory,
3519 decide where in memory it should come back. */
3520 if (outmode != VOIDmode)
3522 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3523 if (aggregate_value_p (tfom, 0))
3525 #ifdef PCC_STATIC_STRUCT_RETURN
3527 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3528 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3529 pcc_struct_value = 1;
3531 value = gen_reg_rtx (outmode);
3532 #else /* not PCC_STATIC_STRUCT_RETURN */
3533 struct_value_size = GET_MODE_SIZE (outmode);
3534 if (value != 0 && MEM_P (value))
3537 mem_value = assign_temp (tfom, 0, 1, 1);
3539 /* This call returns a big structure. */
3540 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3544 tfom = void_type_node;
3546 /* ??? Unfinished: must pass the memory address as an argument. */
3548 /* Copy all the libcall-arguments out of the varargs data
3549 and into a vector ARGVEC.
3551 Compute how to pass each argument. We only support a very small subset
3552 of the full argument passing conventions to limit complexity here since
3553 library functions shouldn't have many args. */
3555 argvec = XALLOCAVEC (struct arg, nargs + 1);
3556 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3558 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3559 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3561 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3564 args_size.constant = 0;
3571 /* If there's a structure value address to be passed,
3572 either pass it in the special place, or pass it as an extra argument. */
3573 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3575 rtx addr = XEXP (mem_value, 0);
3579 /* Make sure it is a reasonable operand for a move or push insn. */
3580 if (!REG_P (addr) && !MEM_P (addr)
3581 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3582 addr = force_operand (addr, NULL_RTX);
3584 argvec[count].value = addr;
3585 argvec[count].mode = Pmode;
3586 argvec[count].partial = 0;
3588 argvec[count].reg = targetm.calls.function_arg (&args_so_far,
3589 Pmode, NULL_TREE, true);
3590 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3591 NULL_TREE, 1) == 0);
3593 locate_and_pad_parm (Pmode, NULL_TREE,
3594 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3597 argvec[count].reg != 0,
3599 0, NULL_TREE, &args_size, &argvec[count].locate);
3601 if (argvec[count].reg == 0 || argvec[count].partial != 0
3602 || reg_parm_stack_space > 0)
3603 args_size.constant += argvec[count].locate.size.constant;
3605 targetm.calls.function_arg_advance (&args_so_far, Pmode, (tree) 0, true);
3610 for (; count < nargs; count++)
3612 rtx val = va_arg (p, rtx);
3613 enum machine_mode mode = (enum machine_mode) va_arg (p, int);
3615 /* We cannot convert the arg value to the mode the library wants here;
3616 must do it earlier where we know the signedness of the arg. */
3617 gcc_assert (mode != BLKmode
3618 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3620 /* Make sure it is a reasonable operand for a move or push insn. */
3621 if (!REG_P (val) && !MEM_P (val)
3622 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3623 val = force_operand (val, NULL_RTX);
3625 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3629 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3631 /* If this was a CONST function, it is now PURE since it now
3633 if (flags & ECF_CONST)
3635 flags &= ~ECF_CONST;
3639 if (MEM_P (val) && !must_copy)
3643 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3645 emit_move_insn (slot, val);
3648 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3649 gen_rtx_USE (VOIDmode, slot),
3652 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3653 gen_rtx_CLOBBER (VOIDmode,
3658 val = force_operand (XEXP (slot, 0), NULL_RTX);
3661 argvec[count].value = val;
3662 argvec[count].mode = mode;
3664 argvec[count].reg = targetm.calls.function_arg (&args_so_far, mode,
3667 argvec[count].partial
3668 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3670 locate_and_pad_parm (mode, NULL_TREE,
3671 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3674 argvec[count].reg != 0,
3676 argvec[count].partial,
3677 NULL_TREE, &args_size, &argvec[count].locate);
3679 gcc_assert (!argvec[count].locate.size.var);
3681 if (argvec[count].reg == 0 || argvec[count].partial != 0
3682 || reg_parm_stack_space > 0)
3683 args_size.constant += argvec[count].locate.size.constant;
3685 targetm.calls.function_arg_advance (&args_so_far, mode, (tree) 0, true);
3688 /* If this machine requires an external definition for library
3689 functions, write one out. */
3690 assemble_external_libcall (fun);
3692 original_args_size = args_size;
3693 args_size.constant = (((args_size.constant
3694 + stack_pointer_delta
3698 - stack_pointer_delta);
3700 args_size.constant = MAX (args_size.constant,
3701 reg_parm_stack_space);
3703 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3704 args_size.constant -= reg_parm_stack_space;
3706 if (args_size.constant > crtl->outgoing_args_size)
3707 crtl->outgoing_args_size = args_size.constant;
3709 if (flag_stack_usage && !ACCUMULATE_OUTGOING_ARGS)
3711 int pushed = args_size.constant + pending_stack_adjust;
3712 if (pushed > current_function_pushed_stack_size)
3713 current_function_pushed_stack_size = pushed;
3716 if (ACCUMULATE_OUTGOING_ARGS)
3718 /* Since the stack pointer will never be pushed, it is possible for
3719 the evaluation of a parm to clobber something we have already
3720 written to the stack. Since most function calls on RISC machines
3721 do not use the stack, this is uncommon, but must work correctly.
3723 Therefore, we save any area of the stack that was already written
3724 and that we are using. Here we set up to do this by making a new
3725 stack usage map from the old one.
3727 Another approach might be to try to reorder the argument
3728 evaluations to avoid this conflicting stack usage. */
3730 needed = args_size.constant;
3732 /* Since we will be writing into the entire argument area, the
3733 map must be allocated for its entire size, not just the part that
3734 is the responsibility of the caller. */
3735 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3736 needed += reg_parm_stack_space;
3738 #ifdef ARGS_GROW_DOWNWARD
3739 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3742 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3745 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3746 stack_usage_map = stack_usage_map_buf;
3748 if (initial_highest_arg_in_use)
3749 memcpy (stack_usage_map, initial_stack_usage_map,
3750 initial_highest_arg_in_use);
3752 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3753 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3754 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3757 /* We must be careful to use virtual regs before they're instantiated,
3758 and real regs afterwards. Loop optimization, for example, can create
3759 new libcalls after we've instantiated the virtual regs, and if we
3760 use virtuals anyway, they won't match the rtl patterns. */
3762 if (virtuals_instantiated)
3763 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3765 argblock = virtual_outgoing_args_rtx;
3770 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3773 /* If we push args individually in reverse order, perform stack alignment
3774 before the first push (the last arg). */
3775 if (argblock == 0 && PUSH_ARGS_REVERSED)
3776 anti_adjust_stack (GEN_INT (args_size.constant
3777 - original_args_size.constant));
3779 if (PUSH_ARGS_REVERSED)
3790 #ifdef REG_PARM_STACK_SPACE
3791 if (ACCUMULATE_OUTGOING_ARGS)
3793 /* The argument list is the property of the called routine and it
3794 may clobber it. If the fixed area has been used for previous
3795 parameters, we must save and restore it. */
3796 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3797 &low_to_save, &high_to_save);
3801 /* Push the args that need to be pushed. */
3803 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3804 are to be pushed. */
3805 for (count = 0; count < nargs; count++, argnum += inc)
3807 enum machine_mode mode = argvec[argnum].mode;
3808 rtx val = argvec[argnum].value;
3809 rtx reg = argvec[argnum].reg;
3810 int partial = argvec[argnum].partial;
3811 unsigned int parm_align = argvec[argnum].locate.boundary;
3812 int lower_bound = 0, upper_bound = 0, i;
3814 if (! (reg != 0 && partial == 0))
3816 if (ACCUMULATE_OUTGOING_ARGS)
3818 /* If this is being stored into a pre-allocated, fixed-size,
3819 stack area, save any previous data at that location. */
3821 #ifdef ARGS_GROW_DOWNWARD
3822 /* stack_slot is negative, but we want to index stack_usage_map
3823 with positive values. */
3824 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
3825 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3827 lower_bound = argvec[argnum].locate.slot_offset.constant;
3828 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3832 /* Don't worry about things in the fixed argument area;
3833 it has already been saved. */
3834 if (i < reg_parm_stack_space)
3835 i = reg_parm_stack_space;
3836 while (i < upper_bound && stack_usage_map[i] == 0)
3839 if (i < upper_bound)
3841 /* We need to make a save area. */
3843 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3844 enum machine_mode save_mode
3845 = mode_for_size (size, MODE_INT, 1);
3847 = plus_constant (argblock,
3848 argvec[argnum].locate.offset.constant);
3850 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3852 if (save_mode == BLKmode)
3854 argvec[argnum].save_area
3855 = assign_stack_temp (BLKmode,
3856 argvec[argnum].locate.size.constant,
3859 emit_block_move (validize_mem (argvec[argnum].save_area),
3861 GEN_INT (argvec[argnum].locate.size.constant),
3862 BLOCK_OP_CALL_PARM);
3866 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3868 emit_move_insn (argvec[argnum].save_area, stack_area);
3873 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
3874 partial, reg, 0, argblock,
3875 GEN_INT (argvec[argnum].locate.offset.constant),
3876 reg_parm_stack_space,
3877 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3879 /* Now mark the segment we just used. */
3880 if (ACCUMULATE_OUTGOING_ARGS)
3881 for (i = lower_bound; i < upper_bound; i++)
3882 stack_usage_map[i] = 1;
3886 if ((flags & ECF_CONST)
3887 || ((flags & ECF_PURE) && ACCUMULATE_OUTGOING_ARGS))
3891 /* Indicate argument access so that alias.c knows that these
3894 use = plus_constant (argblock,
3895 argvec[argnum].locate.offset.constant);
3897 /* When arguments are pushed, trying to tell alias.c where
3898 exactly this argument is won't work, because the
3899 auto-increment causes confusion. So we merely indicate
3900 that we access something with a known mode somewhere on
3902 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3903 gen_rtx_SCRATCH (Pmode));
3904 use = gen_rtx_MEM (argvec[argnum].mode, use);
3905 use = gen_rtx_USE (VOIDmode, use);
3906 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3911 /* If we pushed args in forward order, perform stack alignment
3912 after pushing the last arg. */
3913 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3914 anti_adjust_stack (GEN_INT (args_size.constant
3915 - original_args_size.constant));
3917 if (PUSH_ARGS_REVERSED)
3922 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
3924 /* Now load any reg parms into their regs. */
3926 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3927 are to be pushed. */
3928 for (count = 0; count < nargs; count++, argnum += inc)
3930 enum machine_mode mode = argvec[argnum].mode;
3931 rtx val = argvec[argnum].value;
3932 rtx reg = argvec[argnum].reg;
3933 int partial = argvec[argnum].partial;
3935 /* Handle calls that pass values in multiple non-contiguous
3936 locations. The PA64 has examples of this for library calls. */
3937 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3938 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3939 else if (reg != 0 && partial == 0)
3940 emit_move_insn (reg, val);
3945 /* Any regs containing parms remain in use through the call. */
3946 for (count = 0; count < nargs; count++)
3948 rtx reg = argvec[count].reg;
3949 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3950 use_group_regs (&call_fusage, reg);
3953 int partial = argvec[count].partial;
3957 gcc_assert (partial % UNITS_PER_WORD == 0);
3958 nregs = partial / UNITS_PER_WORD;
3959 use_regs (&call_fusage, REGNO (reg), nregs);
3962 use_reg (&call_fusage, reg);
3966 /* Pass the function the address in which to return a structure value. */
3967 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3969 emit_move_insn (struct_value,
3971 force_operand (XEXP (mem_value, 0),
3973 if (REG_P (struct_value))
3974 use_reg (&call_fusage, struct_value);
3977 /* Don't allow popping to be deferred, since then
3978 cse'ing of library calls could delete a call and leave the pop. */
3980 valreg = (mem_value == 0 && outmode != VOIDmode
3981 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
3983 /* Stack must be properly aligned now. */
3984 gcc_assert (!(stack_pointer_delta
3985 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3987 before_call = get_last_insn ();
3989 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3990 will set inhibit_defer_pop to that value. */
3991 /* The return type is needed to decide how many bytes the function pops.
3992 Signedness plays no role in that, so for simplicity, we pretend it's
3993 always signed. We also assume that the list of arguments passed has
3994 no impact, so we pretend it is unknown. */
3996 emit_call_1 (fun, NULL,
3997 get_identifier (XSTR (orgfun, 0)),
3998 build_function_type (tfom, NULL_TREE),
3999 original_args_size.constant, args_size.constant,
4001 targetm.calls.function_arg (&args_so_far,
4002 VOIDmode, void_type_node, true),
4004 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
4006 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
4007 that it should complain if nonvolatile values are live. For
4008 functions that cannot return, inform flow that control does not
4011 if (flags & ECF_NORETURN)
4013 /* The barrier note must be emitted
4014 immediately after the CALL_INSN. Some ports emit more than
4015 just a CALL_INSN above, so we must search for it here. */
4017 rtx last = get_last_insn ();
4018 while (!CALL_P (last))
4020 last = PREV_INSN (last);
4021 /* There was no CALL_INSN? */
4022 gcc_assert (last != before_call);
4025 emit_barrier_after (last);
4028 /* Now restore inhibit_defer_pop to its actual original value. */
4033 /* Copy the value to the right place. */
4034 if (outmode != VOIDmode && retval)
4040 if (value != mem_value)
4041 emit_move_insn (value, mem_value);
4043 else if (GET_CODE (valreg) == PARALLEL)
4046 value = gen_reg_rtx (outmode);
4047 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
4051 /* Convert to the proper mode if a promotion has been active. */
4052 if (GET_MODE (valreg) != outmode)
4054 int unsignedp = TYPE_UNSIGNED (tfom);
4056 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
4057 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
4058 == GET_MODE (valreg));
4059 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
4063 emit_move_insn (value, valreg);
4069 if (ACCUMULATE_OUTGOING_ARGS)
4071 #ifdef REG_PARM_STACK_SPACE
4073 restore_fixed_argument_area (save_area, argblock,
4074 high_to_save, low_to_save);
4077 /* If we saved any argument areas, restore them. */
4078 for (count = 0; count < nargs; count++)
4079 if (argvec[count].save_area)
4081 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4082 rtx adr = plus_constant (argblock,
4083 argvec[count].locate.offset.constant);
4084 rtx stack_area = gen_rtx_MEM (save_mode,
4085 memory_address (save_mode, adr));
4087 if (save_mode == BLKmode)
4088 emit_block_move (stack_area,
4089 validize_mem (argvec[count].save_area),
4090 GEN_INT (argvec[count].locate.size.constant),
4091 BLOCK_OP_CALL_PARM);
4093 emit_move_insn (stack_area, argvec[count].save_area);
4096 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4097 stack_usage_map = initial_stack_usage_map;
4100 if (stack_usage_map_buf)
4101 free (stack_usage_map_buf);
4107 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4108 (emitting the queue unless NO_QUEUE is nonzero),
4109 for a value of mode OUTMODE,
4110 with NARGS different arguments, passed as alternating rtx values
4111 and machine_modes to convert them to.
4113 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4114 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
4115 other types of library calls. */
4118 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4119 enum machine_mode outmode, int nargs, ...)
4123 va_start (p, nargs);
4124 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4128 /* Like emit_library_call except that an extra argument, VALUE,
4129 comes second and says where to store the result.
4130 (If VALUE is zero, this function chooses a convenient way
4131 to return the value.
4133 This function returns an rtx for where the value is to be found.
4134 If VALUE is nonzero, VALUE is returned. */
4137 emit_library_call_value (rtx orgfun, rtx value,
4138 enum libcall_type fn_type,
4139 enum machine_mode outmode, int nargs, ...)
4144 va_start (p, nargs);
4145 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4152 /* Store a single argument for a function call
4153 into the register or memory area where it must be passed.
4154 *ARG describes the argument value and where to pass it.
4156 ARGBLOCK is the address of the stack-block for all the arguments,
4157 or 0 on a machine where arguments are pushed individually.
4159 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4160 so must be careful about how the stack is used.
4162 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4163 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4164 that we need not worry about saving and restoring the stack.
4166 FNDECL is the declaration of the function we are calling.
4168 Return nonzero if this arg should cause sibcall failure,
4172 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4173 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4175 tree pval = arg->tree_value;
4179 int i, lower_bound = 0, upper_bound = 0;
4180 int sibcall_failure = 0;
4182 if (TREE_CODE (pval) == ERROR_MARK)
4185 /* Push a new temporary level for any temporaries we make for
4189 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4191 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4192 save any previous data at that location. */
4193 if (argblock && ! variable_size && arg->stack)
4195 #ifdef ARGS_GROW_DOWNWARD
4196 /* stack_slot is negative, but we want to index stack_usage_map
4197 with positive values. */
4198 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4199 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4203 lower_bound = upper_bound - arg->locate.size.constant;
4205 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4206 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4210 upper_bound = lower_bound + arg->locate.size.constant;
4214 /* Don't worry about things in the fixed argument area;
4215 it has already been saved. */
4216 if (i < reg_parm_stack_space)
4217 i = reg_parm_stack_space;
4218 while (i < upper_bound && stack_usage_map[i] == 0)
4221 if (i < upper_bound)
4223 /* We need to make a save area. */
4224 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4225 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4226 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4227 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4229 if (save_mode == BLKmode)
4231 tree ot = TREE_TYPE (arg->tree_value);
4232 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4233 | TYPE_QUAL_CONST));
4235 arg->save_area = assign_temp (nt, 0, 1, 1);
4236 preserve_temp_slots (arg->save_area);
4237 emit_block_move (validize_mem (arg->save_area), stack_area,
4238 GEN_INT (arg->locate.size.constant),
4239 BLOCK_OP_CALL_PARM);
4243 arg->save_area = gen_reg_rtx (save_mode);
4244 emit_move_insn (arg->save_area, stack_area);
4250 /* If this isn't going to be placed on both the stack and in registers,
4251 set up the register and number of words. */
4252 if (! arg->pass_on_stack)
4254 if (flags & ECF_SIBCALL)
4255 reg = arg->tail_call_reg;
4258 partial = arg->partial;
4261 /* Being passed entirely in a register. We shouldn't be called in
4263 gcc_assert (reg == 0 || partial != 0);
4265 /* If this arg needs special alignment, don't load the registers
4267 if (arg->n_aligned_regs != 0)
4270 /* If this is being passed partially in a register, we can't evaluate
4271 it directly into its stack slot. Otherwise, we can. */
4272 if (arg->value == 0)
4274 /* stack_arg_under_construction is nonzero if a function argument is
4275 being evaluated directly into the outgoing argument list and
4276 expand_call must take special action to preserve the argument list
4277 if it is called recursively.
4279 For scalar function arguments stack_usage_map is sufficient to
4280 determine which stack slots must be saved and restored. Scalar
4281 arguments in general have pass_on_stack == 0.
4283 If this argument is initialized by a function which takes the
4284 address of the argument (a C++ constructor or a C function
4285 returning a BLKmode structure), then stack_usage_map is
4286 insufficient and expand_call must push the stack around the
4287 function call. Such arguments have pass_on_stack == 1.
4289 Note that it is always safe to set stack_arg_under_construction,
4290 but this generates suboptimal code if set when not needed. */
4292 if (arg->pass_on_stack)
4293 stack_arg_under_construction++;
4295 arg->value = expand_expr (pval,
4297 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4298 ? NULL_RTX : arg->stack,
4299 VOIDmode, EXPAND_STACK_PARM);
4301 /* If we are promoting object (or for any other reason) the mode
4302 doesn't agree, convert the mode. */
4304 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4305 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4306 arg->value, arg->unsignedp);
4308 if (arg->pass_on_stack)
4309 stack_arg_under_construction--;
4312 /* Check for overlap with already clobbered argument area. */
4313 if ((flags & ECF_SIBCALL)
4314 && MEM_P (arg->value)
4315 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4316 arg->locate.size.constant))
4317 sibcall_failure = 1;
4319 /* Don't allow anything left on stack from computation
4320 of argument to alloca. */
4321 if (flags & ECF_MAY_BE_ALLOCA)
4322 do_pending_stack_adjust ();
4324 if (arg->value == arg->stack)
4325 /* If the value is already in the stack slot, we are done. */
4327 else if (arg->mode != BLKmode)
4330 unsigned int parm_align;
4332 /* Argument is a scalar, not entirely passed in registers.
4333 (If part is passed in registers, arg->partial says how much
4334 and emit_push_insn will take care of putting it there.)
4336 Push it, and if its size is less than the
4337 amount of space allocated to it,
4338 also bump stack pointer by the additional space.
4339 Note that in C the default argument promotions
4340 will prevent such mismatches. */
4342 size = GET_MODE_SIZE (arg->mode);
4343 /* Compute how much space the push instruction will push.
4344 On many machines, pushing a byte will advance the stack
4345 pointer by a halfword. */
4346 #ifdef PUSH_ROUNDING
4347 size = PUSH_ROUNDING (size);
4351 /* Compute how much space the argument should get:
4352 round up to a multiple of the alignment for arguments. */
4353 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4354 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4355 / (PARM_BOUNDARY / BITS_PER_UNIT))
4356 * (PARM_BOUNDARY / BITS_PER_UNIT));
4358 /* Compute the alignment of the pushed argument. */
4359 parm_align = arg->locate.boundary;
4360 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4362 int pad = used - size;
4365 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4366 parm_align = MIN (parm_align, pad_align);
4370 /* This isn't already where we want it on the stack, so put it there.
4371 This can either be done with push or copy insns. */
4372 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4373 parm_align, partial, reg, used - size, argblock,
4374 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4375 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4377 /* Unless this is a partially-in-register argument, the argument is now
4380 arg->value = arg->stack;
4384 /* BLKmode, at least partly to be pushed. */
4386 unsigned int parm_align;
4390 /* Pushing a nonscalar.
4391 If part is passed in registers, PARTIAL says how much
4392 and emit_push_insn will take care of putting it there. */
4394 /* Round its size up to a multiple
4395 of the allocation unit for arguments. */
4397 if (arg->locate.size.var != 0)
4400 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4404 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4405 for BLKmode is careful to avoid it. */
4406 excess = (arg->locate.size.constant
4407 - int_size_in_bytes (TREE_TYPE (pval))
4409 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4410 NULL_RTX, TYPE_MODE (sizetype),
4414 parm_align = arg->locate.boundary;
4416 /* When an argument is padded down, the block is aligned to
4417 PARM_BOUNDARY, but the actual argument isn't. */
4418 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4420 if (arg->locate.size.var)
4421 parm_align = BITS_PER_UNIT;
4424 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4425 parm_align = MIN (parm_align, excess_align);
4429 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4431 /* emit_push_insn might not work properly if arg->value and
4432 argblock + arg->locate.offset areas overlap. */
4436 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4437 || (GET_CODE (XEXP (x, 0)) == PLUS
4438 && XEXP (XEXP (x, 0), 0) ==
4439 crtl->args.internal_arg_pointer
4440 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
4442 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4443 i = INTVAL (XEXP (XEXP (x, 0), 1));
4445 /* expand_call should ensure this. */
4446 gcc_assert (!arg->locate.offset.var
4447 && arg->locate.size.var == 0
4448 && CONST_INT_P (size_rtx));
4450 if (arg->locate.offset.constant > i)
4452 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4453 sibcall_failure = 1;
4455 else if (arg->locate.offset.constant < i)
4457 /* Use arg->locate.size.constant instead of size_rtx
4458 because we only care about the part of the argument
4460 if (i < (arg->locate.offset.constant
4461 + arg->locate.size.constant))
4462 sibcall_failure = 1;
4466 /* Even though they appear to be at the same location,
4467 if part of the outgoing argument is in registers,
4468 they aren't really at the same location. Check for
4469 this by making sure that the incoming size is the
4470 same as the outgoing size. */
4471 if (arg->locate.size.constant != INTVAL (size_rtx))
4472 sibcall_failure = 1;
4477 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4478 parm_align, partial, reg, excess, argblock,
4479 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4480 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4482 /* Unless this is a partially-in-register argument, the argument is now
4485 ??? Unlike the case above, in which we want the actual
4486 address of the data, so that we can load it directly into a
4487 register, here we want the address of the stack slot, so that
4488 it's properly aligned for word-by-word copying or something
4489 like that. It's not clear that this is always correct. */
4491 arg->value = arg->stack_slot;
4494 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4496 tree type = TREE_TYPE (arg->tree_value);
4498 = emit_group_load_into_temps (arg->reg, arg->value, type,
4499 int_size_in_bytes (type));
4502 /* Mark all slots this store used. */
4503 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4504 && argblock && ! variable_size && arg->stack)
4505 for (i = lower_bound; i < upper_bound; i++)
4506 stack_usage_map[i] = 1;
4508 /* Once we have pushed something, pops can't safely
4509 be deferred during the rest of the arguments. */
4512 /* Free any temporary slots made in processing this argument. Show
4513 that we might have taken the address of something and pushed that
4515 preserve_temp_slots (NULL_RTX);
4519 return sibcall_failure;
4522 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4525 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4531 /* If the type has variable size... */
4532 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4535 /* If the type is marked as addressable (it is required
4536 to be constructed into the stack)... */
4537 if (TREE_ADDRESSABLE (type))
4543 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4544 takes trailing padding of a structure into account. */
4545 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4548 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
4553 /* If the type has variable size... */
4554 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4557 /* If the type is marked as addressable (it is required
4558 to be constructed into the stack)... */
4559 if (TREE_ADDRESSABLE (type))
4562 /* If the padding and mode of the type is such that a copy into
4563 a register would put it into the wrong part of the register. */
4565 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4566 && (FUNCTION_ARG_PADDING (mode, type)
4567 == (BYTES_BIG_ENDIAN ? upward : downward)))