1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
39 #include "langhooks.h"
45 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
46 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
48 /* Data structure and subroutines used within expand_call. */
52 /* Tree node for this argument. */
54 /* Mode for value; TYPE_MODE unless promoted. */
55 enum machine_mode mode;
56 /* Current RTL value for argument, or 0 if it isn't precomputed. */
58 /* Initially-compute RTL value for argument; only for const functions. */
60 /* Register to pass this argument in, 0 if passed on stack, or an
61 PARALLEL if the arg is to be copied into multiple non-contiguous
64 /* Register to pass this argument in when generating tail call sequence.
65 This is not the same register as for normal calls on machines with
68 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
69 form for emit_group_move. */
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
74 /* Number of bytes to put in registers. 0 means put the whole arg
75 in registers. Also 0 if not passed in registers. */
77 /* Nonzero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
82 /* Some fields packaged up for locate_and_pad_parm. */
83 struct locate_and_pad_arg_data locate;
84 /* Location on the stack at which parameter should be stored. The store
85 has already been done if STACK == VALUE. */
87 /* Location on the stack of the start of this argument slot. This can
88 differ from STACK if this arg pads downward. This location is known
89 to be aligned to FUNCTION_ARG_BOUNDARY. */
91 /* Place that this stack area has been saved, if needed. */
93 /* If an argument's alignment does not permit direct copying into registers,
94 copy in smaller-sized pieces into pseudos. These are stored in a
95 block pointed to by this field. The next field says how many
96 word-sized pseudos we made. */
101 /* A vector of one char per byte of stack space. A byte if nonzero if
102 the corresponding stack location has been used.
103 This vector is used to prevent a function call within an argument from
104 clobbering any stack already set up. */
105 static char *stack_usage_map;
107 /* Size of STACK_USAGE_MAP. */
108 static int highest_outgoing_arg_in_use;
110 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
111 stack location's tail call argument has been already stored into the stack.
112 This bitmap is used to prevent sibling call optimization if function tries
113 to use parent's incoming argument slots when they have been already
114 overwritten with tail call arguments. */
115 static sbitmap stored_args_map;
117 /* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
122 static int stack_arg_under_construction;
124 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
125 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
127 static void precompute_register_parameters (int, struct arg_data *, int *);
128 static int store_one_arg (struct arg_data *, rtx, int, int, int);
129 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
130 static int finalize_must_preallocate (int, int, struct arg_data *,
132 static void precompute_arguments (int, int, struct arg_data *);
133 static int compute_argument_block_size (int, struct args_size *, int);
134 static void initialize_argument_information (int, struct arg_data *,
135 struct args_size *, int,
137 tree, CUMULATIVE_ARGS *, int,
138 rtx *, int *, int *, int *,
140 static void compute_argument_addresses (struct arg_data *, rtx, int);
141 static rtx rtx_for_function_call (tree, tree);
142 static void load_register_parameters (struct arg_data *, int, rtx *, int,
144 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
145 enum machine_mode, int, va_list);
146 static int special_function_p (const_tree, int);
147 static int check_sibcall_argument_overlap_1 (rtx);
148 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
150 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
152 static tree split_complex_types (tree);
154 #ifdef REG_PARM_STACK_SPACE
155 static rtx save_fixed_argument_area (int, rtx, int *, int *);
156 static void restore_fixed_argument_area (rtx, rtx, int, int);
159 /* Force FUNEXP into a form suitable for the address of a CALL,
160 and return that as an rtx. Also load the static chain register
161 if FNDECL is a nested function.
163 CALL_FUSAGE points to a variable holding the prospective
164 CALL_INSN_FUNCTION_USAGE information. */
167 prepare_call_address (rtx funexp, rtx static_chain_value,
168 rtx *call_fusage, int reg_parm_seen, int sibcallp)
170 /* Make a valid memory address and copy constants through pseudo-regs,
171 but not for a constant address if -fno-function-cse. */
172 if (GET_CODE (funexp) != SYMBOL_REF)
173 /* If we are using registers for parameters, force the
174 function address into a register now. */
175 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
176 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
177 : memory_address (FUNCTION_MODE, funexp));
180 #ifndef NO_FUNCTION_CSE
181 if (optimize && ! flag_no_function_cse)
182 funexp = force_reg (Pmode, funexp);
186 if (static_chain_value != 0)
188 static_chain_value = convert_memory_address (Pmode, static_chain_value);
189 emit_move_insn (static_chain_rtx, static_chain_value);
191 if (REG_P (static_chain_rtx))
192 use_reg (call_fusage, static_chain_rtx);
198 /* Generate instructions to call function FUNEXP,
199 and optionally pop the results.
200 The CALL_INSN is the first insn generated.
202 FNDECL is the declaration node of the function. This is given to the
203 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
205 FUNTYPE is the data type of the function. This is given to the macro
206 RETURN_POPS_ARGS to determine whether this function pops its own args.
207 We used to allow an identifier for library functions, but that doesn't
208 work when the return type is an aggregate type and the calling convention
209 says that the pointer to this aggregate is to be popped by the callee.
211 STACK_SIZE is the number of bytes of arguments on the stack,
212 ROUNDED_STACK_SIZE is that number rounded up to
213 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
214 both to put into the call insn and to generate explicit popping
217 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
218 It is zero if this call doesn't want a structure value.
220 NEXT_ARG_REG is the rtx that results from executing
221 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
222 just after all the args have had their registers assigned.
223 This could be whatever you like, but normally it is the first
224 arg-register beyond those used for args in this call,
225 or 0 if all the arg-registers are used in this call.
226 It is passed on to `gen_call' so you can put this info in the call insn.
228 VALREG is a hard register in which a value is returned,
229 or 0 if the call does not return a value.
231 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
232 the args to this call were processed.
233 We restore `inhibit_defer_pop' to that value.
235 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
236 denote registers used by the called function. */
239 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
240 tree funtype ATTRIBUTE_UNUSED,
241 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
242 HOST_WIDE_INT rounded_stack_size,
243 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
244 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
245 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
246 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
248 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
250 int already_popped = 0;
251 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
252 #if defined (HAVE_call) && defined (HAVE_call_value)
253 rtx struct_value_size_rtx;
254 struct_value_size_rtx = GEN_INT (struct_value_size);
257 #ifdef CALL_POPS_ARGS
258 n_popped += CALL_POPS_ARGS (* args_so_far);
261 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
262 and we don't want to load it into a register as an optimization,
263 because prepare_call_address already did it if it should be done. */
264 if (GET_CODE (funexp) != SYMBOL_REF)
265 funexp = memory_address (FUNCTION_MODE, funexp);
267 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
268 if ((ecf_flags & ECF_SIBCALL)
269 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
270 && (n_popped > 0 || stack_size == 0))
272 rtx n_pop = GEN_INT (n_popped);
275 /* If this subroutine pops its own args, record that in the call insn
276 if possible, for the sake of frame pointer elimination. */
279 pat = GEN_SIBCALL_VALUE_POP (valreg,
280 gen_rtx_MEM (FUNCTION_MODE, funexp),
281 rounded_stack_size_rtx, next_arg_reg,
284 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
285 rounded_stack_size_rtx, next_arg_reg, n_pop);
287 emit_call_insn (pat);
293 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
294 /* If the target has "call" or "call_value" insns, then prefer them
295 if no arguments are actually popped. If the target does not have
296 "call" or "call_value" insns, then we must use the popping versions
297 even if the call has no arguments to pop. */
298 #if defined (HAVE_call) && defined (HAVE_call_value)
299 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
302 if (HAVE_call_pop && HAVE_call_value_pop)
305 rtx n_pop = GEN_INT (n_popped);
308 /* If this subroutine pops its own args, record that in the call insn
309 if possible, for the sake of frame pointer elimination. */
312 pat = GEN_CALL_VALUE_POP (valreg,
313 gen_rtx_MEM (FUNCTION_MODE, funexp),
314 rounded_stack_size_rtx, next_arg_reg, n_pop);
316 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
317 rounded_stack_size_rtx, next_arg_reg, n_pop);
319 emit_call_insn (pat);
325 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
326 if ((ecf_flags & ECF_SIBCALL)
327 && HAVE_sibcall && HAVE_sibcall_value)
330 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
331 gen_rtx_MEM (FUNCTION_MODE, funexp),
332 rounded_stack_size_rtx,
333 next_arg_reg, NULL_RTX));
335 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
336 rounded_stack_size_rtx, next_arg_reg,
337 struct_value_size_rtx));
342 #if defined (HAVE_call) && defined (HAVE_call_value)
343 if (HAVE_call && HAVE_call_value)
346 emit_call_insn (GEN_CALL_VALUE (valreg,
347 gen_rtx_MEM (FUNCTION_MODE, funexp),
348 rounded_stack_size_rtx, next_arg_reg,
351 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
352 rounded_stack_size_rtx, next_arg_reg,
353 struct_value_size_rtx));
359 /* Find the call we just emitted. */
360 call_insn = last_call_insn ();
362 /* Mark memory as used for "pure" function call. */
363 if (ecf_flags & ECF_PURE)
367 gen_rtx_USE (VOIDmode,
368 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
371 /* Put the register usage information there. */
372 add_function_usage_to (call_insn, call_fusage);
374 /* If this is a const call, then set the insn's unchanging bit. */
375 if (ecf_flags & (ECF_CONST | ECF_PURE))
376 CONST_OR_PURE_CALL_P (call_insn) = 1;
378 /* If this call can't throw, attach a REG_EH_REGION reg note to that
380 if (ecf_flags & ECF_NOTHROW)
381 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
382 REG_NOTES (call_insn));
385 int rn = lookup_stmt_eh_region (fntree);
387 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
388 throw, which we already took care of. */
390 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
391 REG_NOTES (call_insn));
392 note_current_region_may_contain_throw ();
395 if (ecf_flags & ECF_NORETURN)
396 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
397 REG_NOTES (call_insn));
399 if (ecf_flags & ECF_RETURNS_TWICE)
401 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
402 REG_NOTES (call_insn));
403 current_function_calls_setjmp = 1;
406 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
408 /* Restore this now, so that we do defer pops for this call's args
409 if the context of the call as a whole permits. */
410 inhibit_defer_pop = old_inhibit_defer_pop;
415 CALL_INSN_FUNCTION_USAGE (call_insn)
416 = gen_rtx_EXPR_LIST (VOIDmode,
417 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
418 CALL_INSN_FUNCTION_USAGE (call_insn));
419 rounded_stack_size -= n_popped;
420 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
421 stack_pointer_delta -= n_popped;
424 if (!ACCUMULATE_OUTGOING_ARGS)
426 /* If returning from the subroutine does not automatically pop the args,
427 we need an instruction to pop them sooner or later.
428 Perhaps do it now; perhaps just record how much space to pop later.
430 If returning from the subroutine does pop the args, indicate that the
431 stack pointer will be changed. */
433 if (rounded_stack_size != 0)
435 if (ecf_flags & ECF_NORETURN)
436 /* Just pretend we did the pop. */
437 stack_pointer_delta -= rounded_stack_size;
438 else if (flag_defer_pop && inhibit_defer_pop == 0
439 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
440 pending_stack_adjust += rounded_stack_size;
442 adjust_stack (rounded_stack_size_rtx);
445 /* When we accumulate outgoing args, we must avoid any stack manipulations.
446 Restore the stack pointer to its original value now. Usually
447 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
448 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
449 popping variants of functions exist as well.
451 ??? We may optimize similar to defer_pop above, but it is
452 probably not worthwhile.
454 ??? It will be worthwhile to enable combine_stack_adjustments even for
457 anti_adjust_stack (GEN_INT (n_popped));
460 /* Determine if the function identified by NAME and FNDECL is one with
461 special properties we wish to know about.
463 For example, if the function might return more than one time (setjmp), then
464 set RETURNS_TWICE to a nonzero value.
466 Similarly set NORETURN if the function is in the longjmp family.
468 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
469 space from the stack such as alloca. */
472 special_function_p (const_tree fndecl, int flags)
474 if (fndecl && DECL_NAME (fndecl)
475 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
476 /* Exclude functions not at the file scope, or not `extern',
477 since they are not the magic functions we would otherwise
479 FIXME: this should be handled with attributes, not with this
480 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
481 because you can declare fork() inside a function if you
483 && (DECL_CONTEXT (fndecl) == NULL_TREE
484 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
485 && TREE_PUBLIC (fndecl))
487 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
488 const char *tname = name;
490 /* We assume that alloca will always be called by name. It
491 makes no sense to pass it as a pointer-to-function to
492 anything that does not understand its behavior. */
493 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
495 && ! strcmp (name, "alloca"))
496 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
498 && ! strcmp (name, "__builtin_alloca"))))
499 flags |= ECF_MAY_BE_ALLOCA;
501 /* Disregard prefix _, __ or __x. */
504 if (name[1] == '_' && name[2] == 'x')
506 else if (name[1] == '_')
515 && (! strcmp (tname, "setjmp")
516 || ! strcmp (tname, "setjmp_syscall")))
518 && ! strcmp (tname, "sigsetjmp"))
520 && ! strcmp (tname, "savectx")))
521 flags |= ECF_RETURNS_TWICE;
524 && ! strcmp (tname, "siglongjmp"))
525 flags |= ECF_NORETURN;
527 else if ((tname[0] == 'q' && tname[1] == 's'
528 && ! strcmp (tname, "qsetjmp"))
529 || (tname[0] == 'v' && tname[1] == 'f'
530 && ! strcmp (tname, "vfork"))
531 || (tname[0] == 'g' && tname[1] == 'e'
532 && !strcmp (tname, "getcontext")))
533 flags |= ECF_RETURNS_TWICE;
535 else if (tname[0] == 'l' && tname[1] == 'o'
536 && ! strcmp (tname, "longjmp"))
537 flags |= ECF_NORETURN;
543 /* Return nonzero when FNDECL represents a call to setjmp. */
546 setjmp_call_p (const_tree fndecl)
548 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
551 /* Return true when exp contains alloca call. */
553 alloca_call_p (const_tree exp)
555 if (TREE_CODE (exp) == CALL_EXPR
556 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
557 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
558 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
559 & ECF_MAY_BE_ALLOCA))
564 /* Detect flags (function attributes) from the function decl or type node. */
567 flags_from_decl_or_type (const_tree exp)
570 const_tree type = exp;
574 type = TREE_TYPE (exp);
576 /* The function exp may have the `malloc' attribute. */
577 if (DECL_IS_MALLOC (exp))
580 /* The function exp may have the `returns_twice' attribute. */
581 if (DECL_IS_RETURNS_TWICE (exp))
582 flags |= ECF_RETURNS_TWICE;
584 /* The function exp may have the `pure' attribute. */
585 if (DECL_IS_PURE (exp))
588 if (DECL_IS_NOVOPS (exp))
591 if (TREE_NOTHROW (exp))
592 flags |= ECF_NOTHROW;
594 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
597 flags = special_function_p (exp, flags);
599 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
602 if (TREE_THIS_VOLATILE (exp))
603 flags |= ECF_NORETURN;
608 /* Detect flags from a CALL_EXPR. */
611 call_expr_flags (const_tree t)
614 tree decl = get_callee_fndecl (t);
617 flags = flags_from_decl_or_type (decl);
620 t = TREE_TYPE (CALL_EXPR_FN (t));
621 if (t && TREE_CODE (t) == POINTER_TYPE)
622 flags = flags_from_decl_or_type (TREE_TYPE (t));
630 /* Precompute all register parameters as described by ARGS, storing values
631 into fields within the ARGS array.
633 NUM_ACTUALS indicates the total number elements in the ARGS array.
635 Set REG_PARM_SEEN if we encounter a register parameter. */
638 precompute_register_parameters (int num_actuals, struct arg_data *args,
645 for (i = 0; i < num_actuals; i++)
646 if (args[i].reg != 0 && ! args[i].pass_on_stack)
650 if (args[i].value == 0)
653 args[i].value = expand_normal (args[i].tree_value);
654 preserve_temp_slots (args[i].value);
658 /* If the value is a non-legitimate constant, force it into a
659 pseudo now. TLS symbols sometimes need a call to resolve. */
660 if (CONSTANT_P (args[i].value)
661 && !LEGITIMATE_CONSTANT_P (args[i].value))
662 args[i].value = force_reg (args[i].mode, args[i].value);
664 /* If we are to promote the function arg to a wider mode,
667 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
669 = convert_modes (args[i].mode,
670 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
671 args[i].value, args[i].unsignedp);
673 /* If we're going to have to load the value by parts, pull the
674 parts into pseudos. The part extraction process can involve
675 non-trivial computation. */
676 if (GET_CODE (args[i].reg) == PARALLEL)
678 tree type = TREE_TYPE (args[i].tree_value);
679 args[i].parallel_value
680 = emit_group_load_into_temps (args[i].reg, args[i].value,
681 type, int_size_in_bytes (type));
684 /* If the value is expensive, and we are inside an appropriately
685 short loop, put the value into a pseudo and then put the pseudo
688 For small register classes, also do this if this call uses
689 register parameters. This is to avoid reload conflicts while
690 loading the parameters registers. */
692 else if ((! (REG_P (args[i].value)
693 || (GET_CODE (args[i].value) == SUBREG
694 && REG_P (SUBREG_REG (args[i].value)))))
695 && args[i].mode != BLKmode
696 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
697 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
699 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
703 #ifdef REG_PARM_STACK_SPACE
705 /* The argument list is the property of the called routine and it
706 may clobber it. If the fixed area has been used for previous
707 parameters, we must save and restore it. */
710 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
715 /* Compute the boundary of the area that needs to be saved, if any. */
716 high = reg_parm_stack_space;
717 #ifdef ARGS_GROW_DOWNWARD
720 if (high > highest_outgoing_arg_in_use)
721 high = highest_outgoing_arg_in_use;
723 for (low = 0; low < high; low++)
724 if (stack_usage_map[low] != 0)
727 enum machine_mode save_mode;
732 while (stack_usage_map[--high] == 0)
736 *high_to_save = high;
738 num_to_save = high - low + 1;
739 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
741 /* If we don't have the required alignment, must do this
743 if ((low & (MIN (GET_MODE_SIZE (save_mode),
744 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
747 #ifdef ARGS_GROW_DOWNWARD
752 stack_area = gen_rtx_MEM (save_mode,
753 memory_address (save_mode,
754 plus_constant (argblock,
757 set_mem_align (stack_area, PARM_BOUNDARY);
758 if (save_mode == BLKmode)
760 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
761 emit_block_move (validize_mem (save_area), stack_area,
762 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
766 save_area = gen_reg_rtx (save_mode);
767 emit_move_insn (save_area, stack_area);
777 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
779 enum machine_mode save_mode = GET_MODE (save_area);
783 #ifdef ARGS_GROW_DOWNWARD
784 delta = -high_to_save;
788 stack_area = gen_rtx_MEM (save_mode,
789 memory_address (save_mode,
790 plus_constant (argblock, delta)));
791 set_mem_align (stack_area, PARM_BOUNDARY);
793 if (save_mode != BLKmode)
794 emit_move_insn (stack_area, save_area);
796 emit_block_move (stack_area, validize_mem (save_area),
797 GEN_INT (high_to_save - low_to_save + 1),
800 #endif /* REG_PARM_STACK_SPACE */
802 /* If any elements in ARGS refer to parameters that are to be passed in
803 registers, but not in memory, and whose alignment does not permit a
804 direct copy into registers. Copy the values into a group of pseudos
805 which we will later copy into the appropriate hard registers.
807 Pseudos for each unaligned argument will be stored into the array
808 args[argnum].aligned_regs. The caller is responsible for deallocating
809 the aligned_regs array if it is nonzero. */
812 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
816 for (i = 0; i < num_actuals; i++)
817 if (args[i].reg != 0 && ! args[i].pass_on_stack
818 && args[i].mode == BLKmode
819 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
820 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
822 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
823 int endian_correction = 0;
827 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
828 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
832 args[i].n_aligned_regs
833 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
836 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
838 /* Structures smaller than a word are normally aligned to the
839 least significant byte. On a BYTES_BIG_ENDIAN machine,
840 this means we must skip the empty high order bytes when
841 calculating the bit offset. */
842 if (bytes < UNITS_PER_WORD
843 #ifdef BLOCK_REG_PADDING
844 && (BLOCK_REG_PADDING (args[i].mode,
845 TREE_TYPE (args[i].tree_value), 1)
851 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
853 for (j = 0; j < args[i].n_aligned_regs; j++)
855 rtx reg = gen_reg_rtx (word_mode);
856 rtx word = operand_subword_force (args[i].value, j, BLKmode);
857 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
859 args[i].aligned_regs[j] = reg;
860 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
861 word_mode, word_mode);
863 /* There is no need to restrict this code to loading items
864 in TYPE_ALIGN sized hunks. The bitfield instructions can
865 load up entire word sized registers efficiently.
867 ??? This may not be needed anymore.
868 We use to emit a clobber here but that doesn't let later
869 passes optimize the instructions we emit. By storing 0 into
870 the register later passes know the first AND to zero out the
871 bitfield being set in the register is unnecessary. The store
872 of 0 will be deleted as will at least the first AND. */
874 emit_move_insn (reg, const0_rtx);
876 bytes -= bitsize / BITS_PER_UNIT;
877 store_bit_field (reg, bitsize, endian_correction, word_mode,
883 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
886 NUM_ACTUALS is the total number of parameters.
888 N_NAMED_ARGS is the total number of named arguments.
890 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
893 FNDECL is the tree code for the target of this call (if known)
895 ARGS_SO_FAR holds state needed by the target to know where to place
898 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
899 for arguments which are passed in registers.
901 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
902 and may be modified by this routine.
904 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
905 flags which may may be modified by this routine.
907 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
908 that requires allocation of stack space.
910 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
911 the thunked-to function. */
914 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
915 struct arg_data *args,
916 struct args_size *args_size,
917 int n_named_args ATTRIBUTE_UNUSED,
918 tree exp, tree struct_value_addr_value,
920 CUMULATIVE_ARGS *args_so_far,
921 int reg_parm_stack_space,
922 rtx *old_stack_level, int *old_pending_adj,
923 int *must_preallocate, int *ecf_flags,
924 bool *may_tailcall, bool call_from_thunk_p)
926 /* 1 if scanning parms front to back, -1 if scanning back to front. */
929 /* Count arg position in order args appear. */
934 args_size->constant = 0;
937 /* In this loop, we consider args in the order they are written.
938 We fill up ARGS from the front or from the back if necessary
939 so that in any case the first arg to be pushed ends up at the front. */
941 if (PUSH_ARGS_REVERSED)
943 i = num_actuals - 1, inc = -1;
944 /* In this case, must reverse order of args
945 so that we compute and push the last arg first. */
952 /* First fill in the actual arguments in the ARGS array, splitting
953 complex arguments if necessary. */
956 call_expr_arg_iterator iter;
959 if (struct_value_addr_value)
961 args[j].tree_value = struct_value_addr_value;
964 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
966 tree argtype = TREE_TYPE (arg);
967 if (targetm.calls.split_complex_arg
969 && TREE_CODE (argtype) == COMPLEX_TYPE
970 && targetm.calls.split_complex_arg (argtype))
972 tree subtype = TREE_TYPE (argtype);
973 arg = save_expr (arg);
974 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
976 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
979 args[j].tree_value = arg;
984 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
985 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
987 tree type = TREE_TYPE (args[i].tree_value);
989 enum machine_mode mode;
991 /* Replace erroneous argument with constant zero. */
992 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
993 args[i].tree_value = integer_zero_node, type = integer_type_node;
995 /* If TYPE is a transparent union, pass things the way we would
996 pass the first field of the union. We have already verified that
997 the modes are the same. */
998 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
999 type = TREE_TYPE (TYPE_FIELDS (type));
1001 /* Decide where to pass this arg.
1003 args[i].reg is nonzero if all or part is passed in registers.
1005 args[i].partial is nonzero if part but not all is passed in registers,
1006 and the exact value says how many bytes are passed in registers.
1008 args[i].pass_on_stack is nonzero if the argument must at least be
1009 computed on the stack. It may then be loaded back into registers
1010 if args[i].reg is nonzero.
1012 These decisions are driven by the FUNCTION_... macros and must agree
1013 with those made by function.c. */
1015 /* See if this argument should be passed by invisible reference. */
1016 if (pass_by_reference (args_so_far, TYPE_MODE (type),
1017 type, argpos < n_named_args))
1023 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1024 type, argpos < n_named_args);
1026 /* If we're compiling a thunk, pass through invisible references
1027 instead of making a copy. */
1028 if (call_from_thunk_p
1030 && !TREE_ADDRESSABLE (type)
1031 && (base = get_base_address (args[i].tree_value))
1032 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1034 /* We can't use sibcalls if a callee-copied argument is
1035 stored in the current function's frame. */
1036 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1037 *may_tailcall = false;
1039 args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1040 type = TREE_TYPE (args[i].tree_value);
1042 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1046 /* We make a copy of the object and pass the address to the
1047 function being called. */
1050 if (!COMPLETE_TYPE_P (type)
1051 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1052 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1053 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1054 STACK_CHECK_MAX_VAR_SIZE))))
1056 /* This is a variable-sized object. Make space on the stack
1058 rtx size_rtx = expr_size (args[i].tree_value);
1060 if (*old_stack_level == 0)
1062 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1063 *old_pending_adj = pending_stack_adjust;
1064 pending_stack_adjust = 0;
1067 copy = gen_rtx_MEM (BLKmode,
1068 allocate_dynamic_stack_space
1069 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1070 set_mem_attributes (copy, type, 1);
1073 copy = assign_temp (type, 0, 1, 0);
1075 store_expr (args[i].tree_value, copy, 0, false);
1078 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1080 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1083 = build_fold_addr_expr (make_tree (type, copy));
1084 type = TREE_TYPE (args[i].tree_value);
1085 *may_tailcall = false;
1089 mode = TYPE_MODE (type);
1090 unsignedp = TYPE_UNSIGNED (type);
1092 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1093 mode = promote_mode (type, mode, &unsignedp, 1);
1095 args[i].unsignedp = unsignedp;
1096 args[i].mode = mode;
1098 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1099 argpos < n_named_args);
1100 #ifdef FUNCTION_INCOMING_ARG
1101 /* If this is a sibling call and the machine has register windows, the
1102 register window has to be unwinded before calling the routine, so
1103 arguments have to go into the incoming registers. */
1104 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1105 argpos < n_named_args);
1107 args[i].tail_call_reg = args[i].reg;
1112 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1113 argpos < n_named_args);
1115 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1117 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1118 it means that we are to pass this arg in the register(s) designated
1119 by the PARALLEL, but also to pass it in the stack. */
1120 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1121 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1122 args[i].pass_on_stack = 1;
1124 /* If this is an addressable type, we must preallocate the stack
1125 since we must evaluate the object into its final location.
1127 If this is to be passed in both registers and the stack, it is simpler
1129 if (TREE_ADDRESSABLE (type)
1130 || (args[i].pass_on_stack && args[i].reg != 0))
1131 *must_preallocate = 1;
1133 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1134 we cannot consider this function call constant. */
1135 if (TREE_ADDRESSABLE (type))
1136 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1138 /* Compute the stack-size of this argument. */
1139 if (args[i].reg == 0 || args[i].partial != 0
1140 || reg_parm_stack_space > 0
1141 || args[i].pass_on_stack)
1142 locate_and_pad_parm (mode, type,
1143 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1148 args[i].pass_on_stack ? 0 : args[i].partial,
1149 fndecl, args_size, &args[i].locate);
1150 #ifdef BLOCK_REG_PADDING
1152 /* The argument is passed entirely in registers. See at which
1153 end it should be padded. */
1154 args[i].locate.where_pad =
1155 BLOCK_REG_PADDING (mode, type,
1156 int_size_in_bytes (type) <= UNITS_PER_WORD);
1159 /* Update ARGS_SIZE, the total stack space for args so far. */
1161 args_size->constant += args[i].locate.size.constant;
1162 if (args[i].locate.size.var)
1163 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1165 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1166 have been used, etc. */
1168 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1169 argpos < n_named_args);
1173 /* Update ARGS_SIZE to contain the total size for the argument block.
1174 Return the original constant component of the argument block's size.
1176 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1177 for arguments passed in registers. */
1180 compute_argument_block_size (int reg_parm_stack_space,
1181 struct args_size *args_size,
1182 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1184 int unadjusted_args_size = args_size->constant;
1186 /* For accumulate outgoing args mode we don't need to align, since the frame
1187 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1188 backends from generating misaligned frame sizes. */
1189 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1190 preferred_stack_boundary = STACK_BOUNDARY;
1192 /* Compute the actual size of the argument block required. The variable
1193 and constant sizes must be combined, the size may have to be rounded,
1194 and there may be a minimum required size. */
1198 args_size->var = ARGS_SIZE_TREE (*args_size);
1199 args_size->constant = 0;
1201 preferred_stack_boundary /= BITS_PER_UNIT;
1202 if (preferred_stack_boundary > 1)
1204 /* We don't handle this case yet. To handle it correctly we have
1205 to add the delta, round and subtract the delta.
1206 Currently no machine description requires this support. */
1207 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1208 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1211 if (reg_parm_stack_space > 0)
1214 = size_binop (MAX_EXPR, args_size->var,
1215 ssize_int (reg_parm_stack_space));
1217 /* The area corresponding to register parameters is not to count in
1218 the size of the block we need. So make the adjustment. */
1219 if (!OUTGOING_REG_PARM_STACK_SPACE)
1221 = size_binop (MINUS_EXPR, args_size->var,
1222 ssize_int (reg_parm_stack_space));
1227 preferred_stack_boundary /= BITS_PER_UNIT;
1228 if (preferred_stack_boundary < 1)
1229 preferred_stack_boundary = 1;
1230 args_size->constant = (((args_size->constant
1231 + stack_pointer_delta
1232 + preferred_stack_boundary - 1)
1233 / preferred_stack_boundary
1234 * preferred_stack_boundary)
1235 - stack_pointer_delta);
1237 args_size->constant = MAX (args_size->constant,
1238 reg_parm_stack_space);
1240 if (!OUTGOING_REG_PARM_STACK_SPACE)
1241 args_size->constant -= reg_parm_stack_space;
1243 return unadjusted_args_size;
1246 /* Precompute parameters as needed for a function call.
1248 FLAGS is mask of ECF_* constants.
1250 NUM_ACTUALS is the number of arguments.
1252 ARGS is an array containing information for each argument; this
1253 routine fills in the INITIAL_VALUE and VALUE fields for each
1254 precomputed argument. */
1257 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1261 /* If this is a libcall, then precompute all arguments so that we do not
1262 get extraneous instructions emitted as part of the libcall sequence. */
1264 /* If we preallocated the stack space, and some arguments must be passed
1265 on the stack, then we must precompute any parameter which contains a
1266 function call which will store arguments on the stack.
1267 Otherwise, evaluating the parameter may clobber previous parameters
1268 which have already been stored into the stack. (we have code to avoid
1269 such case by saving the outgoing stack arguments, but it results in
1271 if ((flags & ECF_LIBCALL_BLOCK) == 0 && !ACCUMULATE_OUTGOING_ARGS)
1274 for (i = 0; i < num_actuals; i++)
1276 enum machine_mode mode;
1278 if ((flags & ECF_LIBCALL_BLOCK) == 0
1279 && TREE_CODE (args[i].tree_value) != CALL_EXPR)
1282 /* If this is an addressable type, we cannot pre-evaluate it. */
1283 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
1285 args[i].initial_value = args[i].value
1286 = expand_normal (args[i].tree_value);
1288 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1289 if (mode != args[i].mode)
1292 = convert_modes (args[i].mode, mode,
1293 args[i].value, args[i].unsignedp);
1294 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1295 /* CSE will replace this only if it contains args[i].value
1296 pseudo, so convert it down to the declared mode using
1298 if (REG_P (args[i].value)
1299 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1301 args[i].initial_value
1302 = gen_lowpart_SUBREG (mode, args[i].value);
1303 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1304 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1312 /* Given the current state of MUST_PREALLOCATE and information about
1313 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1314 compute and return the final value for MUST_PREALLOCATE. */
1317 finalize_must_preallocate (int must_preallocate, int num_actuals,
1318 struct arg_data *args, struct args_size *args_size)
1320 /* See if we have or want to preallocate stack space.
1322 If we would have to push a partially-in-regs parm
1323 before other stack parms, preallocate stack space instead.
1325 If the size of some parm is not a multiple of the required stack
1326 alignment, we must preallocate.
1328 If the total size of arguments that would otherwise create a copy in
1329 a temporary (such as a CALL) is more than half the total argument list
1330 size, preallocation is faster.
1332 Another reason to preallocate is if we have a machine (like the m88k)
1333 where stack alignment is required to be maintained between every
1334 pair of insns, not just when the call is made. However, we assume here
1335 that such machines either do not have push insns (and hence preallocation
1336 would occur anyway) or the problem is taken care of with
1339 if (! must_preallocate)
1341 int partial_seen = 0;
1342 int copy_to_evaluate_size = 0;
1345 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1347 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1349 else if (partial_seen && args[i].reg == 0)
1350 must_preallocate = 1;
1352 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1353 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1354 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1355 || TREE_CODE (args[i].tree_value) == COND_EXPR
1356 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1357 copy_to_evaluate_size
1358 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1361 if (copy_to_evaluate_size * 2 >= args_size->constant
1362 && args_size->constant > 0)
1363 must_preallocate = 1;
1365 return must_preallocate;
1368 /* If we preallocated stack space, compute the address of each argument
1369 and store it into the ARGS array.
1371 We need not ensure it is a valid memory address here; it will be
1372 validized when it is used.
1374 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1377 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1381 rtx arg_reg = argblock;
1382 int i, arg_offset = 0;
1384 if (GET_CODE (argblock) == PLUS)
1385 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1387 for (i = 0; i < num_actuals; i++)
1389 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1390 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1392 unsigned int align, boundary;
1393 unsigned int units_on_stack = 0;
1394 enum machine_mode partial_mode = VOIDmode;
1396 /* Skip this parm if it will not be passed on the stack. */
1397 if (! args[i].pass_on_stack
1399 && args[i].partial == 0)
1402 if (GET_CODE (offset) == CONST_INT)
1403 addr = plus_constant (arg_reg, INTVAL (offset));
1405 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1407 addr = plus_constant (addr, arg_offset);
1409 if (args[i].partial != 0)
1411 /* Only part of the parameter is being passed on the stack.
1412 Generate a simple memory reference of the correct size. */
1413 units_on_stack = args[i].locate.size.constant;
1414 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1416 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1417 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1421 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1422 set_mem_attributes (args[i].stack,
1423 TREE_TYPE (args[i].tree_value), 1);
1425 align = BITS_PER_UNIT;
1426 boundary = args[i].locate.boundary;
1427 if (args[i].locate.where_pad != downward)
1429 else if (GET_CODE (offset) == CONST_INT)
1431 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1432 align = align & -align;
1434 set_mem_align (args[i].stack, align);
1436 if (GET_CODE (slot_offset) == CONST_INT)
1437 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1439 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1441 addr = plus_constant (addr, arg_offset);
1443 if (args[i].partial != 0)
1445 /* Only part of the parameter is being passed on the stack.
1446 Generate a simple memory reference of the correct size.
1448 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1449 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1453 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1454 set_mem_attributes (args[i].stack_slot,
1455 TREE_TYPE (args[i].tree_value), 1);
1457 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1459 /* Function incoming arguments may overlap with sibling call
1460 outgoing arguments and we cannot allow reordering of reads
1461 from function arguments with stores to outgoing arguments
1462 of sibling calls. */
1463 set_mem_alias_set (args[i].stack, 0);
1464 set_mem_alias_set (args[i].stack_slot, 0);
1469 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1470 in a call instruction.
1472 FNDECL is the tree node for the target function. For an indirect call
1473 FNDECL will be NULL_TREE.
1475 ADDR is the operand 0 of CALL_EXPR for this call. */
1478 rtx_for_function_call (tree fndecl, tree addr)
1482 /* Get the function to call, in the form of RTL. */
1485 /* If this is the first use of the function, see if we need to
1486 make an external definition for it. */
1487 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1489 assemble_external (fndecl);
1490 TREE_USED (fndecl) = 1;
1493 /* Get a SYMBOL_REF rtx for the function address. */
1494 funexp = XEXP (DECL_RTL (fndecl), 0);
1497 /* Generate an rtx (probably a pseudo-register) for the address. */
1500 funexp = expand_normal (addr);
1501 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1506 /* Return true if and only if SIZE storage units (usually bytes)
1507 starting from address ADDR overlap with already clobbered argument
1508 area. This function is used to determine if we should give up a
1512 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1516 if (addr == current_function_internal_arg_pointer)
1518 else if (GET_CODE (addr) == PLUS
1519 && XEXP (addr, 0) == current_function_internal_arg_pointer
1520 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
1521 i = INTVAL (XEXP (addr, 1));
1522 /* Return true for arg pointer based indexed addressing. */
1523 else if (GET_CODE (addr) == PLUS
1524 && (XEXP (addr, 0) == current_function_internal_arg_pointer
1525 || XEXP (addr, 1) == current_function_internal_arg_pointer))
1530 #ifdef ARGS_GROW_DOWNWARD
1535 unsigned HOST_WIDE_INT k;
1537 for (k = 0; k < size; k++)
1538 if (i + k < stored_args_map->n_bits
1539 && TEST_BIT (stored_args_map, i + k))
1546 /* Do the register loads required for any wholly-register parms or any
1547 parms which are passed both on the stack and in a register. Their
1548 expressions were already evaluated.
1550 Mark all register-parms as living through the call, putting these USE
1551 insns in the CALL_INSN_FUNCTION_USAGE field.
1553 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1554 checking, setting *SIBCALL_FAILURE if appropriate. */
1557 load_register_parameters (struct arg_data *args, int num_actuals,
1558 rtx *call_fusage, int flags, int is_sibcall,
1559 int *sibcall_failure)
1563 for (i = 0; i < num_actuals; i++)
1565 rtx reg = ((flags & ECF_SIBCALL)
1566 ? args[i].tail_call_reg : args[i].reg);
1569 int partial = args[i].partial;
1572 rtx before_arg = get_last_insn ();
1573 /* Set non-negative if we must move a word at a time, even if
1574 just one word (e.g, partial == 4 && mode == DFmode). Set
1575 to -1 if we just use a normal move insn. This value can be
1576 zero if the argument is a zero size structure. */
1578 if (GET_CODE (reg) == PARALLEL)
1582 gcc_assert (partial % UNITS_PER_WORD == 0);
1583 nregs = partial / UNITS_PER_WORD;
1585 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1587 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1588 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1591 size = GET_MODE_SIZE (args[i].mode);
1593 /* Handle calls that pass values in multiple non-contiguous
1594 locations. The Irix 6 ABI has examples of this. */
1596 if (GET_CODE (reg) == PARALLEL)
1597 emit_group_move (reg, args[i].parallel_value);
1599 /* If simple case, just do move. If normal partial, store_one_arg
1600 has already loaded the register for us. In all other cases,
1601 load the register(s) from memory. */
1603 else if (nregs == -1)
1605 emit_move_insn (reg, args[i].value);
1606 #ifdef BLOCK_REG_PADDING
1607 /* Handle case where we have a value that needs shifting
1608 up to the msb. eg. a QImode value and we're padding
1609 upward on a BYTES_BIG_ENDIAN machine. */
1610 if (size < UNITS_PER_WORD
1611 && (args[i].locate.where_pad
1612 == (BYTES_BIG_ENDIAN ? upward : downward)))
1615 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1617 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1618 report the whole reg as used. Strictly speaking, the
1619 call only uses SIZE bytes at the msb end, but it doesn't
1620 seem worth generating rtl to say that. */
1621 reg = gen_rtx_REG (word_mode, REGNO (reg));
1622 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1623 build_int_cst (NULL_TREE, shift),
1626 emit_move_insn (reg, x);
1631 /* If we have pre-computed the values to put in the registers in
1632 the case of non-aligned structures, copy them in now. */
1634 else if (args[i].n_aligned_regs != 0)
1635 for (j = 0; j < args[i].n_aligned_regs; j++)
1636 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1637 args[i].aligned_regs[j]);
1639 else if (partial == 0 || args[i].pass_on_stack)
1641 rtx mem = validize_mem (args[i].value);
1643 /* Check for overlap with already clobbered argument area. */
1645 && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
1647 *sibcall_failure = 1;
1649 /* Handle a BLKmode that needs shifting. */
1650 if (nregs == 1 && size < UNITS_PER_WORD
1651 #ifdef BLOCK_REG_PADDING
1652 && args[i].locate.where_pad == downward
1658 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1659 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1660 rtx x = gen_reg_rtx (word_mode);
1661 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1662 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1665 emit_move_insn (x, tem);
1666 x = expand_shift (dir, word_mode, x,
1667 build_int_cst (NULL_TREE, shift),
1670 emit_move_insn (ri, x);
1673 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1676 /* When a parameter is a block, and perhaps in other cases, it is
1677 possible that it did a load from an argument slot that was
1678 already clobbered. */
1680 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1681 *sibcall_failure = 1;
1683 /* Handle calls that pass values in multiple non-contiguous
1684 locations. The Irix 6 ABI has examples of this. */
1685 if (GET_CODE (reg) == PARALLEL)
1686 use_group_regs (call_fusage, reg);
1687 else if (nregs == -1)
1688 use_reg (call_fusage, reg);
1690 use_regs (call_fusage, REGNO (reg), nregs);
1695 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1696 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1697 bytes, then we would need to push some additional bytes to pad the
1698 arguments. So, we compute an adjust to the stack pointer for an
1699 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1700 bytes. Then, when the arguments are pushed the stack will be perfectly
1701 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1702 be popped after the call. Returns the adjustment. */
1705 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1706 struct args_size *args_size,
1707 unsigned int preferred_unit_stack_boundary)
1709 /* The number of bytes to pop so that the stack will be
1710 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1711 HOST_WIDE_INT adjustment;
1712 /* The alignment of the stack after the arguments are pushed, if we
1713 just pushed the arguments without adjust the stack here. */
1714 unsigned HOST_WIDE_INT unadjusted_alignment;
1716 unadjusted_alignment
1717 = ((stack_pointer_delta + unadjusted_args_size)
1718 % preferred_unit_stack_boundary);
1720 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1721 as possible -- leaving just enough left to cancel out the
1722 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1723 PENDING_STACK_ADJUST is non-negative, and congruent to
1724 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1726 /* Begin by trying to pop all the bytes. */
1727 unadjusted_alignment
1728 = (unadjusted_alignment
1729 - (pending_stack_adjust % preferred_unit_stack_boundary));
1730 adjustment = pending_stack_adjust;
1731 /* Push enough additional bytes that the stack will be aligned
1732 after the arguments are pushed. */
1733 if (preferred_unit_stack_boundary > 1)
1735 if (unadjusted_alignment > 0)
1736 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1738 adjustment += unadjusted_alignment;
1741 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1742 bytes after the call. The right number is the entire
1743 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1744 by the arguments in the first place. */
1746 = pending_stack_adjust - adjustment + unadjusted_args_size;
1751 /* Scan X expression if it does not dereference any argument slots
1752 we already clobbered by tail call arguments (as noted in stored_args_map
1754 Return nonzero if X expression dereferences such argument slots,
1758 check_sibcall_argument_overlap_1 (rtx x)
1767 code = GET_CODE (x);
1770 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1771 GET_MODE_SIZE (GET_MODE (x)));
1773 /* Scan all subexpressions. */
1774 fmt = GET_RTX_FORMAT (code);
1775 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1779 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1782 else if (*fmt == 'E')
1784 for (j = 0; j < XVECLEN (x, i); j++)
1785 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1792 /* Scan sequence after INSN if it does not dereference any argument slots
1793 we already clobbered by tail call arguments (as noted in stored_args_map
1794 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1795 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1796 should be 0). Return nonzero if sequence after INSN dereferences such argument
1797 slots, zero otherwise. */
1800 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1804 if (insn == NULL_RTX)
1805 insn = get_insns ();
1807 insn = NEXT_INSN (insn);
1809 for (; insn; insn = NEXT_INSN (insn))
1811 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1814 if (mark_stored_args_map)
1816 #ifdef ARGS_GROW_DOWNWARD
1817 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1819 low = arg->locate.slot_offset.constant;
1822 for (high = low + arg->locate.size.constant; low < high; low++)
1823 SET_BIT (stored_args_map, low);
1825 return insn != NULL_RTX;
1828 /* Given that a function returns a value of mode MODE at the most
1829 significant end of hard register VALUE, shift VALUE left or right
1830 as specified by LEFT_P. Return true if some action was needed. */
1833 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1835 HOST_WIDE_INT shift;
1837 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1838 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1842 /* Use ashr rather than lshr for right shifts. This is for the benefit
1843 of the MIPS port, which requires SImode values to be sign-extended
1844 when stored in 64-bit registers. */
1845 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1846 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1851 /* If X is a likely-spilled register value, copy it to a pseudo
1852 register and return that register. Return X otherwise. */
1855 avoid_likely_spilled_reg (rtx x)
1860 && HARD_REGISTER_P (x)
1861 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (x))))
1863 /* Make sure that we generate a REG rather than a CONCAT.
1864 Moves into CONCATs can need nontrivial instructions,
1865 and the whole point of this function is to avoid
1866 using the hard register directly in such a situation. */
1867 generating_concat_p = 0;
1868 new = gen_reg_rtx (GET_MODE (x));
1869 generating_concat_p = 1;
1870 emit_move_insn (new, x);
1876 /* Generate all the code for a CALL_EXPR exp
1877 and return an rtx for its value.
1878 Store the value in TARGET (specified as an rtx) if convenient.
1879 If the value is stored in TARGET then TARGET is returned.
1880 If IGNORE is nonzero, then we ignore the value of the function call. */
1883 expand_call (tree exp, rtx target, int ignore)
1885 /* Nonzero if we are currently expanding a call. */
1886 static int currently_expanding_call = 0;
1888 /* RTX for the function to be called. */
1890 /* Sequence of insns to perform a normal "call". */
1891 rtx normal_call_insns = NULL_RTX;
1892 /* Sequence of insns to perform a tail "call". */
1893 rtx tail_call_insns = NULL_RTX;
1894 /* Data type of the function. */
1896 tree type_arg_types;
1897 /* Declaration of the function being called,
1898 or 0 if the function is computed (not known by name). */
1900 /* The type of the function being called. */
1902 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1905 /* Register in which non-BLKmode value will be returned,
1906 or 0 if no value or if value is BLKmode. */
1908 /* Address where we should return a BLKmode value;
1909 0 if value not BLKmode. */
1910 rtx structure_value_addr = 0;
1911 /* Nonzero if that address is being passed by treating it as
1912 an extra, implicit first parameter. Otherwise,
1913 it is passed by being copied directly into struct_value_rtx. */
1914 int structure_value_addr_parm = 0;
1915 /* Holds the value of implicit argument for the struct value. */
1916 tree structure_value_addr_value = NULL_TREE;
1917 /* Size of aggregate value wanted, or zero if none wanted
1918 or if we are using the non-reentrant PCC calling convention
1919 or expecting the value in registers. */
1920 HOST_WIDE_INT struct_value_size = 0;
1921 /* Nonzero if called function returns an aggregate in memory PCC style,
1922 by returning the address of where to find it. */
1923 int pcc_struct_value = 0;
1924 rtx struct_value = 0;
1926 /* Number of actual parameters in this call, including struct value addr. */
1928 /* Number of named args. Args after this are anonymous ones
1929 and they must all go on the stack. */
1931 /* Number of complex actual arguments that need to be split. */
1932 int num_complex_actuals = 0;
1934 /* Vector of information about each argument.
1935 Arguments are numbered in the order they will be pushed,
1936 not the order they are written. */
1937 struct arg_data *args;
1939 /* Total size in bytes of all the stack-parms scanned so far. */
1940 struct args_size args_size;
1941 struct args_size adjusted_args_size;
1942 /* Size of arguments before any adjustments (such as rounding). */
1943 int unadjusted_args_size;
1944 /* Data on reg parms scanned so far. */
1945 CUMULATIVE_ARGS args_so_far;
1946 /* Nonzero if a reg parm has been scanned. */
1948 /* Nonzero if this is an indirect function call. */
1950 /* Nonzero if we must avoid push-insns in the args for this call.
1951 If stack space is allocated for register parameters, but not by the
1952 caller, then it is preallocated in the fixed part of the stack frame.
1953 So the entire argument block must then be preallocated (i.e., we
1954 ignore PUSH_ROUNDING in that case). */
1956 int must_preallocate = !PUSH_ARGS;
1958 /* Size of the stack reserved for parameter registers. */
1959 int reg_parm_stack_space = 0;
1961 /* Address of space preallocated for stack parms
1962 (on machines that lack push insns), or 0 if space not preallocated. */
1965 /* Mask of ECF_ flags. */
1967 #ifdef REG_PARM_STACK_SPACE
1968 /* Define the boundary of the register parm stack space that needs to be
1970 int low_to_save, high_to_save;
1971 rtx save_area = 0; /* Place that it is saved */
1974 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1975 char *initial_stack_usage_map = stack_usage_map;
1976 char *stack_usage_map_buf = NULL;
1978 int old_stack_allocated;
1980 /* State variables to track stack modifications. */
1981 rtx old_stack_level = 0;
1982 int old_stack_arg_under_construction = 0;
1983 int old_pending_adj = 0;
1984 int old_inhibit_defer_pop = inhibit_defer_pop;
1986 /* Some stack pointer alterations we make are performed via
1987 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1988 which we then also need to save/restore along the way. */
1989 int old_stack_pointer_delta = 0;
1992 tree p = CALL_EXPR_FN (exp);
1993 tree addr = CALL_EXPR_FN (exp);
1995 /* The alignment of the stack, in bits. */
1996 unsigned HOST_WIDE_INT preferred_stack_boundary;
1997 /* The alignment of the stack, in bytes. */
1998 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
1999 /* The static chain value to use for this call. */
2000 rtx static_chain_value;
2001 /* See if this is "nothrow" function call. */
2002 if (TREE_NOTHROW (exp))
2003 flags |= ECF_NOTHROW;
2005 /* See if we can find a DECL-node for the actual function, and get the
2006 function attributes (flags) from the function decl or type node. */
2007 fndecl = get_callee_fndecl (exp);
2010 fntype = TREE_TYPE (fndecl);
2011 flags |= flags_from_decl_or_type (fndecl);
2015 fntype = TREE_TYPE (TREE_TYPE (p));
2016 flags |= flags_from_decl_or_type (fntype);
2019 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2021 /* Warn if this value is an aggregate type,
2022 regardless of which calling convention we are using for it. */
2023 if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2024 warning (OPT_Waggregate_return, "function call has aggregate value");
2026 /* If the result of a pure or const function call is ignored (or void),
2027 and none of its arguments are volatile, we can avoid expanding the
2028 call and just evaluate the arguments for side-effects. */
2029 if ((flags & (ECF_CONST | ECF_PURE))
2030 && (ignore || target == const0_rtx
2031 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2033 bool volatilep = false;
2035 call_expr_arg_iterator iter;
2037 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2038 if (TREE_THIS_VOLATILE (arg))
2046 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2047 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2052 #ifdef REG_PARM_STACK_SPACE
2053 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2056 if (!OUTGOING_REG_PARM_STACK_SPACE && reg_parm_stack_space > 0 && PUSH_ARGS)
2057 must_preallocate = 1;
2059 /* Set up a place to return a structure. */
2061 /* Cater to broken compilers. */
2062 if (aggregate_value_p (exp, fndecl))
2064 /* This call returns a big structure. */
2065 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2067 #ifdef PCC_STATIC_STRUCT_RETURN
2069 pcc_struct_value = 1;
2071 #else /* not PCC_STATIC_STRUCT_RETURN */
2073 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2075 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2076 structure_value_addr = XEXP (target, 0);
2079 /* For variable-sized objects, we must be called with a target
2080 specified. If we were to allocate space on the stack here,
2081 we would have no way of knowing when to free it. */
2082 rtx d = assign_temp (TREE_TYPE (exp), 0, 1, 1);
2084 mark_temp_addr_taken (d);
2085 structure_value_addr = XEXP (d, 0);
2089 #endif /* not PCC_STATIC_STRUCT_RETURN */
2092 /* Figure out the amount to which the stack should be aligned. */
2093 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2096 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2097 if (i && i->preferred_incoming_stack_boundary)
2098 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2101 /* Operand 0 is a pointer-to-function; get the type of the function. */
2102 funtype = TREE_TYPE (addr);
2103 gcc_assert (POINTER_TYPE_P (funtype));
2104 funtype = TREE_TYPE (funtype);
2106 /* Count whether there are actual complex arguments that need to be split
2107 into their real and imaginary parts. Munge the type_arg_types
2108 appropriately here as well. */
2109 if (targetm.calls.split_complex_arg)
2111 call_expr_arg_iterator iter;
2113 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2115 tree type = TREE_TYPE (arg);
2116 if (type && TREE_CODE (type) == COMPLEX_TYPE
2117 && targetm.calls.split_complex_arg (type))
2118 num_complex_actuals++;
2120 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2123 type_arg_types = TYPE_ARG_TYPES (funtype);
2125 if (flags & ECF_MAY_BE_ALLOCA)
2126 current_function_calls_alloca = 1;
2128 /* If struct_value_rtx is 0, it means pass the address
2129 as if it were an extra parameter. Put the argument expression
2130 in structure_value_addr_value. */
2131 if (structure_value_addr && struct_value == 0)
2133 /* If structure_value_addr is a REG other than
2134 virtual_outgoing_args_rtx, we can use always use it. If it
2135 is not a REG, we must always copy it into a register.
2136 If it is virtual_outgoing_args_rtx, we must copy it to another
2137 register in some cases. */
2138 rtx temp = (!REG_P (structure_value_addr)
2139 || (ACCUMULATE_OUTGOING_ARGS
2140 && stack_arg_under_construction
2141 && structure_value_addr == virtual_outgoing_args_rtx)
2142 ? copy_addr_to_reg (convert_memory_address
2143 (Pmode, structure_value_addr))
2144 : structure_value_addr);
2146 structure_value_addr_value =
2147 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2148 structure_value_addr_parm = 1;
2151 /* Count the arguments and set NUM_ACTUALS. */
2153 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2155 /* Compute number of named args.
2156 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2158 if (type_arg_types != 0)
2160 = (list_length (type_arg_types)
2161 /* Count the struct value address, if it is passed as a parm. */
2162 + structure_value_addr_parm);
2164 /* If we know nothing, treat all args as named. */
2165 n_named_args = num_actuals;
2167 /* Start updating where the next arg would go.
2169 On some machines (such as the PA) indirect calls have a different
2170 calling convention than normal calls. The fourth argument in
2171 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2173 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2175 /* Now possibly adjust the number of named args.
2176 Normally, don't include the last named arg if anonymous args follow.
2177 We do include the last named arg if
2178 targetm.calls.strict_argument_naming() returns nonzero.
2179 (If no anonymous args follow, the result of list_length is actually
2180 one too large. This is harmless.)
2182 If targetm.calls.pretend_outgoing_varargs_named() returns
2183 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2184 this machine will be able to place unnamed args that were passed
2185 in registers into the stack. So treat all args as named. This
2186 allows the insns emitting for a specific argument list to be
2187 independent of the function declaration.
2189 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2190 we do not have any reliable way to pass unnamed args in
2191 registers, so we must force them into memory. */
2193 if (type_arg_types != 0
2194 && targetm.calls.strict_argument_naming (&args_so_far))
2196 else if (type_arg_types != 0
2197 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2198 /* Don't include the last named arg. */
2201 /* Treat all args as named. */
2202 n_named_args = num_actuals;
2204 /* Make a vector to hold all the information about each arg. */
2205 args = alloca (num_actuals * sizeof (struct arg_data));
2206 memset (args, 0, num_actuals * sizeof (struct arg_data));
2208 /* Build up entries in the ARGS array, compute the size of the
2209 arguments into ARGS_SIZE, etc. */
2210 initialize_argument_information (num_actuals, args, &args_size,
2212 structure_value_addr_value, fndecl,
2213 &args_so_far, reg_parm_stack_space,
2214 &old_stack_level, &old_pending_adj,
2215 &must_preallocate, &flags,
2216 &try_tail_call, CALL_FROM_THUNK_P (exp));
2220 /* If this function requires a variable-sized argument list, don't
2221 try to make a cse'able block for this call. We may be able to
2222 do this eventually, but it is too complicated to keep track of
2223 what insns go in the cse'able block and which don't. */
2225 flags &= ~ECF_LIBCALL_BLOCK;
2226 must_preallocate = 1;
2229 /* Now make final decision about preallocating stack space. */
2230 must_preallocate = finalize_must_preallocate (must_preallocate,
2234 /* If the structure value address will reference the stack pointer, we
2235 must stabilize it. We don't need to do this if we know that we are
2236 not going to adjust the stack pointer in processing this call. */
2238 if (structure_value_addr
2239 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2240 || reg_mentioned_p (virtual_outgoing_args_rtx,
2241 structure_value_addr))
2243 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2244 structure_value_addr = copy_to_reg (structure_value_addr);
2246 /* Tail calls can make things harder to debug, and we've traditionally
2247 pushed these optimizations into -O2. Don't try if we're already
2248 expanding a call, as that means we're an argument. Don't try if
2249 there's cleanups, as we know there's code to follow the call. */
2251 if (currently_expanding_call++ != 0
2252 || !flag_optimize_sibling_calls
2254 || lookup_stmt_eh_region (exp) >= 0
2255 || dbg_cnt (tail_call) == false)
2258 /* Rest of purposes for tail call optimizations to fail. */
2260 #ifdef HAVE_sibcall_epilogue
2261 !HAVE_sibcall_epilogue
2266 /* Doing sibling call optimization needs some work, since
2267 structure_value_addr can be allocated on the stack.
2268 It does not seem worth the effort since few optimizable
2269 sibling calls will return a structure. */
2270 || structure_value_addr != NULL_RTX
2271 /* Check whether the target is able to optimize the call
2273 || !targetm.function_ok_for_sibcall (fndecl, exp)
2274 /* Functions that do not return exactly once may not be sibcall
2276 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2277 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2278 /* If the called function is nested in the current one, it might access
2279 some of the caller's arguments, but could clobber them beforehand if
2280 the argument areas are shared. */
2281 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2282 /* If this function requires more stack slots than the current
2283 function, we cannot change it into a sibling call.
2284 current_function_pretend_args_size is not part of the
2285 stack allocated by our caller. */
2286 || args_size.constant > (current_function_args_size
2287 - current_function_pretend_args_size)
2288 /* If the callee pops its own arguments, then it must pop exactly
2289 the same number of arguments as the current function. */
2290 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2291 != RETURN_POPS_ARGS (current_function_decl,
2292 TREE_TYPE (current_function_decl),
2293 current_function_args_size))
2294 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2297 /* Ensure current function's preferred stack boundary is at least
2298 what we need. We don't have to increase alignment for recursive
2300 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2301 && fndecl != current_function_decl)
2302 cfun->preferred_stack_boundary = preferred_stack_boundary;
2303 if (fndecl == current_function_decl)
2304 cfun->recursive_call_emit = true;
2306 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2308 /* We want to make two insn chains; one for a sibling call, the other
2309 for a normal call. We will select one of the two chains after
2310 initial RTL generation is complete. */
2311 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2313 int sibcall_failure = 0;
2314 /* We want to emit any pending stack adjustments before the tail
2315 recursion "call". That way we know any adjustment after the tail
2316 recursion call can be ignored if we indeed use the tail
2318 int save_pending_stack_adjust = 0;
2319 int save_stack_pointer_delta = 0;
2321 rtx before_call, next_arg_reg, after_args;
2325 /* State variables we need to save and restore between
2327 save_pending_stack_adjust = pending_stack_adjust;
2328 save_stack_pointer_delta = stack_pointer_delta;
2331 flags &= ~ECF_SIBCALL;
2333 flags |= ECF_SIBCALL;
2335 /* Other state variables that we must reinitialize each time
2336 through the loop (that are not initialized by the loop itself). */
2340 /* Start a new sequence for the normal call case.
2342 From this point on, if the sibling call fails, we want to set
2343 sibcall_failure instead of continuing the loop. */
2346 /* Don't let pending stack adjusts add up to too much.
2347 Also, do all pending adjustments now if there is any chance
2348 this might be a call to alloca or if we are expanding a sibling
2350 Also do the adjustments before a throwing call, otherwise
2351 exception handling can fail; PR 19225. */
2352 if (pending_stack_adjust >= 32
2353 || (pending_stack_adjust > 0
2354 && (flags & ECF_MAY_BE_ALLOCA))
2355 || (pending_stack_adjust > 0
2356 && flag_exceptions && !(flags & ECF_NOTHROW))
2358 do_pending_stack_adjust ();
2360 /* When calling a const function, we must pop the stack args right away,
2361 so that the pop is deleted or moved with the call. */
2362 if (pass && (flags & ECF_LIBCALL_BLOCK))
2365 /* Precompute any arguments as needed. */
2367 precompute_arguments (flags, num_actuals, args);
2369 /* Now we are about to start emitting insns that can be deleted
2370 if a libcall is deleted. */
2371 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2374 if (pass == 0 && cfun->stack_protect_guard)
2375 stack_protect_epilogue ();
2377 adjusted_args_size = args_size;
2378 /* Compute the actual size of the argument block required. The variable
2379 and constant sizes must be combined, the size may have to be rounded,
2380 and there may be a minimum required size. When generating a sibcall
2381 pattern, do not round up, since we'll be re-using whatever space our
2383 unadjusted_args_size
2384 = compute_argument_block_size (reg_parm_stack_space,
2385 &adjusted_args_size,
2387 : preferred_stack_boundary));
2389 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2391 /* The argument block when performing a sibling call is the
2392 incoming argument block. */
2395 argblock = virtual_incoming_args_rtx;
2397 #ifdef STACK_GROWS_DOWNWARD
2398 = plus_constant (argblock, current_function_pretend_args_size);
2400 = plus_constant (argblock, -current_function_pretend_args_size);
2402 stored_args_map = sbitmap_alloc (args_size.constant);
2403 sbitmap_zero (stored_args_map);
2406 /* If we have no actual push instructions, or shouldn't use them,
2407 make space for all args right now. */
2408 else if (adjusted_args_size.var != 0)
2410 if (old_stack_level == 0)
2412 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2413 old_stack_pointer_delta = stack_pointer_delta;
2414 old_pending_adj = pending_stack_adjust;
2415 pending_stack_adjust = 0;
2416 /* stack_arg_under_construction says whether a stack arg is
2417 being constructed at the old stack level. Pushing the stack
2418 gets a clean outgoing argument block. */
2419 old_stack_arg_under_construction = stack_arg_under_construction;
2420 stack_arg_under_construction = 0;
2422 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2426 /* Note that we must go through the motions of allocating an argument
2427 block even if the size is zero because we may be storing args
2428 in the area reserved for register arguments, which may be part of
2431 int needed = adjusted_args_size.constant;
2433 /* Store the maximum argument space used. It will be pushed by
2434 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2437 if (needed > current_function_outgoing_args_size)
2438 current_function_outgoing_args_size = needed;
2440 if (must_preallocate)
2442 if (ACCUMULATE_OUTGOING_ARGS)
2444 /* Since the stack pointer will never be pushed, it is
2445 possible for the evaluation of a parm to clobber
2446 something we have already written to the stack.
2447 Since most function calls on RISC machines do not use
2448 the stack, this is uncommon, but must work correctly.
2450 Therefore, we save any area of the stack that was already
2451 written and that we are using. Here we set up to do this
2452 by making a new stack usage map from the old one. The
2453 actual save will be done by store_one_arg.
2455 Another approach might be to try to reorder the argument
2456 evaluations to avoid this conflicting stack usage. */
2458 /* Since we will be writing into the entire argument area,
2459 the map must be allocated for its entire size, not just
2460 the part that is the responsibility of the caller. */
2461 if (!OUTGOING_REG_PARM_STACK_SPACE)
2462 needed += reg_parm_stack_space;
2464 #ifdef ARGS_GROW_DOWNWARD
2465 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2468 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2471 if (stack_usage_map_buf)
2472 free (stack_usage_map_buf);
2473 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2474 stack_usage_map = stack_usage_map_buf;
2476 if (initial_highest_arg_in_use)
2477 memcpy (stack_usage_map, initial_stack_usage_map,
2478 initial_highest_arg_in_use);
2480 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2481 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2482 (highest_outgoing_arg_in_use
2483 - initial_highest_arg_in_use));
2486 /* The address of the outgoing argument list must not be
2487 copied to a register here, because argblock would be left
2488 pointing to the wrong place after the call to
2489 allocate_dynamic_stack_space below. */
2491 argblock = virtual_outgoing_args_rtx;
2495 if (inhibit_defer_pop == 0)
2497 /* Try to reuse some or all of the pending_stack_adjust
2498 to get this space. */
2500 = (combine_pending_stack_adjustment_and_call
2501 (unadjusted_args_size,
2502 &adjusted_args_size,
2503 preferred_unit_stack_boundary));
2505 /* combine_pending_stack_adjustment_and_call computes
2506 an adjustment before the arguments are allocated.
2507 Account for them and see whether or not the stack
2508 needs to go up or down. */
2509 needed = unadjusted_args_size - needed;
2513 /* We're releasing stack space. */
2514 /* ??? We can avoid any adjustment at all if we're
2515 already aligned. FIXME. */
2516 pending_stack_adjust = -needed;
2517 do_pending_stack_adjust ();
2521 /* We need to allocate space. We'll do that in
2522 push_block below. */
2523 pending_stack_adjust = 0;
2526 /* Special case this because overhead of `push_block' in
2527 this case is non-trivial. */
2529 argblock = virtual_outgoing_args_rtx;
2532 argblock = push_block (GEN_INT (needed), 0, 0);
2533 #ifdef ARGS_GROW_DOWNWARD
2534 argblock = plus_constant (argblock, needed);
2538 /* We only really need to call `copy_to_reg' in the case
2539 where push insns are going to be used to pass ARGBLOCK
2540 to a function call in ARGS. In that case, the stack
2541 pointer changes value from the allocation point to the
2542 call point, and hence the value of
2543 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2544 as well always do it. */
2545 argblock = copy_to_reg (argblock);
2550 if (ACCUMULATE_OUTGOING_ARGS)
2552 /* The save/restore code in store_one_arg handles all
2553 cases except one: a constructor call (including a C
2554 function returning a BLKmode struct) to initialize
2556 if (stack_arg_under_construction)
2559 = GEN_INT (adjusted_args_size.constant
2560 + (OUTGOING_REG_PARM_STACK_SPACE ? 0
2561 : reg_parm_stack_space));
2562 if (old_stack_level == 0)
2564 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2566 old_stack_pointer_delta = stack_pointer_delta;
2567 old_pending_adj = pending_stack_adjust;
2568 pending_stack_adjust = 0;
2569 /* stack_arg_under_construction says whether a stack
2570 arg is being constructed at the old stack level.
2571 Pushing the stack gets a clean outgoing argument
2573 old_stack_arg_under_construction
2574 = stack_arg_under_construction;
2575 stack_arg_under_construction = 0;
2576 /* Make a new map for the new argument list. */
2577 if (stack_usage_map_buf)
2578 free (stack_usage_map_buf);
2579 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
2580 stack_usage_map = stack_usage_map_buf;
2581 highest_outgoing_arg_in_use = 0;
2583 allocate_dynamic_stack_space (push_size, NULL_RTX,
2587 /* If argument evaluation might modify the stack pointer,
2588 copy the address of the argument list to a register. */
2589 for (i = 0; i < num_actuals; i++)
2590 if (args[i].pass_on_stack)
2592 argblock = copy_addr_to_reg (argblock);
2597 compute_argument_addresses (args, argblock, num_actuals);
2599 /* If we push args individually in reverse order, perform stack alignment
2600 before the first push (the last arg). */
2601 if (PUSH_ARGS_REVERSED && argblock == 0
2602 && adjusted_args_size.constant != unadjusted_args_size)
2604 /* When the stack adjustment is pending, we get better code
2605 by combining the adjustments. */
2606 if (pending_stack_adjust
2607 && ! (flags & ECF_LIBCALL_BLOCK)
2608 && ! inhibit_defer_pop)
2610 pending_stack_adjust
2611 = (combine_pending_stack_adjustment_and_call
2612 (unadjusted_args_size,
2613 &adjusted_args_size,
2614 preferred_unit_stack_boundary));
2615 do_pending_stack_adjust ();
2617 else if (argblock == 0)
2618 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2619 - unadjusted_args_size));
2621 /* Now that the stack is properly aligned, pops can't safely
2622 be deferred during the evaluation of the arguments. */
2625 funexp = rtx_for_function_call (fndecl, addr);
2627 /* Figure out the register where the value, if any, will come back. */
2629 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2630 && ! structure_value_addr)
2632 if (pcc_struct_value)
2633 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2634 fndecl, NULL, (pass == 0));
2636 valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype,
2639 /* If VALREG is a PARALLEL whose first member has a zero
2640 offset, use that. This is for targets such as m68k that
2641 return the same value in multiple places. */
2642 if (GET_CODE (valreg) == PARALLEL)
2644 rtx elem = XVECEXP (valreg, 0, 0);
2645 rtx where = XEXP (elem, 0);
2646 rtx offset = XEXP (elem, 1);
2647 if (offset == const0_rtx
2648 && GET_MODE (where) == GET_MODE (valreg))
2653 /* Precompute all register parameters. It isn't safe to compute anything
2654 once we have started filling any specific hard regs. */
2655 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2657 if (CALL_EXPR_STATIC_CHAIN (exp))
2658 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
2660 static_chain_value = 0;
2662 #ifdef REG_PARM_STACK_SPACE
2663 /* Save the fixed argument area if it's part of the caller's frame and
2664 is clobbered by argument setup for this call. */
2665 if (ACCUMULATE_OUTGOING_ARGS && pass)
2666 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2667 &low_to_save, &high_to_save);
2670 /* Now store (and compute if necessary) all non-register parms.
2671 These come before register parms, since they can require block-moves,
2672 which could clobber the registers used for register parms.
2673 Parms which have partial registers are not stored here,
2674 but we do preallocate space here if they want that. */
2676 for (i = 0; i < num_actuals; i++)
2677 if (args[i].reg == 0 || args[i].pass_on_stack)
2679 rtx before_arg = get_last_insn ();
2681 if (store_one_arg (&args[i], argblock, flags,
2682 adjusted_args_size.var != 0,
2683 reg_parm_stack_space)
2685 && check_sibcall_argument_overlap (before_arg,
2687 sibcall_failure = 1;
2689 if (flags & ECF_CONST
2691 && args[i].value == args[i].stack)
2692 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2693 gen_rtx_USE (VOIDmode,
2698 /* If we have a parm that is passed in registers but not in memory
2699 and whose alignment does not permit a direct copy into registers,
2700 make a group of pseudos that correspond to each register that we
2702 if (STRICT_ALIGNMENT)
2703 store_unaligned_arguments_into_pseudos (args, num_actuals);
2705 /* Now store any partially-in-registers parm.
2706 This is the last place a block-move can happen. */
2708 for (i = 0; i < num_actuals; i++)
2709 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2711 rtx before_arg = get_last_insn ();
2713 if (store_one_arg (&args[i], argblock, flags,
2714 adjusted_args_size.var != 0,
2715 reg_parm_stack_space)
2717 && check_sibcall_argument_overlap (before_arg,
2719 sibcall_failure = 1;
2722 /* If we pushed args in forward order, perform stack alignment
2723 after pushing the last arg. */
2724 if (!PUSH_ARGS_REVERSED && argblock == 0)
2725 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2726 - unadjusted_args_size));
2728 /* If register arguments require space on the stack and stack space
2729 was not preallocated, allocate stack space here for arguments
2730 passed in registers. */
2731 if (OUTGOING_REG_PARM_STACK_SPACE && !ACCUMULATE_OUTGOING_ARGS
2732 && must_preallocate == 0 && reg_parm_stack_space > 0)
2733 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2735 /* Pass the function the address in which to return a
2737 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2739 structure_value_addr
2740 = convert_memory_address (Pmode, structure_value_addr);
2741 emit_move_insn (struct_value,
2743 force_operand (structure_value_addr,
2746 if (REG_P (struct_value))
2747 use_reg (&call_fusage, struct_value);
2750 after_args = get_last_insn ();
2751 funexp = prepare_call_address (funexp, static_chain_value,
2752 &call_fusage, reg_parm_seen, pass == 0);
2754 load_register_parameters (args, num_actuals, &call_fusage, flags,
2755 pass == 0, &sibcall_failure);
2757 /* Save a pointer to the last insn before the call, so that we can
2758 later safely search backwards to find the CALL_INSN. */
2759 before_call = get_last_insn ();
2761 /* Set up next argument register. For sibling calls on machines
2762 with register windows this should be the incoming register. */
2763 #ifdef FUNCTION_INCOMING_ARG
2765 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2769 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2772 /* All arguments and registers used for the call must be set up by
2775 /* Stack must be properly aligned now. */
2777 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2779 /* Generate the actual call instruction. */
2780 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2781 adjusted_args_size.constant, struct_value_size,
2782 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2783 flags, & args_so_far);
2785 /* If the call setup or the call itself overlaps with anything
2786 of the argument setup we probably clobbered our call address.
2787 In that case we can't do sibcalls. */
2789 && check_sibcall_argument_overlap (after_args, 0, 0))
2790 sibcall_failure = 1;
2792 /* If a non-BLKmode value is returned at the most significant end
2793 of a register, shift the register right by the appropriate amount
2794 and update VALREG accordingly. BLKmode values are handled by the
2795 group load/store machinery below. */
2796 if (!structure_value_addr
2797 && !pcc_struct_value
2798 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2799 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2801 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2802 sibcall_failure = 1;
2803 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2806 /* If call is cse'able, make appropriate pair of reg-notes around it.
2807 Test valreg so we don't crash; may safely ignore `const'
2808 if return type is void. Disable for PARALLEL return values, because
2809 we have no way to move such values into a pseudo register. */
2810 if (pass && (flags & ECF_LIBCALL_BLOCK))
2814 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2816 insns = get_insns ();
2818 /* Expansion of block moves possibly introduced a loop that may
2819 not appear inside libcall block. */
2820 for (insn = insns; insn; insn = NEXT_INSN (insn))
2832 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2834 /* Mark the return value as a pointer if needed. */
2835 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2836 mark_reg_pointer (temp,
2837 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2840 if (flag_unsafe_math_optimizations
2842 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2843 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2844 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2845 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2846 note = gen_rtx_fmt_e (SQRT,
2848 args[0].initial_value);
2851 /* Construct an "equal form" for the value which
2852 mentions all the arguments in order as well as
2853 the function name. */
2854 for (i = 0; i < num_actuals; i++)
2855 note = gen_rtx_EXPR_LIST (VOIDmode,
2856 args[i].initial_value, note);
2857 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2859 if (flags & ECF_PURE)
2860 note = gen_rtx_EXPR_LIST (VOIDmode,
2861 gen_rtx_USE (VOIDmode,
2862 gen_rtx_MEM (BLKmode,
2863 gen_rtx_SCRATCH (VOIDmode))),
2866 emit_libcall_block (insns, temp, valreg, note);
2871 else if (pass && (flags & ECF_MALLOC))
2873 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2876 /* The return value from a malloc-like function is a pointer. */
2877 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2878 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2880 emit_move_insn (temp, valreg);
2882 /* The return value from a malloc-like function can not alias
2884 last = get_last_insn ();
2886 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2888 /* Write out the sequence. */
2889 insns = get_insns ();
2895 /* For calls to `setjmp', etc., inform
2896 function.c:setjmp_warnings that it should complain if
2897 nonvolatile values are live. For functions that cannot
2898 return, inform flow that control does not fall through. */
2900 if ((flags & ECF_NORETURN) || pass == 0)
2902 /* The barrier must be emitted
2903 immediately after the CALL_INSN. Some ports emit more
2904 than just a CALL_INSN above, so we must search for it here. */
2906 rtx last = get_last_insn ();
2907 while (!CALL_P (last))
2909 last = PREV_INSN (last);
2910 /* There was no CALL_INSN? */
2911 gcc_assert (last != before_call);
2914 emit_barrier_after (last);
2916 /* Stack adjustments after a noreturn call are dead code.
2917 However when NO_DEFER_POP is in effect, we must preserve
2918 stack_pointer_delta. */
2919 if (inhibit_defer_pop == 0)
2921 stack_pointer_delta = old_stack_allocated;
2922 pending_stack_adjust = 0;
2926 /* If value type not void, return an rtx for the value. */
2928 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2930 target = const0_rtx;
2931 else if (structure_value_addr)
2933 if (target == 0 || !MEM_P (target))
2936 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2937 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2938 structure_value_addr));
2939 set_mem_attributes (target, exp, 1);
2942 else if (pcc_struct_value)
2944 /* This is the special C++ case where we need to
2945 know what the true target was. We take care to
2946 never use this value more than once in one expression. */
2947 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2948 copy_to_reg (valreg));
2949 set_mem_attributes (target, exp, 1);
2951 /* Handle calls that return values in multiple non-contiguous locations.
2952 The Irix 6 ABI has examples of this. */
2953 else if (GET_CODE (valreg) == PARALLEL)
2957 /* This will only be assigned once, so it can be readonly. */
2958 tree nt = build_qualified_type (TREE_TYPE (exp),
2959 (TYPE_QUALS (TREE_TYPE (exp))
2960 | TYPE_QUAL_CONST));
2962 target = assign_temp (nt, 0, 1, 1);
2965 if (! rtx_equal_p (target, valreg))
2966 emit_group_store (target, valreg, TREE_TYPE (exp),
2967 int_size_in_bytes (TREE_TYPE (exp)));
2969 /* We can not support sibling calls for this case. */
2970 sibcall_failure = 1;
2973 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2974 && GET_MODE (target) == GET_MODE (valreg))
2976 bool may_overlap = false;
2978 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
2979 reg to a plain register. */
2980 if (!REG_P (target) || HARD_REGISTER_P (target))
2981 valreg = avoid_likely_spilled_reg (valreg);
2983 /* If TARGET is a MEM in the argument area, and we have
2984 saved part of the argument area, then we can't store
2985 directly into TARGET as it may get overwritten when we
2986 restore the argument save area below. Don't work too
2987 hard though and simply force TARGET to a register if it
2988 is a MEM; the optimizer is quite likely to sort it out. */
2989 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
2990 for (i = 0; i < num_actuals; i++)
2991 if (args[i].save_area)
2998 target = copy_to_reg (valreg);
3001 /* TARGET and VALREG cannot be equal at this point
3002 because the latter would not have
3003 REG_FUNCTION_VALUE_P true, while the former would if
3004 it were referring to the same register.
3006 If they refer to the same register, this move will be
3007 a no-op, except when function inlining is being
3009 emit_move_insn (target, valreg);
3011 /* If we are setting a MEM, this code must be executed.
3012 Since it is emitted after the call insn, sibcall
3013 optimization cannot be performed in that case. */
3015 sibcall_failure = 1;
3018 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3020 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3022 /* We can not support sibling calls for this case. */
3023 sibcall_failure = 1;
3026 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
3028 if (targetm.calls.promote_function_return(funtype))
3030 /* If we promoted this return value, make the proper SUBREG.
3031 TARGET might be const0_rtx here, so be careful. */
3033 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3034 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3036 tree type = TREE_TYPE (exp);
3037 int unsignedp = TYPE_UNSIGNED (type);
3039 enum machine_mode pmode;
3041 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
3042 /* If we don't promote as expected, something is wrong. */
3043 gcc_assert (GET_MODE (target) == pmode);
3045 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3046 && (GET_MODE_SIZE (GET_MODE (target))
3047 > GET_MODE_SIZE (TYPE_MODE (type))))
3049 offset = GET_MODE_SIZE (GET_MODE (target))
3050 - GET_MODE_SIZE (TYPE_MODE (type));
3051 if (! BYTES_BIG_ENDIAN)
3052 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3053 else if (! WORDS_BIG_ENDIAN)
3054 offset %= UNITS_PER_WORD;
3056 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3057 SUBREG_PROMOTED_VAR_P (target) = 1;
3058 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3062 /* If size of args is variable or this was a constructor call for a stack
3063 argument, restore saved stack-pointer value. */
3065 if (old_stack_level)
3067 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3068 stack_pointer_delta = old_stack_pointer_delta;
3069 pending_stack_adjust = old_pending_adj;
3070 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3071 stack_arg_under_construction = old_stack_arg_under_construction;
3072 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3073 stack_usage_map = initial_stack_usage_map;
3074 sibcall_failure = 1;
3076 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3078 #ifdef REG_PARM_STACK_SPACE
3080 restore_fixed_argument_area (save_area, argblock,
3081 high_to_save, low_to_save);
3084 /* If we saved any argument areas, restore them. */
3085 for (i = 0; i < num_actuals; i++)
3086 if (args[i].save_area)
3088 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3090 = gen_rtx_MEM (save_mode,
3091 memory_address (save_mode,
3092 XEXP (args[i].stack_slot, 0)));
3094 if (save_mode != BLKmode)
3095 emit_move_insn (stack_area, args[i].save_area);
3097 emit_block_move (stack_area, args[i].save_area,
3098 GEN_INT (args[i].locate.size.constant),
3099 BLOCK_OP_CALL_PARM);
3102 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3103 stack_usage_map = initial_stack_usage_map;
3106 /* If this was alloca, record the new stack level for nonlocal gotos.
3107 Check for the handler slots since we might not have a save area
3108 for non-local gotos. */
3110 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3111 update_nonlocal_goto_save_area ();
3113 /* Free up storage we no longer need. */
3114 for (i = 0; i < num_actuals; ++i)
3115 if (args[i].aligned_regs)
3116 free (args[i].aligned_regs);
3118 insns = get_insns ();
3123 tail_call_insns = insns;
3125 /* Restore the pending stack adjustment now that we have
3126 finished generating the sibling call sequence. */
3128 pending_stack_adjust = save_pending_stack_adjust;
3129 stack_pointer_delta = save_stack_pointer_delta;
3131 /* Prepare arg structure for next iteration. */
3132 for (i = 0; i < num_actuals; i++)
3135 args[i].aligned_regs = 0;
3139 sbitmap_free (stored_args_map);
3143 normal_call_insns = insns;
3145 /* Verify that we've deallocated all the stack we used. */
3146 gcc_assert ((flags & ECF_NORETURN)
3147 || (old_stack_allocated
3148 == stack_pointer_delta - pending_stack_adjust));
3151 /* If something prevents making this a sibling call,
3152 zero out the sequence. */
3153 if (sibcall_failure)
3154 tail_call_insns = NULL_RTX;
3159 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3160 arguments too, as argument area is now clobbered by the call. */
3161 if (tail_call_insns)
3163 emit_insn (tail_call_insns);
3164 cfun->tail_call_emit = true;
3167 emit_insn (normal_call_insns);
3169 currently_expanding_call--;
3171 if (stack_usage_map_buf)
3172 free (stack_usage_map_buf);
3177 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3178 this function's incoming arguments.
3180 At the start of RTL generation we know the only REG_EQUIV notes
3181 in the rtl chain are those for incoming arguments, so we can look
3182 for REG_EQUIV notes between the start of the function and the
3183 NOTE_INSN_FUNCTION_BEG.
3185 This is (slight) overkill. We could keep track of the highest
3186 argument we clobber and be more selective in removing notes, but it
3187 does not seem to be worth the effort. */
3190 fixup_tail_calls (void)
3194 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3198 /* There are never REG_EQUIV notes for the incoming arguments
3199 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3201 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3204 note = find_reg_note (insn, REG_EQUIV, 0);
3206 remove_note (insn, note);
3207 note = find_reg_note (insn, REG_EQUIV, 0);
3212 /* Traverse a list of TYPES and expand all complex types into their
3215 split_complex_types (tree types)
3219 /* Before allocating memory, check for the common case of no complex. */
3220 for (p = types; p; p = TREE_CHAIN (p))
3222 tree type = TREE_VALUE (p);
3223 if (TREE_CODE (type) == COMPLEX_TYPE
3224 && targetm.calls.split_complex_arg (type))
3230 types = copy_list (types);
3232 for (p = types; p; p = TREE_CHAIN (p))
3234 tree complex_type = TREE_VALUE (p);
3236 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3237 && targetm.calls.split_complex_arg (complex_type))
3241 /* Rewrite complex type with component type. */
3242 TREE_VALUE (p) = TREE_TYPE (complex_type);
3243 next = TREE_CHAIN (p);
3245 /* Add another component type for the imaginary part. */
3246 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3247 TREE_CHAIN (p) = imag;
3248 TREE_CHAIN (imag) = next;
3250 /* Skip the newly created node. */
3258 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3259 The RETVAL parameter specifies whether return value needs to be saved, other
3260 parameters are documented in the emit_library_call function below. */
3263 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3264 enum libcall_type fn_type,
3265 enum machine_mode outmode, int nargs, va_list p)
3267 /* Total size in bytes of all the stack-parms scanned so far. */
3268 struct args_size args_size;
3269 /* Size of arguments before any adjustments (such as rounding). */
3270 struct args_size original_args_size;
3276 CUMULATIVE_ARGS args_so_far;
3280 enum machine_mode mode;
3283 struct locate_and_pad_arg_data locate;
3287 int old_inhibit_defer_pop = inhibit_defer_pop;
3288 rtx call_fusage = 0;
3291 int pcc_struct_value = 0;
3292 int struct_value_size = 0;
3294 int reg_parm_stack_space = 0;
3297 tree tfom; /* type_for_mode (outmode, 0) */
3299 #ifdef REG_PARM_STACK_SPACE
3300 /* Define the boundary of the register parm stack space that needs to be
3302 int low_to_save, high_to_save;
3303 rtx save_area = 0; /* Place that it is saved. */
3306 /* Size of the stack reserved for parameter registers. */
3307 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3308 char *initial_stack_usage_map = stack_usage_map;
3309 char *stack_usage_map_buf = NULL;
3311 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3313 #ifdef REG_PARM_STACK_SPACE
3314 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3317 /* By default, library functions can not throw. */
3318 flags = ECF_NOTHROW;
3330 case LCT_CONST_MAKE_BLOCK:
3331 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3333 case LCT_PURE_MAKE_BLOCK:
3334 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3337 flags |= ECF_NORETURN;
3340 flags = ECF_NORETURN;
3342 case LCT_RETURNS_TWICE:
3343 flags = ECF_RETURNS_TWICE;
3348 /* Ensure current function's preferred stack boundary is at least
3350 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3351 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3353 /* If this kind of value comes back in memory,
3354 decide where in memory it should come back. */
3355 if (outmode != VOIDmode)
3357 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3358 if (aggregate_value_p (tfom, 0))
3360 #ifdef PCC_STATIC_STRUCT_RETURN
3362 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3363 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3364 pcc_struct_value = 1;
3366 value = gen_reg_rtx (outmode);
3367 #else /* not PCC_STATIC_STRUCT_RETURN */
3368 struct_value_size = GET_MODE_SIZE (outmode);
3369 if (value != 0 && MEM_P (value))
3372 mem_value = assign_temp (tfom, 0, 1, 1);
3374 /* This call returns a big structure. */
3375 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3379 tfom = void_type_node;
3381 /* ??? Unfinished: must pass the memory address as an argument. */
3383 /* Copy all the libcall-arguments out of the varargs data
3384 and into a vector ARGVEC.
3386 Compute how to pass each argument. We only support a very small subset
3387 of the full argument passing conventions to limit complexity here since
3388 library functions shouldn't have many args. */
3390 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3391 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3393 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3394 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3396 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3399 args_size.constant = 0;
3404 /* Now we are about to start emitting insns that can be deleted
3405 if a libcall is deleted. */
3406 if (flags & ECF_LIBCALL_BLOCK)
3411 /* If there's a structure value address to be passed,
3412 either pass it in the special place, or pass it as an extra argument. */
3413 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3415 rtx addr = XEXP (mem_value, 0);
3419 /* Make sure it is a reasonable operand for a move or push insn. */
3420 if (!REG_P (addr) && !MEM_P (addr)
3421 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3422 addr = force_operand (addr, NULL_RTX);
3424 argvec[count].value = addr;
3425 argvec[count].mode = Pmode;
3426 argvec[count].partial = 0;
3428 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3429 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3430 NULL_TREE, 1) == 0);
3432 locate_and_pad_parm (Pmode, NULL_TREE,
3433 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3436 argvec[count].reg != 0,
3438 0, NULL_TREE, &args_size, &argvec[count].locate);
3440 if (argvec[count].reg == 0 || argvec[count].partial != 0
3441 || reg_parm_stack_space > 0)
3442 args_size.constant += argvec[count].locate.size.constant;
3444 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3449 for (; count < nargs; count++)
3451 rtx val = va_arg (p, rtx);
3452 enum machine_mode mode = va_arg (p, enum machine_mode);
3454 /* We cannot convert the arg value to the mode the library wants here;
3455 must do it earlier where we know the signedness of the arg. */
3456 gcc_assert (mode != BLKmode
3457 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3459 /* Make sure it is a reasonable operand for a move or push insn. */
3460 if (!REG_P (val) && !MEM_P (val)
3461 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3462 val = force_operand (val, NULL_RTX);
3464 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3468 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3470 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3471 functions, so we have to pretend this isn't such a function. */
3472 if (flags & ECF_LIBCALL_BLOCK)
3474 rtx insns = get_insns ();
3478 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3480 /* If this was a CONST function, it is now PURE since
3481 it now reads memory. */
3482 if (flags & ECF_CONST)
3484 flags &= ~ECF_CONST;
3488 if (GET_MODE (val) == MEM && !must_copy)
3492 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3494 emit_move_insn (slot, val);
3497 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3498 gen_rtx_USE (VOIDmode, slot),
3501 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3502 gen_rtx_CLOBBER (VOIDmode,
3507 val = force_operand (XEXP (slot, 0), NULL_RTX);
3510 argvec[count].value = val;
3511 argvec[count].mode = mode;
3513 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3515 argvec[count].partial
3516 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3518 locate_and_pad_parm (mode, NULL_TREE,
3519 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3522 argvec[count].reg != 0,
3524 argvec[count].partial,
3525 NULL_TREE, &args_size, &argvec[count].locate);
3527 gcc_assert (!argvec[count].locate.size.var);
3529 if (argvec[count].reg == 0 || argvec[count].partial != 0
3530 || reg_parm_stack_space > 0)
3531 args_size.constant += argvec[count].locate.size.constant;
3533 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3536 /* If this machine requires an external definition for library
3537 functions, write one out. */
3538 assemble_external_libcall (fun);
3540 original_args_size = args_size;
3541 args_size.constant = (((args_size.constant
3542 + stack_pointer_delta
3546 - stack_pointer_delta);
3548 args_size.constant = MAX (args_size.constant,
3549 reg_parm_stack_space);
3551 if (!OUTGOING_REG_PARM_STACK_SPACE)
3552 args_size.constant -= reg_parm_stack_space;
3554 if (args_size.constant > current_function_outgoing_args_size)
3555 current_function_outgoing_args_size = args_size.constant;
3557 if (ACCUMULATE_OUTGOING_ARGS)
3559 /* Since the stack pointer will never be pushed, it is possible for
3560 the evaluation of a parm to clobber something we have already
3561 written to the stack. Since most function calls on RISC machines
3562 do not use the stack, this is uncommon, but must work correctly.
3564 Therefore, we save any area of the stack that was already written
3565 and that we are using. Here we set up to do this by making a new
3566 stack usage map from the old one.
3568 Another approach might be to try to reorder the argument
3569 evaluations to avoid this conflicting stack usage. */
3571 needed = args_size.constant;
3573 /* Since we will be writing into the entire argument area, the
3574 map must be allocated for its entire size, not just the part that
3575 is the responsibility of the caller. */
3576 if (!OUTGOING_REG_PARM_STACK_SPACE)
3577 needed += reg_parm_stack_space;
3579 #ifdef ARGS_GROW_DOWNWARD
3580 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3583 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3586 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3587 stack_usage_map = stack_usage_map_buf;
3589 if (initial_highest_arg_in_use)
3590 memcpy (stack_usage_map, initial_stack_usage_map,
3591 initial_highest_arg_in_use);
3593 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3594 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3595 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3598 /* We must be careful to use virtual regs before they're instantiated,
3599 and real regs afterwards. Loop optimization, for example, can create
3600 new libcalls after we've instantiated the virtual regs, and if we
3601 use virtuals anyway, they won't match the rtl patterns. */
3603 if (virtuals_instantiated)
3604 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3606 argblock = virtual_outgoing_args_rtx;
3611 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3614 /* If we push args individually in reverse order, perform stack alignment
3615 before the first push (the last arg). */
3616 if (argblock == 0 && PUSH_ARGS_REVERSED)
3617 anti_adjust_stack (GEN_INT (args_size.constant
3618 - original_args_size.constant));
3620 if (PUSH_ARGS_REVERSED)
3631 #ifdef REG_PARM_STACK_SPACE
3632 if (ACCUMULATE_OUTGOING_ARGS)
3634 /* The argument list is the property of the called routine and it
3635 may clobber it. If the fixed area has been used for previous
3636 parameters, we must save and restore it. */
3637 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3638 &low_to_save, &high_to_save);
3642 /* Push the args that need to be pushed. */
3644 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3645 are to be pushed. */
3646 for (count = 0; count < nargs; count++, argnum += inc)
3648 enum machine_mode mode = argvec[argnum].mode;
3649 rtx val = argvec[argnum].value;
3650 rtx reg = argvec[argnum].reg;
3651 int partial = argvec[argnum].partial;
3652 int lower_bound = 0, upper_bound = 0, i;
3654 if (! (reg != 0 && partial == 0))
3656 if (ACCUMULATE_OUTGOING_ARGS)
3658 /* If this is being stored into a pre-allocated, fixed-size,
3659 stack area, save any previous data at that location. */
3661 #ifdef ARGS_GROW_DOWNWARD
3662 /* stack_slot is negative, but we want to index stack_usage_map
3663 with positive values. */
3664 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3665 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3667 lower_bound = argvec[argnum].locate.offset.constant;
3668 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3672 /* Don't worry about things in the fixed argument area;
3673 it has already been saved. */
3674 if (i < reg_parm_stack_space)
3675 i = reg_parm_stack_space;
3676 while (i < upper_bound && stack_usage_map[i] == 0)
3679 if (i < upper_bound)
3681 /* We need to make a save area. */
3683 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3684 enum machine_mode save_mode
3685 = mode_for_size (size, MODE_INT, 1);
3687 = plus_constant (argblock,
3688 argvec[argnum].locate.offset.constant);
3690 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3692 if (save_mode == BLKmode)
3694 argvec[argnum].save_area
3695 = assign_stack_temp (BLKmode,
3696 argvec[argnum].locate.size.constant,
3699 emit_block_move (validize_mem (argvec[argnum].save_area),
3701 GEN_INT (argvec[argnum].locate.size.constant),
3702 BLOCK_OP_CALL_PARM);
3706 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3708 emit_move_insn (argvec[argnum].save_area, stack_area);
3713 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3714 partial, reg, 0, argblock,
3715 GEN_INT (argvec[argnum].locate.offset.constant),
3716 reg_parm_stack_space,
3717 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3719 /* Now mark the segment we just used. */
3720 if (ACCUMULATE_OUTGOING_ARGS)
3721 for (i = lower_bound; i < upper_bound; i++)
3722 stack_usage_map[i] = 1;
3726 if (flags & ECF_CONST)
3730 /* Indicate argument access so that alias.c knows that these
3733 use = plus_constant (argblock,
3734 argvec[argnum].locate.offset.constant);
3736 /* When arguments are pushed, trying to tell alias.c where
3737 exactly this argument is won't work, because the
3738 auto-increment causes confusion. So we merely indicate
3739 that we access something with a known mode somewhere on
3741 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3742 gen_rtx_SCRATCH (Pmode));
3743 use = gen_rtx_MEM (argvec[argnum].mode, use);
3744 use = gen_rtx_USE (VOIDmode, use);
3745 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3750 /* If we pushed args in forward order, perform stack alignment
3751 after pushing the last arg. */
3752 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3753 anti_adjust_stack (GEN_INT (args_size.constant
3754 - original_args_size.constant));
3756 if (PUSH_ARGS_REVERSED)
3761 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3763 /* Now load any reg parms into their regs. */
3765 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3766 are to be pushed. */
3767 for (count = 0; count < nargs; count++, argnum += inc)
3769 enum machine_mode mode = argvec[argnum].mode;
3770 rtx val = argvec[argnum].value;
3771 rtx reg = argvec[argnum].reg;
3772 int partial = argvec[argnum].partial;
3774 /* Handle calls that pass values in multiple non-contiguous
3775 locations. The PA64 has examples of this for library calls. */
3776 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3777 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3778 else if (reg != 0 && partial == 0)
3779 emit_move_insn (reg, val);
3784 /* Any regs containing parms remain in use through the call. */
3785 for (count = 0; count < nargs; count++)
3787 rtx reg = argvec[count].reg;
3788 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3789 use_group_regs (&call_fusage, reg);
3792 int partial = argvec[count].partial;
3796 gcc_assert (partial % UNITS_PER_WORD == 0);
3797 nregs = partial / UNITS_PER_WORD;
3798 use_regs (&call_fusage, REGNO (reg), nregs);
3801 use_reg (&call_fusage, reg);
3805 /* Pass the function the address in which to return a structure value. */
3806 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3808 emit_move_insn (struct_value,
3810 force_operand (XEXP (mem_value, 0),
3812 if (REG_P (struct_value))
3813 use_reg (&call_fusage, struct_value);
3816 /* Don't allow popping to be deferred, since then
3817 cse'ing of library calls could delete a call and leave the pop. */
3819 valreg = (mem_value == 0 && outmode != VOIDmode
3820 ? hard_libcall_value (outmode) : NULL_RTX);
3822 /* Stack must be properly aligned now. */
3823 gcc_assert (!(stack_pointer_delta
3824 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3826 before_call = get_last_insn ();
3828 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3829 will set inhibit_defer_pop to that value. */
3830 /* The return type is needed to decide how many bytes the function pops.
3831 Signedness plays no role in that, so for simplicity, we pretend it's
3832 always signed. We also assume that the list of arguments passed has
3833 no impact, so we pretend it is unknown. */
3835 emit_call_1 (fun, NULL,
3836 get_identifier (XSTR (orgfun, 0)),
3837 build_function_type (tfom, NULL_TREE),
3838 original_args_size.constant, args_size.constant,
3840 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3842 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3844 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
3845 that it should complain if nonvolatile values are live. For
3846 functions that cannot return, inform flow that control does not
3849 if (flags & ECF_NORETURN)
3851 /* The barrier note must be emitted
3852 immediately after the CALL_INSN. Some ports emit more than
3853 just a CALL_INSN above, so we must search for it here. */
3855 rtx last = get_last_insn ();
3856 while (!CALL_P (last))
3858 last = PREV_INSN (last);
3859 /* There was no CALL_INSN? */
3860 gcc_assert (last != before_call);
3863 emit_barrier_after (last);
3866 /* Now restore inhibit_defer_pop to its actual original value. */
3869 /* If call is cse'able, make appropriate pair of reg-notes around it.
3870 Test valreg so we don't crash; may safely ignore `const'
3871 if return type is void. Disable for PARALLEL return values, because
3872 we have no way to move such values into a pseudo register. */
3873 if (flags & ECF_LIBCALL_BLOCK)
3879 insns = get_insns ();
3889 if (GET_CODE (valreg) == PARALLEL)
3891 temp = gen_reg_rtx (outmode);
3892 emit_group_store (temp, valreg, NULL_TREE,
3893 GET_MODE_SIZE (outmode));
3897 temp = gen_reg_rtx (GET_MODE (valreg));
3899 /* Construct an "equal form" for the value which mentions all the
3900 arguments in order as well as the function name. */
3901 for (i = 0; i < nargs; i++)
3902 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3903 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3905 insns = get_insns ();
3908 if (flags & ECF_PURE)
3909 note = gen_rtx_EXPR_LIST (VOIDmode,
3910 gen_rtx_USE (VOIDmode,
3911 gen_rtx_MEM (BLKmode,
3912 gen_rtx_SCRATCH (VOIDmode))),
3915 emit_libcall_block (insns, temp, valreg, note);
3922 /* Copy the value to the right place. */
3923 if (outmode != VOIDmode && retval)
3929 if (value != mem_value)
3930 emit_move_insn (value, mem_value);
3932 else if (GET_CODE (valreg) == PARALLEL)
3935 value = gen_reg_rtx (outmode);
3936 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3940 /* Convert to the proper mode if PROMOTE_MODE has been active. */
3941 if (GET_MODE (valreg) != outmode)
3943 int unsignedp = TYPE_UNSIGNED (tfom);
3945 gcc_assert (targetm.calls.promote_function_return (tfom));
3946 gcc_assert (promote_mode (tfom, outmode, &unsignedp, 0)
3947 == GET_MODE (valreg));
3949 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
3953 emit_move_insn (value, valreg);
3959 if (ACCUMULATE_OUTGOING_ARGS)
3961 #ifdef REG_PARM_STACK_SPACE
3963 restore_fixed_argument_area (save_area, argblock,
3964 high_to_save, low_to_save);
3967 /* If we saved any argument areas, restore them. */
3968 for (count = 0; count < nargs; count++)
3969 if (argvec[count].save_area)
3971 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3972 rtx adr = plus_constant (argblock,
3973 argvec[count].locate.offset.constant);
3974 rtx stack_area = gen_rtx_MEM (save_mode,
3975 memory_address (save_mode, adr));
3977 if (save_mode == BLKmode)
3978 emit_block_move (stack_area,
3979 validize_mem (argvec[count].save_area),
3980 GEN_INT (argvec[count].locate.size.constant),
3981 BLOCK_OP_CALL_PARM);
3983 emit_move_insn (stack_area, argvec[count].save_area);
3986 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3987 stack_usage_map = initial_stack_usage_map;
3990 if (stack_usage_map_buf)
3991 free (stack_usage_map_buf);
3997 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3998 (emitting the queue unless NO_QUEUE is nonzero),
3999 for a value of mode OUTMODE,
4000 with NARGS different arguments, passed as alternating rtx values
4001 and machine_modes to convert them to.
4003 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4004 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4005 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4006 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4007 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4008 or other LCT_ value for other types of library calls. */
4011 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4012 enum machine_mode outmode, int nargs, ...)
4016 va_start (p, nargs);
4017 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4021 /* Like emit_library_call except that an extra argument, VALUE,
4022 comes second and says where to store the result.
4023 (If VALUE is zero, this function chooses a convenient way
4024 to return the value.
4026 This function returns an rtx for where the value is to be found.
4027 If VALUE is nonzero, VALUE is returned. */
4030 emit_library_call_value (rtx orgfun, rtx value,
4031 enum libcall_type fn_type,
4032 enum machine_mode outmode, int nargs, ...)
4037 va_start (p, nargs);
4038 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4045 /* Store a single argument for a function call
4046 into the register or memory area where it must be passed.
4047 *ARG describes the argument value and where to pass it.
4049 ARGBLOCK is the address of the stack-block for all the arguments,
4050 or 0 on a machine where arguments are pushed individually.
4052 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4053 so must be careful about how the stack is used.
4055 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4056 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4057 that we need not worry about saving and restoring the stack.
4059 FNDECL is the declaration of the function we are calling.
4061 Return nonzero if this arg should cause sibcall failure,
4065 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4066 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4068 tree pval = arg->tree_value;
4072 int i, lower_bound = 0, upper_bound = 0;
4073 int sibcall_failure = 0;
4075 if (TREE_CODE (pval) == ERROR_MARK)
4078 /* Push a new temporary level for any temporaries we make for
4082 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4084 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4085 save any previous data at that location. */
4086 if (argblock && ! variable_size && arg->stack)
4088 #ifdef ARGS_GROW_DOWNWARD
4089 /* stack_slot is negative, but we want to index stack_usage_map
4090 with positive values. */
4091 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4092 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4096 lower_bound = upper_bound - arg->locate.size.constant;
4098 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4099 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4103 upper_bound = lower_bound + arg->locate.size.constant;
4107 /* Don't worry about things in the fixed argument area;
4108 it has already been saved. */
4109 if (i < reg_parm_stack_space)
4110 i = reg_parm_stack_space;
4111 while (i < upper_bound && stack_usage_map[i] == 0)
4114 if (i < upper_bound)
4116 /* We need to make a save area. */
4117 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4118 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4119 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4120 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4122 if (save_mode == BLKmode)
4124 tree ot = TREE_TYPE (arg->tree_value);
4125 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4126 | TYPE_QUAL_CONST));
4128 arg->save_area = assign_temp (nt, 0, 1, 1);
4129 preserve_temp_slots (arg->save_area);
4130 emit_block_move (validize_mem (arg->save_area), stack_area,
4131 GEN_INT (arg->locate.size.constant),
4132 BLOCK_OP_CALL_PARM);
4136 arg->save_area = gen_reg_rtx (save_mode);
4137 emit_move_insn (arg->save_area, stack_area);
4143 /* If this isn't going to be placed on both the stack and in registers,
4144 set up the register and number of words. */
4145 if (! arg->pass_on_stack)
4147 if (flags & ECF_SIBCALL)
4148 reg = arg->tail_call_reg;
4151 partial = arg->partial;
4154 /* Being passed entirely in a register. We shouldn't be called in
4156 gcc_assert (reg == 0 || partial != 0);
4158 /* If this arg needs special alignment, don't load the registers
4160 if (arg->n_aligned_regs != 0)
4163 /* If this is being passed partially in a register, we can't evaluate
4164 it directly into its stack slot. Otherwise, we can. */
4165 if (arg->value == 0)
4167 /* stack_arg_under_construction is nonzero if a function argument is
4168 being evaluated directly into the outgoing argument list and
4169 expand_call must take special action to preserve the argument list
4170 if it is called recursively.
4172 For scalar function arguments stack_usage_map is sufficient to
4173 determine which stack slots must be saved and restored. Scalar
4174 arguments in general have pass_on_stack == 0.
4176 If this argument is initialized by a function which takes the
4177 address of the argument (a C++ constructor or a C function
4178 returning a BLKmode structure), then stack_usage_map is
4179 insufficient and expand_call must push the stack around the
4180 function call. Such arguments have pass_on_stack == 1.
4182 Note that it is always safe to set stack_arg_under_construction,
4183 but this generates suboptimal code if set when not needed. */
4185 if (arg->pass_on_stack)
4186 stack_arg_under_construction++;
4188 arg->value = expand_expr (pval,
4190 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4191 ? NULL_RTX : arg->stack,
4192 VOIDmode, EXPAND_STACK_PARM);
4194 /* If we are promoting object (or for any other reason) the mode
4195 doesn't agree, convert the mode. */
4197 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4198 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4199 arg->value, arg->unsignedp);
4201 if (arg->pass_on_stack)
4202 stack_arg_under_construction--;
4205 /* Check for overlap with already clobbered argument area. */
4206 if ((flags & ECF_SIBCALL)
4207 && MEM_P (arg->value)
4208 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4209 arg->locate.size.constant))
4210 sibcall_failure = 1;
4212 /* Don't allow anything left on stack from computation
4213 of argument to alloca. */
4214 if (flags & ECF_MAY_BE_ALLOCA)
4215 do_pending_stack_adjust ();
4217 if (arg->value == arg->stack)
4218 /* If the value is already in the stack slot, we are done. */
4220 else if (arg->mode != BLKmode)
4223 unsigned int parm_align;
4225 /* Argument is a scalar, not entirely passed in registers.
4226 (If part is passed in registers, arg->partial says how much
4227 and emit_push_insn will take care of putting it there.)
4229 Push it, and if its size is less than the
4230 amount of space allocated to it,
4231 also bump stack pointer by the additional space.
4232 Note that in C the default argument promotions
4233 will prevent such mismatches. */
4235 size = GET_MODE_SIZE (arg->mode);
4236 /* Compute how much space the push instruction will push.
4237 On many machines, pushing a byte will advance the stack
4238 pointer by a halfword. */
4239 #ifdef PUSH_ROUNDING
4240 size = PUSH_ROUNDING (size);
4244 /* Compute how much space the argument should get:
4245 round up to a multiple of the alignment for arguments. */
4246 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4247 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4248 / (PARM_BOUNDARY / BITS_PER_UNIT))
4249 * (PARM_BOUNDARY / BITS_PER_UNIT));
4251 /* Compute the alignment of the pushed argument. */
4252 parm_align = arg->locate.boundary;
4253 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4255 int pad = used - size;
4258 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4259 parm_align = MIN (parm_align, pad_align);
4263 /* This isn't already where we want it on the stack, so put it there.
4264 This can either be done with push or copy insns. */
4265 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4266 parm_align, partial, reg, used - size, argblock,
4267 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4268 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4270 /* Unless this is a partially-in-register argument, the argument is now
4273 arg->value = arg->stack;
4277 /* BLKmode, at least partly to be pushed. */
4279 unsigned int parm_align;
4283 /* Pushing a nonscalar.
4284 If part is passed in registers, PARTIAL says how much
4285 and emit_push_insn will take care of putting it there. */
4287 /* Round its size up to a multiple
4288 of the allocation unit for arguments. */
4290 if (arg->locate.size.var != 0)
4293 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4297 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4298 for BLKmode is careful to avoid it. */
4299 excess = (arg->locate.size.constant
4300 - int_size_in_bytes (TREE_TYPE (pval))
4302 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4303 NULL_RTX, TYPE_MODE (sizetype), 0);
4306 parm_align = arg->locate.boundary;
4308 /* When an argument is padded down, the block is aligned to
4309 PARM_BOUNDARY, but the actual argument isn't. */
4310 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4312 if (arg->locate.size.var)
4313 parm_align = BITS_PER_UNIT;
4316 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4317 parm_align = MIN (parm_align, excess_align);
4321 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4323 /* emit_push_insn might not work properly if arg->value and
4324 argblock + arg->locate.offset areas overlap. */
4328 if (XEXP (x, 0) == current_function_internal_arg_pointer
4329 || (GET_CODE (XEXP (x, 0)) == PLUS
4330 && XEXP (XEXP (x, 0), 0) ==
4331 current_function_internal_arg_pointer
4332 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4334 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4335 i = INTVAL (XEXP (XEXP (x, 0), 1));
4337 /* expand_call should ensure this. */
4338 gcc_assert (!arg->locate.offset.var
4339 && arg->locate.size.var == 0
4340 && GET_CODE (size_rtx) == CONST_INT);
4342 if (arg->locate.offset.constant > i)
4344 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4345 sibcall_failure = 1;
4347 else if (arg->locate.offset.constant < i)
4349 /* Use arg->locate.size.constant instead of size_rtx
4350 because we only care about the part of the argument
4352 if (i < (arg->locate.offset.constant
4353 + arg->locate.size.constant))
4354 sibcall_failure = 1;
4358 /* Even though they appear to be at the same location,
4359 if part of the outgoing argument is in registers,
4360 they aren't really at the same location. Check for
4361 this by making sure that the incoming size is the
4362 same as the outgoing size. */
4363 if (arg->locate.size.constant != INTVAL (size_rtx))
4364 sibcall_failure = 1;
4369 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4370 parm_align, partial, reg, excess, argblock,
4371 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4372 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4374 /* Unless this is a partially-in-register argument, the argument is now
4377 ??? Unlike the case above, in which we want the actual
4378 address of the data, so that we can load it directly into a
4379 register, here we want the address of the stack slot, so that
4380 it's properly aligned for word-by-word copying or something
4381 like that. It's not clear that this is always correct. */
4383 arg->value = arg->stack_slot;
4386 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4388 tree type = TREE_TYPE (arg->tree_value);
4390 = emit_group_load_into_temps (arg->reg, arg->value, type,
4391 int_size_in_bytes (type));
4394 /* Mark all slots this store used. */
4395 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4396 && argblock && ! variable_size && arg->stack)
4397 for (i = lower_bound; i < upper_bound; i++)
4398 stack_usage_map[i] = 1;
4400 /* Once we have pushed something, pops can't safely
4401 be deferred during the rest of the arguments. */
4404 /* Free any temporary slots made in processing this argument. Show
4405 that we might have taken the address of something and pushed that
4407 preserve_temp_slots (NULL_RTX);
4411 return sibcall_failure;
4414 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4417 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4423 /* If the type has variable size... */
4424 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4427 /* If the type is marked as addressable (it is required
4428 to be constructed into the stack)... */
4429 if (TREE_ADDRESSABLE (type))
4435 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4436 takes trailing padding of a structure into account. */
4437 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4440 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
4445 /* If the type has variable size... */
4446 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4449 /* If the type is marked as addressable (it is required
4450 to be constructed into the stack)... */
4451 if (TREE_ADDRESSABLE (type))
4454 /* If the padding and mode of the type is such that a copy into
4455 a register would put it into the wrong part of the register. */
4457 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4458 && (FUNCTION_ARG_PADDING (mode, type)
4459 == (BYTES_BIG_ENDIAN ? upward : downward)))