1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
40 #include "langhooks.h"
45 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
46 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
48 /* Data structure and subroutines used within expand_call. */
52 /* Tree node for this argument. */
54 /* Mode for value; TYPE_MODE unless promoted. */
55 enum machine_mode mode;
56 /* Current RTL value for argument, or 0 if it isn't precomputed. */
58 /* Initially-compute RTL value for argument; only for const functions. */
60 /* Register to pass this argument in, 0 if passed on stack, or an
61 PARALLEL if the arg is to be copied into multiple non-contiguous
64 /* Register to pass this argument in when generating tail call sequence.
65 This is not the same register as for normal calls on machines with
68 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
69 form for emit_group_move. */
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
74 /* Number of bytes to put in registers. 0 means put the whole arg
75 in registers. Also 0 if not passed in registers. */
77 /* Nonzero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
82 /* Some fields packaged up for locate_and_pad_parm. */
83 struct locate_and_pad_arg_data locate;
84 /* Location on the stack at which parameter should be stored. The store
85 has already been done if STACK == VALUE. */
87 /* Location on the stack of the start of this argument slot. This can
88 differ from STACK if this arg pads downward. This location is known
89 to be aligned to FUNCTION_ARG_BOUNDARY. */
91 /* Place that this stack area has been saved, if needed. */
93 /* If an argument's alignment does not permit direct copying into registers,
94 copy in smaller-sized pieces into pseudos. These are stored in a
95 block pointed to by this field. The next field says how many
96 word-sized pseudos we made. */
101 /* A vector of one char per byte of stack space. A byte if nonzero if
102 the corresponding stack location has been used.
103 This vector is used to prevent a function call within an argument from
104 clobbering any stack already set up. */
105 static char *stack_usage_map;
107 /* Size of STACK_USAGE_MAP. */
108 static int highest_outgoing_arg_in_use;
110 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
111 stack location's tail call argument has been already stored into the stack.
112 This bitmap is used to prevent sibling call optimization if function tries
113 to use parent's incoming argument slots when they have been already
114 overwritten with tail call arguments. */
115 static sbitmap stored_args_map;
117 /* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
122 static int stack_arg_under_construction;
124 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
125 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
127 static void precompute_register_parameters (int, struct arg_data *, int *);
128 static int store_one_arg (struct arg_data *, rtx, int, int, int);
129 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
130 static int finalize_must_preallocate (int, int, struct arg_data *,
132 static void precompute_arguments (int, int, struct arg_data *);
133 static int compute_argument_block_size (int, struct args_size *, int);
134 static void initialize_argument_information (int, struct arg_data *,
135 struct args_size *, int, tree,
136 tree, CUMULATIVE_ARGS *, int,
137 rtx *, int *, int *, int *,
139 static void compute_argument_addresses (struct arg_data *, rtx, int);
140 static rtx rtx_for_function_call (tree, tree);
141 static void load_register_parameters (struct arg_data *, int, rtx *, int,
143 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
144 enum machine_mode, int, va_list);
145 static int special_function_p (tree, int);
146 static int check_sibcall_argument_overlap_1 (rtx);
147 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
149 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
151 static tree split_complex_values (tree);
152 static tree split_complex_types (tree);
154 #ifdef REG_PARM_STACK_SPACE
155 static rtx save_fixed_argument_area (int, rtx, int *, int *);
156 static void restore_fixed_argument_area (rtx, rtx, int, int);
159 /* Force FUNEXP into a form suitable for the address of a CALL,
160 and return that as an rtx. Also load the static chain register
161 if FNDECL is a nested function.
163 CALL_FUSAGE points to a variable holding the prospective
164 CALL_INSN_FUNCTION_USAGE information. */
167 prepare_call_address (rtx funexp, rtx static_chain_value,
168 rtx *call_fusage, int reg_parm_seen, int sibcallp)
170 /* Make a valid memory address and copy constants through pseudo-regs,
171 but not for a constant address if -fno-function-cse. */
172 if (GET_CODE (funexp) != SYMBOL_REF)
173 /* If we are using registers for parameters, force the
174 function address into a register now. */
175 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
176 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
177 : memory_address (FUNCTION_MODE, funexp));
180 #ifndef NO_FUNCTION_CSE
181 if (optimize && ! flag_no_function_cse)
182 funexp = force_reg (Pmode, funexp);
186 if (static_chain_value != 0)
188 static_chain_value = convert_memory_address (Pmode, static_chain_value);
189 emit_move_insn (static_chain_rtx, static_chain_value);
191 if (REG_P (static_chain_rtx))
192 use_reg (call_fusage, static_chain_rtx);
198 /* Generate instructions to call function FUNEXP,
199 and optionally pop the results.
200 The CALL_INSN is the first insn generated.
202 FNDECL is the declaration node of the function. This is given to the
203 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
205 FUNTYPE is the data type of the function. This is given to the macro
206 RETURN_POPS_ARGS to determine whether this function pops its own args.
207 We used to allow an identifier for library functions, but that doesn't
208 work when the return type is an aggregate type and the calling convention
209 says that the pointer to this aggregate is to be popped by the callee.
211 STACK_SIZE is the number of bytes of arguments on the stack,
212 ROUNDED_STACK_SIZE is that number rounded up to
213 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
214 both to put into the call insn and to generate explicit popping
217 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
218 It is zero if this call doesn't want a structure value.
220 NEXT_ARG_REG is the rtx that results from executing
221 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
222 just after all the args have had their registers assigned.
223 This could be whatever you like, but normally it is the first
224 arg-register beyond those used for args in this call,
225 or 0 if all the arg-registers are used in this call.
226 It is passed on to `gen_call' so you can put this info in the call insn.
228 VALREG is a hard register in which a value is returned,
229 or 0 if the call does not return a value.
231 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
232 the args to this call were processed.
233 We restore `inhibit_defer_pop' to that value.
235 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
236 denote registers used by the called function. */
239 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
240 tree funtype ATTRIBUTE_UNUSED,
241 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
242 HOST_WIDE_INT rounded_stack_size,
243 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
244 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
245 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
246 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
248 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
250 int already_popped = 0;
251 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
252 #if defined (HAVE_call) && defined (HAVE_call_value)
253 rtx struct_value_size_rtx;
254 struct_value_size_rtx = GEN_INT (struct_value_size);
257 #ifdef CALL_POPS_ARGS
258 n_popped += CALL_POPS_ARGS (* args_so_far);
261 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
262 and we don't want to load it into a register as an optimization,
263 because prepare_call_address already did it if it should be done. */
264 if (GET_CODE (funexp) != SYMBOL_REF)
265 funexp = memory_address (FUNCTION_MODE, funexp);
267 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
268 if ((ecf_flags & ECF_SIBCALL)
269 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
270 && (n_popped > 0 || stack_size == 0))
272 rtx n_pop = GEN_INT (n_popped);
275 /* If this subroutine pops its own args, record that in the call insn
276 if possible, for the sake of frame pointer elimination. */
279 pat = GEN_SIBCALL_VALUE_POP (valreg,
280 gen_rtx_MEM (FUNCTION_MODE, funexp),
281 rounded_stack_size_rtx, next_arg_reg,
284 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
285 rounded_stack_size_rtx, next_arg_reg, n_pop);
287 emit_call_insn (pat);
293 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
294 /* If the target has "call" or "call_value" insns, then prefer them
295 if no arguments are actually popped. If the target does not have
296 "call" or "call_value" insns, then we must use the popping versions
297 even if the call has no arguments to pop. */
298 #if defined (HAVE_call) && defined (HAVE_call_value)
299 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
300 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
302 if (HAVE_call_pop && HAVE_call_value_pop)
305 rtx n_pop = GEN_INT (n_popped);
308 /* If this subroutine pops its own args, record that in the call insn
309 if possible, for the sake of frame pointer elimination. */
312 pat = GEN_CALL_VALUE_POP (valreg,
313 gen_rtx_MEM (FUNCTION_MODE, funexp),
314 rounded_stack_size_rtx, next_arg_reg, n_pop);
316 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
317 rounded_stack_size_rtx, next_arg_reg, n_pop);
319 emit_call_insn (pat);
325 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
326 if ((ecf_flags & ECF_SIBCALL)
327 && HAVE_sibcall && HAVE_sibcall_value)
330 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
331 gen_rtx_MEM (FUNCTION_MODE, funexp),
332 rounded_stack_size_rtx,
333 next_arg_reg, NULL_RTX));
335 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
336 rounded_stack_size_rtx, next_arg_reg,
337 struct_value_size_rtx));
342 #if defined (HAVE_call) && defined (HAVE_call_value)
343 if (HAVE_call && HAVE_call_value)
346 emit_call_insn (GEN_CALL_VALUE (valreg,
347 gen_rtx_MEM (FUNCTION_MODE, funexp),
348 rounded_stack_size_rtx, next_arg_reg,
351 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
352 rounded_stack_size_rtx, next_arg_reg,
353 struct_value_size_rtx));
359 /* Find the call we just emitted. */
360 call_insn = last_call_insn ();
362 /* Mark memory as used for "pure" function call. */
363 if (ecf_flags & ECF_PURE)
367 gen_rtx_USE (VOIDmode,
368 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
371 /* Put the register usage information there. */
372 add_function_usage_to (call_insn, call_fusage);
374 /* If this is a const call, then set the insn's unchanging bit. */
375 if (ecf_flags & (ECF_CONST | ECF_PURE))
376 CONST_OR_PURE_CALL_P (call_insn) = 1;
378 /* If this call can't throw, attach a REG_EH_REGION reg note to that
380 if (ecf_flags & ECF_NOTHROW)
381 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
382 REG_NOTES (call_insn));
385 int rn = lookup_stmt_eh_region (fntree);
387 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
388 throw, which we already took care of. */
390 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
391 REG_NOTES (call_insn));
392 note_current_region_may_contain_throw ();
395 if (ecf_flags & ECF_NORETURN)
396 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
397 REG_NOTES (call_insn));
399 if (ecf_flags & ECF_RETURNS_TWICE)
401 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
402 REG_NOTES (call_insn));
403 current_function_calls_setjmp = 1;
406 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
408 /* Restore this now, so that we do defer pops for this call's args
409 if the context of the call as a whole permits. */
410 inhibit_defer_pop = old_inhibit_defer_pop;
415 CALL_INSN_FUNCTION_USAGE (call_insn)
416 = gen_rtx_EXPR_LIST (VOIDmode,
417 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
418 CALL_INSN_FUNCTION_USAGE (call_insn));
419 rounded_stack_size -= n_popped;
420 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
421 stack_pointer_delta -= n_popped;
424 if (!ACCUMULATE_OUTGOING_ARGS)
426 /* If returning from the subroutine does not automatically pop the args,
427 we need an instruction to pop them sooner or later.
428 Perhaps do it now; perhaps just record how much space to pop later.
430 If returning from the subroutine does pop the args, indicate that the
431 stack pointer will be changed. */
433 if (rounded_stack_size != 0)
435 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN))
436 /* Just pretend we did the pop. */
437 stack_pointer_delta -= rounded_stack_size;
438 else if (flag_defer_pop && inhibit_defer_pop == 0
439 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
440 pending_stack_adjust += rounded_stack_size;
442 adjust_stack (rounded_stack_size_rtx);
445 /* When we accumulate outgoing args, we must avoid any stack manipulations.
446 Restore the stack pointer to its original value now. Usually
447 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
448 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
449 popping variants of functions exist as well.
451 ??? We may optimize similar to defer_pop above, but it is
452 probably not worthwhile.
454 ??? It will be worthwhile to enable combine_stack_adjustments even for
457 anti_adjust_stack (GEN_INT (n_popped));
460 /* Determine if the function identified by NAME and FNDECL is one with
461 special properties we wish to know about.
463 For example, if the function might return more than one time (setjmp), then
464 set RETURNS_TWICE to a nonzero value.
466 Similarly set NORETURN if the function is in the longjmp family.
468 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
469 space from the stack such as alloca. */
472 special_function_p (tree fndecl, int flags)
474 if (fndecl && DECL_NAME (fndecl)
475 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
476 /* Exclude functions not at the file scope, or not `extern',
477 since they are not the magic functions we would otherwise
479 FIXME: this should be handled with attributes, not with this
480 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
481 because you can declare fork() inside a function if you
483 && (DECL_CONTEXT (fndecl) == NULL_TREE
484 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
485 && TREE_PUBLIC (fndecl))
487 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
488 const char *tname = name;
490 /* We assume that alloca will always be called by name. It
491 makes no sense to pass it as a pointer-to-function to
492 anything that does not understand its behavior. */
493 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
495 && ! strcmp (name, "alloca"))
496 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
498 && ! strcmp (name, "__builtin_alloca"))))
499 flags |= ECF_MAY_BE_ALLOCA;
501 /* Disregard prefix _, __ or __x. */
504 if (name[1] == '_' && name[2] == 'x')
506 else if (name[1] == '_')
515 && (! strcmp (tname, "setjmp")
516 || ! strcmp (tname, "setjmp_syscall")))
518 && ! strcmp (tname, "sigsetjmp"))
520 && ! strcmp (tname, "savectx")))
521 flags |= ECF_RETURNS_TWICE;
524 && ! strcmp (tname, "siglongjmp"))
525 flags |= ECF_NORETURN;
527 else if ((tname[0] == 'q' && tname[1] == 's'
528 && ! strcmp (tname, "qsetjmp"))
529 || (tname[0] == 'v' && tname[1] == 'f'
530 && ! strcmp (tname, "vfork"))
531 || (tname[0] == 'g' && tname[1] == 'e'
532 && !strcmp (tname, "getcontext")))
533 flags |= ECF_RETURNS_TWICE;
535 else if (tname[0] == 'l' && tname[1] == 'o'
536 && ! strcmp (tname, "longjmp"))
537 flags |= ECF_NORETURN;
543 /* Return nonzero when FNDECL represents a call to setjmp. */
546 setjmp_call_p (tree fndecl)
548 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
551 /* Return true when exp contains alloca call. */
553 alloca_call_p (tree exp)
555 if (TREE_CODE (exp) == CALL_EXPR
556 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
557 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
558 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
559 & ECF_MAY_BE_ALLOCA))
564 /* Detect flags (function attributes) from the function decl or type node. */
567 flags_from_decl_or_type (tree exp)
574 type = TREE_TYPE (exp);
576 /* The function exp may have the `malloc' attribute. */
577 if (DECL_IS_MALLOC (exp))
580 /* The function exp may have the `returns_twice' attribute. */
581 if (DECL_IS_RETURNS_TWICE (exp))
582 flags |= ECF_RETURNS_TWICE;
584 /* The function exp may have the `pure' attribute. */
585 if (DECL_IS_PURE (exp))
588 if (DECL_IS_NOVOPS (exp))
591 if (TREE_NOTHROW (exp))
592 flags |= ECF_NOTHROW;
594 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
597 flags = special_function_p (exp, flags);
599 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
602 if (TREE_THIS_VOLATILE (exp))
603 flags |= ECF_NORETURN;
605 /* Mark if the function returns with the stack pointer depressed. We
606 cannot consider it pure or constant in that case. */
607 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
609 flags |= ECF_SP_DEPRESSED;
610 flags &= ~(ECF_PURE | ECF_CONST);
616 /* Detect flags from a CALL_EXPR. */
619 call_expr_flags (tree t)
622 tree decl = get_callee_fndecl (t);
625 flags = flags_from_decl_or_type (decl);
628 t = TREE_TYPE (CALL_EXPR_FN (t));
629 if (t && TREE_CODE (t) == POINTER_TYPE)
630 flags = flags_from_decl_or_type (TREE_TYPE (t));
638 /* Precompute all register parameters as described by ARGS, storing values
639 into fields within the ARGS array.
641 NUM_ACTUALS indicates the total number elements in the ARGS array.
643 Set REG_PARM_SEEN if we encounter a register parameter. */
646 precompute_register_parameters (int num_actuals, struct arg_data *args,
653 for (i = 0; i < num_actuals; i++)
654 if (args[i].reg != 0 && ! args[i].pass_on_stack)
658 if (args[i].value == 0)
661 args[i].value = expand_normal (args[i].tree_value);
662 preserve_temp_slots (args[i].value);
666 /* If the value is a non-legitimate constant, force it into a
667 pseudo now. TLS symbols sometimes need a call to resolve. */
668 if (CONSTANT_P (args[i].value)
669 && !LEGITIMATE_CONSTANT_P (args[i].value))
670 args[i].value = force_reg (args[i].mode, args[i].value);
672 /* If we are to promote the function arg to a wider mode,
675 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
677 = convert_modes (args[i].mode,
678 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
679 args[i].value, args[i].unsignedp);
681 /* If we're going to have to load the value by parts, pull the
682 parts into pseudos. The part extraction process can involve
683 non-trivial computation. */
684 if (GET_CODE (args[i].reg) == PARALLEL)
686 tree type = TREE_TYPE (args[i].tree_value);
687 args[i].parallel_value
688 = emit_group_load_into_temps (args[i].reg, args[i].value,
689 type, int_size_in_bytes (type));
692 /* If the value is expensive, and we are inside an appropriately
693 short loop, put the value into a pseudo and then put the pseudo
696 For small register classes, also do this if this call uses
697 register parameters. This is to avoid reload conflicts while
698 loading the parameters registers. */
700 else if ((! (REG_P (args[i].value)
701 || (GET_CODE (args[i].value) == SUBREG
702 && REG_P (SUBREG_REG (args[i].value)))))
703 && args[i].mode != BLKmode
704 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
705 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
707 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
711 #ifdef REG_PARM_STACK_SPACE
713 /* The argument list is the property of the called routine and it
714 may clobber it. If the fixed area has been used for previous
715 parameters, we must save and restore it. */
718 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
723 /* Compute the boundary of the area that needs to be saved, if any. */
724 high = reg_parm_stack_space;
725 #ifdef ARGS_GROW_DOWNWARD
728 if (high > highest_outgoing_arg_in_use)
729 high = highest_outgoing_arg_in_use;
731 for (low = 0; low < high; low++)
732 if (stack_usage_map[low] != 0)
735 enum machine_mode save_mode;
740 while (stack_usage_map[--high] == 0)
744 *high_to_save = high;
746 num_to_save = high - low + 1;
747 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
749 /* If we don't have the required alignment, must do this
751 if ((low & (MIN (GET_MODE_SIZE (save_mode),
752 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
755 #ifdef ARGS_GROW_DOWNWARD
760 stack_area = gen_rtx_MEM (save_mode,
761 memory_address (save_mode,
762 plus_constant (argblock,
765 set_mem_align (stack_area, PARM_BOUNDARY);
766 if (save_mode == BLKmode)
768 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
769 emit_block_move (validize_mem (save_area), stack_area,
770 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
774 save_area = gen_reg_rtx (save_mode);
775 emit_move_insn (save_area, stack_area);
785 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
787 enum machine_mode save_mode = GET_MODE (save_area);
791 #ifdef ARGS_GROW_DOWNWARD
792 delta = -high_to_save;
796 stack_area = gen_rtx_MEM (save_mode,
797 memory_address (save_mode,
798 plus_constant (argblock, delta)));
799 set_mem_align (stack_area, PARM_BOUNDARY);
801 if (save_mode != BLKmode)
802 emit_move_insn (stack_area, save_area);
804 emit_block_move (stack_area, validize_mem (save_area),
805 GEN_INT (high_to_save - low_to_save + 1),
808 #endif /* REG_PARM_STACK_SPACE */
810 /* If any elements in ARGS refer to parameters that are to be passed in
811 registers, but not in memory, and whose alignment does not permit a
812 direct copy into registers. Copy the values into a group of pseudos
813 which we will later copy into the appropriate hard registers.
815 Pseudos for each unaligned argument will be stored into the array
816 args[argnum].aligned_regs. The caller is responsible for deallocating
817 the aligned_regs array if it is nonzero. */
820 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
824 for (i = 0; i < num_actuals; i++)
825 if (args[i].reg != 0 && ! args[i].pass_on_stack
826 && args[i].mode == BLKmode
827 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
828 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
830 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
831 int endian_correction = 0;
835 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
836 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
840 args[i].n_aligned_regs
841 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
844 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
846 /* Structures smaller than a word are normally aligned to the
847 least significant byte. On a BYTES_BIG_ENDIAN machine,
848 this means we must skip the empty high order bytes when
849 calculating the bit offset. */
850 if (bytes < UNITS_PER_WORD
851 #ifdef BLOCK_REG_PADDING
852 && (BLOCK_REG_PADDING (args[i].mode,
853 TREE_TYPE (args[i].tree_value), 1)
859 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
861 for (j = 0; j < args[i].n_aligned_regs; j++)
863 rtx reg = gen_reg_rtx (word_mode);
864 rtx word = operand_subword_force (args[i].value, j, BLKmode);
865 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
867 args[i].aligned_regs[j] = reg;
868 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
869 word_mode, word_mode);
871 /* There is no need to restrict this code to loading items
872 in TYPE_ALIGN sized hunks. The bitfield instructions can
873 load up entire word sized registers efficiently.
875 ??? This may not be needed anymore.
876 We use to emit a clobber here but that doesn't let later
877 passes optimize the instructions we emit. By storing 0 into
878 the register later passes know the first AND to zero out the
879 bitfield being set in the register is unnecessary. The store
880 of 0 will be deleted as will at least the first AND. */
882 emit_move_insn (reg, const0_rtx);
884 bytes -= bitsize / BITS_PER_UNIT;
885 store_bit_field (reg, bitsize, endian_correction, word_mode,
891 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
894 NUM_ACTUALS is the total number of parameters.
896 N_NAMED_ARGS is the total number of named arguments.
898 FNDECL is the tree code for the target of this call (if known)
900 ARGS_SO_FAR holds state needed by the target to know where to place
903 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
904 for arguments which are passed in registers.
906 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
907 and may be modified by this routine.
909 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
910 flags which may may be modified by this routine.
912 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
913 that requires allocation of stack space.
915 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
916 the thunked-to function. */
919 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
920 struct arg_data *args,
921 struct args_size *args_size,
922 int n_named_args ATTRIBUTE_UNUSED,
923 tree actparms, tree fndecl,
924 CUMULATIVE_ARGS *args_so_far,
925 int reg_parm_stack_space,
926 rtx *old_stack_level, int *old_pending_adj,
927 int *must_preallocate, int *ecf_flags,
928 bool *may_tailcall, bool call_from_thunk_p)
930 /* 1 if scanning parms front to back, -1 if scanning back to front. */
933 /* Count arg position in order args appear. */
939 args_size->constant = 0;
942 /* In this loop, we consider args in the order they are written.
943 We fill up ARGS from the front or from the back if necessary
944 so that in any case the first arg to be pushed ends up at the front. */
946 if (PUSH_ARGS_REVERSED)
948 i = num_actuals - 1, inc = -1;
949 /* In this case, must reverse order of args
950 so that we compute and push the last arg first. */
957 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
958 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
960 tree type = TREE_TYPE (TREE_VALUE (p));
962 enum machine_mode mode;
964 args[i].tree_value = TREE_VALUE (p);
966 /* Replace erroneous argument with constant zero. */
967 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
968 args[i].tree_value = integer_zero_node, type = integer_type_node;
970 /* If TYPE is a transparent union, pass things the way we would
971 pass the first field of the union. We have already verified that
972 the modes are the same. */
973 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
974 type = TREE_TYPE (TYPE_FIELDS (type));
976 /* Decide where to pass this arg.
978 args[i].reg is nonzero if all or part is passed in registers.
980 args[i].partial is nonzero if part but not all is passed in registers,
981 and the exact value says how many bytes are passed in registers.
983 args[i].pass_on_stack is nonzero if the argument must at least be
984 computed on the stack. It may then be loaded back into registers
985 if args[i].reg is nonzero.
987 These decisions are driven by the FUNCTION_... macros and must agree
988 with those made by function.c. */
990 /* See if this argument should be passed by invisible reference. */
991 if (pass_by_reference (args_so_far, TYPE_MODE (type),
992 type, argpos < n_named_args))
998 = reference_callee_copied (args_so_far, TYPE_MODE (type),
999 type, argpos < n_named_args);
1001 /* If we're compiling a thunk, pass through invisible references
1002 instead of making a copy. */
1003 if (call_from_thunk_p
1005 && !TREE_ADDRESSABLE (type)
1006 && (base = get_base_address (args[i].tree_value))
1007 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1009 /* We can't use sibcalls if a callee-copied argument is
1010 stored in the current function's frame. */
1011 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1012 *may_tailcall = false;
1014 args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1015 type = TREE_TYPE (args[i].tree_value);
1017 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1021 /* We make a copy of the object and pass the address to the
1022 function being called. */
1025 if (!COMPLETE_TYPE_P (type)
1026 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1027 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1028 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1029 STACK_CHECK_MAX_VAR_SIZE))))
1031 /* This is a variable-sized object. Make space on the stack
1033 rtx size_rtx = expr_size (TREE_VALUE (p));
1035 if (*old_stack_level == 0)
1037 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1038 *old_pending_adj = pending_stack_adjust;
1039 pending_stack_adjust = 0;
1042 copy = gen_rtx_MEM (BLKmode,
1043 allocate_dynamic_stack_space
1044 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1045 set_mem_attributes (copy, type, 1);
1048 copy = assign_temp (type, 0, 1, 0);
1050 store_expr (args[i].tree_value, copy, 0);
1053 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1055 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1058 = build_fold_addr_expr (make_tree (type, copy));
1059 type = TREE_TYPE (args[i].tree_value);
1060 *may_tailcall = false;
1064 mode = TYPE_MODE (type);
1065 unsignedp = TYPE_UNSIGNED (type);
1067 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1068 mode = promote_mode (type, mode, &unsignedp, 1);
1070 args[i].unsignedp = unsignedp;
1071 args[i].mode = mode;
1073 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1074 argpos < n_named_args);
1075 #ifdef FUNCTION_INCOMING_ARG
1076 /* If this is a sibling call and the machine has register windows, the
1077 register window has to be unwinded before calling the routine, so
1078 arguments have to go into the incoming registers. */
1079 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1080 argpos < n_named_args);
1082 args[i].tail_call_reg = args[i].reg;
1087 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1088 argpos < n_named_args);
1090 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1092 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1093 it means that we are to pass this arg in the register(s) designated
1094 by the PARALLEL, but also to pass it in the stack. */
1095 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1096 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1097 args[i].pass_on_stack = 1;
1099 /* If this is an addressable type, we must preallocate the stack
1100 since we must evaluate the object into its final location.
1102 If this is to be passed in both registers and the stack, it is simpler
1104 if (TREE_ADDRESSABLE (type)
1105 || (args[i].pass_on_stack && args[i].reg != 0))
1106 *must_preallocate = 1;
1108 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1109 we cannot consider this function call constant. */
1110 if (TREE_ADDRESSABLE (type))
1111 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1113 /* Compute the stack-size of this argument. */
1114 if (args[i].reg == 0 || args[i].partial != 0
1115 || reg_parm_stack_space > 0
1116 || args[i].pass_on_stack)
1117 locate_and_pad_parm (mode, type,
1118 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1123 args[i].pass_on_stack ? 0 : args[i].partial,
1124 fndecl, args_size, &args[i].locate);
1125 #ifdef BLOCK_REG_PADDING
1127 /* The argument is passed entirely in registers. See at which
1128 end it should be padded. */
1129 args[i].locate.where_pad =
1130 BLOCK_REG_PADDING (mode, type,
1131 int_size_in_bytes (type) <= UNITS_PER_WORD);
1134 /* Update ARGS_SIZE, the total stack space for args so far. */
1136 args_size->constant += args[i].locate.size.constant;
1137 if (args[i].locate.size.var)
1138 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1140 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1141 have been used, etc. */
1143 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1144 argpos < n_named_args);
1148 /* Update ARGS_SIZE to contain the total size for the argument block.
1149 Return the original constant component of the argument block's size.
1151 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1152 for arguments passed in registers. */
1155 compute_argument_block_size (int reg_parm_stack_space,
1156 struct args_size *args_size,
1157 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1159 int unadjusted_args_size = args_size->constant;
1161 /* For accumulate outgoing args mode we don't need to align, since the frame
1162 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1163 backends from generating misaligned frame sizes. */
1164 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1165 preferred_stack_boundary = STACK_BOUNDARY;
1167 /* Compute the actual size of the argument block required. The variable
1168 and constant sizes must be combined, the size may have to be rounded,
1169 and there may be a minimum required size. */
1173 args_size->var = ARGS_SIZE_TREE (*args_size);
1174 args_size->constant = 0;
1176 preferred_stack_boundary /= BITS_PER_UNIT;
1177 if (preferred_stack_boundary > 1)
1179 /* We don't handle this case yet. To handle it correctly we have
1180 to add the delta, round and subtract the delta.
1181 Currently no machine description requires this support. */
1182 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1183 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1186 if (reg_parm_stack_space > 0)
1189 = size_binop (MAX_EXPR, args_size->var,
1190 ssize_int (reg_parm_stack_space));
1192 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1193 /* The area corresponding to register parameters is not to count in
1194 the size of the block we need. So make the adjustment. */
1196 = size_binop (MINUS_EXPR, args_size->var,
1197 ssize_int (reg_parm_stack_space));
1203 preferred_stack_boundary /= BITS_PER_UNIT;
1204 if (preferred_stack_boundary < 1)
1205 preferred_stack_boundary = 1;
1206 args_size->constant = (((args_size->constant
1207 + stack_pointer_delta
1208 + preferred_stack_boundary - 1)
1209 / preferred_stack_boundary
1210 * preferred_stack_boundary)
1211 - stack_pointer_delta);
1213 args_size->constant = MAX (args_size->constant,
1214 reg_parm_stack_space);
1216 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1217 args_size->constant -= reg_parm_stack_space;
1220 return unadjusted_args_size;
1223 /* Precompute parameters as needed for a function call.
1225 FLAGS is mask of ECF_* constants.
1227 NUM_ACTUALS is the number of arguments.
1229 ARGS is an array containing information for each argument; this
1230 routine fills in the INITIAL_VALUE and VALUE fields for each
1231 precomputed argument. */
1234 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1238 /* If this is a libcall, then precompute all arguments so that we do not
1239 get extraneous instructions emitted as part of the libcall sequence. */
1240 if ((flags & ECF_LIBCALL_BLOCK) == 0)
1243 for (i = 0; i < num_actuals; i++)
1245 enum machine_mode mode;
1247 /* If this is an addressable type, we cannot pre-evaluate it. */
1248 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
1250 args[i].initial_value = args[i].value
1251 = expand_normal (args[i].tree_value);
1253 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1254 if (mode != args[i].mode)
1257 = convert_modes (args[i].mode, mode,
1258 args[i].value, args[i].unsignedp);
1259 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1260 /* CSE will replace this only if it contains args[i].value
1261 pseudo, so convert it down to the declared mode using
1263 if (REG_P (args[i].value)
1264 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1266 args[i].initial_value
1267 = gen_lowpart_SUBREG (mode, args[i].value);
1268 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1269 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1277 /* Given the current state of MUST_PREALLOCATE and information about
1278 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1279 compute and return the final value for MUST_PREALLOCATE. */
1282 finalize_must_preallocate (int must_preallocate, int num_actuals,
1283 struct arg_data *args, struct args_size *args_size)
1285 /* See if we have or want to preallocate stack space.
1287 If we would have to push a partially-in-regs parm
1288 before other stack parms, preallocate stack space instead.
1290 If the size of some parm is not a multiple of the required stack
1291 alignment, we must preallocate.
1293 If the total size of arguments that would otherwise create a copy in
1294 a temporary (such as a CALL) is more than half the total argument list
1295 size, preallocation is faster.
1297 Another reason to preallocate is if we have a machine (like the m88k)
1298 where stack alignment is required to be maintained between every
1299 pair of insns, not just when the call is made. However, we assume here
1300 that such machines either do not have push insns (and hence preallocation
1301 would occur anyway) or the problem is taken care of with
1304 if (! must_preallocate)
1306 int partial_seen = 0;
1307 int copy_to_evaluate_size = 0;
1310 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1312 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1314 else if (partial_seen && args[i].reg == 0)
1315 must_preallocate = 1;
1317 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1318 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1319 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1320 || TREE_CODE (args[i].tree_value) == COND_EXPR
1321 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1322 copy_to_evaluate_size
1323 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1326 if (copy_to_evaluate_size * 2 >= args_size->constant
1327 && args_size->constant > 0)
1328 must_preallocate = 1;
1330 return must_preallocate;
1333 /* If we preallocated stack space, compute the address of each argument
1334 and store it into the ARGS array.
1336 We need not ensure it is a valid memory address here; it will be
1337 validized when it is used.
1339 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1342 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1346 rtx arg_reg = argblock;
1347 int i, arg_offset = 0;
1349 if (GET_CODE (argblock) == PLUS)
1350 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1352 for (i = 0; i < num_actuals; i++)
1354 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1355 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1357 unsigned int align, boundary;
1358 unsigned int units_on_stack = 0;
1359 enum machine_mode partial_mode = VOIDmode;
1361 /* Skip this parm if it will not be passed on the stack. */
1362 if (! args[i].pass_on_stack
1364 && args[i].partial == 0)
1367 if (GET_CODE (offset) == CONST_INT)
1368 addr = plus_constant (arg_reg, INTVAL (offset));
1370 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1372 addr = plus_constant (addr, arg_offset);
1374 if (args[i].partial != 0)
1376 /* Only part of the parameter is being passed on the stack.
1377 Generate a simple memory reference of the correct size. */
1378 units_on_stack = args[i].locate.size.constant;
1379 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1381 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1382 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1386 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1387 set_mem_attributes (args[i].stack,
1388 TREE_TYPE (args[i].tree_value), 1);
1390 align = BITS_PER_UNIT;
1391 boundary = args[i].locate.boundary;
1392 if (args[i].locate.where_pad != downward)
1394 else if (GET_CODE (offset) == CONST_INT)
1396 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1397 align = align & -align;
1399 set_mem_align (args[i].stack, align);
1401 if (GET_CODE (slot_offset) == CONST_INT)
1402 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1404 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1406 addr = plus_constant (addr, arg_offset);
1408 if (args[i].partial != 0)
1410 /* Only part of the parameter is being passed on the stack.
1411 Generate a simple memory reference of the correct size.
1413 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1414 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1418 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1419 set_mem_attributes (args[i].stack_slot,
1420 TREE_TYPE (args[i].tree_value), 1);
1422 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1424 /* Function incoming arguments may overlap with sibling call
1425 outgoing arguments and we cannot allow reordering of reads
1426 from function arguments with stores to outgoing arguments
1427 of sibling calls. */
1428 set_mem_alias_set (args[i].stack, 0);
1429 set_mem_alias_set (args[i].stack_slot, 0);
1434 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1435 in a call instruction.
1437 FNDECL is the tree node for the target function. For an indirect call
1438 FNDECL will be NULL_TREE.
1440 ADDR is the operand 0 of CALL_EXPR for this call. */
1443 rtx_for_function_call (tree fndecl, tree addr)
1447 /* Get the function to call, in the form of RTL. */
1450 /* If this is the first use of the function, see if we need to
1451 make an external definition for it. */
1452 if (! TREE_USED (fndecl))
1454 assemble_external (fndecl);
1455 TREE_USED (fndecl) = 1;
1458 /* Get a SYMBOL_REF rtx for the function address. */
1459 funexp = XEXP (DECL_RTL (fndecl), 0);
1462 /* Generate an rtx (probably a pseudo-register) for the address. */
1465 funexp = expand_normal (addr);
1466 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1471 /* Return true if and only if SIZE storage units (usually bytes)
1472 starting from address ADDR overlap with already clobbered argument
1473 area. This function is used to determine if we should give up a
1477 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1481 if (addr == current_function_internal_arg_pointer)
1483 else if (GET_CODE (addr) == PLUS
1485 == current_function_internal_arg_pointer)
1486 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
1487 i = INTVAL (XEXP (addr, 1));
1491 #ifdef ARGS_GROW_DOWNWARD
1496 unsigned HOST_WIDE_INT k;
1498 for (k = 0; k < size; k++)
1499 if (i + k < stored_args_map->n_bits
1500 && TEST_BIT (stored_args_map, i + k))
1507 /* Do the register loads required for any wholly-register parms or any
1508 parms which are passed both on the stack and in a register. Their
1509 expressions were already evaluated.
1511 Mark all register-parms as living through the call, putting these USE
1512 insns in the CALL_INSN_FUNCTION_USAGE field.
1514 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1515 checking, setting *SIBCALL_FAILURE if appropriate. */
1518 load_register_parameters (struct arg_data *args, int num_actuals,
1519 rtx *call_fusage, int flags, int is_sibcall,
1520 int *sibcall_failure)
1524 for (i = 0; i < num_actuals; i++)
1526 rtx reg = ((flags & ECF_SIBCALL)
1527 ? args[i].tail_call_reg : args[i].reg);
1530 int partial = args[i].partial;
1533 rtx before_arg = get_last_insn ();
1534 /* Set non-negative if we must move a word at a time, even if
1535 just one word (e.g, partial == 4 && mode == DFmode). Set
1536 to -1 if we just use a normal move insn. This value can be
1537 zero if the argument is a zero size structure. */
1539 if (GET_CODE (reg) == PARALLEL)
1543 gcc_assert (partial % UNITS_PER_WORD == 0);
1544 nregs = partial / UNITS_PER_WORD;
1546 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1548 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1549 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1552 size = GET_MODE_SIZE (args[i].mode);
1554 /* Handle calls that pass values in multiple non-contiguous
1555 locations. The Irix 6 ABI has examples of this. */
1557 if (GET_CODE (reg) == PARALLEL)
1558 emit_group_move (reg, args[i].parallel_value);
1560 /* If simple case, just do move. If normal partial, store_one_arg
1561 has already loaded the register for us. In all other cases,
1562 load the register(s) from memory. */
1564 else if (nregs == -1)
1566 emit_move_insn (reg, args[i].value);
1567 #ifdef BLOCK_REG_PADDING
1568 /* Handle case where we have a value that needs shifting
1569 up to the msb. eg. a QImode value and we're padding
1570 upward on a BYTES_BIG_ENDIAN machine. */
1571 if (size < UNITS_PER_WORD
1572 && (args[i].locate.where_pad
1573 == (BYTES_BIG_ENDIAN ? upward : downward)))
1576 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1578 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1579 report the whole reg as used. Strictly speaking, the
1580 call only uses SIZE bytes at the msb end, but it doesn't
1581 seem worth generating rtl to say that. */
1582 reg = gen_rtx_REG (word_mode, REGNO (reg));
1583 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1584 build_int_cst (NULL_TREE, shift),
1587 emit_move_insn (reg, x);
1592 /* If we have pre-computed the values to put in the registers in
1593 the case of non-aligned structures, copy them in now. */
1595 else if (args[i].n_aligned_regs != 0)
1596 for (j = 0; j < args[i].n_aligned_regs; j++)
1597 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1598 args[i].aligned_regs[j]);
1600 else if (partial == 0 || args[i].pass_on_stack)
1602 rtx mem = validize_mem (args[i].value);
1604 /* Check for overlap with already clobbered argument area. */
1606 && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
1608 *sibcall_failure = 1;
1610 /* Handle a BLKmode that needs shifting. */
1611 if (nregs == 1 && size < UNITS_PER_WORD
1612 #ifdef BLOCK_REG_PADDING
1613 && args[i].locate.where_pad == downward
1619 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1620 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1621 rtx x = gen_reg_rtx (word_mode);
1622 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1623 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1626 emit_move_insn (x, tem);
1627 x = expand_shift (dir, word_mode, x,
1628 build_int_cst (NULL_TREE, shift),
1631 emit_move_insn (ri, x);
1634 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1637 /* When a parameter is a block, and perhaps in other cases, it is
1638 possible that it did a load from an argument slot that was
1639 already clobbered. */
1641 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1642 *sibcall_failure = 1;
1644 /* Handle calls that pass values in multiple non-contiguous
1645 locations. The Irix 6 ABI has examples of this. */
1646 if (GET_CODE (reg) == PARALLEL)
1647 use_group_regs (call_fusage, reg);
1648 else if (nregs == -1)
1649 use_reg (call_fusage, reg);
1651 use_regs (call_fusage, REGNO (reg), nregs);
1656 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1657 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1658 bytes, then we would need to push some additional bytes to pad the
1659 arguments. So, we compute an adjust to the stack pointer for an
1660 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1661 bytes. Then, when the arguments are pushed the stack will be perfectly
1662 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1663 be popped after the call. Returns the adjustment. */
1666 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1667 struct args_size *args_size,
1668 unsigned int preferred_unit_stack_boundary)
1670 /* The number of bytes to pop so that the stack will be
1671 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1672 HOST_WIDE_INT adjustment;
1673 /* The alignment of the stack after the arguments are pushed, if we
1674 just pushed the arguments without adjust the stack here. */
1675 unsigned HOST_WIDE_INT unadjusted_alignment;
1677 unadjusted_alignment
1678 = ((stack_pointer_delta + unadjusted_args_size)
1679 % preferred_unit_stack_boundary);
1681 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1682 as possible -- leaving just enough left to cancel out the
1683 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1684 PENDING_STACK_ADJUST is non-negative, and congruent to
1685 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1687 /* Begin by trying to pop all the bytes. */
1688 unadjusted_alignment
1689 = (unadjusted_alignment
1690 - (pending_stack_adjust % preferred_unit_stack_boundary));
1691 adjustment = pending_stack_adjust;
1692 /* Push enough additional bytes that the stack will be aligned
1693 after the arguments are pushed. */
1694 if (preferred_unit_stack_boundary > 1)
1696 if (unadjusted_alignment > 0)
1697 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1699 adjustment += unadjusted_alignment;
1702 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1703 bytes after the call. The right number is the entire
1704 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1705 by the arguments in the first place. */
1707 = pending_stack_adjust - adjustment + unadjusted_args_size;
1712 /* Scan X expression if it does not dereference any argument slots
1713 we already clobbered by tail call arguments (as noted in stored_args_map
1715 Return nonzero if X expression dereferences such argument slots,
1719 check_sibcall_argument_overlap_1 (rtx x)
1728 code = GET_CODE (x);
1731 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1732 GET_MODE_SIZE (GET_MODE (x)));
1734 /* Scan all subexpressions. */
1735 fmt = GET_RTX_FORMAT (code);
1736 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1740 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1743 else if (*fmt == 'E')
1745 for (j = 0; j < XVECLEN (x, i); j++)
1746 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1753 /* Scan sequence after INSN if it does not dereference any argument slots
1754 we already clobbered by tail call arguments (as noted in stored_args_map
1755 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1756 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1757 should be 0). Return nonzero if sequence after INSN dereferences such argument
1758 slots, zero otherwise. */
1761 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1765 if (insn == NULL_RTX)
1766 insn = get_insns ();
1768 insn = NEXT_INSN (insn);
1770 for (; insn; insn = NEXT_INSN (insn))
1772 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1775 if (mark_stored_args_map)
1777 #ifdef ARGS_GROW_DOWNWARD
1778 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1780 low = arg->locate.slot_offset.constant;
1783 for (high = low + arg->locate.size.constant; low < high; low++)
1784 SET_BIT (stored_args_map, low);
1786 return insn != NULL_RTX;
1789 /* Given that a function returns a value of mode MODE at the most
1790 significant end of hard register VALUE, shift VALUE left or right
1791 as specified by LEFT_P. Return true if some action was needed. */
1794 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1796 HOST_WIDE_INT shift;
1798 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1799 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1803 /* Use ashr rather than lshr for right shifts. This is for the benefit
1804 of the MIPS port, which requires SImode values to be sign-extended
1805 when stored in 64-bit registers. */
1806 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1807 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1812 /* Generate all the code for a CALL_EXPR exp
1813 and return an rtx for its value.
1814 Store the value in TARGET (specified as an rtx) if convenient.
1815 If the value is stored in TARGET then TARGET is returned.
1816 If IGNORE is nonzero, then we ignore the value of the function call. */
1819 expand_call (tree exp, rtx target, int ignore)
1821 /* Nonzero if we are currently expanding a call. */
1822 static int currently_expanding_call = 0;
1824 /* List of actual parameters. */
1825 /* FIXME: rewrite this so that it doesn't cons up a TREE_LIST. */
1826 tree actparms = CALL_EXPR_ARGS (exp);
1827 /* RTX for the function to be called. */
1829 /* Sequence of insns to perform a normal "call". */
1830 rtx normal_call_insns = NULL_RTX;
1831 /* Sequence of insns to perform a tail "call". */
1832 rtx tail_call_insns = NULL_RTX;
1833 /* Data type of the function. */
1835 tree type_arg_types;
1836 /* Declaration of the function being called,
1837 or 0 if the function is computed (not known by name). */
1839 /* The type of the function being called. */
1841 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1844 /* Register in which non-BLKmode value will be returned,
1845 or 0 if no value or if value is BLKmode. */
1847 /* Address where we should return a BLKmode value;
1848 0 if value not BLKmode. */
1849 rtx structure_value_addr = 0;
1850 /* Nonzero if that address is being passed by treating it as
1851 an extra, implicit first parameter. Otherwise,
1852 it is passed by being copied directly into struct_value_rtx. */
1853 int structure_value_addr_parm = 0;
1854 /* Size of aggregate value wanted, or zero if none wanted
1855 or if we are using the non-reentrant PCC calling convention
1856 or expecting the value in registers. */
1857 HOST_WIDE_INT struct_value_size = 0;
1858 /* Nonzero if called function returns an aggregate in memory PCC style,
1859 by returning the address of where to find it. */
1860 int pcc_struct_value = 0;
1861 rtx struct_value = 0;
1863 /* Number of actual parameters in this call, including struct value addr. */
1865 /* Number of named args. Args after this are anonymous ones
1866 and they must all go on the stack. */
1869 /* Vector of information about each argument.
1870 Arguments are numbered in the order they will be pushed,
1871 not the order they are written. */
1872 struct arg_data *args;
1874 /* Total size in bytes of all the stack-parms scanned so far. */
1875 struct args_size args_size;
1876 struct args_size adjusted_args_size;
1877 /* Size of arguments before any adjustments (such as rounding). */
1878 int unadjusted_args_size;
1879 /* Data on reg parms scanned so far. */
1880 CUMULATIVE_ARGS args_so_far;
1881 /* Nonzero if a reg parm has been scanned. */
1883 /* Nonzero if this is an indirect function call. */
1885 /* Nonzero if we must avoid push-insns in the args for this call.
1886 If stack space is allocated for register parameters, but not by the
1887 caller, then it is preallocated in the fixed part of the stack frame.
1888 So the entire argument block must then be preallocated (i.e., we
1889 ignore PUSH_ROUNDING in that case). */
1891 int must_preallocate = !PUSH_ARGS;
1893 /* Size of the stack reserved for parameter registers. */
1894 int reg_parm_stack_space = 0;
1896 /* Address of space preallocated for stack parms
1897 (on machines that lack push insns), or 0 if space not preallocated. */
1900 /* Mask of ECF_ flags. */
1902 #ifdef REG_PARM_STACK_SPACE
1903 /* Define the boundary of the register parm stack space that needs to be
1905 int low_to_save, high_to_save;
1906 rtx save_area = 0; /* Place that it is saved */
1909 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1910 char *initial_stack_usage_map = stack_usage_map;
1911 char *stack_usage_map_buf = NULL;
1913 int old_stack_allocated;
1915 /* State variables to track stack modifications. */
1916 rtx old_stack_level = 0;
1917 int old_stack_arg_under_construction = 0;
1918 int old_pending_adj = 0;
1919 int old_inhibit_defer_pop = inhibit_defer_pop;
1921 /* Some stack pointer alterations we make are performed via
1922 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1923 which we then also need to save/restore along the way. */
1924 int old_stack_pointer_delta = 0;
1927 tree p = CALL_EXPR_FN (exp);
1928 tree addr = CALL_EXPR_FN (exp);
1930 /* The alignment of the stack, in bits. */
1931 unsigned HOST_WIDE_INT preferred_stack_boundary;
1932 /* The alignment of the stack, in bytes. */
1933 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
1934 /* The static chain value to use for this call. */
1935 rtx static_chain_value;
1936 /* See if this is "nothrow" function call. */
1937 if (TREE_NOTHROW (exp))
1938 flags |= ECF_NOTHROW;
1940 /* See if we can find a DECL-node for the actual function, and get the
1941 function attributes (flags) from the function decl or type node. */
1942 fndecl = get_callee_fndecl (exp);
1945 fntype = TREE_TYPE (fndecl);
1946 flags |= flags_from_decl_or_type (fndecl);
1950 fntype = TREE_TYPE (TREE_TYPE (p));
1951 flags |= flags_from_decl_or_type (fntype);
1954 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
1956 /* Warn if this value is an aggregate type,
1957 regardless of which calling convention we are using for it. */
1958 if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1959 warning (OPT_Waggregate_return, "function call has aggregate value");
1961 /* If the result of a pure or const function call is ignored (or void),
1962 and none of its arguments are volatile, we can avoid expanding the
1963 call and just evaluate the arguments for side-effects. */
1964 if ((flags & (ECF_CONST | ECF_PURE))
1965 && (ignore || target == const0_rtx
1966 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
1968 bool volatilep = false;
1971 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
1972 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
1980 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
1981 expand_expr (TREE_VALUE (arg), const0_rtx,
1982 VOIDmode, EXPAND_NORMAL);
1987 #ifdef REG_PARM_STACK_SPACE
1988 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1991 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1992 if (reg_parm_stack_space > 0 && PUSH_ARGS)
1993 must_preallocate = 1;
1996 /* Set up a place to return a structure. */
1998 /* Cater to broken compilers. */
1999 if (aggregate_value_p (exp, fndecl))
2001 /* This call returns a big structure. */
2002 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2004 #ifdef PCC_STATIC_STRUCT_RETURN
2006 pcc_struct_value = 1;
2008 #else /* not PCC_STATIC_STRUCT_RETURN */
2010 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2012 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2013 structure_value_addr = XEXP (target, 0);
2016 /* For variable-sized objects, we must be called with a target
2017 specified. If we were to allocate space on the stack here,
2018 we would have no way of knowing when to free it. */
2019 rtx d = assign_temp (TREE_TYPE (exp), 0, 1, 1);
2021 mark_temp_addr_taken (d);
2022 structure_value_addr = XEXP (d, 0);
2026 #endif /* not PCC_STATIC_STRUCT_RETURN */
2029 /* Figure out the amount to which the stack should be aligned. */
2030 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2033 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2034 if (i && i->preferred_incoming_stack_boundary)
2035 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2038 /* Operand 0 is a pointer-to-function; get the type of the function. */
2039 funtype = TREE_TYPE (addr);
2040 gcc_assert (POINTER_TYPE_P (funtype));
2041 funtype = TREE_TYPE (funtype);
2043 /* Munge the tree to split complex arguments into their imaginary
2045 if (targetm.calls.split_complex_arg)
2047 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2048 actparms = split_complex_values (actparms);
2051 type_arg_types = TYPE_ARG_TYPES (funtype);
2053 if (flags & ECF_MAY_BE_ALLOCA)
2054 current_function_calls_alloca = 1;
2056 /* If struct_value_rtx is 0, it means pass the address
2057 as if it were an extra parameter. */
2058 if (structure_value_addr && struct_value == 0)
2060 /* If structure_value_addr is a REG other than
2061 virtual_outgoing_args_rtx, we can use always use it. If it
2062 is not a REG, we must always copy it into a register.
2063 If it is virtual_outgoing_args_rtx, we must copy it to another
2064 register in some cases. */
2065 rtx temp = (!REG_P (structure_value_addr)
2066 || (ACCUMULATE_OUTGOING_ARGS
2067 && stack_arg_under_construction
2068 && structure_value_addr == virtual_outgoing_args_rtx)
2069 ? copy_addr_to_reg (convert_memory_address
2070 (Pmode, structure_value_addr))
2071 : structure_value_addr);
2074 = tree_cons (error_mark_node,
2075 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2078 structure_value_addr_parm = 1;
2081 /* Count the arguments and set NUM_ACTUALS. */
2082 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2085 /* Compute number of named args.
2086 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2088 if (type_arg_types != 0)
2090 = (list_length (type_arg_types)
2091 /* Count the struct value address, if it is passed as a parm. */
2092 + structure_value_addr_parm);
2094 /* If we know nothing, treat all args as named. */
2095 n_named_args = num_actuals;
2097 /* Start updating where the next arg would go.
2099 On some machines (such as the PA) indirect calls have a different
2100 calling convention than normal calls. The fourth argument in
2101 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2103 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2105 /* Now possibly adjust the number of named args.
2106 Normally, don't include the last named arg if anonymous args follow.
2107 We do include the last named arg if
2108 targetm.calls.strict_argument_naming() returns nonzero.
2109 (If no anonymous args follow, the result of list_length is actually
2110 one too large. This is harmless.)
2112 If targetm.calls.pretend_outgoing_varargs_named() returns
2113 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2114 this machine will be able to place unnamed args that were passed
2115 in registers into the stack. So treat all args as named. This
2116 allows the insns emitting for a specific argument list to be
2117 independent of the function declaration.
2119 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2120 we do not have any reliable way to pass unnamed args in
2121 registers, so we must force them into memory. */
2123 if (type_arg_types != 0
2124 && targetm.calls.strict_argument_naming (&args_so_far))
2126 else if (type_arg_types != 0
2127 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2128 /* Don't include the last named arg. */
2131 /* Treat all args as named. */
2132 n_named_args = num_actuals;
2134 /* Make a vector to hold all the information about each arg. */
2135 args = alloca (num_actuals * sizeof (struct arg_data));
2136 memset (args, 0, num_actuals * sizeof (struct arg_data));
2138 /* Build up entries in the ARGS array, compute the size of the
2139 arguments into ARGS_SIZE, etc. */
2140 initialize_argument_information (num_actuals, args, &args_size,
2141 n_named_args, actparms, fndecl,
2142 &args_so_far, reg_parm_stack_space,
2143 &old_stack_level, &old_pending_adj,
2144 &must_preallocate, &flags,
2145 &try_tail_call, CALL_FROM_THUNK_P (exp));
2149 /* If this function requires a variable-sized argument list, don't
2150 try to make a cse'able block for this call. We may be able to
2151 do this eventually, but it is too complicated to keep track of
2152 what insns go in the cse'able block and which don't. */
2154 flags &= ~ECF_LIBCALL_BLOCK;
2155 must_preallocate = 1;
2158 /* Now make final decision about preallocating stack space. */
2159 must_preallocate = finalize_must_preallocate (must_preallocate,
2163 /* If the structure value address will reference the stack pointer, we
2164 must stabilize it. We don't need to do this if we know that we are
2165 not going to adjust the stack pointer in processing this call. */
2167 if (structure_value_addr
2168 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2169 || reg_mentioned_p (virtual_outgoing_args_rtx,
2170 structure_value_addr))
2172 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2173 structure_value_addr = copy_to_reg (structure_value_addr);
2175 /* Tail calls can make things harder to debug, and we've traditionally
2176 pushed these optimizations into -O2. Don't try if we're already
2177 expanding a call, as that means we're an argument. Don't try if
2178 there's cleanups, as we know there's code to follow the call. */
2180 if (currently_expanding_call++ != 0
2181 || !flag_optimize_sibling_calls
2183 || lookup_stmt_eh_region (exp) >= 0)
2186 /* Rest of purposes for tail call optimizations to fail. */
2188 #ifdef HAVE_sibcall_epilogue
2189 !HAVE_sibcall_epilogue
2194 /* Doing sibling call optimization needs some work, since
2195 structure_value_addr can be allocated on the stack.
2196 It does not seem worth the effort since few optimizable
2197 sibling calls will return a structure. */
2198 || structure_value_addr != NULL_RTX
2199 /* Check whether the target is able to optimize the call
2201 || !targetm.function_ok_for_sibcall (fndecl, exp)
2202 /* Functions that do not return exactly once may not be sibcall
2204 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2205 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2206 /* If the called function is nested in the current one, it might access
2207 some of the caller's arguments, but could clobber them beforehand if
2208 the argument areas are shared. */
2209 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2210 /* If this function requires more stack slots than the current
2211 function, we cannot change it into a sibling call.
2212 current_function_pretend_args_size is not part of the
2213 stack allocated by our caller. */
2214 || args_size.constant > (current_function_args_size
2215 - current_function_pretend_args_size)
2216 /* If the callee pops its own arguments, then it must pop exactly
2217 the same number of arguments as the current function. */
2218 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2219 != RETURN_POPS_ARGS (current_function_decl,
2220 TREE_TYPE (current_function_decl),
2221 current_function_args_size))
2222 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2225 /* Ensure current function's preferred stack boundary is at least
2226 what we need. We don't have to increase alignment for recursive
2228 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2229 && fndecl != current_function_decl)
2230 cfun->preferred_stack_boundary = preferred_stack_boundary;
2231 if (fndecl == current_function_decl)
2232 cfun->recursive_call_emit = true;
2234 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2236 /* We want to make two insn chains; one for a sibling call, the other
2237 for a normal call. We will select one of the two chains after
2238 initial RTL generation is complete. */
2239 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2241 int sibcall_failure = 0;
2242 /* We want to emit any pending stack adjustments before the tail
2243 recursion "call". That way we know any adjustment after the tail
2244 recursion call can be ignored if we indeed use the tail
2246 int save_pending_stack_adjust = 0;
2247 int save_stack_pointer_delta = 0;
2249 rtx before_call, next_arg_reg;
2253 /* State variables we need to save and restore between
2255 save_pending_stack_adjust = pending_stack_adjust;
2256 save_stack_pointer_delta = stack_pointer_delta;
2259 flags &= ~ECF_SIBCALL;
2261 flags |= ECF_SIBCALL;
2263 /* Other state variables that we must reinitialize each time
2264 through the loop (that are not initialized by the loop itself). */
2268 /* Start a new sequence for the normal call case.
2270 From this point on, if the sibling call fails, we want to set
2271 sibcall_failure instead of continuing the loop. */
2274 /* Don't let pending stack adjusts add up to too much.
2275 Also, do all pending adjustments now if there is any chance
2276 this might be a call to alloca or if we are expanding a sibling
2277 call sequence or if we are calling a function that is to return
2278 with stack pointer depressed.
2279 Also do the adjustments before a throwing call, otherwise
2280 exception handling can fail; PR 19225. */
2281 if (pending_stack_adjust >= 32
2282 || (pending_stack_adjust > 0
2283 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2284 || (pending_stack_adjust > 0
2285 && flag_exceptions && !(flags & ECF_NOTHROW))
2287 do_pending_stack_adjust ();
2289 /* When calling a const function, we must pop the stack args right away,
2290 so that the pop is deleted or moved with the call. */
2291 if (pass && (flags & ECF_LIBCALL_BLOCK))
2294 /* Precompute any arguments as needed. */
2296 precompute_arguments (flags, num_actuals, args);
2298 /* Now we are about to start emitting insns that can be deleted
2299 if a libcall is deleted. */
2300 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2303 if (pass == 0 && cfun->stack_protect_guard)
2304 stack_protect_epilogue ();
2306 adjusted_args_size = args_size;
2307 /* Compute the actual size of the argument block required. The variable
2308 and constant sizes must be combined, the size may have to be rounded,
2309 and there may be a minimum required size. When generating a sibcall
2310 pattern, do not round up, since we'll be re-using whatever space our
2312 unadjusted_args_size
2313 = compute_argument_block_size (reg_parm_stack_space,
2314 &adjusted_args_size,
2316 : preferred_stack_boundary));
2318 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2320 /* The argument block when performing a sibling call is the
2321 incoming argument block. */
2324 argblock = virtual_incoming_args_rtx;
2326 #ifdef STACK_GROWS_DOWNWARD
2327 = plus_constant (argblock, current_function_pretend_args_size);
2329 = plus_constant (argblock, -current_function_pretend_args_size);
2331 stored_args_map = sbitmap_alloc (args_size.constant);
2332 sbitmap_zero (stored_args_map);
2335 /* If we have no actual push instructions, or shouldn't use them,
2336 make space for all args right now. */
2337 else if (adjusted_args_size.var != 0)
2339 if (old_stack_level == 0)
2341 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2342 old_stack_pointer_delta = stack_pointer_delta;
2343 old_pending_adj = pending_stack_adjust;
2344 pending_stack_adjust = 0;
2345 /* stack_arg_under_construction says whether a stack arg is
2346 being constructed at the old stack level. Pushing the stack
2347 gets a clean outgoing argument block. */
2348 old_stack_arg_under_construction = stack_arg_under_construction;
2349 stack_arg_under_construction = 0;
2351 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2355 /* Note that we must go through the motions of allocating an argument
2356 block even if the size is zero because we may be storing args
2357 in the area reserved for register arguments, which may be part of
2360 int needed = adjusted_args_size.constant;
2362 /* Store the maximum argument space used. It will be pushed by
2363 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2366 if (needed > current_function_outgoing_args_size)
2367 current_function_outgoing_args_size = needed;
2369 if (must_preallocate)
2371 if (ACCUMULATE_OUTGOING_ARGS)
2373 /* Since the stack pointer will never be pushed, it is
2374 possible for the evaluation of a parm to clobber
2375 something we have already written to the stack.
2376 Since most function calls on RISC machines do not use
2377 the stack, this is uncommon, but must work correctly.
2379 Therefore, we save any area of the stack that was already
2380 written and that we are using. Here we set up to do this
2381 by making a new stack usage map from the old one. The
2382 actual save will be done by store_one_arg.
2384 Another approach might be to try to reorder the argument
2385 evaluations to avoid this conflicting stack usage. */
2387 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2388 /* Since we will be writing into the entire argument area,
2389 the map must be allocated for its entire size, not just
2390 the part that is the responsibility of the caller. */
2391 needed += reg_parm_stack_space;
2394 #ifdef ARGS_GROW_DOWNWARD
2395 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2398 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2401 if (stack_usage_map_buf)
2402 free (stack_usage_map_buf);
2403 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2404 stack_usage_map = stack_usage_map_buf;
2406 if (initial_highest_arg_in_use)
2407 memcpy (stack_usage_map, initial_stack_usage_map,
2408 initial_highest_arg_in_use);
2410 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2411 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2412 (highest_outgoing_arg_in_use
2413 - initial_highest_arg_in_use));
2416 /* The address of the outgoing argument list must not be
2417 copied to a register here, because argblock would be left
2418 pointing to the wrong place after the call to
2419 allocate_dynamic_stack_space below. */
2421 argblock = virtual_outgoing_args_rtx;
2425 if (inhibit_defer_pop == 0)
2427 /* Try to reuse some or all of the pending_stack_adjust
2428 to get this space. */
2430 = (combine_pending_stack_adjustment_and_call
2431 (unadjusted_args_size,
2432 &adjusted_args_size,
2433 preferred_unit_stack_boundary));
2435 /* combine_pending_stack_adjustment_and_call computes
2436 an adjustment before the arguments are allocated.
2437 Account for them and see whether or not the stack
2438 needs to go up or down. */
2439 needed = unadjusted_args_size - needed;
2443 /* We're releasing stack space. */
2444 /* ??? We can avoid any adjustment at all if we're
2445 already aligned. FIXME. */
2446 pending_stack_adjust = -needed;
2447 do_pending_stack_adjust ();
2451 /* We need to allocate space. We'll do that in
2452 push_block below. */
2453 pending_stack_adjust = 0;
2456 /* Special case this because overhead of `push_block' in
2457 this case is non-trivial. */
2459 argblock = virtual_outgoing_args_rtx;
2462 argblock = push_block (GEN_INT (needed), 0, 0);
2463 #ifdef ARGS_GROW_DOWNWARD
2464 argblock = plus_constant (argblock, needed);
2468 /* We only really need to call `copy_to_reg' in the case
2469 where push insns are going to be used to pass ARGBLOCK
2470 to a function call in ARGS. In that case, the stack
2471 pointer changes value from the allocation point to the
2472 call point, and hence the value of
2473 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2474 as well always do it. */
2475 argblock = copy_to_reg (argblock);
2480 if (ACCUMULATE_OUTGOING_ARGS)
2482 /* The save/restore code in store_one_arg handles all
2483 cases except one: a constructor call (including a C
2484 function returning a BLKmode struct) to initialize
2486 if (stack_arg_under_construction)
2488 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2489 rtx push_size = GEN_INT (reg_parm_stack_space
2490 + adjusted_args_size.constant);
2492 rtx push_size = GEN_INT (adjusted_args_size.constant);
2494 if (old_stack_level == 0)
2496 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2498 old_stack_pointer_delta = stack_pointer_delta;
2499 old_pending_adj = pending_stack_adjust;
2500 pending_stack_adjust = 0;
2501 /* stack_arg_under_construction says whether a stack
2502 arg is being constructed at the old stack level.
2503 Pushing the stack gets a clean outgoing argument
2505 old_stack_arg_under_construction
2506 = stack_arg_under_construction;
2507 stack_arg_under_construction = 0;
2508 /* Make a new map for the new argument list. */
2509 if (stack_usage_map_buf)
2510 free (stack_usage_map_buf);
2511 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
2512 stack_usage_map = stack_usage_map_buf;
2513 highest_outgoing_arg_in_use = 0;
2515 allocate_dynamic_stack_space (push_size, NULL_RTX,
2519 /* If argument evaluation might modify the stack pointer,
2520 copy the address of the argument list to a register. */
2521 for (i = 0; i < num_actuals; i++)
2522 if (args[i].pass_on_stack)
2524 argblock = copy_addr_to_reg (argblock);
2529 compute_argument_addresses (args, argblock, num_actuals);
2531 /* If we push args individually in reverse order, perform stack alignment
2532 before the first push (the last arg). */
2533 if (PUSH_ARGS_REVERSED && argblock == 0
2534 && adjusted_args_size.constant != unadjusted_args_size)
2536 /* When the stack adjustment is pending, we get better code
2537 by combining the adjustments. */
2538 if (pending_stack_adjust
2539 && ! (flags & ECF_LIBCALL_BLOCK)
2540 && ! inhibit_defer_pop)
2542 pending_stack_adjust
2543 = (combine_pending_stack_adjustment_and_call
2544 (unadjusted_args_size,
2545 &adjusted_args_size,
2546 preferred_unit_stack_boundary));
2547 do_pending_stack_adjust ();
2549 else if (argblock == 0)
2550 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2551 - unadjusted_args_size));
2553 /* Now that the stack is properly aligned, pops can't safely
2554 be deferred during the evaluation of the arguments. */
2557 funexp = rtx_for_function_call (fndecl, addr);
2559 /* Figure out the register where the value, if any, will come back. */
2561 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2562 && ! structure_value_addr)
2564 if (pcc_struct_value)
2565 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2566 fndecl, NULL, (pass == 0));
2568 valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype,
2571 /* If VALREG is a PARALLEL whose first member has a zero
2572 offset, use that. This is for targets such as m68k that
2573 return the same value in multiple places. */
2574 if (GET_CODE (valreg) == PARALLEL)
2576 rtx elem = XVECEXP (valreg, 0, 0);
2577 rtx where = XEXP (elem, 0);
2578 rtx offset = XEXP (elem, 1);
2579 if (offset == const0_rtx
2580 && GET_MODE (where) == GET_MODE (valreg))
2585 /* Precompute all register parameters. It isn't safe to compute anything
2586 once we have started filling any specific hard regs. */
2587 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2589 if (CALL_EXPR_STATIC_CHAIN (exp))
2590 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
2592 static_chain_value = 0;
2594 #ifdef REG_PARM_STACK_SPACE
2595 /* Save the fixed argument area if it's part of the caller's frame and
2596 is clobbered by argument setup for this call. */
2597 if (ACCUMULATE_OUTGOING_ARGS && pass)
2598 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2599 &low_to_save, &high_to_save);
2602 /* Now store (and compute if necessary) all non-register parms.
2603 These come before register parms, since they can require block-moves,
2604 which could clobber the registers used for register parms.
2605 Parms which have partial registers are not stored here,
2606 but we do preallocate space here if they want that. */
2608 for (i = 0; i < num_actuals; i++)
2609 if (args[i].reg == 0 || args[i].pass_on_stack)
2611 rtx before_arg = get_last_insn ();
2613 if (store_one_arg (&args[i], argblock, flags,
2614 adjusted_args_size.var != 0,
2615 reg_parm_stack_space)
2617 && check_sibcall_argument_overlap (before_arg,
2619 sibcall_failure = 1;
2621 if (flags & ECF_CONST
2623 && args[i].value == args[i].stack)
2624 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2625 gen_rtx_USE (VOIDmode,
2630 /* If we have a parm that is passed in registers but not in memory
2631 and whose alignment does not permit a direct copy into registers,
2632 make a group of pseudos that correspond to each register that we
2634 if (STRICT_ALIGNMENT)
2635 store_unaligned_arguments_into_pseudos (args, num_actuals);
2637 /* Now store any partially-in-registers parm.
2638 This is the last place a block-move can happen. */
2640 for (i = 0; i < num_actuals; i++)
2641 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2643 rtx before_arg = get_last_insn ();
2645 if (store_one_arg (&args[i], argblock, flags,
2646 adjusted_args_size.var != 0,
2647 reg_parm_stack_space)
2649 && check_sibcall_argument_overlap (before_arg,
2651 sibcall_failure = 1;
2654 /* If we pushed args in forward order, perform stack alignment
2655 after pushing the last arg. */
2656 if (!PUSH_ARGS_REVERSED && argblock == 0)
2657 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2658 - unadjusted_args_size));
2660 /* If register arguments require space on the stack and stack space
2661 was not preallocated, allocate stack space here for arguments
2662 passed in registers. */
2663 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2664 if (!ACCUMULATE_OUTGOING_ARGS
2665 && must_preallocate == 0 && reg_parm_stack_space > 0)
2666 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2669 /* Pass the function the address in which to return a
2671 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2673 structure_value_addr
2674 = convert_memory_address (Pmode, structure_value_addr);
2675 emit_move_insn (struct_value,
2677 force_operand (structure_value_addr,
2680 if (REG_P (struct_value))
2681 use_reg (&call_fusage, struct_value);
2684 funexp = prepare_call_address (funexp, static_chain_value,
2685 &call_fusage, reg_parm_seen, pass == 0);
2687 load_register_parameters (args, num_actuals, &call_fusage, flags,
2688 pass == 0, &sibcall_failure);
2690 /* Save a pointer to the last insn before the call, so that we can
2691 later safely search backwards to find the CALL_INSN. */
2692 before_call = get_last_insn ();
2694 /* Set up next argument register. For sibling calls on machines
2695 with register windows this should be the incoming register. */
2696 #ifdef FUNCTION_INCOMING_ARG
2698 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2702 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2705 /* All arguments and registers used for the call must be set up by
2708 /* Stack must be properly aligned now. */
2710 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2712 /* Generate the actual call instruction. */
2713 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2714 adjusted_args_size.constant, struct_value_size,
2715 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2716 flags, & args_so_far);
2718 /* If a non-BLKmode value is returned at the most significant end
2719 of a register, shift the register right by the appropriate amount
2720 and update VALREG accordingly. BLKmode values are handled by the
2721 group load/store machinery below. */
2722 if (!structure_value_addr
2723 && !pcc_struct_value
2724 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2725 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2727 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2728 sibcall_failure = 1;
2729 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2732 /* If call is cse'able, make appropriate pair of reg-notes around it.
2733 Test valreg so we don't crash; may safely ignore `const'
2734 if return type is void. Disable for PARALLEL return values, because
2735 we have no way to move such values into a pseudo register. */
2736 if (pass && (flags & ECF_LIBCALL_BLOCK))
2740 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2742 insns = get_insns ();
2744 /* Expansion of block moves possibly introduced a loop that may
2745 not appear inside libcall block. */
2746 for (insn = insns; insn; insn = NEXT_INSN (insn))
2758 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2760 /* Mark the return value as a pointer if needed. */
2761 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2762 mark_reg_pointer (temp,
2763 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2766 if (flag_unsafe_math_optimizations
2768 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2769 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2770 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2771 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2772 note = gen_rtx_fmt_e (SQRT,
2774 args[0].initial_value);
2777 /* Construct an "equal form" for the value which
2778 mentions all the arguments in order as well as
2779 the function name. */
2780 for (i = 0; i < num_actuals; i++)
2781 note = gen_rtx_EXPR_LIST (VOIDmode,
2782 args[i].initial_value, note);
2783 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2785 if (flags & ECF_PURE)
2786 note = gen_rtx_EXPR_LIST (VOIDmode,
2787 gen_rtx_USE (VOIDmode,
2788 gen_rtx_MEM (BLKmode,
2789 gen_rtx_SCRATCH (VOIDmode))),
2792 emit_libcall_block (insns, temp, valreg, note);
2797 else if (pass && (flags & ECF_MALLOC))
2799 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2802 /* The return value from a malloc-like function is a pointer. */
2803 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2804 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2806 emit_move_insn (temp, valreg);
2808 /* The return value from a malloc-like function can not alias
2810 last = get_last_insn ();
2812 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2814 /* Write out the sequence. */
2815 insns = get_insns ();
2821 /* For calls to `setjmp', etc., inform flow.c it should complain
2822 if nonvolatile values are live. For functions that cannot return,
2823 inform flow that control does not fall through. */
2825 if ((flags & ECF_NORETURN) || pass == 0)
2827 /* The barrier must be emitted
2828 immediately after the CALL_INSN. Some ports emit more
2829 than just a CALL_INSN above, so we must search for it here. */
2831 rtx last = get_last_insn ();
2832 while (!CALL_P (last))
2834 last = PREV_INSN (last);
2835 /* There was no CALL_INSN? */
2836 gcc_assert (last != before_call);
2839 emit_barrier_after (last);
2841 /* Stack adjustments after a noreturn call are dead code.
2842 However when NO_DEFER_POP is in effect, we must preserve
2843 stack_pointer_delta. */
2844 if (inhibit_defer_pop == 0)
2846 stack_pointer_delta = old_stack_allocated;
2847 pending_stack_adjust = 0;
2851 /* If value type not void, return an rtx for the value. */
2853 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2855 target = const0_rtx;
2856 else if (structure_value_addr)
2858 if (target == 0 || !MEM_P (target))
2861 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2862 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2863 structure_value_addr));
2864 set_mem_attributes (target, exp, 1);
2867 else if (pcc_struct_value)
2869 /* This is the special C++ case where we need to
2870 know what the true target was. We take care to
2871 never use this value more than once in one expression. */
2872 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2873 copy_to_reg (valreg));
2874 set_mem_attributes (target, exp, 1);
2876 /* Handle calls that return values in multiple non-contiguous locations.
2877 The Irix 6 ABI has examples of this. */
2878 else if (GET_CODE (valreg) == PARALLEL)
2882 /* This will only be assigned once, so it can be readonly. */
2883 tree nt = build_qualified_type (TREE_TYPE (exp),
2884 (TYPE_QUALS (TREE_TYPE (exp))
2885 | TYPE_QUAL_CONST));
2887 target = assign_temp (nt, 0, 1, 1);
2890 if (! rtx_equal_p (target, valreg))
2891 emit_group_store (target, valreg, TREE_TYPE (exp),
2892 int_size_in_bytes (TREE_TYPE (exp)));
2894 /* We can not support sibling calls for this case. */
2895 sibcall_failure = 1;
2898 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2899 && GET_MODE (target) == GET_MODE (valreg))
2901 bool may_overlap = false;
2903 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
2904 reg to a plain register. */
2906 && HARD_REGISTER_P (valreg)
2907 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (valreg)))
2908 && !(REG_P (target) && !HARD_REGISTER_P (target)))
2909 valreg = copy_to_reg (valreg);
2911 /* If TARGET is a MEM in the argument area, and we have
2912 saved part of the argument area, then we can't store
2913 directly into TARGET as it may get overwritten when we
2914 restore the argument save area below. Don't work too
2915 hard though and simply force TARGET to a register if it
2916 is a MEM; the optimizer is quite likely to sort it out. */
2917 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
2918 for (i = 0; i < num_actuals; i++)
2919 if (args[i].save_area)
2926 target = copy_to_reg (valreg);
2929 /* TARGET and VALREG cannot be equal at this point
2930 because the latter would not have
2931 REG_FUNCTION_VALUE_P true, while the former would if
2932 it were referring to the same register.
2934 If they refer to the same register, this move will be
2935 a no-op, except when function inlining is being
2937 emit_move_insn (target, valreg);
2939 /* If we are setting a MEM, this code must be executed.
2940 Since it is emitted after the call insn, sibcall
2941 optimization cannot be performed in that case. */
2943 sibcall_failure = 1;
2946 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2948 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2950 /* We can not support sibling calls for this case. */
2951 sibcall_failure = 1;
2954 target = copy_to_reg (valreg);
2956 if (targetm.calls.promote_function_return(funtype))
2958 /* If we promoted this return value, make the proper SUBREG.
2959 TARGET might be const0_rtx here, so be careful. */
2961 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2962 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2964 tree type = TREE_TYPE (exp);
2965 int unsignedp = TYPE_UNSIGNED (type);
2967 enum machine_mode pmode;
2969 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
2970 /* If we don't promote as expected, something is wrong. */
2971 gcc_assert (GET_MODE (target) == pmode);
2973 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
2974 && (GET_MODE_SIZE (GET_MODE (target))
2975 > GET_MODE_SIZE (TYPE_MODE (type))))
2977 offset = GET_MODE_SIZE (GET_MODE (target))
2978 - GET_MODE_SIZE (TYPE_MODE (type));
2979 if (! BYTES_BIG_ENDIAN)
2980 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
2981 else if (! WORDS_BIG_ENDIAN)
2982 offset %= UNITS_PER_WORD;
2984 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
2985 SUBREG_PROMOTED_VAR_P (target) = 1;
2986 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
2990 /* If size of args is variable or this was a constructor call for a stack
2991 argument, restore saved stack-pointer value. */
2993 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
2995 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2996 stack_pointer_delta = old_stack_pointer_delta;
2997 pending_stack_adjust = old_pending_adj;
2998 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2999 stack_arg_under_construction = old_stack_arg_under_construction;
3000 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3001 stack_usage_map = initial_stack_usage_map;
3002 sibcall_failure = 1;
3004 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3006 #ifdef REG_PARM_STACK_SPACE
3008 restore_fixed_argument_area (save_area, argblock,
3009 high_to_save, low_to_save);
3012 /* If we saved any argument areas, restore them. */
3013 for (i = 0; i < num_actuals; i++)
3014 if (args[i].save_area)
3016 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3018 = gen_rtx_MEM (save_mode,
3019 memory_address (save_mode,
3020 XEXP (args[i].stack_slot, 0)));
3022 if (save_mode != BLKmode)
3023 emit_move_insn (stack_area, args[i].save_area);
3025 emit_block_move (stack_area, args[i].save_area,
3026 GEN_INT (args[i].locate.size.constant),
3027 BLOCK_OP_CALL_PARM);
3030 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3031 stack_usage_map = initial_stack_usage_map;
3034 /* If this was alloca, record the new stack level for nonlocal gotos.
3035 Check for the handler slots since we might not have a save area
3036 for non-local gotos. */
3038 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3039 update_nonlocal_goto_save_area ();
3041 /* Free up storage we no longer need. */
3042 for (i = 0; i < num_actuals; ++i)
3043 if (args[i].aligned_regs)
3044 free (args[i].aligned_regs);
3046 insns = get_insns ();
3051 tail_call_insns = insns;
3053 /* Restore the pending stack adjustment now that we have
3054 finished generating the sibling call sequence. */
3056 pending_stack_adjust = save_pending_stack_adjust;
3057 stack_pointer_delta = save_stack_pointer_delta;
3059 /* Prepare arg structure for next iteration. */
3060 for (i = 0; i < num_actuals; i++)
3063 args[i].aligned_regs = 0;
3067 sbitmap_free (stored_args_map);
3071 normal_call_insns = insns;
3073 /* Verify that we've deallocated all the stack we used. */
3074 gcc_assert ((flags & ECF_NORETURN)
3075 || (old_stack_allocated
3076 == stack_pointer_delta - pending_stack_adjust));
3079 /* If something prevents making this a sibling call,
3080 zero out the sequence. */
3081 if (sibcall_failure)
3082 tail_call_insns = NULL_RTX;
3087 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3088 arguments too, as argument area is now clobbered by the call. */
3089 if (tail_call_insns)
3091 emit_insn (tail_call_insns);
3092 cfun->tail_call_emit = true;
3095 emit_insn (normal_call_insns);
3097 currently_expanding_call--;
3099 /* If this function returns with the stack pointer depressed, ensure
3100 this block saves and restores the stack pointer, show it was
3101 changed, and adjust for any outgoing arg space. */
3102 if (flags & ECF_SP_DEPRESSED)
3104 clear_pending_stack_adjust ();
3105 emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
3106 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3109 if (stack_usage_map_buf)
3110 free (stack_usage_map_buf);
3115 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3116 this function's incoming arguments.
3118 At the start of RTL generation we know the only REG_EQUIV notes
3119 in the rtl chain are those for incoming arguments, so we can look
3120 for REG_EQUIV notes between the start of the function and the
3121 NOTE_INSN_FUNCTION_BEG.
3123 This is (slight) overkill. We could keep track of the highest
3124 argument we clobber and be more selective in removing notes, but it
3125 does not seem to be worth the effort. */
3128 fixup_tail_calls (void)
3132 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3136 /* There are never REG_EQUIV notes for the incoming arguments
3137 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3139 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
3142 note = find_reg_note (insn, REG_EQUIV, 0);
3144 remove_note (insn, note);
3145 note = find_reg_note (insn, REG_EQUIV, 0);
3150 /* Traverse an argument list in VALUES and expand all complex
3151 arguments into their components. */
3153 split_complex_values (tree values)
3157 /* Before allocating memory, check for the common case of no complex. */
3158 for (p = values; p; p = TREE_CHAIN (p))
3160 tree type = TREE_TYPE (TREE_VALUE (p));
3161 if (type && TREE_CODE (type) == COMPLEX_TYPE
3162 && targetm.calls.split_complex_arg (type))
3168 values = copy_list (values);
3170 for (p = values; p; p = TREE_CHAIN (p))
3172 tree complex_value = TREE_VALUE (p);
3175 complex_type = TREE_TYPE (complex_value);
3179 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3180 && targetm.calls.split_complex_arg (complex_type))
3183 tree real, imag, next;
3185 subtype = TREE_TYPE (complex_type);
3186 complex_value = save_expr (complex_value);
3187 real = build1 (REALPART_EXPR, subtype, complex_value);
3188 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3190 TREE_VALUE (p) = real;
3191 next = TREE_CHAIN (p);
3192 imag = build_tree_list (NULL_TREE, imag);
3193 TREE_CHAIN (p) = imag;
3194 TREE_CHAIN (imag) = next;
3196 /* Skip the newly created node. */
3204 /* Traverse a list of TYPES and expand all complex types into their
3207 split_complex_types (tree types)
3211 /* Before allocating memory, check for the common case of no complex. */
3212 for (p = types; p; p = TREE_CHAIN (p))
3214 tree type = TREE_VALUE (p);
3215 if (TREE_CODE (type) == COMPLEX_TYPE
3216 && targetm.calls.split_complex_arg (type))
3222 types = copy_list (types);
3224 for (p = types; p; p = TREE_CHAIN (p))
3226 tree complex_type = TREE_VALUE (p);
3228 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3229 && targetm.calls.split_complex_arg (complex_type))
3233 /* Rewrite complex type with component type. */
3234 TREE_VALUE (p) = TREE_TYPE (complex_type);
3235 next = TREE_CHAIN (p);
3237 /* Add another component type for the imaginary part. */
3238 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3239 TREE_CHAIN (p) = imag;
3240 TREE_CHAIN (imag) = next;
3242 /* Skip the newly created node. */
3250 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3251 The RETVAL parameter specifies whether return value needs to be saved, other
3252 parameters are documented in the emit_library_call function below. */
3255 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3256 enum libcall_type fn_type,
3257 enum machine_mode outmode, int nargs, va_list p)
3259 /* Total size in bytes of all the stack-parms scanned so far. */
3260 struct args_size args_size;
3261 /* Size of arguments before any adjustments (such as rounding). */
3262 struct args_size original_args_size;
3268 CUMULATIVE_ARGS args_so_far;
3272 enum machine_mode mode;
3275 struct locate_and_pad_arg_data locate;
3279 int old_inhibit_defer_pop = inhibit_defer_pop;
3280 rtx call_fusage = 0;
3283 int pcc_struct_value = 0;
3284 int struct_value_size = 0;
3286 int reg_parm_stack_space = 0;
3289 tree tfom; /* type_for_mode (outmode, 0) */
3291 #ifdef REG_PARM_STACK_SPACE
3292 /* Define the boundary of the register parm stack space that needs to be
3294 int low_to_save, high_to_save;
3295 rtx save_area = 0; /* Place that it is saved. */
3298 /* Size of the stack reserved for parameter registers. */
3299 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3300 char *initial_stack_usage_map = stack_usage_map;
3301 char *stack_usage_map_buf = NULL;
3303 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3305 #ifdef REG_PARM_STACK_SPACE
3306 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3309 /* By default, library functions can not throw. */
3310 flags = ECF_NOTHROW;
3322 case LCT_CONST_MAKE_BLOCK:
3323 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3325 case LCT_PURE_MAKE_BLOCK:
3326 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3329 flags |= ECF_NORETURN;
3332 flags = ECF_NORETURN;
3334 case LCT_RETURNS_TWICE:
3335 flags = ECF_RETURNS_TWICE;
3340 /* Ensure current function's preferred stack boundary is at least
3342 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3343 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3345 /* If this kind of value comes back in memory,
3346 decide where in memory it should come back. */
3347 if (outmode != VOIDmode)
3349 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3350 if (aggregate_value_p (tfom, 0))
3352 #ifdef PCC_STATIC_STRUCT_RETURN
3354 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3355 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3356 pcc_struct_value = 1;
3358 value = gen_reg_rtx (outmode);
3359 #else /* not PCC_STATIC_STRUCT_RETURN */
3360 struct_value_size = GET_MODE_SIZE (outmode);
3361 if (value != 0 && MEM_P (value))
3364 mem_value = assign_temp (tfom, 0, 1, 1);
3366 /* This call returns a big structure. */
3367 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3371 tfom = void_type_node;
3373 /* ??? Unfinished: must pass the memory address as an argument. */
3375 /* Copy all the libcall-arguments out of the varargs data
3376 and into a vector ARGVEC.
3378 Compute how to pass each argument. We only support a very small subset
3379 of the full argument passing conventions to limit complexity here since
3380 library functions shouldn't have many args. */
3382 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3383 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3385 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3386 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3388 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3391 args_size.constant = 0;
3396 /* Now we are about to start emitting insns that can be deleted
3397 if a libcall is deleted. */
3398 if (flags & ECF_LIBCALL_BLOCK)
3403 /* If there's a structure value address to be passed,
3404 either pass it in the special place, or pass it as an extra argument. */
3405 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3407 rtx addr = XEXP (mem_value, 0);
3411 /* Make sure it is a reasonable operand for a move or push insn. */
3412 if (!REG_P (addr) && !MEM_P (addr)
3413 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3414 addr = force_operand (addr, NULL_RTX);
3416 argvec[count].value = addr;
3417 argvec[count].mode = Pmode;
3418 argvec[count].partial = 0;
3420 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3421 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3422 NULL_TREE, 1) == 0);
3424 locate_and_pad_parm (Pmode, NULL_TREE,
3425 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3428 argvec[count].reg != 0,
3430 0, NULL_TREE, &args_size, &argvec[count].locate);
3432 if (argvec[count].reg == 0 || argvec[count].partial != 0
3433 || reg_parm_stack_space > 0)
3434 args_size.constant += argvec[count].locate.size.constant;
3436 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3441 for (; count < nargs; count++)
3443 rtx val = va_arg (p, rtx);
3444 enum machine_mode mode = va_arg (p, enum machine_mode);
3446 /* We cannot convert the arg value to the mode the library wants here;
3447 must do it earlier where we know the signedness of the arg. */
3448 gcc_assert (mode != BLKmode
3449 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3451 /* Make sure it is a reasonable operand for a move or push insn. */
3452 if (!REG_P (val) && !MEM_P (val)
3453 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3454 val = force_operand (val, NULL_RTX);
3456 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3460 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3462 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3463 functions, so we have to pretend this isn't such a function. */
3464 if (flags & ECF_LIBCALL_BLOCK)
3466 rtx insns = get_insns ();
3470 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3472 /* If this was a CONST function, it is now PURE since
3473 it now reads memory. */
3474 if (flags & ECF_CONST)
3476 flags &= ~ECF_CONST;
3480 if (GET_MODE (val) == MEM && !must_copy)
3484 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3486 emit_move_insn (slot, val);
3489 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3490 gen_rtx_USE (VOIDmode, slot),
3493 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3494 gen_rtx_CLOBBER (VOIDmode,
3499 val = force_operand (XEXP (slot, 0), NULL_RTX);
3502 argvec[count].value = val;
3503 argvec[count].mode = mode;
3505 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3507 argvec[count].partial
3508 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3510 locate_and_pad_parm (mode, NULL_TREE,
3511 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3514 argvec[count].reg != 0,
3516 argvec[count].partial,
3517 NULL_TREE, &args_size, &argvec[count].locate);
3519 gcc_assert (!argvec[count].locate.size.var);
3521 if (argvec[count].reg == 0 || argvec[count].partial != 0
3522 || reg_parm_stack_space > 0)
3523 args_size.constant += argvec[count].locate.size.constant;
3525 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3528 /* If this machine requires an external definition for library
3529 functions, write one out. */
3530 assemble_external_libcall (fun);
3532 original_args_size = args_size;
3533 args_size.constant = (((args_size.constant
3534 + stack_pointer_delta
3538 - stack_pointer_delta);
3540 args_size.constant = MAX (args_size.constant,
3541 reg_parm_stack_space);
3543 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3544 args_size.constant -= reg_parm_stack_space;
3547 if (args_size.constant > current_function_outgoing_args_size)
3548 current_function_outgoing_args_size = args_size.constant;
3550 if (ACCUMULATE_OUTGOING_ARGS)
3552 /* Since the stack pointer will never be pushed, it is possible for
3553 the evaluation of a parm to clobber something we have already
3554 written to the stack. Since most function calls on RISC machines
3555 do not use the stack, this is uncommon, but must work correctly.
3557 Therefore, we save any area of the stack that was already written
3558 and that we are using. Here we set up to do this by making a new
3559 stack usage map from the old one.
3561 Another approach might be to try to reorder the argument
3562 evaluations to avoid this conflicting stack usage. */
3564 needed = args_size.constant;
3566 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3567 /* Since we will be writing into the entire argument area, the
3568 map must be allocated for its entire size, not just the part that
3569 is the responsibility of the caller. */
3570 needed += reg_parm_stack_space;
3573 #ifdef ARGS_GROW_DOWNWARD
3574 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3577 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3580 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3581 stack_usage_map = stack_usage_map_buf;
3583 if (initial_highest_arg_in_use)
3584 memcpy (stack_usage_map, initial_stack_usage_map,
3585 initial_highest_arg_in_use);
3587 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3588 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3589 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3592 /* We must be careful to use virtual regs before they're instantiated,
3593 and real regs afterwards. Loop optimization, for example, can create
3594 new libcalls after we've instantiated the virtual regs, and if we
3595 use virtuals anyway, they won't match the rtl patterns. */
3597 if (virtuals_instantiated)
3598 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3600 argblock = virtual_outgoing_args_rtx;
3605 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3608 /* If we push args individually in reverse order, perform stack alignment
3609 before the first push (the last arg). */
3610 if (argblock == 0 && PUSH_ARGS_REVERSED)
3611 anti_adjust_stack (GEN_INT (args_size.constant
3612 - original_args_size.constant));
3614 if (PUSH_ARGS_REVERSED)
3625 #ifdef REG_PARM_STACK_SPACE
3626 if (ACCUMULATE_OUTGOING_ARGS)
3628 /* The argument list is the property of the called routine and it
3629 may clobber it. If the fixed area has been used for previous
3630 parameters, we must save and restore it. */
3631 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3632 &low_to_save, &high_to_save);
3636 /* Push the args that need to be pushed. */
3638 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3639 are to be pushed. */
3640 for (count = 0; count < nargs; count++, argnum += inc)
3642 enum machine_mode mode = argvec[argnum].mode;
3643 rtx val = argvec[argnum].value;
3644 rtx reg = argvec[argnum].reg;
3645 int partial = argvec[argnum].partial;
3646 int lower_bound = 0, upper_bound = 0, i;
3648 if (! (reg != 0 && partial == 0))
3650 if (ACCUMULATE_OUTGOING_ARGS)
3652 /* If this is being stored into a pre-allocated, fixed-size,
3653 stack area, save any previous data at that location. */
3655 #ifdef ARGS_GROW_DOWNWARD
3656 /* stack_slot is negative, but we want to index stack_usage_map
3657 with positive values. */
3658 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3659 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3661 lower_bound = argvec[argnum].locate.offset.constant;
3662 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3666 /* Don't worry about things in the fixed argument area;
3667 it has already been saved. */
3668 if (i < reg_parm_stack_space)
3669 i = reg_parm_stack_space;
3670 while (i < upper_bound && stack_usage_map[i] == 0)
3673 if (i < upper_bound)
3675 /* We need to make a save area. */
3677 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3678 enum machine_mode save_mode
3679 = mode_for_size (size, MODE_INT, 1);
3681 = plus_constant (argblock,
3682 argvec[argnum].locate.offset.constant);
3684 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3686 if (save_mode == BLKmode)
3688 argvec[argnum].save_area
3689 = assign_stack_temp (BLKmode,
3690 argvec[argnum].locate.size.constant,
3693 emit_block_move (validize_mem (argvec[argnum].save_area),
3695 GEN_INT (argvec[argnum].locate.size.constant),
3696 BLOCK_OP_CALL_PARM);
3700 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3702 emit_move_insn (argvec[argnum].save_area, stack_area);
3707 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3708 partial, reg, 0, argblock,
3709 GEN_INT (argvec[argnum].locate.offset.constant),
3710 reg_parm_stack_space,
3711 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3713 /* Now mark the segment we just used. */
3714 if (ACCUMULATE_OUTGOING_ARGS)
3715 for (i = lower_bound; i < upper_bound; i++)
3716 stack_usage_map[i] = 1;
3720 if (flags & ECF_CONST)
3724 /* Indicate argument access so that alias.c knows that these
3727 use = plus_constant (argblock,
3728 argvec[argnum].locate.offset.constant);
3730 /* When arguments are pushed, trying to tell alias.c where
3731 exactly this argument is won't work, because the
3732 auto-increment causes confusion. So we merely indicate
3733 that we access something with a known mode somewhere on
3735 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3736 gen_rtx_SCRATCH (Pmode));
3737 use = gen_rtx_MEM (argvec[argnum].mode, use);
3738 use = gen_rtx_USE (VOIDmode, use);
3739 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3744 /* If we pushed args in forward order, perform stack alignment
3745 after pushing the last arg. */
3746 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3747 anti_adjust_stack (GEN_INT (args_size.constant
3748 - original_args_size.constant));
3750 if (PUSH_ARGS_REVERSED)
3755 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3757 /* Now load any reg parms into their regs. */
3759 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3760 are to be pushed. */
3761 for (count = 0; count < nargs; count++, argnum += inc)
3763 enum machine_mode mode = argvec[argnum].mode;
3764 rtx val = argvec[argnum].value;
3765 rtx reg = argvec[argnum].reg;
3766 int partial = argvec[argnum].partial;
3768 /* Handle calls that pass values in multiple non-contiguous
3769 locations. The PA64 has examples of this for library calls. */
3770 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3771 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3772 else if (reg != 0 && partial == 0)
3773 emit_move_insn (reg, val);
3778 /* Any regs containing parms remain in use through the call. */
3779 for (count = 0; count < nargs; count++)
3781 rtx reg = argvec[count].reg;
3782 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3783 use_group_regs (&call_fusage, reg);
3785 use_reg (&call_fusage, reg);
3788 /* Pass the function the address in which to return a structure value. */
3789 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3791 emit_move_insn (struct_value,
3793 force_operand (XEXP (mem_value, 0),
3795 if (REG_P (struct_value))
3796 use_reg (&call_fusage, struct_value);
3799 /* Don't allow popping to be deferred, since then
3800 cse'ing of library calls could delete a call and leave the pop. */
3802 valreg = (mem_value == 0 && outmode != VOIDmode
3803 ? hard_libcall_value (outmode) : NULL_RTX);
3805 /* Stack must be properly aligned now. */
3806 gcc_assert (!(stack_pointer_delta
3807 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3809 before_call = get_last_insn ();
3811 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3812 will set inhibit_defer_pop to that value. */
3813 /* The return type is needed to decide how many bytes the function pops.
3814 Signedness plays no role in that, so for simplicity, we pretend it's
3815 always signed. We also assume that the list of arguments passed has
3816 no impact, so we pretend it is unknown. */
3818 emit_call_1 (fun, NULL,
3819 get_identifier (XSTR (orgfun, 0)),
3820 build_function_type (tfom, NULL_TREE),
3821 original_args_size.constant, args_size.constant,
3823 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3825 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3827 /* For calls to `setjmp', etc., inform flow.c it should complain
3828 if nonvolatile values are live. For functions that cannot return,
3829 inform flow that control does not fall through. */
3831 if (flags & ECF_NORETURN)
3833 /* The barrier note must be emitted
3834 immediately after the CALL_INSN. Some ports emit more than
3835 just a CALL_INSN above, so we must search for it here. */
3837 rtx last = get_last_insn ();
3838 while (!CALL_P (last))
3840 last = PREV_INSN (last);
3841 /* There was no CALL_INSN? */
3842 gcc_assert (last != before_call);
3845 emit_barrier_after (last);
3848 /* Now restore inhibit_defer_pop to its actual original value. */
3851 /* If call is cse'able, make appropriate pair of reg-notes around it.
3852 Test valreg so we don't crash; may safely ignore `const'
3853 if return type is void. Disable for PARALLEL return values, because
3854 we have no way to move such values into a pseudo register. */
3855 if (flags & ECF_LIBCALL_BLOCK)
3861 insns = get_insns ();
3871 if (GET_CODE (valreg) == PARALLEL)
3873 temp = gen_reg_rtx (outmode);
3874 emit_group_store (temp, valreg, NULL_TREE,
3875 GET_MODE_SIZE (outmode));
3879 temp = gen_reg_rtx (GET_MODE (valreg));
3881 /* Construct an "equal form" for the value which mentions all the
3882 arguments in order as well as the function name. */
3883 for (i = 0; i < nargs; i++)
3884 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3885 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3887 insns = get_insns ();
3890 if (flags & ECF_PURE)
3891 note = gen_rtx_EXPR_LIST (VOIDmode,
3892 gen_rtx_USE (VOIDmode,
3893 gen_rtx_MEM (BLKmode,
3894 gen_rtx_SCRATCH (VOIDmode))),
3897 emit_libcall_block (insns, temp, valreg, note);
3904 /* Copy the value to the right place. */
3905 if (outmode != VOIDmode && retval)
3911 if (value != mem_value)
3912 emit_move_insn (value, mem_value);
3914 else if (GET_CODE (valreg) == PARALLEL)
3917 value = gen_reg_rtx (outmode);
3918 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3922 /* Convert to the proper mode if PROMOTE_MODE has been active. */
3923 if (GET_MODE (valreg) != outmode)
3925 int unsignedp = TYPE_UNSIGNED (tfom);
3927 gcc_assert (targetm.calls.promote_function_return (tfom));
3928 gcc_assert (promote_mode (tfom, outmode, &unsignedp, 0)
3929 == GET_MODE (valreg));
3931 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
3935 emit_move_insn (value, valreg);
3941 if (ACCUMULATE_OUTGOING_ARGS)
3943 #ifdef REG_PARM_STACK_SPACE
3945 restore_fixed_argument_area (save_area, argblock,
3946 high_to_save, low_to_save);
3949 /* If we saved any argument areas, restore them. */
3950 for (count = 0; count < nargs; count++)
3951 if (argvec[count].save_area)
3953 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3954 rtx adr = plus_constant (argblock,
3955 argvec[count].locate.offset.constant);
3956 rtx stack_area = gen_rtx_MEM (save_mode,
3957 memory_address (save_mode, adr));
3959 if (save_mode == BLKmode)
3960 emit_block_move (stack_area,
3961 validize_mem (argvec[count].save_area),
3962 GEN_INT (argvec[count].locate.size.constant),
3963 BLOCK_OP_CALL_PARM);
3965 emit_move_insn (stack_area, argvec[count].save_area);
3968 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3969 stack_usage_map = initial_stack_usage_map;
3972 if (stack_usage_map_buf)
3973 free (stack_usage_map_buf);
3979 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3980 (emitting the queue unless NO_QUEUE is nonzero),
3981 for a value of mode OUTMODE,
3982 with NARGS different arguments, passed as alternating rtx values
3983 and machine_modes to convert them to.
3985 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
3986 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
3987 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
3988 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
3989 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
3990 or other LCT_ value for other types of library calls. */
3993 emit_library_call (rtx orgfun, enum libcall_type fn_type,
3994 enum machine_mode outmode, int nargs, ...)
3998 va_start (p, nargs);
3999 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4003 /* Like emit_library_call except that an extra argument, VALUE,
4004 comes second and says where to store the result.
4005 (If VALUE is zero, this function chooses a convenient way
4006 to return the value.
4008 This function returns an rtx for where the value is to be found.
4009 If VALUE is nonzero, VALUE is returned. */
4012 emit_library_call_value (rtx orgfun, rtx value,
4013 enum libcall_type fn_type,
4014 enum machine_mode outmode, int nargs, ...)
4019 va_start (p, nargs);
4020 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4027 /* Store a single argument for a function call
4028 into the register or memory area where it must be passed.
4029 *ARG describes the argument value and where to pass it.
4031 ARGBLOCK is the address of the stack-block for all the arguments,
4032 or 0 on a machine where arguments are pushed individually.
4034 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4035 so must be careful about how the stack is used.
4037 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4038 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4039 that we need not worry about saving and restoring the stack.
4041 FNDECL is the declaration of the function we are calling.
4043 Return nonzero if this arg should cause sibcall failure,
4047 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4048 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4050 tree pval = arg->tree_value;
4054 int i, lower_bound = 0, upper_bound = 0;
4055 int sibcall_failure = 0;
4057 if (TREE_CODE (pval) == ERROR_MARK)
4060 /* Push a new temporary level for any temporaries we make for
4064 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4066 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4067 save any previous data at that location. */
4068 if (argblock && ! variable_size && arg->stack)
4070 #ifdef ARGS_GROW_DOWNWARD
4071 /* stack_slot is negative, but we want to index stack_usage_map
4072 with positive values. */
4073 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4074 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4078 lower_bound = upper_bound - arg->locate.size.constant;
4080 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4081 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4085 upper_bound = lower_bound + arg->locate.size.constant;
4089 /* Don't worry about things in the fixed argument area;
4090 it has already been saved. */
4091 if (i < reg_parm_stack_space)
4092 i = reg_parm_stack_space;
4093 while (i < upper_bound && stack_usage_map[i] == 0)
4096 if (i < upper_bound)
4098 /* We need to make a save area. */
4099 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4100 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4101 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4102 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4104 if (save_mode == BLKmode)
4106 tree ot = TREE_TYPE (arg->tree_value);
4107 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4108 | TYPE_QUAL_CONST));
4110 arg->save_area = assign_temp (nt, 0, 1, 1);
4111 preserve_temp_slots (arg->save_area);
4112 emit_block_move (validize_mem (arg->save_area), stack_area,
4113 GEN_INT (arg->locate.size.constant),
4114 BLOCK_OP_CALL_PARM);
4118 arg->save_area = gen_reg_rtx (save_mode);
4119 emit_move_insn (arg->save_area, stack_area);
4125 /* If this isn't going to be placed on both the stack and in registers,
4126 set up the register and number of words. */
4127 if (! arg->pass_on_stack)
4129 if (flags & ECF_SIBCALL)
4130 reg = arg->tail_call_reg;
4133 partial = arg->partial;
4136 /* Being passed entirely in a register. We shouldn't be called in
4138 gcc_assert (reg == 0 || partial != 0);
4140 /* If this arg needs special alignment, don't load the registers
4142 if (arg->n_aligned_regs != 0)
4145 /* If this is being passed partially in a register, we can't evaluate
4146 it directly into its stack slot. Otherwise, we can. */
4147 if (arg->value == 0)
4149 /* stack_arg_under_construction is nonzero if a function argument is
4150 being evaluated directly into the outgoing argument list and
4151 expand_call must take special action to preserve the argument list
4152 if it is called recursively.
4154 For scalar function arguments stack_usage_map is sufficient to
4155 determine which stack slots must be saved and restored. Scalar
4156 arguments in general have pass_on_stack == 0.
4158 If this argument is initialized by a function which takes the
4159 address of the argument (a C++ constructor or a C function
4160 returning a BLKmode structure), then stack_usage_map is
4161 insufficient and expand_call must push the stack around the
4162 function call. Such arguments have pass_on_stack == 1.
4164 Note that it is always safe to set stack_arg_under_construction,
4165 but this generates suboptimal code if set when not needed. */
4167 if (arg->pass_on_stack)
4168 stack_arg_under_construction++;
4170 arg->value = expand_expr (pval,
4172 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4173 ? NULL_RTX : arg->stack,
4174 VOIDmode, EXPAND_STACK_PARM);
4176 /* If we are promoting object (or for any other reason) the mode
4177 doesn't agree, convert the mode. */
4179 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4180 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4181 arg->value, arg->unsignedp);
4183 if (arg->pass_on_stack)
4184 stack_arg_under_construction--;
4187 /* Check for overlap with already clobbered argument area. */
4188 if ((flags & ECF_SIBCALL)
4189 && MEM_P (arg->value)
4190 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4191 arg->locate.size.constant))
4192 sibcall_failure = 1;
4194 /* Don't allow anything left on stack from computation
4195 of argument to alloca. */
4196 if (flags & ECF_MAY_BE_ALLOCA)
4197 do_pending_stack_adjust ();
4199 if (arg->value == arg->stack)
4200 /* If the value is already in the stack slot, we are done. */
4202 else if (arg->mode != BLKmode)
4205 unsigned int parm_align;
4207 /* Argument is a scalar, not entirely passed in registers.
4208 (If part is passed in registers, arg->partial says how much
4209 and emit_push_insn will take care of putting it there.)
4211 Push it, and if its size is less than the
4212 amount of space allocated to it,
4213 also bump stack pointer by the additional space.
4214 Note that in C the default argument promotions
4215 will prevent such mismatches. */
4217 size = GET_MODE_SIZE (arg->mode);
4218 /* Compute how much space the push instruction will push.
4219 On many machines, pushing a byte will advance the stack
4220 pointer by a halfword. */
4221 #ifdef PUSH_ROUNDING
4222 size = PUSH_ROUNDING (size);
4226 /* Compute how much space the argument should get:
4227 round up to a multiple of the alignment for arguments. */
4228 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4229 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4230 / (PARM_BOUNDARY / BITS_PER_UNIT))
4231 * (PARM_BOUNDARY / BITS_PER_UNIT));
4233 /* Compute the alignment of the pushed argument. */
4234 parm_align = arg->locate.boundary;
4235 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4237 int pad = used - size;
4240 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4241 parm_align = MIN (parm_align, pad_align);
4245 /* This isn't already where we want it on the stack, so put it there.
4246 This can either be done with push or copy insns. */
4247 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4248 parm_align, partial, reg, used - size, argblock,
4249 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4250 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4252 /* Unless this is a partially-in-register argument, the argument is now
4255 arg->value = arg->stack;
4259 /* BLKmode, at least partly to be pushed. */
4261 unsigned int parm_align;
4265 /* Pushing a nonscalar.
4266 If part is passed in registers, PARTIAL says how much
4267 and emit_push_insn will take care of putting it there. */
4269 /* Round its size up to a multiple
4270 of the allocation unit for arguments. */
4272 if (arg->locate.size.var != 0)
4275 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4279 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4280 for BLKmode is careful to avoid it. */
4281 excess = (arg->locate.size.constant
4282 - int_size_in_bytes (TREE_TYPE (pval))
4284 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4285 NULL_RTX, TYPE_MODE (sizetype), 0);
4288 parm_align = arg->locate.boundary;
4290 /* When an argument is padded down, the block is aligned to
4291 PARM_BOUNDARY, but the actual argument isn't. */
4292 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4294 if (arg->locate.size.var)
4295 parm_align = BITS_PER_UNIT;
4298 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4299 parm_align = MIN (parm_align, excess_align);
4303 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4305 /* emit_push_insn might not work properly if arg->value and
4306 argblock + arg->locate.offset areas overlap. */
4310 if (XEXP (x, 0) == current_function_internal_arg_pointer
4311 || (GET_CODE (XEXP (x, 0)) == PLUS
4312 && XEXP (XEXP (x, 0), 0) ==
4313 current_function_internal_arg_pointer
4314 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4316 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4317 i = INTVAL (XEXP (XEXP (x, 0), 1));
4319 /* expand_call should ensure this. */
4320 gcc_assert (!arg->locate.offset.var
4321 && GET_CODE (size_rtx) == CONST_INT);
4323 if (arg->locate.offset.constant > i)
4325 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4326 sibcall_failure = 1;
4328 else if (arg->locate.offset.constant < i)
4330 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4331 sibcall_failure = 1;
4336 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4337 parm_align, partial, reg, excess, argblock,
4338 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4339 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4341 /* Unless this is a partially-in-register argument, the argument is now
4344 ??? Unlike the case above, in which we want the actual
4345 address of the data, so that we can load it directly into a
4346 register, here we want the address of the stack slot, so that
4347 it's properly aligned for word-by-word copying or something
4348 like that. It's not clear that this is always correct. */
4350 arg->value = arg->stack_slot;
4353 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4355 tree type = TREE_TYPE (arg->tree_value);
4357 = emit_group_load_into_temps (arg->reg, arg->value, type,
4358 int_size_in_bytes (type));
4361 /* Mark all slots this store used. */
4362 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4363 && argblock && ! variable_size && arg->stack)
4364 for (i = lower_bound; i < upper_bound; i++)
4365 stack_usage_map[i] = 1;
4367 /* Once we have pushed something, pops can't safely
4368 be deferred during the rest of the arguments. */
4371 /* Free any temporary slots made in processing this argument. Show
4372 that we might have taken the address of something and pushed that
4374 preserve_temp_slots (NULL_RTX);
4378 return sibcall_failure;
4381 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4384 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4390 /* If the type has variable size... */
4391 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4394 /* If the type is marked as addressable (it is required
4395 to be constructed into the stack)... */
4396 if (TREE_ADDRESSABLE (type))
4402 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4403 takes trailing padding of a structure into account. */
4404 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4407 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, tree type)
4412 /* If the type has variable size... */
4413 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4416 /* If the type is marked as addressable (it is required
4417 to be constructed into the stack)... */
4418 if (TREE_ADDRESSABLE (type))
4421 /* If the padding and mode of the type is such that a copy into
4422 a register would put it into the wrong part of the register. */
4424 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4425 && (FUNCTION_ARG_PADDING (mode, type)
4426 == (BYTES_BIG_ENDIAN ? upward : downward)))