1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
39 #include "langhooks.h"
44 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
45 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
47 /* Data structure and subroutines used within expand_call. */
51 /* Tree node for this argument. */
53 /* Mode for value; TYPE_MODE unless promoted. */
54 enum machine_mode mode;
55 /* Current RTL value for argument, or 0 if it isn't precomputed. */
57 /* Initially-compute RTL value for argument; only for const functions. */
59 /* Register to pass this argument in, 0 if passed on stack, or an
60 PARALLEL if the arg is to be copied into multiple non-contiguous
63 /* Register to pass this argument in when generating tail call sequence.
64 This is not the same register as for normal calls on machines with
67 /* If REG was promoted from the actual mode of the argument expression,
68 indicates whether the promotion is sign- or zero-extended. */
70 /* Number of registers to use. 0 means put the whole arg in registers.
71 Also 0 if not passed in registers. */
73 /* Nonzero if argument must be passed on stack.
74 Note that some arguments may be passed on the stack
75 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
76 pass_on_stack identifies arguments that *cannot* go in registers. */
78 /* Some fields packaged up for locate_and_pad_parm. */
79 struct locate_and_pad_arg_data locate;
80 /* Location on the stack at which parameter should be stored. The store
81 has already been done if STACK == VALUE. */
83 /* Location on the stack of the start of this argument slot. This can
84 differ from STACK if this arg pads downward. This location is known
85 to be aligned to FUNCTION_ARG_BOUNDARY. */
87 /* Place that this stack area has been saved, if needed. */
89 /* If an argument's alignment does not permit direct copying into registers,
90 copy in smaller-sized pieces into pseudos. These are stored in a
91 block pointed to by this field. The next field says how many
92 word-sized pseudos we made. */
97 /* A vector of one char per byte of stack space. A byte if nonzero if
98 the corresponding stack location has been used.
99 This vector is used to prevent a function call within an argument from
100 clobbering any stack already set up. */
101 static char *stack_usage_map;
103 /* Size of STACK_USAGE_MAP. */
104 static int highest_outgoing_arg_in_use;
106 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
107 stack location's tail call argument has been already stored into the stack.
108 This bitmap is used to prevent sibling call optimization if function tries
109 to use parent's incoming argument slots when they have been already
110 overwritten with tail call arguments. */
111 static sbitmap stored_args_map;
113 /* stack_arg_under_construction is nonzero when an argument may be
114 initialized with a constructor call (including a C function that
115 returns a BLKmode struct) and expand_call must take special action
116 to make sure the object being constructed does not overlap the
117 argument list for the constructor call. */
118 int stack_arg_under_construction;
120 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
121 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
123 static void precompute_register_parameters (int, struct arg_data *, int *);
124 static int store_one_arg (struct arg_data *, rtx, int, int, int);
125 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
126 static int finalize_must_preallocate (int, int, struct arg_data *,
128 static void precompute_arguments (int, int, struct arg_data *);
129 static int compute_argument_block_size (int, struct args_size *, int);
130 static void initialize_argument_information (int, struct arg_data *,
131 struct args_size *, int, tree,
132 tree, CUMULATIVE_ARGS *, int,
133 rtx *, int *, int *, int *,
135 static void compute_argument_addresses (struct arg_data *, rtx, int);
136 static rtx rtx_for_function_call (tree, tree);
137 static void load_register_parameters (struct arg_data *, int, rtx *, int,
139 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
140 enum machine_mode, int, va_list);
141 static int special_function_p (tree, int);
142 static int check_sibcall_argument_overlap_1 (rtx);
143 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
145 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
147 static tree fix_unsafe_tree (tree);
148 static bool shift_returned_value (tree, rtx *);
150 #ifdef REG_PARM_STACK_SPACE
151 static rtx save_fixed_argument_area (int, rtx, int *, int *);
152 static void restore_fixed_argument_area (rtx, rtx, int, int);
155 /* Force FUNEXP into a form suitable for the address of a CALL,
156 and return that as an rtx. Also load the static chain register
157 if FNDECL is a nested function.
159 CALL_FUSAGE points to a variable holding the prospective
160 CALL_INSN_FUNCTION_USAGE information. */
163 prepare_call_address (rtx funexp, rtx static_chain_value,
164 rtx *call_fusage, int reg_parm_seen, int sibcallp)
166 /* Make a valid memory address and copy constants through pseudo-regs,
167 but not for a constant address if -fno-function-cse. */
168 if (GET_CODE (funexp) != SYMBOL_REF)
169 /* If we are using registers for parameters, force the
170 function address into a register now. */
171 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
172 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
173 : memory_address (FUNCTION_MODE, funexp));
176 #ifndef NO_FUNCTION_CSE
177 if (optimize && ! flag_no_function_cse)
178 funexp = force_reg (Pmode, funexp);
182 if (static_chain_value != 0)
184 static_chain_value = convert_memory_address (Pmode, static_chain_value);
185 emit_move_insn (static_chain_rtx, static_chain_value);
187 if (REG_P (static_chain_rtx))
188 use_reg (call_fusage, static_chain_rtx);
194 /* Generate instructions to call function FUNEXP,
195 and optionally pop the results.
196 The CALL_INSN is the first insn generated.
198 FNDECL is the declaration node of the function. This is given to the
199 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
201 FUNTYPE is the data type of the function. This is given to the macro
202 RETURN_POPS_ARGS to determine whether this function pops its own args.
203 We used to allow an identifier for library functions, but that doesn't
204 work when the return type is an aggregate type and the calling convention
205 says that the pointer to this aggregate is to be popped by the callee.
207 STACK_SIZE is the number of bytes of arguments on the stack,
208 ROUNDED_STACK_SIZE is that number rounded up to
209 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
210 both to put into the call insn and to generate explicit popping
213 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
214 It is zero if this call doesn't want a structure value.
216 NEXT_ARG_REG is the rtx that results from executing
217 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
218 just after all the args have had their registers assigned.
219 This could be whatever you like, but normally it is the first
220 arg-register beyond those used for args in this call,
221 or 0 if all the arg-registers are used in this call.
222 It is passed on to `gen_call' so you can put this info in the call insn.
224 VALREG is a hard register in which a value is returned,
225 or 0 if the call does not return a value.
227 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
228 the args to this call were processed.
229 We restore `inhibit_defer_pop' to that value.
231 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
232 denote registers used by the called function. */
235 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
236 tree funtype ATTRIBUTE_UNUSED,
237 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
238 HOST_WIDE_INT rounded_stack_size,
239 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
240 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
241 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
242 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
244 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
246 int already_popped = 0;
247 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
248 #if defined (HAVE_call) && defined (HAVE_call_value)
249 rtx struct_value_size_rtx;
250 struct_value_size_rtx = GEN_INT (struct_value_size);
253 #ifdef CALL_POPS_ARGS
254 n_popped += CALL_POPS_ARGS (* args_so_far);
257 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
258 and we don't want to load it into a register as an optimization,
259 because prepare_call_address already did it if it should be done. */
260 if (GET_CODE (funexp) != SYMBOL_REF)
261 funexp = memory_address (FUNCTION_MODE, funexp);
263 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
264 if ((ecf_flags & ECF_SIBCALL)
265 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
266 && (n_popped > 0 || stack_size == 0))
268 rtx n_pop = GEN_INT (n_popped);
271 /* If this subroutine pops its own args, record that in the call insn
272 if possible, for the sake of frame pointer elimination. */
275 pat = GEN_SIBCALL_VALUE_POP (valreg,
276 gen_rtx_MEM (FUNCTION_MODE, funexp),
277 rounded_stack_size_rtx, next_arg_reg,
280 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
281 rounded_stack_size_rtx, next_arg_reg, n_pop);
283 emit_call_insn (pat);
289 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
290 /* If the target has "call" or "call_value" insns, then prefer them
291 if no arguments are actually popped. If the target does not have
292 "call" or "call_value" insns, then we must use the popping versions
293 even if the call has no arguments to pop. */
294 #if defined (HAVE_call) && defined (HAVE_call_value)
295 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
296 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
298 if (HAVE_call_pop && HAVE_call_value_pop)
301 rtx n_pop = GEN_INT (n_popped);
304 /* If this subroutine pops its own args, record that in the call insn
305 if possible, for the sake of frame pointer elimination. */
308 pat = GEN_CALL_VALUE_POP (valreg,
309 gen_rtx_MEM (FUNCTION_MODE, funexp),
310 rounded_stack_size_rtx, next_arg_reg, n_pop);
312 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
313 rounded_stack_size_rtx, next_arg_reg, n_pop);
315 emit_call_insn (pat);
321 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
322 if ((ecf_flags & ECF_SIBCALL)
323 && HAVE_sibcall && HAVE_sibcall_value)
326 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
327 gen_rtx_MEM (FUNCTION_MODE, funexp),
328 rounded_stack_size_rtx,
329 next_arg_reg, NULL_RTX));
331 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
332 rounded_stack_size_rtx, next_arg_reg,
333 struct_value_size_rtx));
338 #if defined (HAVE_call) && defined (HAVE_call_value)
339 if (HAVE_call && HAVE_call_value)
342 emit_call_insn (GEN_CALL_VALUE (valreg,
343 gen_rtx_MEM (FUNCTION_MODE, funexp),
344 rounded_stack_size_rtx, next_arg_reg,
347 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
348 rounded_stack_size_rtx, next_arg_reg,
349 struct_value_size_rtx));
355 /* Find the call we just emitted. */
356 call_insn = last_call_insn ();
358 /* Mark memory as used for "pure" function call. */
359 if (ecf_flags & ECF_PURE)
363 gen_rtx_USE (VOIDmode,
364 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
367 /* Put the register usage information there. */
368 add_function_usage_to (call_insn, call_fusage);
370 /* If this is a const call, then set the insn's unchanging bit. */
371 if (ecf_flags & (ECF_CONST | ECF_PURE))
372 CONST_OR_PURE_CALL_P (call_insn) = 1;
374 /* If this call can't throw, attach a REG_EH_REGION reg note to that
376 if (ecf_flags & ECF_NOTHROW)
377 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
378 REG_NOTES (call_insn));
381 int rn = lookup_stmt_eh_region (fntree);
383 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
384 throw, which we already took care of. */
386 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
387 REG_NOTES (call_insn));
388 note_current_region_may_contain_throw ();
391 if (ecf_flags & ECF_NORETURN)
392 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
393 REG_NOTES (call_insn));
394 if (ecf_flags & ECF_ALWAYS_RETURN)
395 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
396 REG_NOTES (call_insn));
398 if (ecf_flags & ECF_RETURNS_TWICE)
400 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
401 REG_NOTES (call_insn));
402 current_function_calls_setjmp = 1;
405 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
407 /* Restore this now, so that we do defer pops for this call's args
408 if the context of the call as a whole permits. */
409 inhibit_defer_pop = old_inhibit_defer_pop;
414 CALL_INSN_FUNCTION_USAGE (call_insn)
415 = gen_rtx_EXPR_LIST (VOIDmode,
416 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
417 CALL_INSN_FUNCTION_USAGE (call_insn));
418 rounded_stack_size -= n_popped;
419 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
420 stack_pointer_delta -= n_popped;
423 if (!ACCUMULATE_OUTGOING_ARGS)
425 /* If returning from the subroutine does not automatically pop the args,
426 we need an instruction to pop them sooner or later.
427 Perhaps do it now; perhaps just record how much space to pop later.
429 If returning from the subroutine does pop the args, indicate that the
430 stack pointer will be changed. */
432 if (rounded_stack_size != 0)
434 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN | ECF_LONGJMP))
435 /* Just pretend we did the pop. */
436 stack_pointer_delta -= rounded_stack_size;
437 else if (flag_defer_pop && inhibit_defer_pop == 0
438 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
439 pending_stack_adjust += rounded_stack_size;
441 adjust_stack (rounded_stack_size_rtx);
444 /* When we accumulate outgoing args, we must avoid any stack manipulations.
445 Restore the stack pointer to its original value now. Usually
446 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
447 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
448 popping variants of functions exist as well.
450 ??? We may optimize similar to defer_pop above, but it is
451 probably not worthwhile.
453 ??? It will be worthwhile to enable combine_stack_adjustments even for
456 anti_adjust_stack (GEN_INT (n_popped));
459 /* Determine if the function identified by NAME and FNDECL is one with
460 special properties we wish to know about.
462 For example, if the function might return more than one time (setjmp), then
463 set RETURNS_TWICE to a nonzero value.
465 Similarly set LONGJMP for if the function is in the longjmp family.
467 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
468 space from the stack such as alloca. */
471 special_function_p (tree fndecl, int flags)
473 if (fndecl && DECL_NAME (fndecl)
474 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
475 /* Exclude functions not at the file scope, or not `extern',
476 since they are not the magic functions we would otherwise
478 FIXME: this should be handled with attributes, not with this
479 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
480 because you can declare fork() inside a function if you
482 && (DECL_CONTEXT (fndecl) == NULL_TREE
483 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
484 && TREE_PUBLIC (fndecl))
486 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
487 const char *tname = name;
489 /* We assume that alloca will always be called by name. It
490 makes no sense to pass it as a pointer-to-function to
491 anything that does not understand its behavior. */
492 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
494 && ! strcmp (name, "alloca"))
495 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
497 && ! strcmp (name, "__builtin_alloca"))))
498 flags |= ECF_MAY_BE_ALLOCA;
500 /* Disregard prefix _, __ or __x. */
503 if (name[1] == '_' && name[2] == 'x')
505 else if (name[1] == '_')
514 && (! strcmp (tname, "setjmp")
515 || ! strcmp (tname, "setjmp_syscall")))
517 && ! strcmp (tname, "sigsetjmp"))
519 && ! strcmp (tname, "savectx")))
520 flags |= ECF_RETURNS_TWICE;
523 && ! strcmp (tname, "siglongjmp"))
524 flags |= ECF_LONGJMP;
526 else if ((tname[0] == 'q' && tname[1] == 's'
527 && ! strcmp (tname, "qsetjmp"))
528 || (tname[0] == 'v' && tname[1] == 'f'
529 && ! strcmp (tname, "vfork")))
530 flags |= ECF_RETURNS_TWICE;
532 else if (tname[0] == 'l' && tname[1] == 'o'
533 && ! strcmp (tname, "longjmp"))
534 flags |= ECF_LONGJMP;
540 /* Return nonzero when tree represent call to longjmp. */
543 setjmp_call_p (tree fndecl)
545 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
548 /* Return true when exp contains alloca call. */
550 alloca_call_p (tree exp)
552 if (TREE_CODE (exp) == CALL_EXPR
553 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
554 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
556 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
557 0) & ECF_MAY_BE_ALLOCA))
562 /* Detect flags (function attributes) from the function decl or type node. */
565 flags_from_decl_or_type (tree exp)
572 struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
573 type = TREE_TYPE (exp);
577 if (i->pure_function)
578 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
579 if (i->const_function)
580 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
583 /* The function exp may have the `malloc' attribute. */
584 if (DECL_IS_MALLOC (exp))
587 /* The function exp may have the `pure' attribute. */
588 if (DECL_IS_PURE (exp))
589 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
591 if (TREE_NOTHROW (exp))
592 flags |= ECF_NOTHROW;
594 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
595 flags |= ECF_LIBCALL_BLOCK | ECF_CONST;
597 flags = special_function_p (exp, flags);
599 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
602 if (TREE_THIS_VOLATILE (exp))
603 flags |= ECF_NORETURN;
605 /* Mark if the function returns with the stack pointer depressed. We
606 cannot consider it pure or constant in that case. */
607 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
609 flags |= ECF_SP_DEPRESSED;
610 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
616 /* Detect flags from a CALL_EXPR. */
619 call_expr_flags (tree t)
622 tree decl = get_callee_fndecl (t);
625 flags = flags_from_decl_or_type (decl);
628 t = TREE_TYPE (TREE_OPERAND (t, 0));
629 if (t && TREE_CODE (t) == POINTER_TYPE)
630 flags = flags_from_decl_or_type (TREE_TYPE (t));
638 /* Precompute all register parameters as described by ARGS, storing values
639 into fields within the ARGS array.
641 NUM_ACTUALS indicates the total number elements in the ARGS array.
643 Set REG_PARM_SEEN if we encounter a register parameter. */
646 precompute_register_parameters (int num_actuals, struct arg_data *args, int *reg_parm_seen)
652 for (i = 0; i < num_actuals; i++)
653 if (args[i].reg != 0 && ! args[i].pass_on_stack)
657 if (args[i].value == 0)
660 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
662 preserve_temp_slots (args[i].value);
666 /* If the value is a non-legitimate constant, force it into a
667 pseudo now. TLS symbols sometimes need a call to resolve. */
668 if (CONSTANT_P (args[i].value)
669 && !LEGITIMATE_CONSTANT_P (args[i].value))
670 args[i].value = force_reg (args[i].mode, args[i].value);
672 /* If we are to promote the function arg to a wider mode,
675 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
677 = convert_modes (args[i].mode,
678 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
679 args[i].value, args[i].unsignedp);
681 /* If the value is expensive, and we are inside an appropriately
682 short loop, put the value into a pseudo and then put the pseudo
685 For small register classes, also do this if this call uses
686 register parameters. This is to avoid reload conflicts while
687 loading the parameters registers. */
689 if ((! (REG_P (args[i].value)
690 || (GET_CODE (args[i].value) == SUBREG
691 && REG_P (SUBREG_REG (args[i].value)))))
692 && args[i].mode != BLKmode
693 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
694 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
695 || preserve_subexpressions_p ()))
696 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
700 #ifdef REG_PARM_STACK_SPACE
702 /* The argument list is the property of the called routine and it
703 may clobber it. If the fixed area has been used for previous
704 parameters, we must save and restore it. */
707 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
712 /* Compute the boundary of the area that needs to be saved, if any. */
713 high = reg_parm_stack_space;
714 #ifdef ARGS_GROW_DOWNWARD
717 if (high > highest_outgoing_arg_in_use)
718 high = highest_outgoing_arg_in_use;
720 for (low = 0; low < high; low++)
721 if (stack_usage_map[low] != 0)
724 enum machine_mode save_mode;
729 while (stack_usage_map[--high] == 0)
733 *high_to_save = high;
735 num_to_save = high - low + 1;
736 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
738 /* If we don't have the required alignment, must do this
740 if ((low & (MIN (GET_MODE_SIZE (save_mode),
741 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
744 #ifdef ARGS_GROW_DOWNWARD
749 stack_area = gen_rtx_MEM (save_mode,
750 memory_address (save_mode,
751 plus_constant (argblock,
754 set_mem_align (stack_area, PARM_BOUNDARY);
755 if (save_mode == BLKmode)
757 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
758 emit_block_move (validize_mem (save_area), stack_area,
759 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
763 save_area = gen_reg_rtx (save_mode);
764 emit_move_insn (save_area, stack_area);
774 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
776 enum machine_mode save_mode = GET_MODE (save_area);
780 #ifdef ARGS_GROW_DOWNWARD
781 delta = -high_to_save;
785 stack_area = gen_rtx_MEM (save_mode,
786 memory_address (save_mode,
787 plus_constant (argblock, delta)));
788 set_mem_align (stack_area, PARM_BOUNDARY);
790 if (save_mode != BLKmode)
791 emit_move_insn (stack_area, save_area);
793 emit_block_move (stack_area, validize_mem (save_area),
794 GEN_INT (high_to_save - low_to_save + 1),
797 #endif /* REG_PARM_STACK_SPACE */
799 /* If any elements in ARGS refer to parameters that are to be passed in
800 registers, but not in memory, and whose alignment does not permit a
801 direct copy into registers. Copy the values into a group of pseudos
802 which we will later copy into the appropriate hard registers.
804 Pseudos for each unaligned argument will be stored into the array
805 args[argnum].aligned_regs. The caller is responsible for deallocating
806 the aligned_regs array if it is nonzero. */
809 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
813 for (i = 0; i < num_actuals; i++)
814 if (args[i].reg != 0 && ! args[i].pass_on_stack
815 && args[i].mode == BLKmode
816 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
817 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
819 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
820 int nregs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
821 int endian_correction = 0;
823 args[i].n_aligned_regs = args[i].partial ? args[i].partial : nregs;
824 args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
826 /* Structures smaller than a word are normally aligned to the
827 least significant byte. On a BYTES_BIG_ENDIAN machine,
828 this means we must skip the empty high order bytes when
829 calculating the bit offset. */
830 if (bytes < UNITS_PER_WORD
831 #ifdef BLOCK_REG_PADDING
832 && (BLOCK_REG_PADDING (args[i].mode,
833 TREE_TYPE (args[i].tree_value), 1)
839 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
841 for (j = 0; j < args[i].n_aligned_regs; j++)
843 rtx reg = gen_reg_rtx (word_mode);
844 rtx word = operand_subword_force (args[i].value, j, BLKmode);
845 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
847 args[i].aligned_regs[j] = reg;
848 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
849 word_mode, word_mode);
851 /* There is no need to restrict this code to loading items
852 in TYPE_ALIGN sized hunks. The bitfield instructions can
853 load up entire word sized registers efficiently.
855 ??? This may not be needed anymore.
856 We use to emit a clobber here but that doesn't let later
857 passes optimize the instructions we emit. By storing 0 into
858 the register later passes know the first AND to zero out the
859 bitfield being set in the register is unnecessary. The store
860 of 0 will be deleted as will at least the first AND. */
862 emit_move_insn (reg, const0_rtx);
864 bytes -= bitsize / BITS_PER_UNIT;
865 store_bit_field (reg, bitsize, endian_correction, word_mode,
871 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
874 NUM_ACTUALS is the total number of parameters.
876 N_NAMED_ARGS is the total number of named arguments.
878 FNDECL is the tree code for the target of this call (if known)
880 ARGS_SO_FAR holds state needed by the target to know where to place
883 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
884 for arguments which are passed in registers.
886 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
887 and may be modified by this routine.
889 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
890 flags which may may be modified by this routine.
892 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
893 that requires allocation of stack space.
895 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
896 the thunked-to function. */
899 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
900 struct arg_data *args,
901 struct args_size *args_size,
902 int n_named_args ATTRIBUTE_UNUSED,
903 tree actparms, tree fndecl,
904 CUMULATIVE_ARGS *args_so_far,
905 int reg_parm_stack_space,
906 rtx *old_stack_level, int *old_pending_adj,
907 int *must_preallocate, int *ecf_flags,
908 bool *may_tailcall, bool call_from_thunk_p)
910 /* 1 if scanning parms front to back, -1 if scanning back to front. */
913 /* Count arg position in order args appear. */
919 args_size->constant = 0;
922 /* In this loop, we consider args in the order they are written.
923 We fill up ARGS from the front or from the back if necessary
924 so that in any case the first arg to be pushed ends up at the front. */
926 if (PUSH_ARGS_REVERSED)
928 i = num_actuals - 1, inc = -1;
929 /* In this case, must reverse order of args
930 so that we compute and push the last arg first. */
937 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
938 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
940 tree type = TREE_TYPE (TREE_VALUE (p));
942 enum machine_mode mode;
944 args[i].tree_value = TREE_VALUE (p);
946 /* Replace erroneous argument with constant zero. */
947 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
948 args[i].tree_value = integer_zero_node, type = integer_type_node;
950 /* If TYPE is a transparent union, pass things the way we would
951 pass the first field of the union. We have already verified that
952 the modes are the same. */
953 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
954 type = TREE_TYPE (TYPE_FIELDS (type));
956 /* Decide where to pass this arg.
958 args[i].reg is nonzero if all or part is passed in registers.
960 args[i].partial is nonzero if part but not all is passed in registers,
961 and the exact value says how many words are passed in registers.
963 args[i].pass_on_stack is nonzero if the argument must at least be
964 computed on the stack. It may then be loaded back into registers
965 if args[i].reg is nonzero.
967 These decisions are driven by the FUNCTION_... macros and must agree
968 with those made by function.c. */
970 /* See if this argument should be passed by invisible reference. */
971 if (pass_by_reference (args_so_far, TYPE_MODE (type),
972 type, argpos < n_named_args))
974 /* If we're compiling a thunk, pass through invisible
975 references instead of making a copy. */
976 if (call_from_thunk_p
977 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
978 type, argpos < n_named_args)
979 /* If it's in a register, we must make a copy of it too. */
980 /* ??? Is this a sufficient test? Is there a better one? */
981 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
982 && REG_P (DECL_RTL (args[i].tree_value)))
983 && ! TREE_ADDRESSABLE (type))
986 /* C++ uses a TARGET_EXPR to indicate that we want to make a
987 new object from the argument. If we are passing by
988 invisible reference, the callee will do that for us, so we
989 can strip off the TARGET_EXPR. This is not always safe,
990 but it is safe in the only case where this is a useful
991 optimization; namely, when the argument is a plain object.
992 In that case, the frontend is just asking the backend to
993 make a bitwise copy of the argument. */
995 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
996 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
997 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
998 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1000 /* We can't use sibcalls if a callee-copied argument is stored
1001 in the current function's frame. */
1002 if (!call_from_thunk_p
1003 && (!DECL_P (args[i].tree_value)
1004 || !TREE_STATIC (args[i].tree_value)))
1005 *may_tailcall = false;
1007 args[i].tree_value = build1 (ADDR_EXPR,
1008 build_pointer_type (type),
1009 args[i].tree_value);
1010 type = build_pointer_type (type);
1012 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1014 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1015 We implement this by passing the address of the temporary
1016 rather than expanding it into another allocated slot. */
1017 args[i].tree_value = build1 (ADDR_EXPR,
1018 build_pointer_type (type),
1019 args[i].tree_value);
1020 type = build_pointer_type (type);
1021 *may_tailcall = false;
1025 /* We make a copy of the object and pass the address to the
1026 function being called. */
1029 if (!COMPLETE_TYPE_P (type)
1030 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1031 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1032 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1033 STACK_CHECK_MAX_VAR_SIZE))))
1035 /* This is a variable-sized object. Make space on the stack
1037 rtx size_rtx = expr_size (TREE_VALUE (p));
1039 if (*old_stack_level == 0)
1041 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1042 *old_pending_adj = pending_stack_adjust;
1043 pending_stack_adjust = 0;
1046 copy = gen_rtx_MEM (BLKmode,
1047 allocate_dynamic_stack_space
1048 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1049 set_mem_attributes (copy, type, 1);
1052 copy = assign_temp (type, 0, 1, 0);
1054 store_expr (args[i].tree_value, copy, 0);
1055 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1057 args[i].tree_value = build1 (ADDR_EXPR,
1058 build_pointer_type (type),
1059 make_tree (type, copy));
1060 type = build_pointer_type (type);
1061 *may_tailcall = false;
1065 mode = TYPE_MODE (type);
1066 unsignedp = TYPE_UNSIGNED (type);
1068 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1069 mode = promote_mode (type, mode, &unsignedp, 1);
1071 args[i].unsignedp = unsignedp;
1072 args[i].mode = mode;
1074 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1075 argpos < n_named_args);
1076 #ifdef FUNCTION_INCOMING_ARG
1077 /* If this is a sibling call and the machine has register windows, the
1078 register window has to be unwinded before calling the routine, so
1079 arguments have to go into the incoming registers. */
1080 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1081 argpos < n_named_args);
1083 args[i].tail_call_reg = args[i].reg;
1088 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1089 argpos < n_named_args);
1091 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1093 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1094 it means that we are to pass this arg in the register(s) designated
1095 by the PARALLEL, but also to pass it in the stack. */
1096 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1097 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1098 args[i].pass_on_stack = 1;
1100 /* If this is an addressable type, we must preallocate the stack
1101 since we must evaluate the object into its final location.
1103 If this is to be passed in both registers and the stack, it is simpler
1105 if (TREE_ADDRESSABLE (type)
1106 || (args[i].pass_on_stack && args[i].reg != 0))
1107 *must_preallocate = 1;
1109 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1110 we cannot consider this function call constant. */
1111 if (TREE_ADDRESSABLE (type))
1112 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1114 /* Compute the stack-size of this argument. */
1115 if (args[i].reg == 0 || args[i].partial != 0
1116 || reg_parm_stack_space > 0
1117 || args[i].pass_on_stack)
1118 locate_and_pad_parm (mode, type,
1119 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1124 args[i].pass_on_stack ? 0 : args[i].partial,
1125 fndecl, args_size, &args[i].locate);
1126 #ifdef BLOCK_REG_PADDING
1128 /* The argument is passed entirely in registers. See at which
1129 end it should be padded. */
1130 args[i].locate.where_pad =
1131 BLOCK_REG_PADDING (mode, type,
1132 int_size_in_bytes (type) <= UNITS_PER_WORD);
1135 /* Update ARGS_SIZE, the total stack space for args so far. */
1137 args_size->constant += args[i].locate.size.constant;
1138 if (args[i].locate.size.var)
1139 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1141 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1142 have been used, etc. */
1144 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1145 argpos < n_named_args);
1149 /* Update ARGS_SIZE to contain the total size for the argument block.
1150 Return the original constant component of the argument block's size.
1152 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1153 for arguments passed in registers. */
1156 compute_argument_block_size (int reg_parm_stack_space,
1157 struct args_size *args_size,
1158 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1160 int unadjusted_args_size = args_size->constant;
1162 /* For accumulate outgoing args mode we don't need to align, since the frame
1163 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1164 backends from generating misaligned frame sizes. */
1165 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1166 preferred_stack_boundary = STACK_BOUNDARY;
1168 /* Compute the actual size of the argument block required. The variable
1169 and constant sizes must be combined, the size may have to be rounded,
1170 and there may be a minimum required size. */
1174 args_size->var = ARGS_SIZE_TREE (*args_size);
1175 args_size->constant = 0;
1177 preferred_stack_boundary /= BITS_PER_UNIT;
1178 if (preferred_stack_boundary > 1)
1180 /* We don't handle this case yet. To handle it correctly we have
1181 to add the delta, round and subtract the delta.
1182 Currently no machine description requires this support. */
1183 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1185 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1188 if (reg_parm_stack_space > 0)
1191 = size_binop (MAX_EXPR, args_size->var,
1192 ssize_int (reg_parm_stack_space));
1194 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1195 /* The area corresponding to register parameters is not to count in
1196 the size of the block we need. So make the adjustment. */
1198 = size_binop (MINUS_EXPR, args_size->var,
1199 ssize_int (reg_parm_stack_space));
1205 preferred_stack_boundary /= BITS_PER_UNIT;
1206 if (preferred_stack_boundary < 1)
1207 preferred_stack_boundary = 1;
1208 args_size->constant = (((args_size->constant
1209 + stack_pointer_delta
1210 + preferred_stack_boundary - 1)
1211 / preferred_stack_boundary
1212 * preferred_stack_boundary)
1213 - stack_pointer_delta);
1215 args_size->constant = MAX (args_size->constant,
1216 reg_parm_stack_space);
1218 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1219 args_size->constant -= reg_parm_stack_space;
1222 return unadjusted_args_size;
1225 /* Precompute parameters as needed for a function call.
1227 FLAGS is mask of ECF_* constants.
1229 NUM_ACTUALS is the number of arguments.
1231 ARGS is an array containing information for each argument; this
1232 routine fills in the INITIAL_VALUE and VALUE fields for each
1233 precomputed argument. */
1236 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1240 /* If this is a libcall, then precompute all arguments so that we do not
1241 get extraneous instructions emitted as part of the libcall sequence. */
1242 if ((flags & ECF_LIBCALL_BLOCK) == 0)
1245 for (i = 0; i < num_actuals; i++)
1247 enum machine_mode mode;
1249 /* If this is an addressable type, we cannot pre-evaluate it. */
1250 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1253 args[i].initial_value = args[i].value
1254 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1256 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1257 if (mode != args[i].mode)
1260 = convert_modes (args[i].mode, mode,
1261 args[i].value, args[i].unsignedp);
1262 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1263 /* CSE will replace this only if it contains args[i].value
1264 pseudo, so convert it down to the declared mode using
1266 if (REG_P (args[i].value)
1267 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1269 args[i].initial_value
1270 = gen_lowpart_SUBREG (mode, args[i].value);
1271 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1272 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1280 /* Given the current state of MUST_PREALLOCATE and information about
1281 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1282 compute and return the final value for MUST_PREALLOCATE. */
1285 finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1287 /* See if we have or want to preallocate stack space.
1289 If we would have to push a partially-in-regs parm
1290 before other stack parms, preallocate stack space instead.
1292 If the size of some parm is not a multiple of the required stack
1293 alignment, we must preallocate.
1295 If the total size of arguments that would otherwise create a copy in
1296 a temporary (such as a CALL) is more than half the total argument list
1297 size, preallocation is faster.
1299 Another reason to preallocate is if we have a machine (like the m88k)
1300 where stack alignment is required to be maintained between every
1301 pair of insns, not just when the call is made. However, we assume here
1302 that such machines either do not have push insns (and hence preallocation
1303 would occur anyway) or the problem is taken care of with
1306 if (! must_preallocate)
1308 int partial_seen = 0;
1309 int copy_to_evaluate_size = 0;
1312 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1314 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1316 else if (partial_seen && args[i].reg == 0)
1317 must_preallocate = 1;
1319 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1320 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1321 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1322 || TREE_CODE (args[i].tree_value) == COND_EXPR
1323 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1324 copy_to_evaluate_size
1325 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1328 if (copy_to_evaluate_size * 2 >= args_size->constant
1329 && args_size->constant > 0)
1330 must_preallocate = 1;
1332 return must_preallocate;
1335 /* If we preallocated stack space, compute the address of each argument
1336 and store it into the ARGS array.
1338 We need not ensure it is a valid memory address here; it will be
1339 validized when it is used.
1341 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1344 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1348 rtx arg_reg = argblock;
1349 int i, arg_offset = 0;
1351 if (GET_CODE (argblock) == PLUS)
1352 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1354 for (i = 0; i < num_actuals; i++)
1356 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1357 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1360 /* Skip this parm if it will not be passed on the stack. */
1361 if (! args[i].pass_on_stack && args[i].reg != 0)
1364 if (GET_CODE (offset) == CONST_INT)
1365 addr = plus_constant (arg_reg, INTVAL (offset));
1367 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1369 addr = plus_constant (addr, arg_offset);
1370 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1371 set_mem_align (args[i].stack, PARM_BOUNDARY);
1372 set_mem_attributes (args[i].stack,
1373 TREE_TYPE (args[i].tree_value), 1);
1375 if (GET_CODE (slot_offset) == CONST_INT)
1376 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1378 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1380 addr = plus_constant (addr, arg_offset);
1381 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1382 set_mem_align (args[i].stack_slot, PARM_BOUNDARY);
1383 set_mem_attributes (args[i].stack_slot,
1384 TREE_TYPE (args[i].tree_value), 1);
1386 /* Function incoming arguments may overlap with sibling call
1387 outgoing arguments and we cannot allow reordering of reads
1388 from function arguments with stores to outgoing arguments
1389 of sibling calls. */
1390 set_mem_alias_set (args[i].stack, 0);
1391 set_mem_alias_set (args[i].stack_slot, 0);
1396 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1397 in a call instruction.
1399 FNDECL is the tree node for the target function. For an indirect call
1400 FNDECL will be NULL_TREE.
1402 ADDR is the operand 0 of CALL_EXPR for this call. */
1405 rtx_for_function_call (tree fndecl, tree addr)
1409 /* Get the function to call, in the form of RTL. */
1412 /* If this is the first use of the function, see if we need to
1413 make an external definition for it. */
1414 if (! TREE_USED (fndecl))
1416 assemble_external (fndecl);
1417 TREE_USED (fndecl) = 1;
1420 /* Get a SYMBOL_REF rtx for the function address. */
1421 funexp = XEXP (DECL_RTL (fndecl), 0);
1424 /* Generate an rtx (probably a pseudo-register) for the address. */
1427 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1428 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1433 /* Do the register loads required for any wholly-register parms or any
1434 parms which are passed both on the stack and in a register. Their
1435 expressions were already evaluated.
1437 Mark all register-parms as living through the call, putting these USE
1438 insns in the CALL_INSN_FUNCTION_USAGE field.
1440 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1441 checking, setting *SIBCALL_FAILURE if appropriate. */
1444 load_register_parameters (struct arg_data *args, int num_actuals,
1445 rtx *call_fusage, int flags, int is_sibcall,
1446 int *sibcall_failure)
1450 for (i = 0; i < num_actuals; i++)
1452 rtx reg = ((flags & ECF_SIBCALL)
1453 ? args[i].tail_call_reg : args[i].reg);
1456 int partial = args[i].partial;
1459 rtx before_arg = get_last_insn ();
1460 /* Set to non-negative if must move a word at a time, even if just
1461 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1462 we just use a normal move insn. This value can be zero if the
1463 argument is a zero size structure with no fields. */
1467 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1469 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1470 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1473 size = GET_MODE_SIZE (args[i].mode);
1475 /* Handle calls that pass values in multiple non-contiguous
1476 locations. The Irix 6 ABI has examples of this. */
1478 if (GET_CODE (reg) == PARALLEL)
1480 tree type = TREE_TYPE (args[i].tree_value);
1481 emit_group_load (reg, args[i].value, type,
1482 int_size_in_bytes (type));
1485 /* If simple case, just do move. If normal partial, store_one_arg
1486 has already loaded the register for us. In all other cases,
1487 load the register(s) from memory. */
1489 else if (nregs == -1)
1491 emit_move_insn (reg, args[i].value);
1492 #ifdef BLOCK_REG_PADDING
1493 /* Handle case where we have a value that needs shifting
1494 up to the msb. eg. a QImode value and we're padding
1495 upward on a BYTES_BIG_ENDIAN machine. */
1496 if (size < UNITS_PER_WORD
1497 && (args[i].locate.where_pad
1498 == (BYTES_BIG_ENDIAN ? upward : downward)))
1501 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1503 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1504 report the whole reg as used. Strictly speaking, the
1505 call only uses SIZE bytes at the msb end, but it doesn't
1506 seem worth generating rtl to say that. */
1507 reg = gen_rtx_REG (word_mode, REGNO (reg));
1508 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1509 build_int_2 (shift, 0), reg, 1);
1511 emit_move_insn (reg, x);
1516 /* If we have pre-computed the values to put in the registers in
1517 the case of non-aligned structures, copy them in now. */
1519 else if (args[i].n_aligned_regs != 0)
1520 for (j = 0; j < args[i].n_aligned_regs; j++)
1521 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1522 args[i].aligned_regs[j]);
1524 else if (partial == 0 || args[i].pass_on_stack)
1526 rtx mem = validize_mem (args[i].value);
1528 /* Handle a BLKmode that needs shifting. */
1529 if (nregs == 1 && size < UNITS_PER_WORD
1530 #ifdef BLOCK_REG_PADDING
1531 && args[i].locate.where_pad == downward
1537 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1538 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1539 rtx x = gen_reg_rtx (word_mode);
1540 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1541 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1544 emit_move_insn (x, tem);
1545 x = expand_shift (dir, word_mode, x,
1546 build_int_2 (shift, 0), ri, 1);
1548 emit_move_insn (ri, x);
1551 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1554 /* When a parameter is a block, and perhaps in other cases, it is
1555 possible that it did a load from an argument slot that was
1556 already clobbered. */
1558 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1559 *sibcall_failure = 1;
1561 /* Handle calls that pass values in multiple non-contiguous
1562 locations. The Irix 6 ABI has examples of this. */
1563 if (GET_CODE (reg) == PARALLEL)
1564 use_group_regs (call_fusage, reg);
1565 else if (nregs == -1)
1566 use_reg (call_fusage, reg);
1568 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1573 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1574 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1575 bytes, then we would need to push some additional bytes to pad the
1576 arguments. So, we compute an adjust to the stack pointer for an
1577 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1578 bytes. Then, when the arguments are pushed the stack will be perfectly
1579 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1580 be popped after the call. Returns the adjustment. */
1583 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1584 struct args_size *args_size,
1585 unsigned int preferred_unit_stack_boundary)
1587 /* The number of bytes to pop so that the stack will be
1588 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1589 HOST_WIDE_INT adjustment;
1590 /* The alignment of the stack after the arguments are pushed, if we
1591 just pushed the arguments without adjust the stack here. */
1592 unsigned HOST_WIDE_INT unadjusted_alignment;
1594 unadjusted_alignment
1595 = ((stack_pointer_delta + unadjusted_args_size)
1596 % preferred_unit_stack_boundary);
1598 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1599 as possible -- leaving just enough left to cancel out the
1600 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1601 PENDING_STACK_ADJUST is non-negative, and congruent to
1602 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1604 /* Begin by trying to pop all the bytes. */
1605 unadjusted_alignment
1606 = (unadjusted_alignment
1607 - (pending_stack_adjust % preferred_unit_stack_boundary));
1608 adjustment = pending_stack_adjust;
1609 /* Push enough additional bytes that the stack will be aligned
1610 after the arguments are pushed. */
1611 if (preferred_unit_stack_boundary > 1)
1613 if (unadjusted_alignment > 0)
1614 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1616 adjustment += unadjusted_alignment;
1619 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1620 bytes after the call. The right number is the entire
1621 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1622 by the arguments in the first place. */
1624 = pending_stack_adjust - adjustment + unadjusted_args_size;
1629 /* Scan X expression if it does not dereference any argument slots
1630 we already clobbered by tail call arguments (as noted in stored_args_map
1632 Return nonzero if X expression dereferences such argument slots,
1636 check_sibcall_argument_overlap_1 (rtx x)
1646 code = GET_CODE (x);
1650 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1652 else if (GET_CODE (XEXP (x, 0)) == PLUS
1653 && XEXP (XEXP (x, 0), 0) ==
1654 current_function_internal_arg_pointer
1655 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1656 i = INTVAL (XEXP (XEXP (x, 0), 1));
1660 #ifdef ARGS_GROW_DOWNWARD
1661 i = -i - GET_MODE_SIZE (GET_MODE (x));
1664 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1665 if (i + k < stored_args_map->n_bits
1666 && TEST_BIT (stored_args_map, i + k))
1672 /* Scan all subexpressions. */
1673 fmt = GET_RTX_FORMAT (code);
1674 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1678 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1681 else if (*fmt == 'E')
1683 for (j = 0; j < XVECLEN (x, i); j++)
1684 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1691 /* Scan sequence after INSN if it does not dereference any argument slots
1692 we already clobbered by tail call arguments (as noted in stored_args_map
1693 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1694 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1695 should be 0). Return nonzero if sequence after INSN dereferences such argument
1696 slots, zero otherwise. */
1699 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1703 if (insn == NULL_RTX)
1704 insn = get_insns ();
1706 insn = NEXT_INSN (insn);
1708 for (; insn; insn = NEXT_INSN (insn))
1710 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1713 if (mark_stored_args_map)
1715 #ifdef ARGS_GROW_DOWNWARD
1716 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1718 low = arg->locate.slot_offset.constant;
1721 for (high = low + arg->locate.size.constant; low < high; low++)
1722 SET_BIT (stored_args_map, low);
1724 return insn != NULL_RTX;
1728 fix_unsafe_tree (tree t)
1730 switch (unsafe_for_reeval (t))
1735 case 1: /* Mildly unsafe. */
1736 t = unsave_expr (t);
1739 case 2: /* Wildly unsafe. */
1741 tree var = build_decl (VAR_DECL, NULL_TREE,
1744 expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
1756 /* If function value *VALUE was returned at the most significant end of a
1757 register, shift it towards the least significant end and convert it to
1758 TYPE's mode. Return true and update *VALUE if some action was needed.
1760 TYPE is the type of the function's return value, which is known not
1761 to have mode BLKmode. */
1764 shift_returned_value (tree type, rtx *value)
1766 if (targetm.calls.return_in_msb (type))
1768 HOST_WIDE_INT shift;
1770 shift = (GET_MODE_BITSIZE (GET_MODE (*value))
1771 - BITS_PER_UNIT * int_size_in_bytes (type));
1774 /* Shift the value into the low part of the register. */
1775 *value = expand_binop (GET_MODE (*value), lshr_optab, *value,
1776 GEN_INT (shift), 0, 1, OPTAB_WIDEN);
1778 /* Truncate it to the type's mode, or its integer equivalent.
1779 This is subject to TRULY_NOOP_TRUNCATION. */
1780 *value = convert_to_mode (int_mode_for_mode (TYPE_MODE (type)),
1783 /* Now convert it to the final form. */
1784 *value = gen_lowpart (TYPE_MODE (type), *value);
1791 /* Remove all REG_EQUIV notes found in the insn chain. */
1794 purge_reg_equiv_notes (void)
1798 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1802 rtx note = find_reg_note (insn, REG_EQUIV, 0);
1805 /* Remove the note and keep looking at the notes for
1807 remove_note (insn, note);
1815 /* Clear RTX_UNCHANGING_P flag of incoming argument MEMs. */
1818 purge_mem_unchanging_flag (rtx x)
1827 code = GET_CODE (x);
1831 if (RTX_UNCHANGING_P (x)
1832 && (XEXP (x, 0) == current_function_internal_arg_pointer
1833 || (GET_CODE (XEXP (x, 0)) == PLUS
1834 && XEXP (XEXP (x, 0), 0) ==
1835 current_function_internal_arg_pointer
1836 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
1837 RTX_UNCHANGING_P (x) = 0;
1841 /* Scan all subexpressions. */
1842 fmt = GET_RTX_FORMAT (code);
1843 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1846 purge_mem_unchanging_flag (XEXP (x, i));
1847 else if (*fmt == 'E')
1848 for (j = 0; j < XVECLEN (x, i); j++)
1849 purge_mem_unchanging_flag (XVECEXP (x, i, j));
1854 /* Generate all the code for a function call
1855 and return an rtx for its value.
1856 Store the value in TARGET (specified as an rtx) if convenient.
1857 If the value is stored in TARGET then TARGET is returned.
1858 If IGNORE is nonzero, then we ignore the value of the function call. */
1861 expand_call (tree exp, rtx target, int ignore)
1863 /* Nonzero if we are currently expanding a call. */
1864 static int currently_expanding_call = 0;
1866 /* List of actual parameters. */
1867 tree actparms = TREE_OPERAND (exp, 1);
1868 /* RTX for the function to be called. */
1870 /* Sequence of insns to perform a normal "call". */
1871 rtx normal_call_insns = NULL_RTX;
1872 /* Sequence of insns to perform a tail "call". */
1873 rtx tail_call_insns = NULL_RTX;
1874 /* Data type of the function. */
1876 tree type_arg_types;
1877 /* Declaration of the function being called,
1878 or 0 if the function is computed (not known by name). */
1880 /* The type of the function being called. */
1882 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1885 /* Register in which non-BLKmode value will be returned,
1886 or 0 if no value or if value is BLKmode. */
1888 /* Address where we should return a BLKmode value;
1889 0 if value not BLKmode. */
1890 rtx structure_value_addr = 0;
1891 /* Nonzero if that address is being passed by treating it as
1892 an extra, implicit first parameter. Otherwise,
1893 it is passed by being copied directly into struct_value_rtx. */
1894 int structure_value_addr_parm = 0;
1895 /* Size of aggregate value wanted, or zero if none wanted
1896 or if we are using the non-reentrant PCC calling convention
1897 or expecting the value in registers. */
1898 HOST_WIDE_INT struct_value_size = 0;
1899 /* Nonzero if called function returns an aggregate in memory PCC style,
1900 by returning the address of where to find it. */
1901 int pcc_struct_value = 0;
1902 rtx struct_value = 0;
1904 /* Number of actual parameters in this call, including struct value addr. */
1906 /* Number of named args. Args after this are anonymous ones
1907 and they must all go on the stack. */
1910 /* Vector of information about each argument.
1911 Arguments are numbered in the order they will be pushed,
1912 not the order they are written. */
1913 struct arg_data *args;
1915 /* Total size in bytes of all the stack-parms scanned so far. */
1916 struct args_size args_size;
1917 struct args_size adjusted_args_size;
1918 /* Size of arguments before any adjustments (such as rounding). */
1919 int unadjusted_args_size;
1920 /* Data on reg parms scanned so far. */
1921 CUMULATIVE_ARGS args_so_far;
1922 /* Nonzero if a reg parm has been scanned. */
1924 /* Nonzero if this is an indirect function call. */
1926 /* Nonzero if we must avoid push-insns in the args for this call.
1927 If stack space is allocated for register parameters, but not by the
1928 caller, then it is preallocated in the fixed part of the stack frame.
1929 So the entire argument block must then be preallocated (i.e., we
1930 ignore PUSH_ROUNDING in that case). */
1932 int must_preallocate = !PUSH_ARGS;
1934 /* Size of the stack reserved for parameter registers. */
1935 int reg_parm_stack_space = 0;
1937 /* Address of space preallocated for stack parms
1938 (on machines that lack push insns), or 0 if space not preallocated. */
1941 /* Mask of ECF_ flags. */
1943 #ifdef REG_PARM_STACK_SPACE
1944 /* Define the boundary of the register parm stack space that needs to be
1946 int low_to_save, high_to_save;
1947 rtx save_area = 0; /* Place that it is saved */
1950 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1951 char *initial_stack_usage_map = stack_usage_map;
1953 int old_stack_allocated;
1955 /* State variables to track stack modifications. */
1956 rtx old_stack_level = 0;
1957 int old_stack_arg_under_construction = 0;
1958 int old_pending_adj = 0;
1959 int old_inhibit_defer_pop = inhibit_defer_pop;
1961 /* Some stack pointer alterations we make are performed via
1962 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1963 which we then also need to save/restore along the way. */
1964 int old_stack_pointer_delta = 0;
1967 tree p = TREE_OPERAND (exp, 0);
1968 tree addr = TREE_OPERAND (exp, 0);
1970 /* The alignment of the stack, in bits. */
1971 unsigned HOST_WIDE_INT preferred_stack_boundary;
1972 /* The alignment of the stack, in bytes. */
1973 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
1974 /* The static chain value to use for this call. */
1975 rtx static_chain_value;
1976 /* See if this is "nothrow" function call. */
1977 if (TREE_NOTHROW (exp))
1978 flags |= ECF_NOTHROW;
1980 /* See if we can find a DECL-node for the actual function, and get the
1981 function attributes (flags) from the function decl or type node. */
1982 fndecl = get_callee_fndecl (exp);
1985 fntype = TREE_TYPE (fndecl);
1986 flags |= flags_from_decl_or_type (fndecl);
1990 fntype = TREE_TYPE (TREE_TYPE (p));
1991 flags |= flags_from_decl_or_type (fntype);
1994 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
1996 /* Warn if this value is an aggregate type,
1997 regardless of which calling convention we are using for it. */
1998 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1999 warning ("function call has aggregate value");
2001 /* If the result of a pure or const function call is ignored (or void),
2002 and none of its arguments are volatile, we can avoid expanding the
2003 call and just evaluate the arguments for side-effects. */
2004 if ((flags & (ECF_CONST | ECF_PURE))
2005 && (ignore || target == const0_rtx
2006 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2008 bool volatilep = false;
2011 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2012 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
2020 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2021 expand_expr (TREE_VALUE (arg), const0_rtx,
2022 VOIDmode, EXPAND_NORMAL);
2027 #ifdef REG_PARM_STACK_SPACE
2028 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2031 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2032 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2033 must_preallocate = 1;
2036 /* Set up a place to return a structure. */
2038 /* Cater to broken compilers. */
2039 if (aggregate_value_p (exp, fndecl))
2041 /* This call returns a big structure. */
2042 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2044 #ifdef PCC_STATIC_STRUCT_RETURN
2046 pcc_struct_value = 1;
2048 #else /* not PCC_STATIC_STRUCT_RETURN */
2050 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2052 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
2054 /* The structure value address arg is already in actparms.
2055 Pull it out. It might be nice to just leave it there, but
2056 we need to set structure_value_addr. */
2057 tree return_arg = TREE_VALUE (actparms);
2058 actparms = TREE_CHAIN (actparms);
2059 structure_value_addr = expand_expr (return_arg, NULL_RTX,
2060 VOIDmode, EXPAND_NORMAL);
2062 else if (target && MEM_P (target))
2063 structure_value_addr = XEXP (target, 0);
2066 /* For variable-sized objects, we must be called with a target
2067 specified. If we were to allocate space on the stack here,
2068 we would have no way of knowing when to free it. */
2069 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2071 mark_temp_addr_taken (d);
2072 structure_value_addr = XEXP (d, 0);
2076 #endif /* not PCC_STATIC_STRUCT_RETURN */
2079 /* Figure out the amount to which the stack should be aligned. */
2080 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2083 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2084 if (i && i->preferred_incoming_stack_boundary)
2085 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2088 /* Operand 0 is a pointer-to-function; get the type of the function. */
2089 funtype = TREE_TYPE (addr);
2090 if (! POINTER_TYPE_P (funtype))
2092 funtype = TREE_TYPE (funtype);
2094 /* Munge the tree to split complex arguments into their imaginary
2096 if (targetm.calls.split_complex_arg)
2098 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2099 actparms = split_complex_values (actparms);
2102 type_arg_types = TYPE_ARG_TYPES (funtype);
2104 if (flags & ECF_MAY_BE_ALLOCA)
2105 current_function_calls_alloca = 1;
2107 /* If struct_value_rtx is 0, it means pass the address
2108 as if it were an extra parameter. */
2109 if (structure_value_addr && struct_value == 0)
2111 /* If structure_value_addr is a REG other than
2112 virtual_outgoing_args_rtx, we can use always use it. If it
2113 is not a REG, we must always copy it into a register.
2114 If it is virtual_outgoing_args_rtx, we must copy it to another
2115 register in some cases. */
2116 rtx temp = (!REG_P (structure_value_addr)
2117 || (ACCUMULATE_OUTGOING_ARGS
2118 && stack_arg_under_construction
2119 && structure_value_addr == virtual_outgoing_args_rtx)
2120 ? copy_addr_to_reg (convert_memory_address
2121 (Pmode, structure_value_addr))
2122 : structure_value_addr);
2125 = tree_cons (error_mark_node,
2126 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2129 structure_value_addr_parm = 1;
2132 /* Count the arguments and set NUM_ACTUALS. */
2133 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2136 /* Compute number of named args.
2137 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2139 if (type_arg_types != 0)
2141 = (list_length (type_arg_types)
2142 /* Count the struct value address, if it is passed as a parm. */
2143 + structure_value_addr_parm);
2145 /* If we know nothing, treat all args as named. */
2146 n_named_args = num_actuals;
2148 /* Start updating where the next arg would go.
2150 On some machines (such as the PA) indirect calls have a different
2151 calling convention than normal calls. The fourth argument in
2152 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2154 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2156 /* Now possibly adjust the number of named args.
2157 Normally, don't include the last named arg if anonymous args follow.
2158 We do include the last named arg if
2159 targetm.calls.strict_argument_naming() returns nonzero.
2160 (If no anonymous args follow, the result of list_length is actually
2161 one too large. This is harmless.)
2163 If targetm.calls.pretend_outgoing_varargs_named() returns
2164 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2165 this machine will be able to place unnamed args that were passed
2166 in registers into the stack. So treat all args as named. This
2167 allows the insns emitting for a specific argument list to be
2168 independent of the function declaration.
2170 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2171 we do not have any reliable way to pass unnamed args in
2172 registers, so we must force them into memory. */
2174 if (type_arg_types != 0
2175 && targetm.calls.strict_argument_naming (&args_so_far))
2177 else if (type_arg_types != 0
2178 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2179 /* Don't include the last named arg. */
2182 /* Treat all args as named. */
2183 n_named_args = num_actuals;
2185 /* Make a vector to hold all the information about each arg. */
2186 args = alloca (num_actuals * sizeof (struct arg_data));
2187 memset (args, 0, num_actuals * sizeof (struct arg_data));
2189 /* Build up entries in the ARGS array, compute the size of the
2190 arguments into ARGS_SIZE, etc. */
2191 initialize_argument_information (num_actuals, args, &args_size,
2192 n_named_args, actparms, fndecl,
2193 &args_so_far, reg_parm_stack_space,
2194 &old_stack_level, &old_pending_adj,
2195 &must_preallocate, &flags,
2196 &try_tail_call, CALL_FROM_THUNK_P (exp));
2200 /* If this function requires a variable-sized argument list, don't
2201 try to make a cse'able block for this call. We may be able to
2202 do this eventually, but it is too complicated to keep track of
2203 what insns go in the cse'able block and which don't. */
2205 flags &= ~ECF_LIBCALL_BLOCK;
2206 must_preallocate = 1;
2209 /* Now make final decision about preallocating stack space. */
2210 must_preallocate = finalize_must_preallocate (must_preallocate,
2214 /* If the structure value address will reference the stack pointer, we
2215 must stabilize it. We don't need to do this if we know that we are
2216 not going to adjust the stack pointer in processing this call. */
2218 if (structure_value_addr
2219 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2220 || reg_mentioned_p (virtual_outgoing_args_rtx,
2221 structure_value_addr))
2223 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2224 structure_value_addr = copy_to_reg (structure_value_addr);
2226 /* Tail calls can make things harder to debug, and we've traditionally
2227 pushed these optimizations into -O2. Don't try if we're already
2228 expanding a call, as that means we're an argument. Don't try if
2229 there's cleanups, as we know there's code to follow the call.
2231 If rtx_equal_function_value_matters is false, that means we've
2232 finished with regular parsing. Which means that some of the
2233 machinery we use to generate tail-calls is no longer in place.
2234 This is most often true of sjlj-exceptions, which we couldn't
2235 tail-call to anyway. */
2237 if (currently_expanding_call++ != 0
2238 || !flag_optimize_sibling_calls
2239 || !rtx_equal_function_value_matters
2241 || lookup_stmt_eh_region (exp) >= 0)
2244 /* Rest of purposes for tail call optimizations to fail. */
2246 #ifdef HAVE_sibcall_epilogue
2247 !HAVE_sibcall_epilogue
2252 /* Doing sibling call optimization needs some work, since
2253 structure_value_addr can be allocated on the stack.
2254 It does not seem worth the effort since few optimizable
2255 sibling calls will return a structure. */
2256 || structure_value_addr != NULL_RTX
2257 /* Check whether the target is able to optimize the call
2259 || !targetm.function_ok_for_sibcall (fndecl, exp)
2260 /* Functions that do not return exactly once may not be sibcall
2262 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
2263 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2264 /* If the called function is nested in the current one, it might access
2265 some of the caller's arguments, but could clobber them beforehand if
2266 the argument areas are shared. */
2267 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2268 /* If this function requires more stack slots than the current
2269 function, we cannot change it into a sibling call. */
2270 || args_size.constant > current_function_args_size
2271 /* If the callee pops its own arguments, then it must pop exactly
2272 the same number of arguments as the current function. */
2273 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2274 != RETURN_POPS_ARGS (current_function_decl,
2275 TREE_TYPE (current_function_decl),
2276 current_function_args_size))
2277 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2283 actparms = NULL_TREE;
2284 /* Ok, we're going to give the tail call the old college try.
2285 This means we're going to evaluate the function arguments
2286 up to three times. There are two degrees of badness we can
2287 encounter, those that can be unsaved and those that can't.
2288 (See unsafe_for_reeval commentary for details.)
2290 Generate a new argument list. Pass safe arguments through
2291 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2292 For hard badness, evaluate them now and put their resulting
2293 rtx in a temporary VAR_DECL.
2295 initialize_argument_information has ordered the array for the
2296 order to be pushed, and we must remember this when reconstructing
2297 the original argument order. */
2299 if (PUSH_ARGS_REVERSED)
2308 i = num_actuals - 1;
2312 for (; i != end; i += inc)
2314 args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
2316 /* Do the same for the function address if it is an expression. */
2318 addr = fix_unsafe_tree (addr);
2322 /* Ensure current function's preferred stack boundary is at least
2323 what we need. We don't have to increase alignment for recursive
2325 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2326 && fndecl != current_function_decl)
2327 cfun->preferred_stack_boundary = preferred_stack_boundary;
2328 if (fndecl == current_function_decl)
2329 cfun->recursive_call_emit = true;
2331 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2333 /* We want to make two insn chains; one for a sibling call, the other
2334 for a normal call. We will select one of the two chains after
2335 initial RTL generation is complete. */
2336 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2338 int sibcall_failure = 0;
2339 /* We want to emit any pending stack adjustments before the tail
2340 recursion "call". That way we know any adjustment after the tail
2341 recursion call can be ignored if we indeed use the tail
2343 int save_pending_stack_adjust = 0;
2344 int save_stack_pointer_delta = 0;
2346 rtx before_call, next_arg_reg;
2350 /* State variables we need to save and restore between
2352 save_pending_stack_adjust = pending_stack_adjust;
2353 save_stack_pointer_delta = stack_pointer_delta;
2356 flags &= ~ECF_SIBCALL;
2358 flags |= ECF_SIBCALL;
2360 /* Other state variables that we must reinitialize each time
2361 through the loop (that are not initialized by the loop itself). */
2365 /* Start a new sequence for the normal call case.
2367 From this point on, if the sibling call fails, we want to set
2368 sibcall_failure instead of continuing the loop. */
2371 /* Don't let pending stack adjusts add up to too much.
2372 Also, do all pending adjustments now if there is any chance
2373 this might be a call to alloca or if we are expanding a sibling
2374 call sequence or if we are calling a function that is to return
2375 with stack pointer depressed. */
2376 if (pending_stack_adjust >= 32
2377 || (pending_stack_adjust > 0
2378 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2380 do_pending_stack_adjust ();
2382 /* When calling a const function, we must pop the stack args right away,
2383 so that the pop is deleted or moved with the call. */
2384 if (pass && (flags & ECF_LIBCALL_BLOCK))
2387 /* Precompute any arguments as needed. */
2389 precompute_arguments (flags, num_actuals, args);
2391 /* Now we are about to start emitting insns that can be deleted
2392 if a libcall is deleted. */
2393 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2396 adjusted_args_size = args_size;
2397 /* Compute the actual size of the argument block required. The variable
2398 and constant sizes must be combined, the size may have to be rounded,
2399 and there may be a minimum required size. When generating a sibcall
2400 pattern, do not round up, since we'll be re-using whatever space our
2402 unadjusted_args_size
2403 = compute_argument_block_size (reg_parm_stack_space,
2404 &adjusted_args_size,
2406 : preferred_stack_boundary));
2408 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2410 /* The argument block when performing a sibling call is the
2411 incoming argument block. */
2414 argblock = virtual_incoming_args_rtx;
2416 #ifdef STACK_GROWS_DOWNWARD
2417 = plus_constant (argblock, current_function_pretend_args_size);
2419 = plus_constant (argblock, -current_function_pretend_args_size);
2421 stored_args_map = sbitmap_alloc (args_size.constant);
2422 sbitmap_zero (stored_args_map);
2425 /* If we have no actual push instructions, or shouldn't use them,
2426 make space for all args right now. */
2427 else if (adjusted_args_size.var != 0)
2429 if (old_stack_level == 0)
2431 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2432 old_stack_pointer_delta = stack_pointer_delta;
2433 old_pending_adj = pending_stack_adjust;
2434 pending_stack_adjust = 0;
2435 /* stack_arg_under_construction says whether a stack arg is
2436 being constructed at the old stack level. Pushing the stack
2437 gets a clean outgoing argument block. */
2438 old_stack_arg_under_construction = stack_arg_under_construction;
2439 stack_arg_under_construction = 0;
2441 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2445 /* Note that we must go through the motions of allocating an argument
2446 block even if the size is zero because we may be storing args
2447 in the area reserved for register arguments, which may be part of
2450 int needed = adjusted_args_size.constant;
2452 /* Store the maximum argument space used. It will be pushed by
2453 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2456 if (needed > current_function_outgoing_args_size)
2457 current_function_outgoing_args_size = needed;
2459 if (must_preallocate)
2461 if (ACCUMULATE_OUTGOING_ARGS)
2463 /* Since the stack pointer will never be pushed, it is
2464 possible for the evaluation of a parm to clobber
2465 something we have already written to the stack.
2466 Since most function calls on RISC machines do not use
2467 the stack, this is uncommon, but must work correctly.
2469 Therefore, we save any area of the stack that was already
2470 written and that we are using. Here we set up to do this
2471 by making a new stack usage map from the old one. The
2472 actual save will be done by store_one_arg.
2474 Another approach might be to try to reorder the argument
2475 evaluations to avoid this conflicting stack usage. */
2477 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2478 /* Since we will be writing into the entire argument area,
2479 the map must be allocated for its entire size, not just
2480 the part that is the responsibility of the caller. */
2481 needed += reg_parm_stack_space;
2484 #ifdef ARGS_GROW_DOWNWARD
2485 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2488 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2491 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2493 if (initial_highest_arg_in_use)
2494 memcpy (stack_usage_map, initial_stack_usage_map,
2495 initial_highest_arg_in_use);
2497 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2498 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2499 (highest_outgoing_arg_in_use
2500 - initial_highest_arg_in_use));
2503 /* The address of the outgoing argument list must not be
2504 copied to a register here, because argblock would be left
2505 pointing to the wrong place after the call to
2506 allocate_dynamic_stack_space below. */
2508 argblock = virtual_outgoing_args_rtx;
2512 if (inhibit_defer_pop == 0)
2514 /* Try to reuse some or all of the pending_stack_adjust
2515 to get this space. */
2517 = (combine_pending_stack_adjustment_and_call
2518 (unadjusted_args_size,
2519 &adjusted_args_size,
2520 preferred_unit_stack_boundary));
2522 /* combine_pending_stack_adjustment_and_call computes
2523 an adjustment before the arguments are allocated.
2524 Account for them and see whether or not the stack
2525 needs to go up or down. */
2526 needed = unadjusted_args_size - needed;
2530 /* We're releasing stack space. */
2531 /* ??? We can avoid any adjustment at all if we're
2532 already aligned. FIXME. */
2533 pending_stack_adjust = -needed;
2534 do_pending_stack_adjust ();
2538 /* We need to allocate space. We'll do that in
2539 push_block below. */
2540 pending_stack_adjust = 0;
2543 /* Special case this because overhead of `push_block' in
2544 this case is non-trivial. */
2546 argblock = virtual_outgoing_args_rtx;
2549 argblock = push_block (GEN_INT (needed), 0, 0);
2550 #ifdef ARGS_GROW_DOWNWARD
2551 argblock = plus_constant (argblock, needed);
2555 /* We only really need to call `copy_to_reg' in the case
2556 where push insns are going to be used to pass ARGBLOCK
2557 to a function call in ARGS. In that case, the stack
2558 pointer changes value from the allocation point to the
2559 call point, and hence the value of
2560 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2561 as well always do it. */
2562 argblock = copy_to_reg (argblock);
2567 if (ACCUMULATE_OUTGOING_ARGS)
2569 /* The save/restore code in store_one_arg handles all
2570 cases except one: a constructor call (including a C
2571 function returning a BLKmode struct) to initialize
2573 if (stack_arg_under_construction)
2575 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2576 rtx push_size = GEN_INT (reg_parm_stack_space
2577 + adjusted_args_size.constant);
2579 rtx push_size = GEN_INT (adjusted_args_size.constant);
2581 if (old_stack_level == 0)
2583 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2585 old_stack_pointer_delta = stack_pointer_delta;
2586 old_pending_adj = pending_stack_adjust;
2587 pending_stack_adjust = 0;
2588 /* stack_arg_under_construction says whether a stack
2589 arg is being constructed at the old stack level.
2590 Pushing the stack gets a clean outgoing argument
2592 old_stack_arg_under_construction
2593 = stack_arg_under_construction;
2594 stack_arg_under_construction = 0;
2595 /* Make a new map for the new argument list. */
2596 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2597 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2598 highest_outgoing_arg_in_use = 0;
2600 allocate_dynamic_stack_space (push_size, NULL_RTX,
2604 /* If argument evaluation might modify the stack pointer,
2605 copy the address of the argument list to a register. */
2606 for (i = 0; i < num_actuals; i++)
2607 if (args[i].pass_on_stack)
2609 argblock = copy_addr_to_reg (argblock);
2614 compute_argument_addresses (args, argblock, num_actuals);
2616 /* If we push args individually in reverse order, perform stack alignment
2617 before the first push (the last arg). */
2618 if (PUSH_ARGS_REVERSED && argblock == 0
2619 && adjusted_args_size.constant != unadjusted_args_size)
2621 /* When the stack adjustment is pending, we get better code
2622 by combining the adjustments. */
2623 if (pending_stack_adjust
2624 && ! (flags & ECF_LIBCALL_BLOCK)
2625 && ! inhibit_defer_pop)
2627 pending_stack_adjust
2628 = (combine_pending_stack_adjustment_and_call
2629 (unadjusted_args_size,
2630 &adjusted_args_size,
2631 preferred_unit_stack_boundary));
2632 do_pending_stack_adjust ();
2634 else if (argblock == 0)
2635 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2636 - unadjusted_args_size));
2638 /* Now that the stack is properly aligned, pops can't safely
2639 be deferred during the evaluation of the arguments. */
2642 funexp = rtx_for_function_call (fndecl, addr);
2644 /* Figure out the register where the value, if any, will come back. */
2646 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2647 && ! structure_value_addr)
2649 if (pcc_struct_value)
2650 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2651 fndecl, (pass == 0));
2653 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2656 /* Precompute all register parameters. It isn't safe to compute anything
2657 once we have started filling any specific hard regs. */
2658 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2660 if (TREE_OPERAND (exp, 2))
2661 static_chain_value = expand_expr (TREE_OPERAND (exp, 2),
2662 NULL_RTX, VOIDmode, 0);
2664 static_chain_value = 0;
2666 #ifdef REG_PARM_STACK_SPACE
2667 /* Save the fixed argument area if it's part of the caller's frame and
2668 is clobbered by argument setup for this call. */
2669 if (ACCUMULATE_OUTGOING_ARGS && pass)
2670 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2671 &low_to_save, &high_to_save);
2674 /* Now store (and compute if necessary) all non-register parms.
2675 These come before register parms, since they can require block-moves,
2676 which could clobber the registers used for register parms.
2677 Parms which have partial registers are not stored here,
2678 but we do preallocate space here if they want that. */
2680 for (i = 0; i < num_actuals; i++)
2681 if (args[i].reg == 0 || args[i].pass_on_stack)
2683 rtx before_arg = get_last_insn ();
2685 if (store_one_arg (&args[i], argblock, flags,
2686 adjusted_args_size.var != 0,
2687 reg_parm_stack_space)
2689 && check_sibcall_argument_overlap (before_arg,
2691 sibcall_failure = 1;
2693 if (flags & ECF_CONST
2695 && args[i].value == args[i].stack)
2696 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2697 gen_rtx_USE (VOIDmode,
2702 /* If we have a parm that is passed in registers but not in memory
2703 and whose alignment does not permit a direct copy into registers,
2704 make a group of pseudos that correspond to each register that we
2706 if (STRICT_ALIGNMENT)
2707 store_unaligned_arguments_into_pseudos (args, num_actuals);
2709 /* Now store any partially-in-registers parm.
2710 This is the last place a block-move can happen. */
2712 for (i = 0; i < num_actuals; i++)
2713 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2715 rtx before_arg = get_last_insn ();
2717 if (store_one_arg (&args[i], argblock, flags,
2718 adjusted_args_size.var != 0,
2719 reg_parm_stack_space)
2721 && check_sibcall_argument_overlap (before_arg,
2723 sibcall_failure = 1;
2726 /* If we pushed args in forward order, perform stack alignment
2727 after pushing the last arg. */
2728 if (!PUSH_ARGS_REVERSED && argblock == 0)
2729 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2730 - unadjusted_args_size));
2732 /* If register arguments require space on the stack and stack space
2733 was not preallocated, allocate stack space here for arguments
2734 passed in registers. */
2735 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2736 if (!ACCUMULATE_OUTGOING_ARGS
2737 && must_preallocate == 0 && reg_parm_stack_space > 0)
2738 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2741 /* Pass the function the address in which to return a
2743 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2745 structure_value_addr
2746 = convert_memory_address (Pmode, structure_value_addr);
2747 emit_move_insn (struct_value,
2749 force_operand (structure_value_addr,
2752 if (REG_P (struct_value))
2753 use_reg (&call_fusage, struct_value);
2756 funexp = prepare_call_address (funexp, static_chain_value,
2757 &call_fusage, reg_parm_seen, pass == 0);
2759 load_register_parameters (args, num_actuals, &call_fusage, flags,
2760 pass == 0, &sibcall_failure);
2762 /* Save a pointer to the last insn before the call, so that we can
2763 later safely search backwards to find the CALL_INSN. */
2764 before_call = get_last_insn ();
2766 /* Set up next argument register. For sibling calls on machines
2767 with register windows this should be the incoming register. */
2768 #ifdef FUNCTION_INCOMING_ARG
2770 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2774 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2777 /* All arguments and registers used for the call must be set up by
2780 /* Stack must be properly aligned now. */
2781 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
2784 /* Generate the actual call instruction. */
2785 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2786 adjusted_args_size.constant, struct_value_size,
2787 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2788 flags, & args_so_far);
2790 /* If call is cse'able, make appropriate pair of reg-notes around it.
2791 Test valreg so we don't crash; may safely ignore `const'
2792 if return type is void. Disable for PARALLEL return values, because
2793 we have no way to move such values into a pseudo register. */
2794 if (pass && (flags & ECF_LIBCALL_BLOCK))
2798 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2800 insns = get_insns ();
2802 /* Expansion of block moves possibly introduced a loop that may
2803 not appear inside libcall block. */
2804 for (insn = insns; insn; insn = NEXT_INSN (insn))
2816 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2818 /* Mark the return value as a pointer if needed. */
2819 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2820 mark_reg_pointer (temp,
2821 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2824 if (flag_unsafe_math_optimizations
2826 && DECL_BUILT_IN (fndecl)
2827 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2828 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2829 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2830 note = gen_rtx_fmt_e (SQRT,
2832 args[0].initial_value);
2835 /* Construct an "equal form" for the value which
2836 mentions all the arguments in order as well as
2837 the function name. */
2838 for (i = 0; i < num_actuals; i++)
2839 note = gen_rtx_EXPR_LIST (VOIDmode,
2840 args[i].initial_value, note);
2841 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2843 if (flags & ECF_PURE)
2844 note = gen_rtx_EXPR_LIST (VOIDmode,
2845 gen_rtx_USE (VOIDmode,
2846 gen_rtx_MEM (BLKmode,
2847 gen_rtx_SCRATCH (VOIDmode))),
2850 emit_libcall_block (insns, temp, valreg, note);
2855 else if (pass && (flags & ECF_MALLOC))
2857 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2860 /* The return value from a malloc-like function is a pointer. */
2861 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2862 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2864 emit_move_insn (temp, valreg);
2866 /* The return value from a malloc-like function can not alias
2868 last = get_last_insn ();
2870 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2872 /* Write out the sequence. */
2873 insns = get_insns ();
2879 /* For calls to `setjmp', etc., inform flow.c it should complain
2880 if nonvolatile values are live. For functions that cannot return,
2881 inform flow that control does not fall through. */
2883 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
2885 /* The barrier must be emitted
2886 immediately after the CALL_INSN. Some ports emit more
2887 than just a CALL_INSN above, so we must search for it here. */
2889 rtx last = get_last_insn ();
2890 while (!CALL_P (last))
2892 last = PREV_INSN (last);
2893 /* There was no CALL_INSN? */
2894 if (last == before_call)
2898 emit_barrier_after (last);
2900 /* Stack adjustments after a noreturn call are dead code.
2901 However when NO_DEFER_POP is in effect, we must preserve
2902 stack_pointer_delta. */
2903 if (inhibit_defer_pop == 0)
2905 stack_pointer_delta = old_stack_allocated;
2906 pending_stack_adjust = 0;
2910 if (flags & ECF_LONGJMP)
2911 current_function_calls_longjmp = 1;
2913 /* If value type not void, return an rtx for the value. */
2915 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2917 target = const0_rtx;
2918 else if (structure_value_addr)
2920 if (target == 0 || !MEM_P (target))
2923 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2924 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2925 structure_value_addr));
2926 set_mem_attributes (target, exp, 1);
2929 else if (pcc_struct_value)
2931 /* This is the special C++ case where we need to
2932 know what the true target was. We take care to
2933 never use this value more than once in one expression. */
2934 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2935 copy_to_reg (valreg));
2936 set_mem_attributes (target, exp, 1);
2938 /* Handle calls that return values in multiple non-contiguous locations.
2939 The Irix 6 ABI has examples of this. */
2940 else if (GET_CODE (valreg) == PARALLEL)
2944 /* This will only be assigned once, so it can be readonly. */
2945 tree nt = build_qualified_type (TREE_TYPE (exp),
2946 (TYPE_QUALS (TREE_TYPE (exp))
2947 | TYPE_QUAL_CONST));
2949 target = assign_temp (nt, 0, 1, 1);
2950 preserve_temp_slots (target);
2953 if (! rtx_equal_p (target, valreg))
2954 emit_group_store (target, valreg, TREE_TYPE (exp),
2955 int_size_in_bytes (TREE_TYPE (exp)));
2957 /* We can not support sibling calls for this case. */
2958 sibcall_failure = 1;
2961 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2962 && GET_MODE (target) == GET_MODE (valreg))
2964 /* TARGET and VALREG cannot be equal at this point because the
2965 latter would not have REG_FUNCTION_VALUE_P true, while the
2966 former would if it were referring to the same register.
2968 If they refer to the same register, this move will be a no-op,
2969 except when function inlining is being done. */
2970 emit_move_insn (target, valreg);
2972 /* If we are setting a MEM, this code must be executed. Since it is
2973 emitted after the call insn, sibcall optimization cannot be
2974 performed in that case. */
2976 sibcall_failure = 1;
2978 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2980 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2982 /* We can not support sibling calls for this case. */
2983 sibcall_failure = 1;
2987 if (shift_returned_value (TREE_TYPE (exp), &valreg))
2988 sibcall_failure = 1;
2990 target = copy_to_reg (valreg);
2993 if (targetm.calls.promote_function_return(funtype))
2995 /* If we promoted this return value, make the proper SUBREG. TARGET
2996 might be const0_rtx here, so be careful. */
2998 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2999 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3001 tree type = TREE_TYPE (exp);
3002 int unsignedp = TYPE_UNSIGNED (type);
3005 /* If we don't promote as expected, something is wrong. */
3006 if (GET_MODE (target)
3007 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3010 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3011 && GET_MODE_SIZE (GET_MODE (target))
3012 > GET_MODE_SIZE (TYPE_MODE (type)))
3014 offset = GET_MODE_SIZE (GET_MODE (target))
3015 - GET_MODE_SIZE (TYPE_MODE (type));
3016 if (! BYTES_BIG_ENDIAN)
3017 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3018 else if (! WORDS_BIG_ENDIAN)
3019 offset %= UNITS_PER_WORD;
3021 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3022 SUBREG_PROMOTED_VAR_P (target) = 1;
3023 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3027 /* If size of args is variable or this was a constructor call for a stack
3028 argument, restore saved stack-pointer value. */
3030 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3032 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3033 stack_pointer_delta = old_stack_pointer_delta;
3034 pending_stack_adjust = old_pending_adj;
3035 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3036 stack_arg_under_construction = old_stack_arg_under_construction;
3037 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3038 stack_usage_map = initial_stack_usage_map;
3039 sibcall_failure = 1;
3041 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3043 #ifdef REG_PARM_STACK_SPACE
3045 restore_fixed_argument_area (save_area, argblock,
3046 high_to_save, low_to_save);
3049 /* If we saved any argument areas, restore them. */
3050 for (i = 0; i < num_actuals; i++)
3051 if (args[i].save_area)
3053 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3055 = gen_rtx_MEM (save_mode,
3056 memory_address (save_mode,
3057 XEXP (args[i].stack_slot, 0)));
3059 if (save_mode != BLKmode)
3060 emit_move_insn (stack_area, args[i].save_area);
3062 emit_block_move (stack_area, args[i].save_area,
3063 GEN_INT (args[i].locate.size.constant),
3064 BLOCK_OP_CALL_PARM);
3067 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3068 stack_usage_map = initial_stack_usage_map;
3071 /* If this was alloca, record the new stack level for nonlocal gotos.
3072 Check for the handler slots since we might not have a save area
3073 for non-local gotos. */
3075 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3076 update_nonlocal_goto_save_area ();
3078 /* Free up storage we no longer need. */
3079 for (i = 0; i < num_actuals; ++i)
3080 if (args[i].aligned_regs)
3081 free (args[i].aligned_regs);
3083 /* If this function is returning into a memory location marked as
3084 readonly, it means it is initializing that location. We normally treat
3085 functions as not clobbering such locations, so we need to specify that
3086 this one does. We do this by adding the appropriate CLOBBER to the
3087 CALL_INSN function usage list. This cannot be done by emitting a
3088 standalone CLOBBER after the call because the latter would be ignored
3089 by at least the delay slot scheduling pass. We do this now instead of
3090 adding to call_fusage before the call to emit_call_1 because TARGET
3091 may be modified in the meantime. */
3092 if (structure_value_addr != 0 && target != 0
3093 && MEM_P (target) && RTX_UNCHANGING_P (target))
3094 add_function_usage_to
3096 gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
3099 insns = get_insns ();
3104 tail_call_insns = insns;
3106 /* Restore the pending stack adjustment now that we have
3107 finished generating the sibling call sequence. */
3109 pending_stack_adjust = save_pending_stack_adjust;
3110 stack_pointer_delta = save_stack_pointer_delta;
3112 /* Prepare arg structure for next iteration. */
3113 for (i = 0; i < num_actuals; i++)
3116 args[i].aligned_regs = 0;
3120 sbitmap_free (stored_args_map);
3124 normal_call_insns = insns;
3126 /* Verify that we've deallocated all the stack we used. */
3127 if (! (flags & (ECF_NORETURN | ECF_LONGJMP))
3128 && old_stack_allocated != stack_pointer_delta
3129 - pending_stack_adjust)
3133 /* If something prevents making this a sibling call,
3134 zero out the sequence. */
3135 if (sibcall_failure)
3136 tail_call_insns = NULL_RTX;
3141 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3142 arguments too, as argument area is now clobbered by the call. */
3143 if (tail_call_insns)
3145 emit_insn (tail_call_insns);
3146 cfun->tail_call_emit = true;
3149 emit_insn (normal_call_insns);
3151 currently_expanding_call--;
3153 /* If this function returns with the stack pointer depressed, ensure
3154 this block saves and restores the stack pointer, show it was
3155 changed, and adjust for any outgoing arg space. */
3156 if (flags & ECF_SP_DEPRESSED)
3158 clear_pending_stack_adjust ();
3159 emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
3160 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3166 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3167 this function's incoming arguments.
3169 At the start of RTL generation we know the only REG_EQUIV notes
3170 in the rtl chain are those for incoming arguments, so we can safely
3171 flush any REG_EQUIV note.
3173 This is (slight) overkill. We could keep track of the highest
3174 argument we clobber and be more selective in removing notes, but it
3175 does not seem to be worth the effort. */
3177 fixup_tail_calls (void)
3182 purge_reg_equiv_notes ();
3184 /* A sibling call sequence also may invalidate RTX_UNCHANGING_P
3185 flag of some incoming arguments MEM RTLs, because it can write into
3186 those slots. We clear all those bits now.
3188 This is (slight) overkill, we could keep track of which arguments
3189 we actually write into. */
3190 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3193 purge_mem_unchanging_flag (PATTERN (insn));
3196 /* Similarly, invalidate RTX_UNCHANGING_P for any incoming
3197 arguments passed in registers. */
3198 for (arg = DECL_ARGUMENTS (current_function_decl);
3200 arg = TREE_CHAIN (arg))
3202 if (REG_P (DECL_RTL (arg)))
3203 RTX_UNCHANGING_P (DECL_RTL (arg)) = false;
3207 /* Traverse an argument list in VALUES and expand all complex
3208 arguments into their components. */
3210 split_complex_values (tree values)
3214 /* Before allocating memory, check for the common case of no complex. */
3215 for (p = values; p; p = TREE_CHAIN (p))
3217 tree type = TREE_TYPE (TREE_VALUE (p));
3218 if (type && TREE_CODE (type) == COMPLEX_TYPE
3219 && targetm.calls.split_complex_arg (type))
3225 values = copy_list (values);
3227 for (p = values; p; p = TREE_CHAIN (p))
3229 tree complex_value = TREE_VALUE (p);
3232 complex_type = TREE_TYPE (complex_value);
3236 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3237 && targetm.calls.split_complex_arg (complex_type))
3240 tree real, imag, next;
3242 subtype = TREE_TYPE (complex_type);
3243 complex_value = save_expr (complex_value);
3244 real = build1 (REALPART_EXPR, subtype, complex_value);
3245 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3247 TREE_VALUE (p) = real;
3248 next = TREE_CHAIN (p);
3249 imag = build_tree_list (NULL_TREE, imag);
3250 TREE_CHAIN (p) = imag;
3251 TREE_CHAIN (imag) = next;
3253 /* Skip the newly created node. */
3261 /* Traverse a list of TYPES and expand all complex types into their
3264 split_complex_types (tree types)
3268 /* Before allocating memory, check for the common case of no complex. */
3269 for (p = types; p; p = TREE_CHAIN (p))
3271 tree type = TREE_VALUE (p);
3272 if (TREE_CODE (type) == COMPLEX_TYPE
3273 && targetm.calls.split_complex_arg (type))
3279 types = copy_list (types);
3281 for (p = types; p; p = TREE_CHAIN (p))
3283 tree complex_type = TREE_VALUE (p);
3285 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3286 && targetm.calls.split_complex_arg (complex_type))
3290 /* Rewrite complex type with component type. */
3291 TREE_VALUE (p) = TREE_TYPE (complex_type);
3292 next = TREE_CHAIN (p);
3294 /* Add another component type for the imaginary part. */
3295 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3296 TREE_CHAIN (p) = imag;
3297 TREE_CHAIN (imag) = next;
3299 /* Skip the newly created node. */
3307 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3308 The RETVAL parameter specifies whether return value needs to be saved, other
3309 parameters are documented in the emit_library_call function below. */
3312 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3313 enum libcall_type fn_type,
3314 enum machine_mode outmode, int nargs, va_list p)
3316 /* Total size in bytes of all the stack-parms scanned so far. */
3317 struct args_size args_size;
3318 /* Size of arguments before any adjustments (such as rounding). */
3319 struct args_size original_args_size;
3325 CUMULATIVE_ARGS args_so_far;
3329 enum machine_mode mode;
3332 struct locate_and_pad_arg_data locate;
3336 int old_inhibit_defer_pop = inhibit_defer_pop;
3337 rtx call_fusage = 0;
3340 int pcc_struct_value = 0;
3341 int struct_value_size = 0;
3343 int reg_parm_stack_space = 0;
3346 tree tfom; /* type_for_mode (outmode, 0) */
3348 #ifdef REG_PARM_STACK_SPACE
3349 /* Define the boundary of the register parm stack space that needs to be
3351 int low_to_save, high_to_save;
3352 rtx save_area = 0; /* Place that it is saved. */
3355 /* Size of the stack reserved for parameter registers. */
3356 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3357 char *initial_stack_usage_map = stack_usage_map;
3359 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3361 #ifdef REG_PARM_STACK_SPACE
3362 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3365 /* By default, library functions can not throw. */
3366 flags = ECF_NOTHROW;
3378 case LCT_CONST_MAKE_BLOCK:
3379 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3381 case LCT_PURE_MAKE_BLOCK:
3382 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3385 flags |= ECF_NORETURN;
3388 flags = ECF_NORETURN;
3390 case LCT_ALWAYS_RETURN:
3391 flags = ECF_ALWAYS_RETURN;
3393 case LCT_RETURNS_TWICE:
3394 flags = ECF_RETURNS_TWICE;
3399 /* Ensure current function's preferred stack boundary is at least
3401 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3402 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3404 /* If this kind of value comes back in memory,
3405 decide where in memory it should come back. */
3406 if (outmode != VOIDmode)
3408 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3409 if (aggregate_value_p (tfom, 0))
3411 #ifdef PCC_STATIC_STRUCT_RETURN
3413 = hard_function_value (build_pointer_type (tfom), 0, 0);
3414 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3415 pcc_struct_value = 1;
3417 value = gen_reg_rtx (outmode);
3418 #else /* not PCC_STATIC_STRUCT_RETURN */
3419 struct_value_size = GET_MODE_SIZE (outmode);
3420 if (value != 0 && MEM_P (value))
3423 mem_value = assign_temp (tfom, 0, 1, 1);
3425 /* This call returns a big structure. */
3426 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3430 tfom = void_type_node;
3432 /* ??? Unfinished: must pass the memory address as an argument. */
3434 /* Copy all the libcall-arguments out of the varargs data
3435 and into a vector ARGVEC.
3437 Compute how to pass each argument. We only support a very small subset
3438 of the full argument passing conventions to limit complexity here since
3439 library functions shouldn't have many args. */
3441 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3442 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3444 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3445 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3447 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3450 args_size.constant = 0;
3455 /* Now we are about to start emitting insns that can be deleted
3456 if a libcall is deleted. */
3457 if (flags & ECF_LIBCALL_BLOCK)
3462 /* If there's a structure value address to be passed,
3463 either pass it in the special place, or pass it as an extra argument. */
3464 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3466 rtx addr = XEXP (mem_value, 0);
3469 /* Make sure it is a reasonable operand for a move or push insn. */
3470 if (!REG_P (addr) && !MEM_P (addr)
3471 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3472 addr = force_operand (addr, NULL_RTX);
3474 argvec[count].value = addr;
3475 argvec[count].mode = Pmode;
3476 argvec[count].partial = 0;
3478 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3479 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3482 locate_and_pad_parm (Pmode, NULL_TREE,
3483 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3486 argvec[count].reg != 0,
3488 0, NULL_TREE, &args_size, &argvec[count].locate);
3490 if (argvec[count].reg == 0 || argvec[count].partial != 0
3491 || reg_parm_stack_space > 0)
3492 args_size.constant += argvec[count].locate.size.constant;
3494 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3499 for (; count < nargs; count++)
3501 rtx val = va_arg (p, rtx);
3502 enum machine_mode mode = va_arg (p, enum machine_mode);
3504 /* We cannot convert the arg value to the mode the library wants here;
3505 must do it earlier where we know the signedness of the arg. */
3507 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3510 /* Make sure it is a reasonable operand for a move or push insn. */
3511 if (!REG_P (val) && !MEM_P (val)
3512 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3513 val = force_operand (val, NULL_RTX);
3515 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3518 int must_copy = ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3521 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3522 functions, so we have to pretend this isn't such a function. */
3523 if (flags & ECF_LIBCALL_BLOCK)
3525 rtx insns = get_insns ();
3529 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3531 /* If this was a CONST function, it is now PURE since
3532 it now reads memory. */
3533 if (flags & ECF_CONST)
3535 flags &= ~ECF_CONST;
3539 if (GET_MODE (val) == MEM && ! must_copy)
3543 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3545 emit_move_insn (slot, val);
3549 tree type = lang_hooks.types.type_for_mode (mode, 0);
3552 = gen_rtx_MEM (mode,
3553 expand_expr (build1 (ADDR_EXPR,
3554 build_pointer_type (type),
3555 make_tree (type, val)),
3556 NULL_RTX, VOIDmode, 0));
3559 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3560 gen_rtx_USE (VOIDmode, slot),
3563 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3564 gen_rtx_CLOBBER (VOIDmode,
3569 val = force_operand (XEXP (slot, 0), NULL_RTX);
3572 argvec[count].value = val;
3573 argvec[count].mode = mode;
3575 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3577 argvec[count].partial
3578 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3580 locate_and_pad_parm (mode, NULL_TREE,
3581 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3584 argvec[count].reg != 0,
3586 argvec[count].partial,
3587 NULL_TREE, &args_size, &argvec[count].locate);
3589 if (argvec[count].locate.size.var)
3592 if (argvec[count].reg == 0 || argvec[count].partial != 0
3593 || reg_parm_stack_space > 0)
3594 args_size.constant += argvec[count].locate.size.constant;
3596 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3599 /* If this machine requires an external definition for library
3600 functions, write one out. */
3601 assemble_external_libcall (fun);
3603 original_args_size = args_size;
3604 args_size.constant = (((args_size.constant
3605 + stack_pointer_delta
3609 - stack_pointer_delta);
3611 args_size.constant = MAX (args_size.constant,
3612 reg_parm_stack_space);
3614 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3615 args_size.constant -= reg_parm_stack_space;
3618 if (args_size.constant > current_function_outgoing_args_size)
3619 current_function_outgoing_args_size = args_size.constant;
3621 if (ACCUMULATE_OUTGOING_ARGS)
3623 /* Since the stack pointer will never be pushed, it is possible for
3624 the evaluation of a parm to clobber something we have already
3625 written to the stack. Since most function calls on RISC machines
3626 do not use the stack, this is uncommon, but must work correctly.
3628 Therefore, we save any area of the stack that was already written
3629 and that we are using. Here we set up to do this by making a new
3630 stack usage map from the old one.
3632 Another approach might be to try to reorder the argument
3633 evaluations to avoid this conflicting stack usage. */
3635 needed = args_size.constant;
3637 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3638 /* Since we will be writing into the entire argument area, the
3639 map must be allocated for its entire size, not just the part that
3640 is the responsibility of the caller. */
3641 needed += reg_parm_stack_space;
3644 #ifdef ARGS_GROW_DOWNWARD
3645 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3648 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3651 stack_usage_map = alloca (highest_outgoing_arg_in_use);
3653 if (initial_highest_arg_in_use)
3654 memcpy (stack_usage_map, initial_stack_usage_map,
3655 initial_highest_arg_in_use);
3657 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3658 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3659 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3662 /* We must be careful to use virtual regs before they're instantiated,
3663 and real regs afterwards. Loop optimization, for example, can create
3664 new libcalls after we've instantiated the virtual regs, and if we
3665 use virtuals anyway, they won't match the rtl patterns. */
3667 if (virtuals_instantiated)
3668 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3670 argblock = virtual_outgoing_args_rtx;
3675 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3678 /* If we push args individually in reverse order, perform stack alignment
3679 before the first push (the last arg). */
3680 if (argblock == 0 && PUSH_ARGS_REVERSED)
3681 anti_adjust_stack (GEN_INT (args_size.constant
3682 - original_args_size.constant));
3684 if (PUSH_ARGS_REVERSED)
3695 #ifdef REG_PARM_STACK_SPACE
3696 if (ACCUMULATE_OUTGOING_ARGS)
3698 /* The argument list is the property of the called routine and it
3699 may clobber it. If the fixed area has been used for previous
3700 parameters, we must save and restore it. */
3701 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3702 &low_to_save, &high_to_save);
3706 /* Push the args that need to be pushed. */
3708 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3709 are to be pushed. */
3710 for (count = 0; count < nargs; count++, argnum += inc)
3712 enum machine_mode mode = argvec[argnum].mode;
3713 rtx val = argvec[argnum].value;
3714 rtx reg = argvec[argnum].reg;
3715 int partial = argvec[argnum].partial;
3716 int lower_bound = 0, upper_bound = 0, i;
3718 if (! (reg != 0 && partial == 0))
3720 if (ACCUMULATE_OUTGOING_ARGS)
3722 /* If this is being stored into a pre-allocated, fixed-size,
3723 stack area, save any previous data at that location. */
3725 #ifdef ARGS_GROW_DOWNWARD
3726 /* stack_slot is negative, but we want to index stack_usage_map
3727 with positive values. */
3728 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3729 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3731 lower_bound = argvec[argnum].locate.offset.constant;
3732 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3736 /* Don't worry about things in the fixed argument area;
3737 it has already been saved. */
3738 if (i < reg_parm_stack_space)
3739 i = reg_parm_stack_space;
3740 while (i < upper_bound && stack_usage_map[i] == 0)
3743 if (i < upper_bound)
3745 /* We need to make a save area. */
3747 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3748 enum machine_mode save_mode
3749 = mode_for_size (size, MODE_INT, 1);
3751 = plus_constant (argblock,
3752 argvec[argnum].locate.offset.constant);
3754 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3756 if (save_mode == BLKmode)
3758 argvec[argnum].save_area
3759 = assign_stack_temp (BLKmode,
3760 argvec[argnum].locate.size.constant,
3763 emit_block_move (validize_mem (argvec[argnum].save_area),
3765 GEN_INT (argvec[argnum].locate.size.constant),
3766 BLOCK_OP_CALL_PARM);
3770 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3772 emit_move_insn (argvec[argnum].save_area, stack_area);
3777 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3778 partial, reg, 0, argblock,
3779 GEN_INT (argvec[argnum].locate.offset.constant),
3780 reg_parm_stack_space,
3781 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3783 /* Now mark the segment we just used. */
3784 if (ACCUMULATE_OUTGOING_ARGS)
3785 for (i = lower_bound; i < upper_bound; i++)
3786 stack_usage_map[i] = 1;
3792 /* If we pushed args in forward order, perform stack alignment
3793 after pushing the last arg. */
3794 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3795 anti_adjust_stack (GEN_INT (args_size.constant
3796 - original_args_size.constant));
3798 if (PUSH_ARGS_REVERSED)
3803 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3805 /* Now load any reg parms into their regs. */
3807 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3808 are to be pushed. */
3809 for (count = 0; count < nargs; count++, argnum += inc)
3811 enum machine_mode mode = argvec[argnum].mode;
3812 rtx val = argvec[argnum].value;
3813 rtx reg = argvec[argnum].reg;
3814 int partial = argvec[argnum].partial;
3816 /* Handle calls that pass values in multiple non-contiguous
3817 locations. The PA64 has examples of this for library calls. */
3818 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3819 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3820 else if (reg != 0 && partial == 0)
3821 emit_move_insn (reg, val);
3826 /* Any regs containing parms remain in use through the call. */
3827 for (count = 0; count < nargs; count++)
3829 rtx reg = argvec[count].reg;
3830 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3831 use_group_regs (&call_fusage, reg);
3833 use_reg (&call_fusage, reg);
3836 /* Pass the function the address in which to return a structure value. */
3837 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3839 emit_move_insn (struct_value,
3841 force_operand (XEXP (mem_value, 0),
3843 if (REG_P (struct_value))
3844 use_reg (&call_fusage, struct_value);
3847 /* Don't allow popping to be deferred, since then
3848 cse'ing of library calls could delete a call and leave the pop. */
3850 valreg = (mem_value == 0 && outmode != VOIDmode
3851 ? hard_libcall_value (outmode) : NULL_RTX);
3853 /* Stack must be properly aligned now. */
3854 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
3857 before_call = get_last_insn ();
3859 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3860 will set inhibit_defer_pop to that value. */
3861 /* The return type is needed to decide how many bytes the function pops.
3862 Signedness plays no role in that, so for simplicity, we pretend it's
3863 always signed. We also assume that the list of arguments passed has
3864 no impact, so we pretend it is unknown. */
3866 emit_call_1 (fun, NULL,
3867 get_identifier (XSTR (orgfun, 0)),
3868 build_function_type (tfom, NULL_TREE),
3869 original_args_size.constant, args_size.constant,
3871 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3873 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3875 /* For calls to `setjmp', etc., inform flow.c it should complain
3876 if nonvolatile values are live. For functions that cannot return,
3877 inform flow that control does not fall through. */
3879 if (flags & (ECF_NORETURN | ECF_LONGJMP))
3881 /* The barrier note must be emitted
3882 immediately after the CALL_INSN. Some ports emit more than
3883 just a CALL_INSN above, so we must search for it here. */
3885 rtx last = get_last_insn ();
3886 while (!CALL_P (last))
3888 last = PREV_INSN (last);
3889 /* There was no CALL_INSN? */
3890 if (last == before_call)
3894 emit_barrier_after (last);
3897 /* Now restore inhibit_defer_pop to its actual original value. */
3900 /* If call is cse'able, make appropriate pair of reg-notes around it.
3901 Test valreg so we don't crash; may safely ignore `const'
3902 if return type is void. Disable for PARALLEL return values, because
3903 we have no way to move such values into a pseudo register. */
3904 if (flags & ECF_LIBCALL_BLOCK)
3910 insns = get_insns ();
3920 if (GET_CODE (valreg) == PARALLEL)
3922 temp = gen_reg_rtx (outmode);
3923 emit_group_store (temp, valreg, NULL_TREE,
3924 GET_MODE_SIZE (outmode));
3928 temp = gen_reg_rtx (GET_MODE (valreg));
3930 /* Construct an "equal form" for the value which mentions all the
3931 arguments in order as well as the function name. */
3932 for (i = 0; i < nargs; i++)
3933 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3934 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3936 insns = get_insns ();
3939 if (flags & ECF_PURE)
3940 note = gen_rtx_EXPR_LIST (VOIDmode,
3941 gen_rtx_USE (VOIDmode,
3942 gen_rtx_MEM (BLKmode,
3943 gen_rtx_SCRATCH (VOIDmode))),
3946 emit_libcall_block (insns, temp, valreg, note);
3953 /* Copy the value to the right place. */
3954 if (outmode != VOIDmode && retval)
3960 if (value != mem_value)
3961 emit_move_insn (value, mem_value);
3963 else if (GET_CODE (valreg) == PARALLEL)
3966 value = gen_reg_rtx (outmode);
3967 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3969 else if (value != 0)
3970 emit_move_insn (value, valreg);
3975 if (ACCUMULATE_OUTGOING_ARGS)
3977 #ifdef REG_PARM_STACK_SPACE
3979 restore_fixed_argument_area (save_area, argblock,
3980 high_to_save, low_to_save);
3983 /* If we saved any argument areas, restore them. */
3984 for (count = 0; count < nargs; count++)
3985 if (argvec[count].save_area)
3987 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3988 rtx adr = plus_constant (argblock,
3989 argvec[count].locate.offset.constant);
3990 rtx stack_area = gen_rtx_MEM (save_mode,
3991 memory_address (save_mode, adr));
3993 if (save_mode == BLKmode)
3994 emit_block_move (stack_area,
3995 validize_mem (argvec[count].save_area),
3996 GEN_INT (argvec[count].locate.size.constant),
3997 BLOCK_OP_CALL_PARM);
3999 emit_move_insn (stack_area, argvec[count].save_area);
4002 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4003 stack_usage_map = initial_stack_usage_map;
4010 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4011 (emitting the queue unless NO_QUEUE is nonzero),
4012 for a value of mode OUTMODE,
4013 with NARGS different arguments, passed as alternating rtx values
4014 and machine_modes to convert them to.
4016 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4017 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4018 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4019 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4020 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4021 or other LCT_ value for other types of library calls. */
4024 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4025 enum machine_mode outmode, int nargs, ...)
4029 va_start (p, nargs);
4030 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4034 /* Like emit_library_call except that an extra argument, VALUE,
4035 comes second and says where to store the result.
4036 (If VALUE is zero, this function chooses a convenient way
4037 to return the value.
4039 This function returns an rtx for where the value is to be found.
4040 If VALUE is nonzero, VALUE is returned. */
4043 emit_library_call_value (rtx orgfun, rtx value,
4044 enum libcall_type fn_type,
4045 enum machine_mode outmode, int nargs, ...)
4050 va_start (p, nargs);
4051 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4058 /* Store a single argument for a function call
4059 into the register or memory area where it must be passed.
4060 *ARG describes the argument value and where to pass it.
4062 ARGBLOCK is the address of the stack-block for all the arguments,
4063 or 0 on a machine where arguments are pushed individually.
4065 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4066 so must be careful about how the stack is used.
4068 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4069 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4070 that we need not worry about saving and restoring the stack.
4072 FNDECL is the declaration of the function we are calling.
4074 Return nonzero if this arg should cause sibcall failure,
4078 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4079 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4081 tree pval = arg->tree_value;
4085 int i, lower_bound = 0, upper_bound = 0;
4086 int sibcall_failure = 0;
4088 if (TREE_CODE (pval) == ERROR_MARK)
4091 /* Push a new temporary level for any temporaries we make for
4095 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4097 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4098 save any previous data at that location. */
4099 if (argblock && ! variable_size && arg->stack)
4101 #ifdef ARGS_GROW_DOWNWARD
4102 /* stack_slot is negative, but we want to index stack_usage_map
4103 with positive values. */
4104 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4105 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4109 lower_bound = upper_bound - arg->locate.size.constant;
4111 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4112 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4116 upper_bound = lower_bound + arg->locate.size.constant;
4120 /* Don't worry about things in the fixed argument area;
4121 it has already been saved. */
4122 if (i < reg_parm_stack_space)
4123 i = reg_parm_stack_space;
4124 while (i < upper_bound && stack_usage_map[i] == 0)
4127 if (i < upper_bound)
4129 /* We need to make a save area. */
4130 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4131 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4132 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4133 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4135 if (save_mode == BLKmode)
4137 tree ot = TREE_TYPE (arg->tree_value);
4138 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4139 | TYPE_QUAL_CONST));
4141 arg->save_area = assign_temp (nt, 0, 1, 1);
4142 preserve_temp_slots (arg->save_area);
4143 emit_block_move (validize_mem (arg->save_area), stack_area,
4144 expr_size (arg->tree_value),
4145 BLOCK_OP_CALL_PARM);
4149 arg->save_area = gen_reg_rtx (save_mode);
4150 emit_move_insn (arg->save_area, stack_area);
4156 /* If this isn't going to be placed on both the stack and in registers,
4157 set up the register and number of words. */
4158 if (! arg->pass_on_stack)
4160 if (flags & ECF_SIBCALL)
4161 reg = arg->tail_call_reg;
4164 partial = arg->partial;
4167 if (reg != 0 && partial == 0)
4168 /* Being passed entirely in a register. We shouldn't be called in
4172 /* If this arg needs special alignment, don't load the registers
4174 if (arg->n_aligned_regs != 0)
4177 /* If this is being passed partially in a register, we can't evaluate
4178 it directly into its stack slot. Otherwise, we can. */
4179 if (arg->value == 0)
4181 /* stack_arg_under_construction is nonzero if a function argument is
4182 being evaluated directly into the outgoing argument list and
4183 expand_call must take special action to preserve the argument list
4184 if it is called recursively.
4186 For scalar function arguments stack_usage_map is sufficient to
4187 determine which stack slots must be saved and restored. Scalar
4188 arguments in general have pass_on_stack == 0.
4190 If this argument is initialized by a function which takes the
4191 address of the argument (a C++ constructor or a C function
4192 returning a BLKmode structure), then stack_usage_map is
4193 insufficient and expand_call must push the stack around the
4194 function call. Such arguments have pass_on_stack == 1.
4196 Note that it is always safe to set stack_arg_under_construction,
4197 but this generates suboptimal code if set when not needed. */
4199 if (arg->pass_on_stack)
4200 stack_arg_under_construction++;
4202 arg->value = expand_expr (pval,
4204 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4205 ? NULL_RTX : arg->stack,
4206 VOIDmode, EXPAND_STACK_PARM);
4208 /* If we are promoting object (or for any other reason) the mode
4209 doesn't agree, convert the mode. */
4211 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4212 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4213 arg->value, arg->unsignedp);
4215 if (arg->pass_on_stack)
4216 stack_arg_under_construction--;
4219 /* Don't allow anything left on stack from computation
4220 of argument to alloca. */
4221 if (flags & ECF_MAY_BE_ALLOCA)
4222 do_pending_stack_adjust ();
4224 if (arg->value == arg->stack)
4225 /* If the value is already in the stack slot, we are done. */
4227 else if (arg->mode != BLKmode)
4231 /* Argument is a scalar, not entirely passed in registers.
4232 (If part is passed in registers, arg->partial says how much
4233 and emit_push_insn will take care of putting it there.)
4235 Push it, and if its size is less than the
4236 amount of space allocated to it,
4237 also bump stack pointer by the additional space.
4238 Note that in C the default argument promotions
4239 will prevent such mismatches. */
4241 size = GET_MODE_SIZE (arg->mode);
4242 /* Compute how much space the push instruction will push.
4243 On many machines, pushing a byte will advance the stack
4244 pointer by a halfword. */
4245 #ifdef PUSH_ROUNDING
4246 size = PUSH_ROUNDING (size);
4250 /* Compute how much space the argument should get:
4251 round up to a multiple of the alignment for arguments. */
4252 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4253 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4254 / (PARM_BOUNDARY / BITS_PER_UNIT))
4255 * (PARM_BOUNDARY / BITS_PER_UNIT));
4257 /* This isn't already where we want it on the stack, so put it there.
4258 This can either be done with push or copy insns. */
4259 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4260 PARM_BOUNDARY, partial, reg, used - size, argblock,
4261 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4262 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4264 /* Unless this is a partially-in-register argument, the argument is now
4267 arg->value = arg->stack;
4271 /* BLKmode, at least partly to be pushed. */
4273 unsigned int parm_align;
4277 /* Pushing a nonscalar.
4278 If part is passed in registers, PARTIAL says how much
4279 and emit_push_insn will take care of putting it there. */
4281 /* Round its size up to a multiple
4282 of the allocation unit for arguments. */
4284 if (arg->locate.size.var != 0)
4287 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4291 /* PUSH_ROUNDING has no effect on us, because
4292 emit_push_insn for BLKmode is careful to avoid it. */
4293 if (reg && GET_CODE (reg) == PARALLEL)
4295 /* Use the size of the elt to compute excess. */
4296 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
4297 excess = (arg->locate.size.constant
4298 - int_size_in_bytes (TREE_TYPE (pval))
4299 + partial * GET_MODE_SIZE (GET_MODE (elt)));
4302 excess = (arg->locate.size.constant
4303 - int_size_in_bytes (TREE_TYPE (pval))
4304 + partial * UNITS_PER_WORD);
4305 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4306 NULL_RTX, TYPE_MODE (sizetype), 0);
4309 /* Some types will require stricter alignment, which will be
4310 provided for elsewhere in argument layout. */
4311 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4313 /* When an argument is padded down, the block is aligned to
4314 PARM_BOUNDARY, but the actual argument isn't. */
4315 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4317 if (arg->locate.size.var)
4318 parm_align = BITS_PER_UNIT;
4321 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4322 parm_align = MIN (parm_align, excess_align);
4326 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4328 /* emit_push_insn might not work properly if arg->value and
4329 argblock + arg->locate.offset areas overlap. */
4333 if (XEXP (x, 0) == current_function_internal_arg_pointer
4334 || (GET_CODE (XEXP (x, 0)) == PLUS
4335 && XEXP (XEXP (x, 0), 0) ==
4336 current_function_internal_arg_pointer
4337 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4339 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4340 i = INTVAL (XEXP (XEXP (x, 0), 1));
4342 /* expand_call should ensure this. */
4343 if (arg->locate.offset.var || GET_CODE (size_rtx) != CONST_INT)
4346 if (arg->locate.offset.constant > i)
4348 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4349 sibcall_failure = 1;
4351 else if (arg->locate.offset.constant < i)
4353 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4354 sibcall_failure = 1;
4359 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4360 parm_align, partial, reg, excess, argblock,
4361 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4362 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4364 /* Unless this is a partially-in-register argument, the argument is now
4367 ??? Unlike the case above, in which we want the actual
4368 address of the data, so that we can load it directly into a
4369 register, here we want the address of the stack slot, so that
4370 it's properly aligned for word-by-word copying or something
4371 like that. It's not clear that this is always correct. */
4373 arg->value = arg->stack_slot;
4376 /* Mark all slots this store used. */
4377 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4378 && argblock && ! variable_size && arg->stack)
4379 for (i = lower_bound; i < upper_bound; i++)
4380 stack_usage_map[i] = 1;
4382 /* Once we have pushed something, pops can't safely
4383 be deferred during the rest of the arguments. */
4386 /* Free any temporary slots made in processing this argument. Show
4387 that we might have taken the address of something and pushed that
4389 preserve_temp_slots (NULL_RTX);
4393 return sibcall_failure;
4396 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4399 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4405 /* If the type has variable size... */
4406 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4409 /* If the type is marked as addressable (it is required
4410 to be constructed into the stack)... */
4411 if (TREE_ADDRESSABLE (type))
4417 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4418 takes trailing padding of a structure into account. */
4419 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4422 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, tree type)
4427 /* If the type has variable size... */
4428 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4431 /* If the type is marked as addressable (it is required
4432 to be constructed into the stack)... */
4433 if (TREE_ADDRESSABLE (type))
4436 /* If the padding and mode of the type is such that a copy into
4437 a register would put it into the wrong part of the register. */
4439 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4440 && (FUNCTION_ARG_PADDING (mode, type)
4441 == (BYTES_BIG_ENDIAN ? upward : downward)))