+ for (i = reg_parm_stack_space - 1; i >= 0; i--)
+ if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
+ break;
+
+ if (stack_arg_under_construction || i >= 0)
+ {
+ rtx first_insn
+ = before_call ? NEXT_INSN (before_call) : get_insns ();
+ rtx insn = NULL_RTX, seq;
+
+ /* Look for a call in the inline function code.
+ If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
+ nonzero then there is a call and it is not necessary
+ to scan the insns. */
+
+ if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
+ for (insn = first_insn; insn; insn = NEXT_INSN (insn))
+ if (GET_CODE (insn) == CALL_INSN)
+ break;
+
+ if (insn)
+ {
+ /* Reserve enough stack space so that the largest
+ argument list of any function call in the inline
+ function does not overlap the argument list being
+ evaluated. This is usually an overestimate because
+ allocate_dynamic_stack_space reserves space for an
+ outgoing argument list in addition to the requested
+ space, but there is no way to ask for stack space such
+ that an argument list of a certain length can be
+ safely constructed.
+
+ Add the stack space reserved for register arguments, if
+ any, in the inline function. What is really needed is the
+ largest value of reg_parm_stack_space in the inline
+ function, but that is not available. Using the current
+ value of reg_parm_stack_space is wrong, but gives
+ correct results on all supported machines. */
+
+ int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
+ + reg_parm_stack_space);
+
+ start_sequence ();
+ emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
+ allocate_dynamic_stack_space (GEN_INT (adjust),
+ NULL_RTX, BITS_PER_UNIT);
+ seq = get_insns ();
+ end_sequence ();
+ emit_insns_before (seq, first_insn);
+ emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
+ }
+ }
+ }
+
+ /* If the result is equivalent to TARGET, return TARGET to simplify
+ checks in store_expr. They can be equivalent but not equal in the
+ case of a function that returns BLKmode. */
+ if (temp != target && rtx_equal_p (temp, target))
+ return target;
+ return temp;
+ }
+
+ /* If inlining failed, mark FNDECL as needing to be compiled
+ separately after all. If function was declared inline,
+ give a warning. */
+ if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
+ && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
+ {
+ warning_with_decl (fndecl, "inlining failed in call to `%s'");
+ warning ("called from here");
+ }
+ mark_addressable (fndecl);
+ return (rtx) (HOST_WIDE_INT) - 1;
+}
+
+/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
+ wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
+ bytes, then we would need to push some additional bytes to pad the
+ arguments. So, we compute an adjust to the stack pointer for an
+ amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
+ bytes. Then, when the arguments are pushed the stack will be perfectly
+ aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
+ be popped after the call. Returns the adjustment. */
+
+static int
+combine_pending_stack_adjustment_and_call (unadjusted_args_size,
+ args_size,
+ preferred_unit_stack_boundary)
+ int unadjusted_args_size;
+ struct args_size *args_size;
+ int preferred_unit_stack_boundary;
+{
+ /* The number of bytes to pop so that the stack will be
+ under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
+ HOST_WIDE_INT adjustment;
+ /* The alignment of the stack after the arguments are pushed, if we
+ just pushed the arguments without adjust the stack here. */
+ HOST_WIDE_INT unadjusted_alignment;
+
+ unadjusted_alignment
+ = ((stack_pointer_delta + unadjusted_args_size)
+ % preferred_unit_stack_boundary);
+
+ /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
+ as possible -- leaving just enough left to cancel out the
+ UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
+ PENDING_STACK_ADJUST is non-negative, and congruent to
+ -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
+
+ /* Begin by trying to pop all the bytes. */
+ unadjusted_alignment
+ = (unadjusted_alignment
+ - (pending_stack_adjust % preferred_unit_stack_boundary));
+ adjustment = pending_stack_adjust;
+ /* Push enough additional bytes that the stack will be aligned
+ after the arguments are pushed. */
+ if (preferred_unit_stack_boundary > 1)
+ {
+ if (unadjusted_alignment > 0)
+ adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
+ else
+ adjustment += unadjusted_alignment;
+ }
+
+ /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
+ bytes after the call. The right number is the entire
+ PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
+ by the arguments in the first place. */
+ args_size->constant
+ = pending_stack_adjust - adjustment + unadjusted_args_size;
+
+ return adjustment;
+}
+
+/* Scan X expression if it does not dereference any argument slots
+ we already clobbered by tail call arguments (as noted in stored_args_map
+ bitmap).
+ Return non-zero if X expression dereferences such argument slots,
+ zero otherwise. */
+
+static int
+check_sibcall_argument_overlap_1 (x)
+ rtx x;
+{
+ RTX_CODE code;
+ int i, j;
+ unsigned int k;
+ const char *fmt;
+
+ if (x == NULL_RTX)
+ return 0;
+
+ code = GET_CODE (x);
+
+ if (code == MEM)
+ {
+ if (XEXP (x, 0) == current_function_internal_arg_pointer)
+ i = 0;
+ else if (GET_CODE (XEXP (x, 0)) == PLUS
+ && XEXP (XEXP (x, 0), 0) ==
+ current_function_internal_arg_pointer
+ && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
+ i = INTVAL (XEXP (XEXP (x, 0), 1));
+ else
+ return 0;
+
+#ifdef ARGS_GROW_DOWNWARD
+ i = -i - GET_MODE_SIZE (GET_MODE (x));
+#endif
+
+ for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
+ if (i + k < stored_args_map->n_bits
+ && TEST_BIT (stored_args_map, i + k))
+ return 1;
+
+ return 0;
+ }
+
+ /* Scan all subexpressions. */
+ fmt = GET_RTX_FORMAT (code);
+ for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
+ {
+ if (*fmt == 'e')
+ {
+ if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
+ return 1;
+ }
+ else if (*fmt == 'E')
+ {
+ for (j = 0; j < XVECLEN (x, i); j++)
+ if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
+ return 1;
+ }
+ }
+ return 0;
+}
+
+/* Scan sequence after INSN if it does not dereference any argument slots
+ we already clobbered by tail call arguments (as noted in stored_args_map
+ bitmap). Add stack slots for ARG to stored_args_map bitmap afterwards.
+ Return non-zero if sequence after INSN dereferences such argument slots,
+ zero otherwise. */
+
+static int
+check_sibcall_argument_overlap (insn, arg)
+ rtx insn;
+ struct arg_data *arg;
+{
+ int low, high;
+
+ if (insn == NULL_RTX)
+ insn = get_insns ();
+ else
+ insn = NEXT_INSN (insn);
+
+ for (; insn; insn = NEXT_INSN (insn))
+ if (INSN_P (insn)
+ && check_sibcall_argument_overlap_1 (PATTERN (insn)))
+ break;
+
+#ifdef ARGS_GROW_DOWNWARD
+ low = -arg->offset.constant - arg->size.constant;
+#else
+ low = arg->offset.constant;
+#endif
+
+ for (high = low + arg->size.constant; low < high; low++)
+ SET_BIT (stored_args_map, low);
+ return insn != NULL_RTX;
+}
+
+/* Generate all the code for a function call
+ and return an rtx for its value.
+ Store the value in TARGET (specified as an rtx) if convenient.
+ If the value is stored in TARGET then TARGET is returned.
+ If IGNORE is nonzero, then we ignore the value of the function call. */
+
+rtx
+expand_call (exp, target, ignore)
+ tree exp;
+ rtx target;
+ int ignore;
+{
+ /* Nonzero if we are currently expanding a call. */
+ static int currently_expanding_call = 0;