#endif
abort ();
- /* Find the CALL insn we just emitted. */
- for (call_insn = get_last_insn ();
- call_insn && GET_CODE (call_insn) != CALL_INSN;
- call_insn = PREV_INSN (call_insn))
- ;
-
- if (! call_insn)
- abort ();
+ /* Find the call we just emitted. */
+ call_insn = last_call_insn ();
/* Mark memory as used for "pure" function call. */
if (ecf_flags & ECF_PURE)
gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
call_fusage);
- /* Put the register usage information on the CALL. If there is already
- some usage information, put ours at the end. */
- if (CALL_INSN_FUNCTION_USAGE (call_insn))
- {
- rtx link;
-
- for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
- link = XEXP (link, 1))
- ;
-
- XEXP (link, 1) = call_fusage;
- }
- else
- CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
+ /* Put the register usage information there. */
+ add_function_usage_to (call_insn, call_fusage);
/* If this is a const call, then set the insn's unchanging bit. */
if (ecf_flags & (ECF_CONST | ECF_PURE))
with those made by function.c. */
/* See if this argument should be passed by invisible reference. */
- if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
- && contains_placeholder_p (TYPE_SIZE (type)))
+ if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
|| TREE_ADDRESSABLE (type)
#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
|| FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
rtx tail_call_insns = NULL_RTX;
/* Data type of the function. */
tree funtype;
+ tree type_arg_types;
/* Declaration of the function being called,
or 0 if the function is computed (not known by name). */
tree fndecl = 0;
else
flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
+ /* Warn if this value is an aggregate type,
+ regardless of which calling convention we are using for it. */
+ if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
+ warning ("function call has aggregate value");
+
+ /* If the result of a pure or const function call is ignored (or void),
+ and none of its arguments are volatile, we can avoid expanding the
+ call and just evaluate the arguments for side-effects. */
+ if ((flags & (ECF_CONST | ECF_PURE))
+ && (ignore || target == const0_rtx
+ || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
+ {
+ bool volatilep = false;
+ tree arg;
+
+ for (arg = actparms; arg; arg = TREE_CHAIN (arg))
+ if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
+ {
+ volatilep = true;
+ break;
+ }
+
+ if (! volatilep)
+ {
+ for (arg = actparms; arg; arg = TREE_CHAIN (arg))
+ expand_expr (TREE_VALUE (arg), const0_rtx,
+ VOIDmode, EXPAND_NORMAL);
+ return const0_rtx;
+ }
+ }
+
#ifdef REG_PARM_STACK_SPACE
#ifdef MAYBE_REG_PARM_STACK_SPACE
reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
must_preallocate = 1;
#endif
- /* Warn if this value is an aggregate type,
- regardless of which calling convention we are using for it. */
- if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
- warning ("function call has aggregate value");
-
/* Set up a place to return a structure. */
/* Cater to broken compilers. */
abort ();
funtype = TREE_TYPE (funtype);
+ /* Munge the tree to split complex arguments into their imaginary
+ and real parts. */
+ if (SPLIT_COMPLEX_ARGS)
+ {
+ type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
+ actparms = split_complex_values (actparms);
+ }
+ else
+ type_arg_types = TYPE_ARG_TYPES (funtype);
+
/* See if this is a call to a function that can return more than once
or a call to longjmp or malloc. */
flags |= special_function_p (fndecl, flags);
if ((STRICT_ARGUMENT_NAMING
|| ! PRETEND_OUTGOING_VARARGS_NAMED)
- && TYPE_ARG_TYPES (funtype) != 0)
+ && type_arg_types != 0)
n_named_args
- = (list_length (TYPE_ARG_TYPES (funtype))
+ = (list_length (type_arg_types)
/* Don't include the last named arg. */
- (STRICT_ARGUMENT_NAMING ? 0 : 1)
/* Count the struct value address, if it is passed as a parm. */
|| args_size.constant > current_function_args_size
/* If the callee pops its own arguments, then it must pop exactly
the same number of arguments as the current function. */
- || RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
- != RETURN_POPS_ARGS (current_function_decl,
- TREE_TYPE (current_function_decl),
- current_function_args_size))
+ || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
+ != RETURN_POPS_ARGS (current_function_decl,
+ TREE_TYPE (current_function_decl),
+ current_function_args_size))
+ || !(*lang_hooks.decls.ok_for_sibcall) (fndecl))
try_tail_call = 0;
if (try_tail_call || try_tail_recursion)
if (needed == 0)
argblock = virtual_outgoing_args_rtx;
else
- argblock = push_block (GEN_INT (needed), 0, 0);
+ {
+ argblock = push_block (GEN_INT (needed), 0, 0);
+#ifdef ARGS_GROW_DOWNWARD
+ argblock = plus_constant (argblock, needed);
+#endif
+ }
/* We only really need to call `copy_to_reg' in the case
where push insns are going to be used to pass ARGBLOCK
structure value. */
if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
{
+#ifdef POINTERS_EXTEND_UNSIGNED
+ if (GET_MODE (structure_value_addr) != Pmode)
+ structure_value_addr = convert_memory_address
+ (Pmode, structure_value_addr);
+#endif
emit_move_insn (struct_value_rtx,
force_reg (Pmode,
force_operand (structure_value_addr,
if (flags & ECF_LONGJMP)
current_function_calls_longjmp = 1;
- /* If this function is returning into a memory location marked as
- readonly, it means it is initializing that location. But we normally
- treat functions as not clobbering such locations, so we need to
- specify that this one does. */
- if (target != 0 && GET_CODE (target) == MEM
- && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
- emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
-
/* If value type not void, return an rtx for the value. */
/* If there are cleanups to be called, don't use a hard reg as target.
expand_end_target_temps ();
}
+ /* If this function is returning into a memory location marked as
+ readonly, it means it is initializing that location. We normally treat
+ functions as not clobbering such locations, so we need to specify that
+ this one does. We do this by adding the appropriate CLOBBER to the
+ CALL_INSN function usage list. This cannot be done by emitting a
+ standalone CLOBBER after the call because the latter would be ignored
+ by at least the delay slot scheduling pass. We do this now instead of
+ adding to call_fusage before the call to emit_call_1 because TARGET
+ may be modified in the meantime. */
+ if (structure_value_addr != 0 && target != 0
+ && GET_CODE (target) == MEM && RTX_UNCHANGING_P (target))
+ add_function_usage_to
+ (last_call_insn (),
+ gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
+ NULL_RTX));
+
insns = get_insns ();
end_sequence ();
return target;
}
+
+/* Traverse an argument list in VALUES and expand all complex
+ arguments into their components. */
+tree
+split_complex_values (tree values)
+{
+ tree p;
+
+ values = copy_list (values);
+
+ for (p = values; p; p = TREE_CHAIN (p))
+ {
+ tree complex_value = TREE_VALUE (p);
+ tree complex_type;
+
+ complex_type = TREE_TYPE (complex_value);
+ if (!complex_type)
+ continue;
+
+ if (TREE_CODE (complex_type) == COMPLEX_TYPE)
+ {
+ tree subtype;
+ tree real, imag, next;
+
+ subtype = TREE_TYPE (complex_type);
+ complex_value = save_expr (complex_value);
+ real = build1 (REALPART_EXPR, subtype, complex_value);
+ imag = build1 (IMAGPART_EXPR, subtype, complex_value);
+
+ TREE_VALUE (p) = real;
+ next = TREE_CHAIN (p);
+ imag = build_tree_list (NULL_TREE, imag);
+ TREE_CHAIN (p) = imag;
+ TREE_CHAIN (imag) = next;
+
+ /* Skip the newly created node. */
+ p = TREE_CHAIN (p);
+ }
+ }
+
+ return values;
+}
+
+/* Traverse a list of TYPES and expand all complex types into their
+ components. */
+tree
+split_complex_types (tree types)
+{
+ tree p;
+
+ types = copy_list (types);
+
+ for (p = types; p; p = TREE_CHAIN (p))
+ {
+ tree complex_type = TREE_VALUE (p);
+
+ if (TREE_CODE (complex_type) == COMPLEX_TYPE)
+ {
+ tree next, imag;
+
+ /* Rewrite complex type with component type. */
+ TREE_VALUE (p) = TREE_TYPE (complex_type);
+ next = TREE_CHAIN (p);
+
+ /* Add another component type for the imaginary part. */
+ imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
+ TREE_CHAIN (p) = imag;
+ TREE_CHAIN (imag) = next;
+
+ /* Skip the newly created node. */
+ p = TREE_CHAIN (p);
+ }
+ }
+
+ return types;
+}
\f
/* Output a library call to function FUN (a SYMBOL_REF rtx).
The RETVAL parameter specifies whether return value needs to be saved, other
or other LCT_ value for other types of library calls. */
void
-emit_library_call VPARAMS((rtx orgfun, enum libcall_type fn_type,
- enum machine_mode outmode, int nargs, ...))
+emit_library_call (rtx orgfun, enum libcall_type fn_type,
+ enum machine_mode outmode, int nargs, ...)
{
- VA_OPEN (p, nargs);
- VA_FIXEDARG (p, rtx, orgfun);
- VA_FIXEDARG (p, int, fn_type);
- VA_FIXEDARG (p, enum machine_mode, outmode);
- VA_FIXEDARG (p, int, nargs);
-
+ va_list p;
+
+ va_start (p, nargs);
emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
-
- VA_CLOSE (p);
+ va_end (p);
}
\f
/* Like emit_library_call except that an extra argument, VALUE,
If VALUE is nonzero, VALUE is returned. */
rtx
-emit_library_call_value VPARAMS((rtx orgfun, rtx value,
- enum libcall_type fn_type,
- enum machine_mode outmode, int nargs, ...))
+emit_library_call_value (rtx orgfun, rtx value,
+ enum libcall_type fn_type,
+ enum machine_mode outmode, int nargs, ...)
{
rtx result;
+ va_list p;
- VA_OPEN (p, nargs);
- VA_FIXEDARG (p, rtx, orgfun);
- VA_FIXEDARG (p, rtx, value);
- VA_FIXEDARG (p, int, fn_type);
- VA_FIXEDARG (p, enum machine_mode, outmode);
- VA_FIXEDARG (p, int, nargs);
-
+ va_start (p, nargs);
result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
nargs, p);
-
- VA_CLOSE (p);
+ va_end (p);
return result;
}