+/* Arguments to the call. */
+static rtx call_arguments;
+
+/* Compute call_arguments. */
+
+static void
+prepare_call_arguments (basic_block bb, rtx insn)
+{
+ rtx link, x;
+ rtx prev, cur, next;
+ rtx call = PATTERN (insn);
+ rtx this_arg = NULL_RTX;
+ tree type = NULL_TREE, t, fndecl = NULL_TREE;
+ tree obj_type_ref = NULL_TREE;
+ CUMULATIVE_ARGS args_so_far_v;
+ cumulative_args_t args_so_far;
+
+ memset (&args_so_far_v, 0, sizeof (args_so_far_v));
+ args_so_far = pack_cumulative_args (&args_so_far_v);
+ if (GET_CODE (call) == PARALLEL)
+ call = XVECEXP (call, 0, 0);
+ if (GET_CODE (call) == SET)
+ call = SET_SRC (call);
+ if (GET_CODE (call) == CALL && MEM_P (XEXP (call, 0)))
+ {
+ if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
+ {
+ rtx symbol = XEXP (XEXP (call, 0), 0);
+ if (SYMBOL_REF_DECL (symbol))
+ fndecl = SYMBOL_REF_DECL (symbol);
+ }
+ if (fndecl == NULL_TREE)
+ fndecl = MEM_EXPR (XEXP (call, 0));
+ if (fndecl
+ && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
+ && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
+ fndecl = NULL_TREE;
+ if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
+ type = TREE_TYPE (fndecl);
+ if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
+ {
+ if (TREE_CODE (fndecl) == INDIRECT_REF
+ && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
+ obj_type_ref = TREE_OPERAND (fndecl, 0);
+ fndecl = NULL_TREE;
+ }
+ if (type)
+ {
+ for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
+ t = TREE_CHAIN (t))
+ if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
+ && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
+ break;
+ if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
+ type = NULL;
+ else
+ {
+ int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
+ link = CALL_INSN_FUNCTION_USAGE (insn);
+#ifndef PCC_STATIC_STRUCT_RETURN
+ if (aggregate_value_p (TREE_TYPE (type), type)
+ && targetm.calls.struct_value_rtx (type, 0) == 0)
+ {
+ tree struct_addr = build_pointer_type (TREE_TYPE (type));
+ enum machine_mode mode = TYPE_MODE (struct_addr);
+ rtx reg;
+ INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
+ nargs + 1);
+ reg = targetm.calls.function_arg (args_so_far, mode,
+ struct_addr, true);
+ targetm.calls.function_arg_advance (args_so_far, mode,
+ struct_addr, true);
+ if (reg == NULL_RTX)
+ {
+ for (; link; link = XEXP (link, 1))
+ if (GET_CODE (XEXP (link, 0)) == USE
+ && MEM_P (XEXP (XEXP (link, 0), 0)))
+ {
+ link = XEXP (link, 1);
+ break;
+ }
+ }
+ }
+ else
+#endif
+ INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
+ nargs);
+ if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
+ {
+ enum machine_mode mode;
+ t = TYPE_ARG_TYPES (type);
+ mode = TYPE_MODE (TREE_VALUE (t));
+ this_arg = targetm.calls.function_arg (args_so_far, mode,
+ TREE_VALUE (t), true);
+ if (this_arg && !REG_P (this_arg))
+ this_arg = NULL_RTX;
+ else if (this_arg == NULL_RTX)
+ {
+ for (; link; link = XEXP (link, 1))
+ if (GET_CODE (XEXP (link, 0)) == USE
+ && MEM_P (XEXP (XEXP (link, 0), 0)))
+ {
+ this_arg = XEXP (XEXP (link, 0), 0);
+ break;
+ }
+ }
+ }
+ }
+ }
+ }
+ t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
+
+ for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
+ if (GET_CODE (XEXP (link, 0)) == USE)
+ {
+ rtx item = NULL_RTX;
+ x = XEXP (XEXP (link, 0), 0);
+ if (GET_MODE (link) == VOIDmode
+ || GET_MODE (link) == BLKmode
+ || (GET_MODE (link) != GET_MODE (x)
+ && (GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
+ || GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)))
+ /* Can't do anything for these, if the original type mode
+ isn't known or can't be converted. */;
+ else if (REG_P (x))
+ {
+ cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
+ if (val && cselib_preserved_value_p (val))
+ item = val->val_rtx;
+ else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
+ {
+ enum machine_mode mode = GET_MODE (x);
+
+ while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
+ && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
+ {
+ rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
+
+ if (reg == NULL_RTX || !REG_P (reg))
+ continue;
+ val = cselib_lookup (reg, mode, 0, VOIDmode);
+ if (val && cselib_preserved_value_p (val))
+ {
+ item = val->val_rtx;
+ break;
+ }
+ }
+ }
+ }
+ else if (MEM_P (x))
+ {
+ rtx mem = x;
+ cselib_val *val;
+
+ if (!frame_pointer_needed)
+ {
+ struct adjust_mem_data amd;
+ amd.mem_mode = VOIDmode;
+ amd.stack_adjust = -VTI (bb)->out.stack_adjust;
+ amd.side_effects = NULL_RTX;
+ amd.store = true;
+ mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
+ &amd);
+ gcc_assert (amd.side_effects == NULL_RTX);
+ }
+ val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
+ if (val && cselib_preserved_value_p (val))
+ item = val->val_rtx;
+ else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT)
+ {
+ /* For non-integer stack argument see also if they weren't
+ initialized by integers. */
+ enum machine_mode imode = int_mode_for_mode (GET_MODE (mem));
+ if (imode != GET_MODE (mem) && imode != BLKmode)
+ {
+ val = cselib_lookup (adjust_address_nv (mem, imode, 0),
+ imode, 0, VOIDmode);
+ if (val && cselib_preserved_value_p (val))
+ item = lowpart_subreg (GET_MODE (x), val->val_rtx,
+ imode);
+ }
+ }
+ }
+ if (item)
+ {
+ rtx x2 = x;
+ if (GET_MODE (item) != GET_MODE (link))
+ item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
+ if (GET_MODE (x2) != GET_MODE (link))
+ x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
+ item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
+ call_arguments
+ = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
+ }
+ if (t && t != void_list_node)
+ {
+ tree argtype = TREE_VALUE (t);
+ enum machine_mode mode = TYPE_MODE (argtype);
+ rtx reg;
+ if (pass_by_reference (&args_so_far_v, mode, argtype, true))
+ {
+ argtype = build_pointer_type (argtype);
+ mode = TYPE_MODE (argtype);
+ }
+ reg = targetm.calls.function_arg (args_so_far, mode,
+ argtype, true);
+ if (TREE_CODE (argtype) == REFERENCE_TYPE
+ && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
+ && reg
+ && REG_P (reg)
+ && GET_MODE (reg) == mode
+ && GET_MODE_CLASS (mode) == MODE_INT
+ && REG_P (x)
+ && REGNO (x) == REGNO (reg)
+ && GET_MODE (x) == mode
+ && item)
+ {
+ enum machine_mode indmode
+ = TYPE_MODE (TREE_TYPE (argtype));
+ rtx mem = gen_rtx_MEM (indmode, x);
+ cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
+ if (val && cselib_preserved_value_p (val))
+ {
+ item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
+ call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
+ call_arguments);
+ }
+ else
+ {
+ struct elt_loc_list *l;
+ tree initial;
+
+ /* Try harder, when passing address of a constant
+ pool integer it can be easily read back. */
+ item = XEXP (item, 1);
+ if (GET_CODE (item) == SUBREG)
+ item = SUBREG_REG (item);
+ gcc_assert (GET_CODE (item) == VALUE);
+ val = CSELIB_VAL_PTR (item);
+ for (l = val->locs; l; l = l->next)
+ if (GET_CODE (l->loc) == SYMBOL_REF
+ && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
+ && SYMBOL_REF_DECL (l->loc)
+ && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
+ {
+ initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
+ if (host_integerp (initial, 0))
+ {
+ item = GEN_INT (tree_low_cst (initial, 0));
+ item = gen_rtx_CONCAT (indmode, mem, item);
+ call_arguments
+ = gen_rtx_EXPR_LIST (VOIDmode, item,
+ call_arguments);
+ }
+ break;
+ }
+ }
+ }
+ targetm.calls.function_arg_advance (args_so_far, mode,
+ argtype, true);
+ t = TREE_CHAIN (t);
+ }
+ }
+
+ /* Add debug arguments. */
+ if (fndecl
+ && TREE_CODE (fndecl) == FUNCTION_DECL
+ && DECL_HAS_DEBUG_ARGS_P (fndecl))
+ {
+ VEC(tree, gc) **debug_args = decl_debug_args_lookup (fndecl);
+ if (debug_args)
+ {
+ unsigned int ix;
+ tree param;
+ for (ix = 0; VEC_iterate (tree, *debug_args, ix, param); ix += 2)
+ {
+ rtx item;
+ tree dtemp = VEC_index (tree, *debug_args, ix + 1);
+ enum machine_mode mode = DECL_MODE (dtemp);
+ item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
+ item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
+ call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
+ call_arguments);
+ }
+ }
+ }
+
+ /* Reverse call_arguments chain. */
+ prev = NULL_RTX;
+ for (cur = call_arguments; cur; cur = next)
+ {
+ next = XEXP (cur, 1);
+ XEXP (cur, 1) = prev;
+ prev = cur;
+ }
+ call_arguments = prev;
+
+ x = PATTERN (insn);
+ if (GET_CODE (x) == PARALLEL)
+ x = XVECEXP (x, 0, 0);
+ if (GET_CODE (x) == SET)
+ x = SET_SRC (x);
+ if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
+ {
+ x = XEXP (XEXP (x, 0), 0);
+ if (GET_CODE (x) == SYMBOL_REF)
+ /* Don't record anything. */;
+ else if (CONSTANT_P (x))
+ {
+ x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
+ pc_rtx, x);
+ call_arguments
+ = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
+ }
+ else
+ {
+ cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
+ if (val && cselib_preserved_value_p (val))
+ {
+ x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
+ call_arguments
+ = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
+ }
+ }
+ }
+ if (this_arg)
+ {
+ enum machine_mode mode
+ = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
+ rtx clobbered = gen_rtx_MEM (mode, this_arg);
+ HOST_WIDE_INT token
+ = tree_low_cst (OBJ_TYPE_REF_TOKEN (obj_type_ref), 0);
+ if (token)
+ clobbered = plus_constant (clobbered, token * GET_MODE_SIZE (mode));
+ clobbered = gen_rtx_MEM (mode, clobbered);
+ x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
+ call_arguments
+ = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
+ }
+}
+