+ if (!do_mark)
+ {
+ gcc_assert (arg_stores);
+ bitmap_clear (arg_stores);
+ }
+
+ min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
+ max_sp_off = 0;
+
+ /* First determine the minimum and maximum offset from sp for
+ stored arguments. */
+ for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
+ if (GET_CODE (XEXP (p, 0)) == USE
+ && MEM_P (XEXP (XEXP (p, 0), 0)))
+ {
+ rtx mem = XEXP (XEXP (p, 0), 0), addr;
+ HOST_WIDE_INT off = 0, size;
+ if (!MEM_SIZE_KNOWN_P (mem))
+ return false;
+ size = MEM_SIZE (mem);
+ addr = XEXP (mem, 0);
+ if (GET_CODE (addr) == PLUS
+ && REG_P (XEXP (addr, 0))
+ && CONST_INT_P (XEXP (addr, 1)))
+ {
+ off = INTVAL (XEXP (addr, 1));
+ addr = XEXP (addr, 0);
+ }
+ if (addr != stack_pointer_rtx)
+ {
+ if (!REG_P (addr))
+ return false;
+ /* If not fast, use chains to see if addr wasn't set to
+ sp + offset. */
+ if (!fast)
+ {
+ df_ref *use_rec;
+ struct df_link *defs;
+ rtx set;
+
+ for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
+ if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
+ break;
+
+ if (*use_rec == NULL)
+ return false;
+
+ for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
+ if (! DF_REF_IS_ARTIFICIAL (defs->ref))
+ break;
+
+ if (defs == NULL)
+ return false;
+
+ set = single_set (DF_REF_INSN (defs->ref));
+ if (!set)
+ return false;
+
+ if (GET_CODE (SET_SRC (set)) != PLUS
+ || XEXP (SET_SRC (set), 0) != stack_pointer_rtx
+ || !CONST_INT_P (XEXP (SET_SRC (set), 1)))
+ return false;
+
+ off += INTVAL (XEXP (SET_SRC (set), 1));
+ }
+ else
+ return false;
+ }
+ min_sp_off = MIN (min_sp_off, off);
+ max_sp_off = MAX (max_sp_off, off + size);
+ }
+
+ if (min_sp_off >= max_sp_off)
+ return true;
+ sp_bytes = BITMAP_ALLOC (NULL);
+
+ /* Set bits in SP_BYTES bitmap for bytes relative to sp + min_sp_off
+ which contain arguments. Checking has been done in the previous
+ loop. */
+ for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
+ if (GET_CODE (XEXP (p, 0)) == USE
+ && MEM_P (XEXP (XEXP (p, 0), 0)))
+ {
+ rtx mem = XEXP (XEXP (p, 0), 0), addr;
+ HOST_WIDE_INT off = 0, byte;
+ addr = XEXP (mem, 0);
+ if (GET_CODE (addr) == PLUS
+ && REG_P (XEXP (addr, 0))
+ && CONST_INT_P (XEXP (addr, 1)))
+ {
+ off = INTVAL (XEXP (addr, 1));
+ addr = XEXP (addr, 0);
+ }
+ if (addr != stack_pointer_rtx)
+ {
+ df_ref *use_rec;
+ struct df_link *defs;
+ rtx set;
+
+ for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
+ if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
+ break;
+
+ for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
+ if (! DF_REF_IS_ARTIFICIAL (defs->ref))
+ break;
+
+ set = single_set (DF_REF_INSN (defs->ref));
+ off += INTVAL (XEXP (SET_SRC (set), 1));
+ }
+ for (byte = off; byte < off + MEM_SIZE (mem); byte++)
+ {
+ if (!bitmap_set_bit (sp_bytes, byte - min_sp_off))
+ gcc_unreachable ();
+ }
+ }
+
+ /* Walk backwards, looking for argument stores. The search stops
+ when seeing another call, sp adjustment or memory store other than
+ argument store. */
+ ret = false;
+ for (insn = PREV_INSN (call_insn); insn; insn = prev_insn)
+ {
+ rtx set, mem, addr;
+ HOST_WIDE_INT off;
+
+ if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn)))
+ prev_insn = NULL_RTX;
+ else
+ prev_insn = PREV_INSN (insn);
+
+ if (CALL_P (insn))