+/* Build a decomposed reference object for querying the alias-oracle
+ from the MEM rtx and store it in *REF.
+ Returns false if MEM is not suitable for the alias-oracle. */
+
+static bool
+ao_ref_from_mem (ao_ref *ref, const_rtx mem)
+{
+ tree expr = MEM_EXPR (mem);
+ tree base;
+
+ if (!expr)
+ return false;
+
+ ao_ref_init (ref, expr);
+
+ /* Get the base of the reference and see if we have to reject or
+ adjust it. */
+ base = ao_ref_base (ref);
+ if (base == NULL_TREE)
+ return false;
+
+ /* The tree oracle doesn't like to have these. */
+ if (TREE_CODE (base) == FUNCTION_DECL
+ || TREE_CODE (base) == LABEL_DECL)
+ return false;
+
+ /* If this is a pointer dereference of a non-SSA_NAME punt.
+ ??? We could replace it with a pointer to anything. */
+ if (INDIRECT_REF_P (base)
+ && TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME)
+ return false;
+
+ /* If this is a reference based on a partitioned decl replace the
+ base with an INDIRECT_REF of the pointer representative we
+ created during stack slot partitioning. */
+ if (TREE_CODE (base) == VAR_DECL
+ && ! TREE_STATIC (base)
+ && cfun->gimple_df->decls_to_pointers != NULL)
+ {
+ void *namep;
+ namep = pointer_map_contains (cfun->gimple_df->decls_to_pointers, base);
+ if (namep)
+ {
+ ref->base_alias_set = get_alias_set (base);
+ ref->base = build1 (INDIRECT_REF, TREE_TYPE (base), *(tree *)namep);
+ }
+ }
+
+ ref->ref_alias_set = MEM_ALIAS_SET (mem);
+
+ /* If MEM_OFFSET or MEM_SIZE are NULL we have to punt.
+ Keep points-to related information though. */
+ if (!MEM_OFFSET (mem)
+ || !MEM_SIZE (mem))
+ {
+ ref->ref = NULL_TREE;
+ ref->offset = 0;
+ ref->size = -1;
+ ref->max_size = -1;
+ return true;
+ }
+
+ /* If the base decl is a parameter we can have negative MEM_OFFSET in
+ case of promoted subregs on bigendian targets. Trust the MEM_EXPR
+ here. */
+ if (INTVAL (MEM_OFFSET (mem)) < 0
+ && ((INTVAL (MEM_SIZE (mem)) + INTVAL (MEM_OFFSET (mem)))
+ * BITS_PER_UNIT) == ref->size)
+ return true;
+
+ ref->offset += INTVAL (MEM_OFFSET (mem)) * BITS_PER_UNIT;
+ ref->size = INTVAL (MEM_SIZE (mem)) * BITS_PER_UNIT;
+
+ /* The MEM may extend into adjacent fields, so adjust max_size if
+ necessary. */
+ if (ref->max_size != -1
+ && ref->size > ref->max_size)
+ ref->max_size = ref->size;
+
+ /* If MEM_OFFSET and MEM_SIZE get us outside of the base object of
+ the MEM_EXPR punt. This happens for STRICT_ALIGNMENT targets a lot. */
+ if (MEM_EXPR (mem) != get_spill_slot_decl (false)
+ && (ref->offset < 0
+ || (DECL_P (ref->base)
+ && (!host_integerp (DECL_SIZE (ref->base), 1)
+ || (TREE_INT_CST_LOW (DECL_SIZE ((ref->base)))
+ < (unsigned HOST_WIDE_INT)(ref->offset + ref->size))))))
+ return false;
+
+ return true;
+}
+
+/* Query the alias-oracle on whether the two memory rtx X and MEM may
+ alias. If TBAA_P is set also apply TBAA. Returns true if the
+ two rtxen may alias, false otherwise. */
+
+static bool
+rtx_refs_may_alias_p (const_rtx x, const_rtx mem, bool tbaa_p)
+{
+ ao_ref ref1, ref2;
+
+ if (!ao_ref_from_mem (&ref1, x)
+ || !ao_ref_from_mem (&ref2, mem))
+ return true;
+
+ return refs_may_alias_p_1 (&ref1, &ref2, tbaa_p);
+}
+