#include "tree-pass.h"
#include "ipa-type-escape.h"
#include "df.h"
+#include "tree-ssa-alias.h"
+#include "pointer-set.h"
+#include "tree-flow.h"
/* The aliasing API provided here solves related but different problems:
/* The splay-tree used to store the various alias set entries. */
static GTY (()) VEC(alias_set_entry,gc) *alias_sets;
\f
+/* Build a decomposed reference object for querying the alias-oracle
+ from the MEM rtx and store it in *REF.
+ Returns false if MEM is not suitable for the alias-oracle. */
+
+static bool
+ao_ref_from_mem (ao_ref *ref, const_rtx mem)
+{
+ tree expr = MEM_EXPR (mem);
+ tree base;
+
+ if (!expr)
+ return false;
+
+ /* If MEM_OFFSET or MEM_SIZE are NULL punt. */
+ if (!MEM_OFFSET (mem)
+ || !MEM_SIZE (mem))
+ return false;
+
+ ao_ref_init (ref, expr);
+
+ /* Get the base of the reference and see if we have to reject or
+ adjust it. */
+ base = ao_ref_base (ref);
+ if (base == NULL_TREE)
+ return false;
+
+ /* If this is a pointer dereference of a non-SSA_NAME punt.
+ ??? We could replace it with a pointer to anything. */
+ if (INDIRECT_REF_P (base)
+ && TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME)
+ return false;
+
+ /* The tree oracle doesn't like to have these. */
+ if (TREE_CODE (base) == FUNCTION_DECL
+ || TREE_CODE (base) == LABEL_DECL)
+ return false;
+
+ /* If this is a reference based on a partitioned decl replace the
+ base with an INDIRECT_REF of the pointer representative we
+ created during stack slot partitioning. */
+ if (TREE_CODE (base) == VAR_DECL
+ && ! TREE_STATIC (base)
+ && cfun->gimple_df->decls_to_pointers != NULL)
+ {
+ void *namep;
+ namep = pointer_map_contains (cfun->gimple_df->decls_to_pointers, base);
+ if (namep)
+ {
+ ref->base_alias_set = get_alias_set (base);
+ ref->base = build1 (INDIRECT_REF, TREE_TYPE (base), *(tree *)namep);
+ }
+ }
+
+ ref->ref_alias_set = MEM_ALIAS_SET (mem);
+
+ /* If the base decl is a parameter we can have negative MEM_OFFSET in
+ case of promoted subregs on bigendian targets. Trust the MEM_EXPR
+ here. */
+ if (INTVAL (MEM_OFFSET (mem)) < 0
+ && ((INTVAL (MEM_SIZE (mem)) + INTVAL (MEM_OFFSET (mem)))
+ * BITS_PER_UNIT) == ref->size)
+ return true;
+
+ ref->offset += INTVAL (MEM_OFFSET (mem)) * BITS_PER_UNIT;
+ ref->size = INTVAL (MEM_SIZE (mem)) * BITS_PER_UNIT;
+
+ /* The MEM may extend into adjacent fields, so adjust max_size if
+ necessary. */
+ if (ref->max_size != -1
+ && ref->size > ref->max_size)
+ ref->max_size = ref->size;
+
+ /* If MEM_OFFSET and MEM_SIZE get us outside of the base object of
+ the MEM_EXPR punt. This happens for STRICT_ALIGNMENT targets a lot. */
+ if (MEM_EXPR (mem) != get_spill_slot_decl (false)
+ && (ref->offset < 0
+ || (DECL_P (ref->base)
+ && (!host_integerp (DECL_SIZE (ref->base), 1)
+ || (TREE_INT_CST_LOW (DECL_SIZE ((ref->base)))
+ < (unsigned HOST_WIDE_INT)(ref->offset + ref->size))))))
+ return false;
+
+ return true;
+}
+
+/* Query the alias-oracle on whether the two memory rtx X and MEM may
+ alias. If TBAA_P is set also apply TBAA. Returns true if the
+ two rtxen may alias, false otherwise. */
+
+static bool
+rtx_refs_may_alias_p (const_rtx x, const_rtx mem, bool tbaa_p)
+{
+ ao_ref ref1, ref2;
+
+ if (!ao_ref_from_mem (&ref1, x)
+ || !ao_ref_from_mem (&ref2, mem))
+ return true;
+
+ return refs_may_alias_p_1 (&ref1, &ref2, tbaa_p);
+}
+
/* Returns a pointer to the alias set entry for ALIAS_SET, if there is
such an entry, or NULL otherwise. */
{
if (alias_sets_conflict_p (MEM_ALIAS_SET(*x), MEM_ALIAS_SET(mem)))
return 1;
-
- return -1;
+
+ return -1;
}
return 0;
}
if (for_each_rtx (pat, (rtx_function) walk_mems_2, *x))
/* Indicate that dependence was determined and stop traversal. */
return 1;
-
+
return -1;
}
return 0;
aren't types. */
if (! TYPE_P (t))
{
- tree inner = t;
+ tree inner;
/* Remove any nops, then give the language a chance to do
something with this tree before we look at it. */
if (set != -1)
return set;
+ /* Retrieve the original memory reference if needed. */
+ if (TREE_CODE (t) == TARGET_MEM_REF)
+ t = TMR_ORIGINAL (t);
+
/* First see if the actual object referenced is an INDIRECT_REF from a
restrict-qualified pointer or a "void *". */
+ inner = t;
while (handled_component_p (inner))
{
inner = TREE_OPERAND (inner, 0);
}
/* Variant qualifiers don't affect the alias set, so get the main
- variant. Always use the canonical type as well.
- If this is a type with a known alias set, return it. */
+ variant. */
t = TYPE_MAIN_VARIANT (t);
- if (TYPE_CANONICAL (t))
- t = TYPE_CANONICAL (t);
+
+ /* Always use the canonical type as well. If this is a type that
+ requires structural comparisons to identify compatible types
+ use alias set zero. */
+ if (TYPE_STRUCTURAL_EQUALITY_P (t))
+ {
+ /* Allow the language to specify another alias set for this
+ type. */
+ set = lang_hooks.get_alias_set (t);
+ if (set != -1)
+ return set;
+ return 0;
+ }
+ t = TYPE_CANONICAL (t);
+ /* Canonical types shouldn't form a tree nor should the canonical
+ type require structural equality checks. */
+ gcc_assert (!TYPE_STRUCTURAL_EQUALITY_P (t) && TYPE_CANONICAL (t) == t);
+
+ /* If this is a type with a known alias set, return it. */
if (TYPE_ALIAS_SET_KNOWN_P (t))
return TYPE_ALIAS_SET (t);
return 0;
case TRUNCATE:
+ /* As we do not know which address space the pointer is refering to, we can
+ handle this only if the target does not support different pointer or
+ address modes depending on the address space. */
+ if (!target_default_pointer_address_modes_p ())
+ break;
if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
break;
/* Fall through. */
case ZERO_EXTEND:
case SIGN_EXTEND: /* used for NT/Alpha pointers */
+ /* As we do not know which address space the pointer is refering to, we can
+ handle this only if the target does not support different pointer or
+ address modes depending on the address space. */
+ if (!target_default_pointer_address_modes_p ())
+ break;
+
{
rtx temp = find_base_value (XEXP (src, 0));
return REG_BASE_VALUE (x);
case TRUNCATE:
+ /* As we do not know which address space the pointer is refering to, we can
+ handle this only if the target does not support different pointer or
+ address modes depending on the address space. */
+ if (!target_default_pointer_address_modes_p ())
+ return 0;
if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
return 0;
/* Fall through. */
case ZERO_EXTEND:
case SIGN_EXTEND: /* Used for Alpha/NT pointers */
+ /* As we do not know which address space the pointer is refering to, we can
+ handle this only if the target does not support different pointer or
+ address modes depending on the address space. */
+ if (!target_default_pointer_address_modes_p ())
+ return 0;
+
{
rtx temp = find_base_term (XEXP (x, 0));
{
const_tree fieldx, fieldy, typex, typey, orig_y;
+ if (!flag_strict_aliasing)
+ return false;
+
do
{
/* The comparison has to be done at a common type, since we don't
if (! DECL_P (exprx) || ! DECL_P (expry))
return 0;
+ /* With invalid code we can end up storing into the constant pool.
+ Bail out to avoid ICEing when creating RTL for this.
+ See gfortran.dg/lto/20091028-2_0.f90. */
+ if (TREE_CODE (exprx) == CONST_DECL
+ || TREE_CODE (expry) == CONST_DECL)
+ return 1;
+
rtlx = DECL_RTL (exprx);
rtly = DECL_RTL (expry);
&& ! rtx_equal_p (rtlx, rtly))
return 1;
+ /* If we have MEMs refering to different address spaces (which can
+ potentially overlap), we cannot easily tell from the addresses
+ whether the references overlap. */
+ if (MEM_P (rtlx) && MEM_P (rtly)
+ && MEM_ADDR_SPACE (rtlx) != MEM_ADDR_SPACE (rtly))
+ return 0;
+
/* Get the base and offsets of both decls. If either is a register, we
know both are and are the same, so use that as the base. The only
we can avoid overlap is if we can deduce that they are nonoverlapping
if (nonoverlapping_memrefs_p (mem, x))
return 0;
+ /* If we have MEMs refering to different address spaces (which can
+ potentially overlap), we cannot easily tell from the addresses
+ whether the references overlap. */
+ if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
+ return 1;
+
if (mem_mode == VOIDmode)
mem_mode = GET_MODE (mem);
if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
return 1;
- return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
- varies);
+ if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, varies))
+ return 0;
+
+ return rtx_refs_may_alias_p (x, mem, true);
}
/* Canonical true dependence: X is read after store in MEM takes place.
if (nonoverlapping_memrefs_p (x, mem))
return 0;
+ /* If we have MEMs refering to different address spaces (which can
+ potentially overlap), we cannot easily tell from the addresses
+ whether the references overlap. */
+ if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
+ return 1;
+
if (! x_addr)
x_addr = get_addr (XEXP (x, 0));
if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
return 1;
- return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
- varies);
+ if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, varies))
+ return 0;
+
+ return rtx_refs_may_alias_p (x, mem, true);
}
/* Returns nonzero if a write to X might alias a previous read from
if (nonoverlapping_memrefs_p (x, mem))
return 0;
+ /* If we have MEMs refering to different address spaces (which can
+ potentially overlap), we cannot easily tell from the addresses
+ whether the references overlap. */
+ if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
+ return 1;
+
x_addr = get_addr (XEXP (x, 0));
mem_addr = get_addr (XEXP (mem, 0));
= fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
rtx_addr_varies_p);
- return (!(fixed_scalar == mem && !aliases_everything_p (x))
- && !(fixed_scalar == x && !aliases_everything_p (mem)));
+ if ((fixed_scalar == mem && !aliases_everything_p (x))
+ || (fixed_scalar == x && !aliases_everything_p (mem)))
+ return 0;
+
+ return rtx_refs_may_alias_p (x, mem, false);
}
/* Anti dependence: X is written after read in MEM takes place. */