+/* The splay-tree used to store the various alias set entries. */
+static splay_tree alias_sets;
+\f
+/* Returns a pointer to the alias set entry for ALIAS_SET, if there is
+ such an entry, or NULL otherwise. */
+
+static alias_set_entry
+get_alias_set_entry (alias_set)
+ HOST_WIDE_INT alias_set;
+{
+ splay_tree_node sn
+ = splay_tree_lookup (alias_sets, (splay_tree_key) alias_set);
+
+ return sn != 0 ? ((alias_set_entry) sn->value) : 0;
+}
+
+/* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
+ the two MEMs cannot alias each other. */
+
+static int
+mems_in_disjoint_alias_sets_p (mem1, mem2)
+ rtx mem1;
+ rtx mem2;
+{
+#ifdef ENABLE_CHECKING
+/* Perform a basic sanity check. Namely, that there are no alias sets
+ if we're not using strict aliasing. This helps to catch bugs
+ whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
+ where a MEM is allocated in some way other than by the use of
+ gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
+ use alias sets to indicate that spilled registers cannot alias each
+ other, we might need to remove this check. */
+ if (! flag_strict_aliasing
+ && (MEM_ALIAS_SET (mem1) != 0 || MEM_ALIAS_SET (mem2) != 0))
+ abort ();
+#endif
+
+ return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
+}
+
+/* Insert the NODE into the splay tree given by DATA. Used by
+ record_alias_subset via splay_tree_foreach. */
+
+static int
+insert_subset_children (node, data)
+ splay_tree_node node;
+ void *data;
+{
+ splay_tree_insert ((splay_tree) data, node->key, node->value);
+
+ return 0;
+}
+
+/* Return 1 if the two specified alias sets may conflict. */
+
+int
+alias_sets_conflict_p (set1, set2)
+ HOST_WIDE_INT set1, set2;
+{
+ alias_set_entry ase;
+
+ /* If have no alias set information for one of the operands, we have
+ to assume it can alias anything. */
+ if (set1 == 0 || set2 == 0
+ /* If the two alias sets are the same, they may alias. */
+ || set1 == set2)
+ return 1;
+
+ /* See if the first alias set is a subset of the second. */
+ ase = get_alias_set_entry (set1);
+ if (ase != 0
+ && (ase->has_zero_child
+ || splay_tree_lookup (ase->children,
+ (splay_tree_key) set2)))
+ return 1;
+
+ /* Now do the same, but with the alias sets reversed. */
+ ase = get_alias_set_entry (set2);
+ if (ase != 0
+ && (ase->has_zero_child
+ || splay_tree_lookup (ase->children,
+ (splay_tree_key) set1)))
+ return 1;
+
+ /* The two alias sets are distinct and neither one is the
+ child of the other. Therefore, they cannot alias. */
+ return 0;
+}
+\f
+/* Return 1 if TYPE is a RECORD_TYPE, UNION_TYPE, or QUAL_UNION_TYPE and has
+ has any readonly fields. If any of the fields have types that
+ contain readonly fields, return true as well. */
+
+int
+readonly_fields_p (type)
+ tree type;
+{
+ tree field;
+
+ if (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
+ && TREE_CODE (type) != QUAL_UNION_TYPE)
+ return 0;
+
+ for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
+ if (TREE_CODE (field) == FIELD_DECL
+ && (TREE_READONLY (field)
+ || readonly_fields_p (TREE_TYPE (field))))
+ return 1;
+
+ return 0;
+}
+\f
+/* Return 1 if any MEM object of type T1 will always conflict (using the
+ dependency routines in this file) with any MEM object of type T2.
+ This is used when allocating temporary storage. If T1 and/or T2 are
+ NULL_TREE, it means we know nothing about the storage. */
+
+int
+objects_must_conflict_p (t1, t2)
+ tree t1, t2;
+{
+ /* If neither has a type specified, we don't know if they'll conflict
+ because we may be using them to store objects of various types, for
+ example the argument and local variables areas of inlined functions. */
+ if (t1 == 0 && t2 == 0)
+ return 0;
+
+ /* If one or the other has readonly fields or is readonly,
+ then they may not conflict. */
+ if ((t1 != 0 && readonly_fields_p (t1))
+ || (t2 != 0 && readonly_fields_p (t2))
+ || (t1 != 0 && TYPE_READONLY (t1))
+ || (t2 != 0 && TYPE_READONLY (t2)))
+ return 0;
+
+ /* If they are the same type, they must conflict. */
+ if (t1 == t2
+ /* Likewise if both are volatile. */
+ || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
+ return 1;
+
+ /* If one is aggregate and the other is scalar then they may not
+ conflict. */
+ if ((t1 != 0 && AGGREGATE_TYPE_P (t1))
+ != (t2 != 0 && AGGREGATE_TYPE_P (t2)))
+ return 0;
+
+ /* Otherwise they conflict only if the alias sets conflict. */
+ return alias_sets_conflict_p (t1 ? get_alias_set (t1) : 0,
+ t2 ? get_alias_set (t2) : 0);
+}
+\f
+/* T is an expression with pointer type. Find the DECL on which this
+ expression is based. (For example, in `a[i]' this would be `a'.)
+ If there is no such DECL, or a unique decl cannot be determined,
+ NULL_TREE is retured. */
+
+static tree
+find_base_decl (t)
+ tree t;
+{
+ tree d0, d1, d2;
+
+ if (t == 0 || t == error_mark_node || ! POINTER_TYPE_P (TREE_TYPE (t)))
+ return 0;
+
+ /* If this is a declaration, return it. */
+ if (TREE_CODE_CLASS (TREE_CODE (t)) == 'd')
+ return t;
+
+ /* Handle general expressions. It would be nice to deal with
+ COMPONENT_REFs here. If we could tell that `a' and `b' were the
+ same, then `a->f' and `b->f' are also the same. */
+ switch (TREE_CODE_CLASS (TREE_CODE (t)))
+ {
+ case '1':
+ return find_base_decl (TREE_OPERAND (t, 0));
+
+ case '2':
+ /* Return 0 if found in neither or both are the same. */
+ d0 = find_base_decl (TREE_OPERAND (t, 0));
+ d1 = find_base_decl (TREE_OPERAND (t, 1));
+ if (d0 == d1)
+ return d0;
+ else if (d0 == 0)
+ return d1;
+ else if (d1 == 0)
+ return d0;
+ else
+ return 0;
+
+ case '3':
+ d0 = find_base_decl (TREE_OPERAND (t, 0));
+ d1 = find_base_decl (TREE_OPERAND (t, 1));
+ d0 = find_base_decl (TREE_OPERAND (t, 0));
+ d2 = find_base_decl (TREE_OPERAND (t, 2));
+
+ /* Set any nonzero values from the last, then from the first. */
+ if (d1 == 0) d1 = d2;
+ if (d0 == 0) d0 = d1;
+ if (d1 == 0) d1 = d0;
+ if (d2 == 0) d2 = d1;
+
+ /* At this point all are nonzero or all are zero. If all three are the
+ same, return it. Otherwise, return zero. */
+ return (d0 == d1 && d1 == d2) ? d0 : 0;
+
+ default:
+ return 0;
+ }
+}
+
+/* Return 1 if T is an expression that get_inner_reference handles. */
+
+static int
+handled_component_p (t)
+ tree t;
+{
+ switch (TREE_CODE (t))
+ {
+ case BIT_FIELD_REF:
+ case COMPONENT_REF:
+ case ARRAY_REF:
+ case NON_LVALUE_EXPR:
+ return 1;
+
+ case NOP_EXPR:
+ case CONVERT_EXPR:
+ return (TYPE_MODE (TREE_TYPE (t))
+ == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
+
+ default:
+ return 0;
+ }
+}
+
+/* Return 1 if all the nested component references handled by
+ get_inner_reference in T are such that we can address the object in T. */
+
+static int
+can_address_p (t)
+ tree t;
+{
+ /* If we're at the end, it is vacuously addressable. */
+ if (! handled_component_p (t))
+ return 1;
+
+ /* Bitfields are never addressable. */
+ else if (TREE_CODE (t) == BIT_FIELD_REF)
+ return 0;
+
+ else if (TREE_CODE (t) == COMPONENT_REF
+ && ! DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1))
+ && can_address_p (TREE_OPERAND (t, 0)))
+ return 1;
+
+ else if (TREE_CODE (t) == ARRAY_REF
+ && ! TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0)))
+ && can_address_p (TREE_OPERAND (t, 0)))
+ return 1;
+
+ return 0;
+}
+
+/* Return the alias set for T, which may be either a type or an
+ expression. Call language-specific routine for help, if needed. */
+
+HOST_WIDE_INT
+get_alias_set (t)
+ tree t;
+{
+ tree orig_t;
+ HOST_WIDE_INT set;
+
+ /* If we're not doing any alias analysis, just assume everything
+ aliases everything else. Also return 0 if this or its type is
+ an error. */
+ if (! flag_strict_aliasing || t == error_mark_node
+ || (! TYPE_P (t)
+ && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
+ return 0;
+
+ /* We can be passed either an expression or a type. This and the
+ language-specific routine may make mutually-recursive calls to
+ each other to figure out what to do. At each juncture, we see if
+ this is a tree that the language may need to handle specially.
+ First handle things that aren't types and start by removing nops
+ since we care only about the actual object. */
+ if (! TYPE_P (t))
+ {
+ while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
+ || TREE_CODE (t) == NON_LVALUE_EXPR)
+ t = TREE_OPERAND (t, 0);
+
+ /* Now give the language a chance to do something but record what we
+ gave it this time. */
+ orig_t = t;
+ if ((set = lang_get_alias_set (t)) != -1)
+ return set;
+
+ /* Now loop the same way as get_inner_reference and get the alias
+ set to use. Pick up the outermost object that we could have
+ a pointer to. */
+ while (handled_component_p (t) && ! can_address_p (t))
+ t = TREE_OPERAND (t, 0);
+
+ if (TREE_CODE (t) == INDIRECT_REF)
+ {
+ /* Check for accesses through restrict-qualified pointers. */
+ tree decl = find_base_decl (TREE_OPERAND (t, 0));
+
+ if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl))
+ /* We use the alias set indicated in the declaration. */
+ return DECL_POINTER_ALIAS_SET (decl);
+
+ /* If we have an INDIRECT_REF via a void pointer, we don't
+ know anything about what that might alias. */
+ if (TREE_CODE (TREE_TYPE (t)) == VOID_TYPE)
+ return 0;
+ }
+
+ /* Give the language another chance to do something special. */
+ if (orig_t != t
+ && (set = lang_get_alias_set (t)) != -1)
+ return set;
+
+ /* Now all we care about is the type. */
+ t = TREE_TYPE (t);
+ }
+
+ /* Variant qualifiers don't affect the alias set, so get the main
+ variant. If this is a type with a known alias set, return it. */
+ t = TYPE_MAIN_VARIANT (t);
+ if (TYPE_P (t) && TYPE_ALIAS_SET_KNOWN_P (t))
+ return TYPE_ALIAS_SET (t);
+
+ /* See if the language has special handling for this type. */
+ if ((set = lang_get_alias_set (t)) != -1)
+ {
+ /* If the alias set is now known, we are done. */
+ if (TYPE_ALIAS_SET_KNOWN_P (t))
+ return TYPE_ALIAS_SET (t);
+ }
+
+ /* There are no objects of FUNCTION_TYPE, so there's no point in
+ using up an alias set for them. (There are, of course, pointers
+ and references to functions, but that's different.) */
+ else if (TREE_CODE (t) == FUNCTION_TYPE)
+ set = 0;
+ else
+ /* Otherwise make a new alias set for this type. */
+ set = new_alias_set ();
+
+ TYPE_ALIAS_SET (t) = set;
+
+ /* If this is an aggregate type, we must record any component aliasing
+ information. */
+ if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
+ record_component_aliases (t);
+
+ return set;
+}
+
+/* Return a brand-new alias set. */
+
+HOST_WIDE_INT
+new_alias_set ()
+{
+ static HOST_WIDE_INT last_alias_set;
+
+ if (flag_strict_aliasing)
+ return ++last_alias_set;
+ else
+ return 0;
+}
+
+/* Indicate that things in SUBSET can alias things in SUPERSET, but
+ not vice versa. For example, in C, a store to an `int' can alias a
+ structure containing an `int', but not vice versa. Here, the
+ structure would be the SUPERSET and `int' the SUBSET. This
+ function should be called only once per SUPERSET/SUBSET pair.
+
+ It is illegal for SUPERSET to be zero; everything is implicitly a
+ subset of alias set zero. */
+
+void
+record_alias_subset (superset, subset)
+ HOST_WIDE_INT superset;
+ HOST_WIDE_INT subset;
+{
+ alias_set_entry superset_entry;
+ alias_set_entry subset_entry;
+
+ if (superset == 0)
+ abort ();
+
+ superset_entry = get_alias_set_entry (superset);
+ if (superset_entry == 0)
+ {
+ /* Create an entry for the SUPERSET, so that we have a place to
+ attach the SUBSET. */
+ superset_entry
+ = (alias_set_entry) xmalloc (sizeof (struct alias_set_entry));
+ superset_entry->alias_set = superset;
+ superset_entry->children
+ = splay_tree_new (splay_tree_compare_ints, 0, 0);
+ superset_entry->has_zero_child = 0;
+ splay_tree_insert (alias_sets, (splay_tree_key) superset,
+ (splay_tree_value) superset_entry);
+ }
+
+ if (subset == 0)
+ superset_entry->has_zero_child = 1;
+ else
+ {
+ subset_entry = get_alias_set_entry (subset);
+ /* If there is an entry for the subset, enter all of its children
+ (if they are not already present) as children of the SUPERSET. */
+ if (subset_entry)
+ {
+ if (subset_entry->has_zero_child)
+ superset_entry->has_zero_child = 1;
+
+ splay_tree_foreach (subset_entry->children, insert_subset_children,
+ superset_entry->children);
+ }
+
+ /* Enter the SUBSET itself as a child of the SUPERSET. */
+ splay_tree_insert (superset_entry->children,
+ (splay_tree_key) subset, 0);
+ }
+}
+
+/* Record that component types of TYPE, if any, are part of that type for
+ aliasing purposes. For record types, we only record component types
+ for fields that are marked addressable. For array types, we always
+ record the component types, so the front end should not call this
+ function if the individual component aren't addressable. */
+
+void
+record_component_aliases (type)
+ tree type;
+{
+ HOST_WIDE_INT superset = get_alias_set (type);
+ tree field;
+
+ if (superset == 0)
+ return;
+
+ switch (TREE_CODE (type))
+ {
+ case ARRAY_TYPE:
+ if (! TYPE_NONALIASED_COMPONENT (type))
+ record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
+ break;
+
+ case RECORD_TYPE:
+ case UNION_TYPE:
+ case QUAL_UNION_TYPE:
+ for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
+ if (TREE_CODE (field) == FIELD_DECL && ! DECL_NONADDRESSABLE_P (field))
+ record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
+ break;
+
+ case COMPLEX_TYPE:
+ record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
+ break;
+
+ default:
+ break;
+ }
+}
+
+/* Allocate an alias set for use in storing and reading from the varargs
+ spill area. */
+
+HOST_WIDE_INT
+get_varargs_alias_set ()
+{
+ static HOST_WIDE_INT set = -1;
+
+ if (set == -1)
+ set = new_alias_set ();
+
+ return set;
+}
+
+/* Likewise, but used for the fixed portions of the frame, e.g., register
+ save areas. */
+
+HOST_WIDE_INT
+get_frame_alias_set ()
+{
+ static HOST_WIDE_INT set = -1;
+
+ if (set == -1)
+ set = new_alias_set ();
+
+ return set;
+}
+