+
+static hashval_t gimple_type_hash (const void *);
+
+/* Structure used to maintain a cache of some type pairs compared by
+ gimple_types_compatible_p when comparing aggregate types. There are
+ three possible values for SAME_P:
+
+ -2: The pair (T1, T2) has just been inserted in the table.
+ 0: T1 and T2 are different types.
+ 1: T1 and T2 are the same type.
+
+ The two elements in the SAME_P array are indexed by the comparison
+ mode gtc_mode. */
+
+struct type_pair_d
+{
+ unsigned int uid1;
+ unsigned int uid2;
+ signed char same_p[2];
+};
+typedef struct type_pair_d *type_pair_t;
+
+DEF_VEC_P(type_pair_t);
+DEF_VEC_ALLOC_P(type_pair_t,heap);
+
+/* Return a hash value for the type pair pointed-to by P. */
+
+static hashval_t
+type_pair_hash (const void *p)
+{
+ const struct type_pair_d *pair = (const struct type_pair_d *) p;
+ hashval_t val1 = pair->uid1;
+ hashval_t val2 = pair->uid2;
+ return (iterative_hash_hashval_t (val2, val1)
+ ^ iterative_hash_hashval_t (val1, val2));
+}
+
+/* Compare two type pairs pointed-to by P1 and P2. */
+
+static int
+type_pair_eq (const void *p1, const void *p2)
+{
+ const struct type_pair_d *pair1 = (const struct type_pair_d *) p1;
+ const struct type_pair_d *pair2 = (const struct type_pair_d *) p2;
+ return ((pair1->uid1 == pair2->uid1 && pair1->uid2 == pair2->uid2)
+ || (pair1->uid1 == pair2->uid2 && pair1->uid2 == pair2->uid1));
+}
+
+/* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
+ entry if none existed. */
+
+static type_pair_t
+lookup_type_pair (tree t1, tree t2, htab_t *visited_p, struct obstack *ob_p)
+{
+ struct type_pair_d pair;
+ type_pair_t p;
+ void **slot;
+
+ if (*visited_p == NULL)
+ {
+ *visited_p = htab_create (251, type_pair_hash, type_pair_eq, NULL);
+ gcc_obstack_init (ob_p);
+ }
+
+ pair.uid1 = TYPE_UID (t1);
+ pair.uid2 = TYPE_UID (t2);
+ slot = htab_find_slot (*visited_p, &pair, INSERT);
+
+ if (*slot)
+ p = *((type_pair_t *) slot);
+ else
+ {
+ p = XOBNEW (ob_p, struct type_pair_d);
+ p->uid1 = TYPE_UID (t1);
+ p->uid2 = TYPE_UID (t2);
+ p->same_p[0] = -2;
+ p->same_p[1] = -2;
+ *slot = (void *) p;
+ }
+
+ return p;
+}
+
+/* Per pointer state for the SCC finding. The on_sccstack flag
+ is not strictly required, it is true when there is no hash value
+ recorded for the type and false otherwise. But querying that
+ is slower. */
+
+struct sccs
+{
+ unsigned int dfsnum;
+ unsigned int low;
+ bool on_sccstack;
+ union {
+ hashval_t hash;
+ signed char same_p;
+ } u;
+};
+
+static unsigned int next_dfs_num;
+static unsigned int gtc_next_dfs_num;
+
+
+/* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
+
+typedef struct GTY(()) gimple_type_leader_entry_s {
+ tree type;
+ tree leader;
+} gimple_type_leader_entry;
+
+#define GIMPLE_TYPE_LEADER_SIZE 16381
+static GTY((length("GIMPLE_TYPE_LEADER_SIZE"))) gimple_type_leader_entry
+ *gimple_type_leader;
+
+/* Lookup an existing leader for T and return it or NULL_TREE, if
+ there is none in the cache. */
+
+static tree
+gimple_lookup_type_leader (tree t)
+{
+ gimple_type_leader_entry *leader;
+
+ if (!gimple_type_leader)
+ return NULL_TREE;
+
+ leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
+ if (leader->type != t)
+ return NULL_TREE;
+
+ return leader->leader;
+}
+
+/* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
+ true then if any type has no name return false, otherwise return
+ true if both types have no names. */
+
+static bool
+compare_type_names_p (tree t1, tree t2, bool for_completion_p)
+{
+ tree name1 = TYPE_NAME (t1);
+ tree name2 = TYPE_NAME (t2);
+
+ /* Consider anonymous types all unique for completion. */
+ if (for_completion_p
+ && (!name1 || !name2))
+ return false;
+
+ if (name1 && TREE_CODE (name1) == TYPE_DECL)
+ {
+ name1 = DECL_NAME (name1);
+ if (for_completion_p
+ && !name1)
+ return false;
+ }
+ gcc_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
+
+ if (name2 && TREE_CODE (name2) == TYPE_DECL)
+ {
+ name2 = DECL_NAME (name2);
+ if (for_completion_p
+ && !name2)
+ return false;
+ }
+ gcc_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
+
+ /* Identifiers can be compared with pointer equality rather
+ than a string comparison. */
+ if (name1 == name2)
+ return true;
+
+ return false;
+}
+
+/* Return true if the field decls F1 and F2 are at the same offset.
+
+ This is intended to be used on GIMPLE types only. In order to
+ compare GENERIC types, use fields_compatible_p instead. */
+
+bool
+gimple_compare_field_offset (tree f1, tree f2)
+{
+ if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
+ {
+ tree offset1 = DECL_FIELD_OFFSET (f1);
+ tree offset2 = DECL_FIELD_OFFSET (f2);
+ return ((offset1 == offset2
+ /* Once gimplification is done, self-referential offsets are
+ instantiated as operand #2 of the COMPONENT_REF built for
+ each access and reset. Therefore, they are not relevant
+ anymore and fields are interchangeable provided that they
+ represent the same access. */
+ || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
+ && TREE_CODE (offset2) == PLACEHOLDER_EXPR
+ && (DECL_SIZE (f1) == DECL_SIZE (f2)
+ || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
+ && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
+ || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
+ && DECL_ALIGN (f1) == DECL_ALIGN (f2))
+ || operand_equal_p (offset1, offset2, 0))
+ && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
+ DECL_FIELD_BIT_OFFSET (f2)));
+ }
+
+ /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
+ should be, so handle differing ones specially by decomposing
+ the offset into a byte and bit offset manually. */
+ if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
+ && host_integerp (DECL_FIELD_OFFSET (f2), 0))
+ {
+ unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
+ unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
+ bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
+ byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
+ + bit_offset1 / BITS_PER_UNIT);
+ bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
+ byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
+ + bit_offset2 / BITS_PER_UNIT);
+ if (byte_offset1 != byte_offset2)
+ return false;
+ return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
+ }
+
+ return false;
+}
+
+/* If the type T1 and the type T2 are a complete and an incomplete
+ variant of the same type return true. */
+
+static bool
+gimple_compatible_complete_and_incomplete_subtype_p (tree t1, tree t2)
+{
+ /* If one pointer points to an incomplete type variant of
+ the other pointed-to type they are the same. */
+ if (TREE_CODE (t1) == TREE_CODE (t2)
+ && RECORD_OR_UNION_TYPE_P (t1)
+ && (!COMPLETE_TYPE_P (t1)
+ || !COMPLETE_TYPE_P (t2))
+ && TYPE_QUALS (t1) == TYPE_QUALS (t2)
+ && compare_type_names_p (TYPE_MAIN_VARIANT (t1),
+ TYPE_MAIN_VARIANT (t2), true))
+ return true;
+ return false;
+}
+
+static bool
+gimple_types_compatible_p_1 (tree, tree, enum gtc_mode, type_pair_t,
+ VEC(type_pair_t, heap) **,
+ struct pointer_map_t *, struct obstack *);
+
+/* DFS visit the edge from the callers type pair with state *STATE to
+ the pair T1, T2 while operating in FOR_MERGING_P mode.
+ Update the merging status if it is not part of the SCC containing the
+ callers pair and return it.
+ SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
+
+static bool
+gtc_visit (tree t1, tree t2, enum gtc_mode mode,
+ struct sccs *state,
+ VEC(type_pair_t, heap) **sccstack,
+ struct pointer_map_t *sccstate,
+ struct obstack *sccstate_obstack)
+{
+ struct sccs *cstate = NULL;
+ type_pair_t p;
+ void **slot;
+
+ /* Check first for the obvious case of pointer identity. */
+ if (t1 == t2)
+ return true;
+
+ /* Check that we have two types to compare. */
+ if (t1 == NULL_TREE || t2 == NULL_TREE)
+ return false;
+
+ /* If the types have been previously registered and found equal
+ they still are. */
+ if (mode == GTC_MERGE)
+ {
+ tree leader1 = gimple_lookup_type_leader (t1);
+ tree leader2 = gimple_lookup_type_leader (t2);
+ if (leader1 == t2
+ || t1 == leader2
+ || (leader1 && leader1 == leader2))
+ return true;
+ }
+ else if (mode == GTC_DIAG)
+ {
+ if (TYPE_CANONICAL (t1)
+ && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
+ return true;
+ }
+
+ /* Can't be the same type if the types don't have the same code. */
+ if (TREE_CODE (t1) != TREE_CODE (t2))
+ return false;
+
+ /* Can't be the same type if they have different CV qualifiers. */
+ if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
+ return false;
+
+ /* Void types are always the same. */
+ if (TREE_CODE (t1) == VOID_TYPE)
+ return true;
+
+ /* Do some simple checks before doing three hashtable queries. */
+ if (INTEGRAL_TYPE_P (t1)
+ || SCALAR_FLOAT_TYPE_P (t1)
+ || FIXED_POINT_TYPE_P (t1)
+ || TREE_CODE (t1) == VECTOR_TYPE
+ || TREE_CODE (t1) == COMPLEX_TYPE
+ || TREE_CODE (t1) == OFFSET_TYPE)
+ {
+ /* Can't be the same type if they have different alignment,
+ sign, precision or mode. */
+ if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
+ || TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
+ || TYPE_MODE (t1) != TYPE_MODE (t2)
+ || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
+ return false;
+
+ if (TREE_CODE (t1) == INTEGER_TYPE
+ && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
+ || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
+ return false;
+
+ /* That's all we need to check for float and fixed-point types. */
+ if (SCALAR_FLOAT_TYPE_P (t1)
+ || FIXED_POINT_TYPE_P (t1))
+ return true;
+
+ /* For integral types fall thru to more complex checks. */
+ }
+
+ else if (AGGREGATE_TYPE_P (t1) || POINTER_TYPE_P (t1))
+ {
+ /* Can't be the same type if they have different alignment or mode. */
+ if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
+ || TYPE_MODE (t1) != TYPE_MODE (t2))
+ return false;
+ }
+
+ /* If the hash values of t1 and t2 are different the types can't
+ possibly be the same. This helps keeping the type-pair hashtable
+ small, only tracking comparisons for hash collisions. */
+ if (gimple_type_hash (t1) != gimple_type_hash (t2))
+ return false;
+
+ /* Allocate a new cache entry for this comparison. */
+ p = lookup_type_pair (t1, t2, >c_visited, >c_ob);
+ if (p->same_p[mode] == 0 || p->same_p[mode] == 1)
+ {
+ /* We have already decided whether T1 and T2 are the
+ same, return the cached result. */
+ return p->same_p[mode] == 1;
+ }
+
+ if ((slot = pointer_map_contains (sccstate, p)) != NULL)
+ cstate = (struct sccs *)*slot;
+ if (!cstate)
+ {
+ bool res;
+ /* Not yet visited. DFS recurse. */
+ res = gimple_types_compatible_p_1 (t1, t2, mode, p,
+ sccstack, sccstate, sccstate_obstack);
+ if (!cstate)
+ cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
+ state->low = MIN (state->low, cstate->low);
+ /* If the type is no longer on the SCC stack and thus is not part
+ of the parents SCC, return its state. Otherwise we will
+ ignore this pair and assume equality. */
+ if (!cstate->on_sccstack)
+ return res;
+ }
+ if (cstate->dfsnum < state->dfsnum
+ && cstate->on_sccstack)
+ state->low = MIN (cstate->dfsnum, state->low);
+
+ /* We are part of our parents SCC, skip this entry and return true. */
+ return true;
+}
+
+/* Worker for gimple_types_compatible.
+ SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
+
+static bool
+gimple_types_compatible_p_1 (tree t1, tree t2, enum gtc_mode mode,
+ type_pair_t p,
+ VEC(type_pair_t, heap) **sccstack,
+ struct pointer_map_t *sccstate,
+ struct obstack *sccstate_obstack)
+{
+ struct sccs *state;
+
+ gcc_assert (p->same_p[mode] == -2);
+
+ state = XOBNEW (sccstate_obstack, struct sccs);
+ *pointer_map_insert (sccstate, p) = state;
+
+ VEC_safe_push (type_pair_t, heap, *sccstack, p);
+ state->dfsnum = gtc_next_dfs_num++;
+ state->low = state->dfsnum;
+ state->on_sccstack = true;
+
+ /* If their attributes are not the same they can't be the same type. */
+ if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
+ goto different_types;
+
+ /* Do type-specific comparisons. */
+ switch (TREE_CODE (t1))
+ {
+ case VECTOR_TYPE:
+ case COMPLEX_TYPE:
+ if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
+ state, sccstack, sccstate, sccstate_obstack))
+ goto different_types;
+ goto same_types;
+
+ case ARRAY_TYPE:
+ /* Array types are the same if the element types are the same and
+ the number of elements are the same. */
+ if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
+ state, sccstack, sccstate, sccstate_obstack)
+ || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
+ || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
+ goto different_types;
+ else
+ {
+ tree i1 = TYPE_DOMAIN (t1);
+ tree i2 = TYPE_DOMAIN (t2);
+
+ /* For an incomplete external array, the type domain can be
+ NULL_TREE. Check this condition also. */
+ if (i1 == NULL_TREE && i2 == NULL_TREE)
+ goto same_types;
+ else if (i1 == NULL_TREE || i2 == NULL_TREE)
+ goto different_types;
+ /* If for a complete array type the possibly gimplified sizes
+ are different the types are different. */
+ else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
+ || (TYPE_SIZE (i1)
+ && TYPE_SIZE (i2)
+ && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
+ goto different_types;
+ else
+ {
+ tree min1 = TYPE_MIN_VALUE (i1);
+ tree min2 = TYPE_MIN_VALUE (i2);
+ tree max1 = TYPE_MAX_VALUE (i1);
+ tree max2 = TYPE_MAX_VALUE (i2);
+
+ /* The minimum/maximum values have to be the same. */
+ if ((min1 == min2
+ || (min1 && min2
+ && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
+ && TREE_CODE (min2) == PLACEHOLDER_EXPR)
+ || operand_equal_p (min1, min2, 0))))
+ && (max1 == max2
+ || (max1 && max2
+ && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
+ && TREE_CODE (max2) == PLACEHOLDER_EXPR)
+ || operand_equal_p (max1, max2, 0)))))
+ goto same_types;
+ else
+ goto different_types;
+ }
+ }
+
+ case METHOD_TYPE:
+ /* Method types should belong to the same class. */
+ if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
+ mode, state, sccstack, sccstate, sccstate_obstack))
+ goto different_types;
+
+ /* Fallthru */
+
+ case FUNCTION_TYPE:
+ /* Function types are the same if the return type and arguments types
+ are the same. */
+ if ((mode != GTC_DIAG
+ || !gimple_compatible_complete_and_incomplete_subtype_p
+ (TREE_TYPE (t1), TREE_TYPE (t2)))
+ && !gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
+ state, sccstack, sccstate, sccstate_obstack))
+ goto different_types;
+
+ if (!targetm.comp_type_attributes (t1, t2))
+ goto different_types;
+
+ if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
+ goto same_types;
+ else
+ {
+ tree parms1, parms2;
+
+ for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
+ parms1 && parms2;
+ parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
+ {
+ if ((mode == GTC_MERGE
+ || !gimple_compatible_complete_and_incomplete_subtype_p
+ (TREE_VALUE (parms1), TREE_VALUE (parms2)))
+ && !gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2), mode,
+ state, sccstack, sccstate, sccstate_obstack))
+ goto different_types;
+ }
+
+ if (parms1 || parms2)
+ goto different_types;
+
+ goto same_types;
+ }
+
+ case OFFSET_TYPE:
+ {
+ if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
+ state, sccstack, sccstate, sccstate_obstack)
+ || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
+ TYPE_OFFSET_BASETYPE (t2), mode,
+ state, sccstack, sccstate, sccstate_obstack))
+ goto different_types;
+
+ goto same_types;
+ }
+
+ case POINTER_TYPE:
+ case REFERENCE_TYPE:
+ {
+ /* If the two pointers have different ref-all attributes,
+ they can't be the same type. */
+ if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
+ goto different_types;
+
+ /* If one pointer points to an incomplete type variant of
+ the other pointed-to type they are the same. */
+ if (mode == GTC_DIAG
+ && gimple_compatible_complete_and_incomplete_subtype_p
+ (TREE_TYPE (t1), TREE_TYPE (t2)))
+ goto same_types;
+
+ /* Otherwise, pointer and reference types are the same if the
+ pointed-to types are the same. */
+ if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
+ state, sccstack, sccstate, sccstate_obstack))
+ goto same_types;
+
+ goto different_types;
+ }
+
+ case NULLPTR_TYPE:
+ /* There is only one decltype(nullptr). */
+ goto same_types;
+
+ case INTEGER_TYPE:
+ case BOOLEAN_TYPE:
+ {
+ tree min1 = TYPE_MIN_VALUE (t1);
+ tree max1 = TYPE_MAX_VALUE (t1);
+ tree min2 = TYPE_MIN_VALUE (t2);
+ tree max2 = TYPE_MAX_VALUE (t2);
+ bool min_equal_p = false;
+ bool max_equal_p = false;
+
+ /* If either type has a minimum value, the other type must
+ have the same. */
+ if (min1 == NULL_TREE && min2 == NULL_TREE)
+ min_equal_p = true;
+ else if (min1 && min2 && operand_equal_p (min1, min2, 0))
+ min_equal_p = true;
+
+ /* Likewise, if either type has a maximum value, the other
+ type must have the same. */
+ if (max1 == NULL_TREE && max2 == NULL_TREE)
+ max_equal_p = true;
+ else if (max1 && max2 && operand_equal_p (max1, max2, 0))
+ max_equal_p = true;
+
+ if (!min_equal_p || !max_equal_p)
+ goto different_types;
+
+ goto same_types;
+ }
+
+ case ENUMERAL_TYPE:
+ {
+ /* FIXME lto, we cannot check bounds on enumeral types because
+ different front ends will produce different values.
+ In C, enumeral types are integers, while in C++ each element
+ will have its own symbolic value. We should decide how enums
+ are to be represented in GIMPLE and have each front end lower
+ to that. */
+ tree v1, v2;
+
+ /* For enumeral types, all the values must be the same. */
+ if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
+ goto same_types;
+
+ for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
+ v1 && v2;
+ v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
+ {
+ tree c1 = TREE_VALUE (v1);
+ tree c2 = TREE_VALUE (v2);
+
+ if (TREE_CODE (c1) == CONST_DECL)
+ c1 = DECL_INITIAL (c1);
+
+ if (TREE_CODE (c2) == CONST_DECL)
+ c2 = DECL_INITIAL (c2);
+
+ if (tree_int_cst_equal (c1, c2) != 1)
+ goto different_types;
+ }
+
+ /* If one enumeration has more values than the other, they
+ are not the same. */
+ if (v1 || v2)
+ goto different_types;
+
+ goto same_types;
+ }
+
+ case RECORD_TYPE:
+ case UNION_TYPE:
+ case QUAL_UNION_TYPE:
+ {
+ tree f1, f2;
+
+ /* The struct tags shall compare equal. */
+ if (!compare_type_names_p (TYPE_MAIN_VARIANT (t1),
+ TYPE_MAIN_VARIANT (t2), false))
+ goto different_types;
+
+ /* For aggregate types, all the fields must be the same. */
+ for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
+ f1 && f2;
+ f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
+ {
+ /* The fields must have the same name, offset and type. */
+ if (DECL_NAME (f1) != DECL_NAME (f2)
+ || DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
+ || !gimple_compare_field_offset (f1, f2)
+ || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2), mode,
+ state, sccstack, sccstate, sccstate_obstack))
+ goto different_types;
+ }
+
+ /* If one aggregate has more fields than the other, they
+ are not the same. */
+ if (f1 || f2)
+ goto different_types;
+
+ goto same_types;
+ }
+
+ default:
+ gcc_unreachable ();
+ }
+
+ /* Common exit path for types that are not compatible. */
+different_types:
+ state->u.same_p = 0;
+ goto pop;
+
+ /* Common exit path for types that are compatible. */
+same_types:
+ state->u.same_p = 1;
+ goto pop;
+
+pop:
+ if (state->low == state->dfsnum)
+ {
+ type_pair_t x;
+
+ /* Pop off the SCC and set its cache values. */
+ do
+ {
+ struct sccs *cstate;
+ x = VEC_pop (type_pair_t, *sccstack);
+ cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
+ cstate->on_sccstack = false;
+ x->same_p[mode] = cstate->u.same_p;
+ }
+ while (x != p);
+ }
+
+ return state->u.same_p;
+}
+
+/* Return true iff T1 and T2 are structurally identical. When
+ FOR_MERGING_P is true the an incomplete type and a complete type
+ are considered different, otherwise they are considered compatible. */
+
+bool
+gimple_types_compatible_p (tree t1, tree t2, enum gtc_mode mode)
+{
+ VEC(type_pair_t, heap) *sccstack = NULL;
+ struct pointer_map_t *sccstate;
+ struct obstack sccstate_obstack;
+ type_pair_t p = NULL;
+ bool res;
+
+ /* Before starting to set up the SCC machinery handle simple cases. */
+
+ /* Check first for the obvious case of pointer identity. */
+ if (t1 == t2)
+ return true;
+
+ /* Check that we have two types to compare. */
+ if (t1 == NULL_TREE || t2 == NULL_TREE)
+ return false;
+
+ /* If the types have been previously registered and found equal
+ they still are. */
+ if (mode == GTC_MERGE)
+ {
+ tree leader1 = gimple_lookup_type_leader (t1);
+ tree leader2 = gimple_lookup_type_leader (t2);
+ if (leader1 == t2
+ || t1 == leader2
+ || (leader1 && leader1 == leader2))
+ return true;
+ }
+ else if (mode == GTC_DIAG)
+ {
+ if (TYPE_CANONICAL (t1)
+ && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
+ return true;
+ }
+
+ /* Can't be the same type if the types don't have the same code. */
+ if (TREE_CODE (t1) != TREE_CODE (t2))
+ return false;
+
+ /* Can't be the same type if they have different CV qualifiers. */
+ if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
+ return false;
+
+ /* Void types are always the same. */
+ if (TREE_CODE (t1) == VOID_TYPE)
+ return true;
+
+ /* Do some simple checks before doing three hashtable queries. */
+ if (INTEGRAL_TYPE_P (t1)
+ || SCALAR_FLOAT_TYPE_P (t1)
+ || FIXED_POINT_TYPE_P (t1)
+ || TREE_CODE (t1) == VECTOR_TYPE
+ || TREE_CODE (t1) == COMPLEX_TYPE
+ || TREE_CODE (t1) == OFFSET_TYPE)
+ {
+ /* Can't be the same type if they have different alignment,
+ sign, precision or mode. */
+ if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
+ || TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
+ || TYPE_MODE (t1) != TYPE_MODE (t2)
+ || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
+ return false;
+
+ if (TREE_CODE (t1) == INTEGER_TYPE
+ && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
+ || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
+ return false;
+
+ /* That's all we need to check for float and fixed-point types. */
+ if (SCALAR_FLOAT_TYPE_P (t1)
+ || FIXED_POINT_TYPE_P (t1))
+ return true;
+
+ /* For integral types fall thru to more complex checks. */
+ }
+
+ else if (AGGREGATE_TYPE_P (t1) || POINTER_TYPE_P (t1))
+ {
+ /* Can't be the same type if they have different alignment or mode. */
+ if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
+ || TYPE_MODE (t1) != TYPE_MODE (t2))
+ return false;
+ }
+
+ /* If the hash values of t1 and t2 are different the types can't
+ possibly be the same. This helps keeping the type-pair hashtable
+ small, only tracking comparisons for hash collisions. */
+ if (gimple_type_hash (t1) != gimple_type_hash (t2))
+ return false;
+
+ /* If we've visited this type pair before (in the case of aggregates
+ with self-referential types), and we made a decision, return it. */
+ p = lookup_type_pair (t1, t2, >c_visited, >c_ob);
+ if (p->same_p[mode] == 0 || p->same_p[mode] == 1)
+ {
+ /* We have already decided whether T1 and T2 are the
+ same, return the cached result. */
+ return p->same_p[mode] == 1;
+ }
+
+ /* Now set up the SCC machinery for the comparison. */
+ gtc_next_dfs_num = 1;
+ sccstate = pointer_map_create ();
+ gcc_obstack_init (&sccstate_obstack);
+ res = gimple_types_compatible_p_1 (t1, t2, mode, p,
+ &sccstack, sccstate, &sccstate_obstack);
+ VEC_free (type_pair_t, heap, sccstack);
+ pointer_map_destroy (sccstate);
+ obstack_free (&sccstate_obstack, NULL);
+
+ return res;
+}
+
+
+static hashval_t
+iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
+ struct pointer_map_t *, struct obstack *);
+
+/* DFS visit the edge from the callers type with state *STATE to T.
+ Update the callers type hash V with the hash for T if it is not part
+ of the SCC containing the callers type and return it.
+ SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
+
+static hashval_t
+visit (tree t, struct sccs *state, hashval_t v,
+ VEC (tree, heap) **sccstack,
+ struct pointer_map_t *sccstate,
+ struct obstack *sccstate_obstack)
+{
+ struct sccs *cstate = NULL;
+ struct tree_int_map m;
+ void **slot;
+
+ /* If there is a hash value recorded for this type then it can't
+ possibly be part of our parent SCC. Simply mix in its hash. */
+ m.base.from = t;
+ if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
+ && *slot)
+ return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
+
+ if ((slot = pointer_map_contains (sccstate, t)) != NULL)
+ cstate = (struct sccs *)*slot;
+ if (!cstate)
+ {
+ hashval_t tem;
+ /* Not yet visited. DFS recurse. */
+ tem = iterative_hash_gimple_type (t, v,
+ sccstack, sccstate, sccstate_obstack);
+ if (!cstate)
+ cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
+ state->low = MIN (state->low, cstate->low);
+ /* If the type is no longer on the SCC stack and thus is not part
+ of the parents SCC mix in its hash value. Otherwise we will
+ ignore the type for hashing purposes and return the unaltered
+ hash value. */
+ if (!cstate->on_sccstack)
+ return tem;
+ }
+ if (cstate->dfsnum < state->dfsnum
+ && cstate->on_sccstack)
+ state->low = MIN (cstate->dfsnum, state->low);
+
+ /* We are part of our parents SCC, skip this type during hashing
+ and return the unaltered hash value. */
+ return v;
+}
+
+/* Hash NAME with the previous hash value V and return it. */
+
+static hashval_t
+iterative_hash_name (tree name, hashval_t v)
+{
+ if (!name)
+ return v;
+ if (TREE_CODE (name) == TYPE_DECL)
+ name = DECL_NAME (name);
+ if (!name)
+ return v;
+ gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
+ return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
+}
+
+/* Returning a hash value for gimple type TYPE combined with VAL.
+ SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
+
+ To hash a type we end up hashing in types that are reachable.
+ Through pointers we can end up with cycles which messes up the
+ required property that we need to compute the same hash value
+ for structurally equivalent types. To avoid this we have to
+ hash all types in a cycle (the SCC) in a commutative way. The
+ easiest way is to not mix in the hashes of the SCC members at
+ all. To make this work we have to delay setting the hash
+ values of the SCC until it is complete. */
+
+static hashval_t
+iterative_hash_gimple_type (tree type, hashval_t val,
+ VEC(tree, heap) **sccstack,
+ struct pointer_map_t *sccstate,
+ struct obstack *sccstate_obstack)
+{
+ hashval_t v;
+ void **slot;
+ struct sccs *state;
+
+ /* Not visited during this DFS walk. */
+ gcc_checking_assert (!pointer_map_contains (sccstate, type));
+ state = XOBNEW (sccstate_obstack, struct sccs);
+ *pointer_map_insert (sccstate, type) = state;
+
+ VEC_safe_push (tree, heap, *sccstack, type);
+ state->dfsnum = next_dfs_num++;
+ state->low = state->dfsnum;
+ state->on_sccstack = true;
+
+ /* Combine a few common features of types so that types are grouped into
+ smaller sets; when searching for existing matching types to merge,
+ only existing types having the same features as the new type will be
+ checked. */
+ v = iterative_hash_hashval_t (TREE_CODE (type), 0);
+ v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
+ v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
+
+ /* Do not hash the types size as this will cause differences in
+ hash values for the complete vs. the incomplete type variant. */
+
+ /* Incorporate common features of numerical types. */
+ if (INTEGRAL_TYPE_P (type)
+ || SCALAR_FLOAT_TYPE_P (type)
+ || FIXED_POINT_TYPE_P (type))
+ {
+ v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
+ v = iterative_hash_hashval_t (TYPE_MODE (type), v);
+ v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
+ }
+
+ /* For pointer and reference types, fold in information about the type
+ pointed to but do not recurse into possibly incomplete types to
+ avoid hash differences for complete vs. incomplete types. */
+ if (POINTER_TYPE_P (type))
+ {
+ if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
+ {
+ v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
+ v = iterative_hash_name
+ (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
+ }
+ else
+ v = visit (TREE_TYPE (type), state, v,
+ sccstack, sccstate, sccstate_obstack);
+ }
+
+ /* For integer types hash the types min/max values and the string flag. */
+ if (TREE_CODE (type) == INTEGER_TYPE)
+ {
+ /* OMP lowering can introduce error_mark_node in place of
+ random local decls in types. */
+ if (TYPE_MIN_VALUE (type) != error_mark_node)
+ v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
+ if (TYPE_MAX_VALUE (type) != error_mark_node)
+ v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
+ v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
+ }
+
+ /* For array types hash their domain and the string flag. */
+ if (TREE_CODE (type) == ARRAY_TYPE
+ && TYPE_DOMAIN (type))
+ {
+ v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
+ v = visit (TYPE_DOMAIN (type), state, v,
+ sccstack, sccstate, sccstate_obstack);
+ }
+
+ /* Recurse for aggregates with a single element type. */
+ if (TREE_CODE (type) == ARRAY_TYPE
+ || TREE_CODE (type) == COMPLEX_TYPE
+ || TREE_CODE (type) == VECTOR_TYPE)
+ v = visit (TREE_TYPE (type), state, v,
+ sccstack, sccstate, sccstate_obstack);
+
+ /* Incorporate function return and argument types. */
+ if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
+ {
+ unsigned na;
+ tree p;
+
+ /* For method types also incorporate their parent class. */
+ if (TREE_CODE (type) == METHOD_TYPE)
+ v = visit (TYPE_METHOD_BASETYPE (type), state, v,
+ sccstack, sccstate, sccstate_obstack);
+
+ /* For result types allow mismatch in completeness. */
+ if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
+ {
+ v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
+ v = iterative_hash_name
+ (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
+ }
+ else
+ v = visit (TREE_TYPE (type), state, v,
+ sccstack, sccstate, sccstate_obstack);
+
+ for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
+ {
+ /* For argument types allow mismatch in completeness. */
+ if (RECORD_OR_UNION_TYPE_P (TREE_VALUE (p)))
+ {
+ v = iterative_hash_hashval_t (TREE_CODE (TREE_VALUE (p)), v);
+ v = iterative_hash_name
+ (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_VALUE (p))), v);
+ }
+ else
+ v = visit (TREE_VALUE (p), state, v,
+ sccstack, sccstate, sccstate_obstack);
+ na++;
+ }
+
+ v = iterative_hash_hashval_t (na, v);
+ }
+
+ if (TREE_CODE (type) == RECORD_TYPE
+ || TREE_CODE (type) == UNION_TYPE
+ || TREE_CODE (type) == QUAL_UNION_TYPE)
+ {
+ unsigned nf;
+ tree f;
+
+ v = iterative_hash_name (TYPE_NAME (TYPE_MAIN_VARIANT (type)), v);
+
+ for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
+ {
+ v = iterative_hash_name (DECL_NAME (f), v);
+ v = visit (TREE_TYPE (f), state, v,
+ sccstack, sccstate, sccstate_obstack);
+ nf++;
+ }
+
+ v = iterative_hash_hashval_t (nf, v);
+ }
+
+ /* Record hash for us. */
+ state->u.hash = v;
+
+ /* See if we found an SCC. */
+ if (state->low == state->dfsnum)
+ {
+ tree x;
+
+ /* Pop off the SCC and set its hash values. */
+ do
+ {
+ struct sccs *cstate;
+ struct tree_int_map *m = ggc_alloc_cleared_tree_int_map ();
+ x = VEC_pop (tree, *sccstack);
+ cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
+ cstate->on_sccstack = false;
+ m->base.from = x;
+ m->to = cstate->u.hash;
+ slot = htab_find_slot (type_hash_cache, m, INSERT);
+ gcc_assert (!*slot);
+ *slot = (void *) m;
+ }
+ while (x != type);
+ }
+
+ return iterative_hash_hashval_t (v, val);
+}
+
+
+/* Returns a hash value for P (assumed to be a type). The hash value
+ is computed using some distinguishing features of the type. Note
+ that we cannot use pointer hashing here as we may be dealing with
+ two distinct instances of the same type.
+
+ This function should produce the same hash value for two compatible
+ types according to gimple_types_compatible_p. */
+
+static hashval_t
+gimple_type_hash (const void *p)
+{
+ const_tree t = (const_tree) p;
+ VEC(tree, heap) *sccstack = NULL;
+ struct pointer_map_t *sccstate;
+ struct obstack sccstate_obstack;
+ hashval_t val;
+ void **slot;
+ struct tree_int_map m;
+
+ if (type_hash_cache == NULL)
+ type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
+ tree_int_map_eq, NULL);
+
+ m.base.from = CONST_CAST_TREE (t);
+ if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
+ && *slot)
+ return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
+
+ /* Perform a DFS walk and pre-hash all reachable types. */
+ next_dfs_num = 1;
+ sccstate = pointer_map_create ();
+ gcc_obstack_init (&sccstate_obstack);
+ val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
+ &sccstack, sccstate, &sccstate_obstack);
+ VEC_free (tree, heap, sccstack);
+ pointer_map_destroy (sccstate);
+ obstack_free (&sccstate_obstack, NULL);
+
+ return val;
+}
+
+
+/* Returns nonzero if P1 and P2 are equal. */
+
+static int
+gimple_type_eq (const void *p1, const void *p2)
+{
+ const_tree t1 = (const_tree) p1;
+ const_tree t2 = (const_tree) p2;
+ return gimple_types_compatible_p (CONST_CAST_TREE (t1),
+ CONST_CAST_TREE (t2), GTC_MERGE);
+}
+
+
+/* Register type T in the global type table gimple_types.
+ If another type T', compatible with T, already existed in
+ gimple_types then return T', otherwise return T. This is used by
+ LTO to merge identical types read from different TUs. */
+
+tree
+gimple_register_type (tree t)
+{
+ void **slot;
+ gimple_type_leader_entry *leader;
+
+ gcc_assert (TYPE_P (t));
+
+ if (!gimple_type_leader)
+ gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
+ (GIMPLE_TYPE_LEADER_SIZE);
+ /* If we registered this type before return the cached result. */
+ leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
+ if (leader->type == t)
+ return leader->leader;
+
+ /* Always register the main variant first. This is important so we
+ pick up the non-typedef variants as canonical, otherwise we'll end
+ up taking typedef ids for structure tags during comparison. */
+ if (TYPE_MAIN_VARIANT (t) != t)
+ gimple_register_type (TYPE_MAIN_VARIANT (t));
+
+ if (gimple_types == NULL)
+ gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
+
+ slot = htab_find_slot (gimple_types, t, INSERT);
+ if (*slot
+ && *(tree *)slot != t)
+ {
+ tree new_type = (tree) *((tree *) slot);
+
+ /* Do not merge types with different addressability. */
+ gcc_assert (TREE_ADDRESSABLE (t) == TREE_ADDRESSABLE (new_type));
+
+ /* If t is not its main variant then make t unreachable from its
+ main variant list. Otherwise we'd queue up a lot of duplicates
+ there. */
+ if (t != TYPE_MAIN_VARIANT (t))
+ {
+ tree tem = TYPE_MAIN_VARIANT (t);
+ while (tem && TYPE_NEXT_VARIANT (tem) != t)
+ tem = TYPE_NEXT_VARIANT (tem);
+ if (tem)
+ TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
+ TYPE_NEXT_VARIANT (t) = NULL_TREE;
+ }
+
+ /* If we are a pointer then remove us from the pointer-to or
+ reference-to chain. Otherwise we'd queue up a lot of duplicates
+ there. */
+ if (TREE_CODE (t) == POINTER_TYPE)
+ {
+ if (TYPE_POINTER_TO (TREE_TYPE (t)) == t)
+ TYPE_POINTER_TO (TREE_TYPE (t)) = TYPE_NEXT_PTR_TO (t);
+ else
+ {
+ tree tem = TYPE_POINTER_TO (TREE_TYPE (t));
+ while (tem && TYPE_NEXT_PTR_TO (tem) != t)
+ tem = TYPE_NEXT_PTR_TO (tem);
+ if (tem)
+ TYPE_NEXT_PTR_TO (tem) = TYPE_NEXT_PTR_TO (t);
+ }
+ TYPE_NEXT_PTR_TO (t) = NULL_TREE;
+ }
+ else if (TREE_CODE (t) == REFERENCE_TYPE)
+ {
+ if (TYPE_REFERENCE_TO (TREE_TYPE (t)) == t)
+ TYPE_REFERENCE_TO (TREE_TYPE (t)) = TYPE_NEXT_REF_TO (t);
+ else
+ {
+ tree tem = TYPE_REFERENCE_TO (TREE_TYPE (t));
+ while (tem && TYPE_NEXT_REF_TO (tem) != t)
+ tem = TYPE_NEXT_REF_TO (tem);
+ if (tem)
+ TYPE_NEXT_REF_TO (tem) = TYPE_NEXT_REF_TO (t);
+ }
+ TYPE_NEXT_REF_TO (t) = NULL_TREE;
+ }
+
+ leader->type = t;
+ leader->leader = new_type;
+ t = new_type;
+ }
+ else
+ {
+ leader->type = t;
+ leader->leader = t;
+ *slot = (void *) t;
+ }
+
+ return t;
+}
+
+
+/* Returns nonzero if P1 and P2 are equal. */
+
+static int
+gimple_canonical_type_eq (const void *p1, const void *p2)
+{
+ const_tree t1 = (const_tree) p1;
+ const_tree t2 = (const_tree) p2;
+ return gimple_types_compatible_p (CONST_CAST_TREE (t1),
+ CONST_CAST_TREE (t2), GTC_DIAG);
+}
+
+/* Register type T in the global type table gimple_types.
+ If another type T', compatible with T, already existed in
+ gimple_types then return T', otherwise return T. This is used by
+ LTO to merge identical types read from different TUs. */
+
+tree
+gimple_register_canonical_type (tree t)
+{
+ void **slot;
+
+ gcc_assert (TYPE_P (t));
+
+ if (TYPE_CANONICAL (t))
+ return TYPE_CANONICAL (t);
+
+ /* Always register the main variant first. This is important so we
+ pick up the non-typedef variants as canonical, otherwise we'll end
+ up taking typedef ids for structure tags during comparison. */
+ if (TYPE_MAIN_VARIANT (t) != t)
+ gimple_register_canonical_type (TYPE_MAIN_VARIANT (t));
+
+ if (gimple_canonical_types == NULL)
+ gimple_canonical_types = htab_create_ggc (16381, gimple_type_hash,
+ gimple_canonical_type_eq, 0);
+
+ slot = htab_find_slot (gimple_canonical_types, t, INSERT);
+ if (*slot
+ && *(tree *)slot != t)
+ {
+ tree new_type = (tree) *((tree *) slot);
+
+ TYPE_CANONICAL (t) = new_type;
+ t = new_type;
+ }
+ else
+ {
+ TYPE_CANONICAL (t) = t;
+ *slot = (void *) t;
+ }
+
+ return t;
+}
+
+
+/* Show statistics on references to the global type table gimple_types. */
+
+void
+print_gimple_types_stats (void)
+{
+ if (gimple_types)
+ fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
+ "%ld searches, %ld collisions (ratio: %f)\n",
+ (long) htab_size (gimple_types),
+ (long) htab_elements (gimple_types),
+ (long) gimple_types->searches,
+ (long) gimple_types->collisions,
+ htab_collisions (gimple_types));
+ else
+ fprintf (stderr, "GIMPLE type table is empty\n");
+ if (gimple_canonical_types)
+ fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
+ "%ld searches, %ld collisions (ratio: %f)\n",
+ (long) htab_size (gimple_canonical_types),
+ (long) htab_elements (gimple_canonical_types),
+ (long) gimple_canonical_types->searches,
+ (long) gimple_canonical_types->collisions,
+ htab_collisions (gimple_canonical_types));
+ else
+ fprintf (stderr, "GIMPLE canonical type table is empty\n");
+ if (type_hash_cache)
+ fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
+ "%ld searches, %ld collisions (ratio: %f)\n",
+ (long) htab_size (type_hash_cache),
+ (long) htab_elements (type_hash_cache),
+ (long) type_hash_cache->searches,
+ (long) type_hash_cache->collisions,
+ htab_collisions (type_hash_cache));
+ else
+ fprintf (stderr, "GIMPLE type hash table is empty\n");
+ if (gtc_visited)
+ fprintf (stderr, "GIMPLE type comparison table: size %ld, %ld "
+ "elements, %ld searches, %ld collisions (ratio: %f)\n",
+ (long) htab_size (gtc_visited),
+ (long) htab_elements (gtc_visited),
+ (long) gtc_visited->searches,
+ (long) gtc_visited->collisions,
+ htab_collisions (gtc_visited));
+ else
+ fprintf (stderr, "GIMPLE type comparison table is empty\n");
+}
+
+/* Free the gimple type hashtables used for LTO type merging. */
+
+void
+free_gimple_type_tables (void)
+{
+ /* Last chance to print stats for the tables. */
+ if (flag_lto_report)
+ print_gimple_types_stats ();
+
+ if (gimple_types)
+ {
+ htab_delete (gimple_types);
+ gimple_types = NULL;
+ }
+ if (gimple_canonical_types)
+ {
+ htab_delete (gimple_canonical_types);
+ gimple_canonical_types = NULL;
+ }
+ if (type_hash_cache)
+ {
+ htab_delete (type_hash_cache);
+ type_hash_cache = NULL;
+ }
+ if (gtc_visited)
+ {
+ htab_delete (gtc_visited);
+ obstack_free (>c_ob, NULL);
+ gtc_visited = NULL;
+ }
+ gimple_type_leader = NULL;
+}
+
+
+/* Return a type the same as TYPE except unsigned or
+ signed according to UNSIGNEDP. */
+
+static tree
+gimple_signed_or_unsigned_type (bool unsignedp, tree type)
+{
+ tree type1;
+
+ type1 = TYPE_MAIN_VARIANT (type);
+ if (type1 == signed_char_type_node
+ || type1 == char_type_node
+ || type1 == unsigned_char_type_node)
+ return unsignedp ? unsigned_char_type_node : signed_char_type_node;
+ if (type1 == integer_type_node || type1 == unsigned_type_node)
+ return unsignedp ? unsigned_type_node : integer_type_node;
+ if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
+ return unsignedp ? short_unsigned_type_node : short_integer_type_node;
+ if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
+ return unsignedp ? long_unsigned_type_node : long_integer_type_node;
+ if (type1 == long_long_integer_type_node
+ || type1 == long_long_unsigned_type_node)
+ return unsignedp
+ ? long_long_unsigned_type_node
+ : long_long_integer_type_node;
+ if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
+ return unsignedp
+ ? int128_unsigned_type_node
+ : int128_integer_type_node;
+#if HOST_BITS_PER_WIDE_INT >= 64
+ if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
+ return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
+#endif
+ if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
+ return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
+ if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
+ return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
+ if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
+ return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
+ if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
+ return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
+
+#define GIMPLE_FIXED_TYPES(NAME) \
+ if (type1 == short_ ## NAME ## _type_node \
+ || type1 == unsigned_short_ ## NAME ## _type_node) \
+ return unsignedp ? unsigned_short_ ## NAME ## _type_node \
+ : short_ ## NAME ## _type_node; \
+ if (type1 == NAME ## _type_node \
+ || type1 == unsigned_ ## NAME ## _type_node) \
+ return unsignedp ? unsigned_ ## NAME ## _type_node \
+ : NAME ## _type_node; \
+ if (type1 == long_ ## NAME ## _type_node \
+ || type1 == unsigned_long_ ## NAME ## _type_node) \
+ return unsignedp ? unsigned_long_ ## NAME ## _type_node \
+ : long_ ## NAME ## _type_node; \
+ if (type1 == long_long_ ## NAME ## _type_node \
+ || type1 == unsigned_long_long_ ## NAME ## _type_node) \
+ return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
+ : long_long_ ## NAME ## _type_node;
+
+#define GIMPLE_FIXED_MODE_TYPES(NAME) \
+ if (type1 == NAME ## _type_node \
+ || type1 == u ## NAME ## _type_node) \
+ return unsignedp ? u ## NAME ## _type_node \
+ : NAME ## _type_node;
+
+#define GIMPLE_FIXED_TYPES_SAT(NAME) \
+ if (type1 == sat_ ## short_ ## NAME ## _type_node \
+ || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
+ return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
+ : sat_ ## short_ ## NAME ## _type_node; \
+ if (type1 == sat_ ## NAME ## _type_node \
+ || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
+ return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
+ : sat_ ## NAME ## _type_node; \
+ if (type1 == sat_ ## long_ ## NAME ## _type_node \
+ || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
+ return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
+ : sat_ ## long_ ## NAME ## _type_node; \
+ if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
+ || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
+ return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
+ : sat_ ## long_long_ ## NAME ## _type_node;
+
+#define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
+ if (type1 == sat_ ## NAME ## _type_node \
+ || type1 == sat_ ## u ## NAME ## _type_node) \
+ return unsignedp ? sat_ ## u ## NAME ## _type_node \
+ : sat_ ## NAME ## _type_node;
+
+ GIMPLE_FIXED_TYPES (fract);
+ GIMPLE_FIXED_TYPES_SAT (fract);
+ GIMPLE_FIXED_TYPES (accum);
+ GIMPLE_FIXED_TYPES_SAT (accum);
+
+ GIMPLE_FIXED_MODE_TYPES (qq);
+ GIMPLE_FIXED_MODE_TYPES (hq);
+ GIMPLE_FIXED_MODE_TYPES (sq);
+ GIMPLE_FIXED_MODE_TYPES (dq);
+ GIMPLE_FIXED_MODE_TYPES (tq);
+ GIMPLE_FIXED_MODE_TYPES_SAT (qq);
+ GIMPLE_FIXED_MODE_TYPES_SAT (hq);
+ GIMPLE_FIXED_MODE_TYPES_SAT (sq);
+ GIMPLE_FIXED_MODE_TYPES_SAT (dq);
+ GIMPLE_FIXED_MODE_TYPES_SAT (tq);
+ GIMPLE_FIXED_MODE_TYPES (ha);
+ GIMPLE_FIXED_MODE_TYPES (sa);
+ GIMPLE_FIXED_MODE_TYPES (da);
+ GIMPLE_FIXED_MODE_TYPES (ta);
+ GIMPLE_FIXED_MODE_TYPES_SAT (ha);
+ GIMPLE_FIXED_MODE_TYPES_SAT (sa);
+ GIMPLE_FIXED_MODE_TYPES_SAT (da);
+ GIMPLE_FIXED_MODE_TYPES_SAT (ta);
+
+ /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
+ the precision; they have precision set to match their range, but
+ may use a wider mode to match an ABI. If we change modes, we may
+ wind up with bad conversions. For INTEGER_TYPEs in C, must check
+ the precision as well, so as to yield correct results for
+ bit-field types. C++ does not have these separate bit-field
+ types, and producing a signed or unsigned variant of an
+ ENUMERAL_TYPE may cause other problems as well. */
+ if (!INTEGRAL_TYPE_P (type)
+ || TYPE_UNSIGNED (type) == unsignedp)
+ return type;
+
+#define TYPE_OK(node) \
+ (TYPE_MODE (type) == TYPE_MODE (node) \
+ && TYPE_PRECISION (type) == TYPE_PRECISION (node))
+ if (TYPE_OK (signed_char_type_node))
+ return unsignedp ? unsigned_char_type_node : signed_char_type_node;
+ if (TYPE_OK (integer_type_node))
+ return unsignedp ? unsigned_type_node : integer_type_node;
+ if (TYPE_OK (short_integer_type_node))
+ return unsignedp ? short_unsigned_type_node : short_integer_type_node;
+ if (TYPE_OK (long_integer_type_node))
+ return unsignedp ? long_unsigned_type_node : long_integer_type_node;
+ if (TYPE_OK (long_long_integer_type_node))
+ return (unsignedp
+ ? long_long_unsigned_type_node
+ : long_long_integer_type_node);
+ if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
+ return (unsignedp
+ ? int128_unsigned_type_node
+ : int128_integer_type_node);
+
+#if HOST_BITS_PER_WIDE_INT >= 64
+ if (TYPE_OK (intTI_type_node))
+ return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
+#endif
+ if (TYPE_OK (intDI_type_node))
+ return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
+ if (TYPE_OK (intSI_type_node))
+ return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
+ if (TYPE_OK (intHI_type_node))
+ return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
+ if (TYPE_OK (intQI_type_node))
+ return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
+
+#undef GIMPLE_FIXED_TYPES
+#undef GIMPLE_FIXED_MODE_TYPES
+#undef GIMPLE_FIXED_TYPES_SAT
+#undef GIMPLE_FIXED_MODE_TYPES_SAT
+#undef TYPE_OK
+
+ return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
+}
+
+
+/* Return an unsigned type the same as TYPE in other respects. */
+
+tree
+gimple_unsigned_type (tree type)
+{
+ return gimple_signed_or_unsigned_type (true, type);
+}
+
+
+/* Return a signed type the same as TYPE in other respects. */
+
+tree
+gimple_signed_type (tree type)
+{
+ return gimple_signed_or_unsigned_type (false, type);
+}
+
+
+/* Return the typed-based alias set for T, which may be an expression
+ or a type. Return -1 if we don't do anything special. */
+
+alias_set_type
+gimple_get_alias_set (tree t)
+{
+ tree u;
+
+ /* Permit type-punning when accessing a union, provided the access
+ is directly through the union. For example, this code does not
+ permit taking the address of a union member and then storing
+ through it. Even the type-punning allowed here is a GCC
+ extension, albeit a common and useful one; the C standard says
+ that such accesses have implementation-defined behavior. */
+ for (u = t;
+ TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
+ u = TREE_OPERAND (u, 0))
+ if (TREE_CODE (u) == COMPONENT_REF
+ && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
+ return 0;
+
+ /* That's all the expressions we handle specially. */
+ if (!TYPE_P (t))
+ return -1;
+
+ /* For convenience, follow the C standard when dealing with
+ character types. Any object may be accessed via an lvalue that
+ has character type. */
+ if (t == char_type_node
+ || t == signed_char_type_node
+ || t == unsigned_char_type_node)
+ return 0;
+
+ /* Allow aliasing between signed and unsigned variants of the same
+ type. We treat the signed variant as canonical. */
+ if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
+ {
+ tree t1 = gimple_signed_type (t);
+
+ /* t1 == t can happen for boolean nodes which are always unsigned. */
+ if (t1 != t)
+ return get_alias_set (t1);
+ }
+
+ return -1;
+}
+
+
+/* Data structure used to count the number of dereferences to PTR
+ inside an expression. */
+struct count_ptr_d
+{
+ tree ptr;
+ unsigned num_stores;
+ unsigned num_loads;
+};
+
+/* Helper for count_uses_and_derefs. Called by walk_tree to look for
+ (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
+
+static tree
+count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
+{
+ struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
+ struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
+
+ /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
+ pointer 'ptr' is *not* dereferenced, it is simply used to compute
+ the address of 'fld' as 'ptr + offsetof(fld)'. */
+ if (TREE_CODE (*tp) == ADDR_EXPR)
+ {
+ *walk_subtrees = 0;
+ return NULL_TREE;
+ }
+
+ if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
+ {
+ if (wi_p->is_lhs)
+ count_p->num_stores++;
+ else
+ count_p->num_loads++;
+ }
+
+ return NULL_TREE;
+}
+
+/* Count the number of direct and indirect uses for pointer PTR in
+ statement STMT. The number of direct uses is stored in
+ *NUM_USES_P. Indirect references are counted separately depending
+ on whether they are store or load operations. The counts are
+ stored in *NUM_STORES_P and *NUM_LOADS_P. */
+
+void
+count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
+ unsigned *num_loads_p, unsigned *num_stores_p)
+{
+ ssa_op_iter i;
+ tree use;
+
+ *num_uses_p = 0;
+ *num_loads_p = 0;
+ *num_stores_p = 0;
+
+ /* Find out the total number of uses of PTR in STMT. */
+ FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
+ if (use == ptr)
+ (*num_uses_p)++;
+
+ /* Now count the number of indirect references to PTR. This is
+ truly awful, but we don't have much choice. There are no parent
+ pointers inside INDIRECT_REFs, so an expression like
+ '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
+ find all the indirect and direct uses of x_1 inside. The only
+ shortcut we can take is the fact that GIMPLE only allows
+ INDIRECT_REFs inside the expressions below. */
+ if (is_gimple_assign (stmt)
+ || gimple_code (stmt) == GIMPLE_RETURN
+ || gimple_code (stmt) == GIMPLE_ASM
+ || is_gimple_call (stmt))
+ {
+ struct walk_stmt_info wi;
+ struct count_ptr_d count;
+
+ count.ptr = ptr;
+ count.num_stores = 0;
+ count.num_loads = 0;
+
+ memset (&wi, 0, sizeof (wi));
+ wi.info = &count;
+ walk_gimple_op (stmt, count_ptr_derefs, &wi);
+
+ *num_stores_p = count.num_stores;
+ *num_loads_p = count.num_loads;
+ }
+
+ gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
+}
+
+/* From a tree operand OP return the base of a load or store operation
+ or NULL_TREE if OP is not a load or a store. */
+
+static tree
+get_base_loadstore (tree op)
+{
+ while (handled_component_p (op))
+ op = TREE_OPERAND (op, 0);
+ if (DECL_P (op)
+ || INDIRECT_REF_P (op)
+ || TREE_CODE (op) == MEM_REF
+ || TREE_CODE (op) == TARGET_MEM_REF)
+ return op;
+ return NULL_TREE;
+}
+
+/* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
+ VISIT_ADDR if non-NULL on loads, store and address-taken operands
+ passing the STMT, the base of the operand and DATA to it. The base
+ will be either a decl, an indirect reference (including TARGET_MEM_REF)
+ or the argument of an address expression.
+ Returns the results of these callbacks or'ed. */
+
+bool
+walk_stmt_load_store_addr_ops (gimple stmt, void *data,
+ bool (*visit_load)(gimple, tree, void *),
+ bool (*visit_store)(gimple, tree, void *),
+ bool (*visit_addr)(gimple, tree, void *))
+{
+ bool ret = false;
+ unsigned i;
+ if (gimple_assign_single_p (stmt))
+ {
+ tree lhs, rhs;
+ if (visit_store)
+ {
+ lhs = get_base_loadstore (gimple_assign_lhs (stmt));
+ if (lhs)
+ ret |= visit_store (stmt, lhs, data);
+ }
+ rhs = gimple_assign_rhs1 (stmt);
+ while (handled_component_p (rhs))
+ rhs = TREE_OPERAND (rhs, 0);
+ if (visit_addr)
+ {
+ if (TREE_CODE (rhs) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
+ else if (TREE_CODE (rhs) == TARGET_MEM_REF
+ && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
+ else if (TREE_CODE (rhs) == OBJ_TYPE_REF
+ && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
+ 0), data);
+ lhs = gimple_assign_lhs (stmt);
+ if (TREE_CODE (lhs) == TARGET_MEM_REF
+ && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
+ }
+ if (visit_load)
+ {
+ rhs = get_base_loadstore (rhs);
+ if (rhs)
+ ret |= visit_load (stmt, rhs, data);
+ }
+ }
+ else if (visit_addr
+ && (is_gimple_assign (stmt)
+ || gimple_code (stmt) == GIMPLE_COND))
+ {
+ for (i = 0; i < gimple_num_ops (stmt); ++i)
+ if (gimple_op (stmt, i)
+ && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
+ }
+ else if (is_gimple_call (stmt))
+ {
+ if (visit_store)
+ {
+ tree lhs = gimple_call_lhs (stmt);
+ if (lhs)
+ {
+ lhs = get_base_loadstore (lhs);
+ if (lhs)
+ ret |= visit_store (stmt, lhs, data);
+ }
+ }
+ if (visit_load || visit_addr)
+ for (i = 0; i < gimple_call_num_args (stmt); ++i)
+ {
+ tree rhs = gimple_call_arg (stmt, i);
+ if (visit_addr
+ && TREE_CODE (rhs) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
+ else if (visit_load)
+ {
+ rhs = get_base_loadstore (rhs);
+ if (rhs)
+ ret |= visit_load (stmt, rhs, data);
+ }
+ }
+ if (visit_addr
+ && gimple_call_chain (stmt)
+ && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
+ data);
+ if (visit_addr
+ && gimple_call_return_slot_opt_p (stmt)
+ && gimple_call_lhs (stmt) != NULL_TREE
+ && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
+ ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
+ }
+ else if (gimple_code (stmt) == GIMPLE_ASM)
+ {
+ unsigned noutputs;
+ const char *constraint;
+ const char **oconstraints;
+ bool allows_mem, allows_reg, is_inout;
+ noutputs = gimple_asm_noutputs (stmt);
+ oconstraints = XALLOCAVEC (const char *, noutputs);
+ if (visit_store || visit_addr)
+ for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
+ {
+ tree link = gimple_asm_output_op (stmt, i);
+ tree op = get_base_loadstore (TREE_VALUE (link));
+ if (op && visit_store)
+ ret |= visit_store (stmt, op, data);
+ if (visit_addr)
+ {
+ constraint = TREE_STRING_POINTER
+ (TREE_VALUE (TREE_PURPOSE (link)));
+ oconstraints[i] = constraint;
+ parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
+ &allows_reg, &is_inout);
+ if (op && !allows_reg && allows_mem)
+ ret |= visit_addr (stmt, op, data);
+ }
+ }
+ if (visit_load || visit_addr)
+ for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
+ {
+ tree link = gimple_asm_input_op (stmt, i);
+ tree op = TREE_VALUE (link);
+ if (visit_addr
+ && TREE_CODE (op) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
+ else if (visit_load || visit_addr)
+ {
+ op = get_base_loadstore (op);
+ if (op)
+ {
+ if (visit_load)
+ ret |= visit_load (stmt, op, data);
+ if (visit_addr)
+ {
+ constraint = TREE_STRING_POINTER
+ (TREE_VALUE (TREE_PURPOSE (link)));
+ parse_input_constraint (&constraint, 0, 0, noutputs,
+ 0, oconstraints,
+ &allows_mem, &allows_reg);
+ if (!allows_reg && allows_mem)
+ ret |= visit_addr (stmt, op, data);
+ }
+ }
+ }
+ }
+ }
+ else if (gimple_code (stmt) == GIMPLE_RETURN)
+ {
+ tree op = gimple_return_retval (stmt);
+ if (op)
+ {
+ if (visit_addr
+ && TREE_CODE (op) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
+ else if (visit_load)
+ {
+ op = get_base_loadstore (op);
+ if (op)
+ ret |= visit_load (stmt, op, data);
+ }
+ }
+ }
+ else if (visit_addr
+ && gimple_code (stmt) == GIMPLE_PHI)
+ {
+ for (i = 0; i < gimple_phi_num_args (stmt); ++i)
+ {
+ tree op = PHI_ARG_DEF (stmt, i);
+ if (TREE_CODE (op) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
+ }
+ }
+
+ return ret;
+}
+
+/* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
+ should make a faster clone for this case. */
+
+bool
+walk_stmt_load_store_ops (gimple stmt, void *data,
+ bool (*visit_load)(gimple, tree, void *),
+ bool (*visit_store)(gimple, tree, void *))
+{
+ return walk_stmt_load_store_addr_ops (stmt, data,
+ visit_load, visit_store, NULL);
+}
+
+/* Helper for gimple_ior_addresses_taken_1. */
+
+static bool
+gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
+ tree addr, void *data)
+{
+ bitmap addresses_taken = (bitmap)data;
+ addr = get_base_address (addr);
+ if (addr
+ && DECL_P (addr))
+ {
+ bitmap_set_bit (addresses_taken, DECL_UID (addr));
+ return true;
+ }
+ return false;
+}
+
+/* Set the bit for the uid of all decls that have their address taken
+ in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
+ were any in this stmt. */
+
+bool
+gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
+{
+ return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
+ gimple_ior_addresses_taken_1);
+}
+
+
+/* Return a printable name for symbol DECL. */
+
+const char *
+gimple_decl_printable_name (tree decl, int verbosity)
+{
+ if (!DECL_NAME (decl))
+ return NULL;
+
+ if (DECL_ASSEMBLER_NAME_SET_P (decl))
+ {
+ const char *str, *mangled_str;
+ int dmgl_opts = DMGL_NO_OPTS;
+
+ if (verbosity >= 2)
+ {
+ dmgl_opts = DMGL_VERBOSE
+ | DMGL_ANSI
+ | DMGL_GNU_V3
+ | DMGL_RET_POSTFIX;
+ if (TREE_CODE (decl) == FUNCTION_DECL)
+ dmgl_opts |= DMGL_PARAMS;
+ }
+
+ mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
+ str = cplus_demangle_v3 (mangled_str, dmgl_opts);
+ return (str) ? str : mangled_str;
+ }
+
+ return IDENTIFIER_POINTER (DECL_NAME (decl));
+}
+
+/* Return true when STMT is builtins call to CODE. */
+
+bool
+gimple_call_builtin_p (gimple stmt, enum built_in_function code)
+{
+ tree fndecl;
+ return (is_gimple_call (stmt)
+ && (fndecl = gimple_call_fndecl (stmt)) != NULL
+ && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
+ && DECL_FUNCTION_CODE (fndecl) == code);
+}
+