#include "hard-reg-set.h"
#include "basic-block.h"
#include "gimple.h"
-#include "toplev.h"
#include "diagnostic.h"
#include "tree-flow.h"
#include "value-prof.h"
htab_t gimple_canonical_types;
static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
htab_t type_hash_cache;
+static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
+ htab_t canonical_type_hash_cache;
/* Global type comparison cache. This is by TYPE_UID for space efficiency
and thus cannot use and does not need GC. */
}
}
+
/* Return true if GS is a copy assignment. */
bool
gimple_assign_copy_p (gimple gs)
{
- return gimple_code (gs) == GIMPLE_ASSIGN
- && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
- == GIMPLE_SINGLE_RHS
- && is_gimple_val (gimple_op (gs, 1));
+ return (gimple_assign_single_p (gs)
+ && is_gimple_val (gimple_op (gs, 1)));
}
bool
gimple_assign_ssa_name_copy_p (gimple gs)
{
- return (gimple_code (gs) == GIMPLE_ASSIGN
- && (get_gimple_rhs_class (gimple_assign_rhs_code (gs))
- == GIMPLE_SINGLE_RHS)
+ return (gimple_assign_single_p (gs)
&& TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
&& TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
}
-/* Return true if GS is an assignment with a singleton RHS, i.e.,
- there is no operator associated with the assignment itself.
- Unlike gimple_assign_copy_p, this predicate returns true for
- any RHS operand, including those that perform an operation
- and do not have the semantics of a copy, such as COND_EXPR. */
-
-bool
-gimple_assign_single_p (gimple gs)
-{
- return (gimple_code (gs) == GIMPLE_ASSIGN
- && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
- == GIMPLE_SINGLE_RHS);
-}
-
/* Return true if GS is an assignment with a unary RHS, but the
operator has no effect on the assigned value. The logic is adapted
from STRIP_NOPS. This predicate is intended to be used in tuplifying
bool
gimple_assign_unary_nop_p (gimple gs)
{
- return (gimple_code (gs) == GIMPLE_ASSIGN
+ return (is_gimple_assign (gs)
&& (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
|| gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
&& gimple_assign_rhs1 (gs) != error_mark_node
return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
}
-/* Return true if T is a typecast operation. */
-
-bool
-is_gimple_cast (tree t)
-{
- return (CONVERT_EXPR_P (t)
- || TREE_CODE (t) == FIX_TRUNC_EXPR);
-}
-
/* Return true if T is a valid function operand of a CALL_EXPR. */
bool
}
-static hashval_t gimple_type_hash (const void *);
+static hashval_t gimple_type_hash_1 (const void *, enum gtc_mode);
/* Structure used to maintain a cache of some type pairs compared by
gimple_types_compatible_p when comparing aggregate types. There are
/* If the hash values of t1 and t2 are different the types can't
possibly be the same. This helps keeping the type-pair hashtable
small, only tracking comparisons for hash collisions. */
- if (gimple_type_hash (t1) != gimple_type_hash (t2))
+ if (gimple_type_hash_1 (t1, mode) != gimple_type_hash_1 (t2, mode))
return false;
/* Allocate a new cache entry for this comparison. */
if ((slot = pointer_map_contains (sccstate, p)) != NULL)
cstate = (struct sccs *)*slot;
+ /* Not yet visited. DFS recurse. */
if (!cstate)
{
- bool res;
- /* Not yet visited. DFS recurse. */
- res = gimple_types_compatible_p_1 (t1, t2, mode, p,
- sccstack, sccstate, sccstate_obstack);
- if (!cstate)
- cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
+ gimple_types_compatible_p_1 (t1, t2, mode, p,
+ sccstack, sccstate, sccstate_obstack);
+ cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
state->low = MIN (state->low, cstate->low);
- /* If the type is no longer on the SCC stack and thus is not part
- of the parents SCC, return its state. Otherwise we will
- ignore this pair and assume equality. */
- if (!cstate->on_sccstack)
- return res;
}
+ /* If the type is still on the SCC stack adjust the parents low. */
if (cstate->dfsnum < state->dfsnum
&& cstate->on_sccstack)
state->low = MIN (cstate->dfsnum, state->low);
- /* We are part of our parents SCC, skip this entry and return true. */
- return true;
+ /* Return the current lattice value. We start with an equality
+ assumption so types part of a SCC will be optimistically
+ treated equal unless proven otherwise. */
+ return cstate->u.same_p;
}
/* Worker for gimple_types_compatible.
state->dfsnum = gtc_next_dfs_num++;
state->low = state->dfsnum;
state->on_sccstack = true;
+ /* Start with an equality assumption. As we DFS recurse into child
+ SCCs this assumption may get revisited. */
+ state->u.same_p = 1;
/* If their attributes are not the same they can't be the same type. */
if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
tree f1, f2;
/* The struct tags shall compare equal. */
- if (!compare_type_names_p (TYPE_MAIN_VARIANT (t1),
- TYPE_MAIN_VARIANT (t2), false))
+ if (mode == GTC_MERGE
+ && !compare_type_names_p (TYPE_MAIN_VARIANT (t1),
+ TYPE_MAIN_VARIANT (t2), false))
goto different_types;
/* For aggregate types, all the fields must be the same. */
f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
{
/* The fields must have the same name, offset and type. */
- if (DECL_NAME (f1) != DECL_NAME (f2)
+ if ((mode == GTC_MERGE
+ && DECL_NAME (f1) != DECL_NAME (f2))
|| DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
|| !gimple_compare_field_offset (f1, f2)
|| !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2), mode,
/* Common exit path for types that are compatible. */
same_types:
- state->u.same_p = 1;
- goto pop;
+ gcc_assert (state->u.same_p == 1);
pop:
if (state->low == state->dfsnum)
{
type_pair_t x;
- /* Pop off the SCC and set its cache values. */
+ /* Pop off the SCC and set its cache values to the final
+ comparison result. */
do
{
struct sccs *cstate;
x = VEC_pop (type_pair_t, *sccstack);
cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
cstate->on_sccstack = false;
- x->same_p[mode] = cstate->u.same_p;
+ x->same_p[mode] = state->u.same_p;
}
while (x != p);
}
/* If the hash values of t1 and t2 are different the types can't
possibly be the same. This helps keeping the type-pair hashtable
small, only tracking comparisons for hash collisions. */
- if (gimple_type_hash (t1) != gimple_type_hash (t2))
+ if (gimple_type_hash_1 (t1, mode) != gimple_type_hash_1 (t2, mode))
return false;
/* If we've visited this type pair before (in the case of aggregates
static hashval_t
iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
- struct pointer_map_t *, struct obstack *);
+ struct pointer_map_t *, struct obstack *,
+ enum gtc_mode);
/* DFS visit the edge from the callers type with state *STATE to T.
Update the callers type hash V with the hash for T if it is not part
visit (tree t, struct sccs *state, hashval_t v,
VEC (tree, heap) **sccstack,
struct pointer_map_t *sccstate,
- struct obstack *sccstate_obstack)
+ struct obstack *sccstate_obstack, enum gtc_mode mode)
{
struct sccs *cstate = NULL;
struct tree_int_map m;
/* If there is a hash value recorded for this type then it can't
possibly be part of our parent SCC. Simply mix in its hash. */
m.base.from = t;
- if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
+ if ((slot = htab_find_slot (mode == GTC_MERGE
+ ? type_hash_cache : canonical_type_hash_cache,
+ &m, NO_INSERT))
&& *slot)
return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
hashval_t tem;
/* Not yet visited. DFS recurse. */
tem = iterative_hash_gimple_type (t, v,
- sccstack, sccstate, sccstate_obstack);
+ sccstack, sccstate, sccstate_obstack,
+ mode);
if (!cstate)
cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
state->low = MIN (state->low, cstate->low);
iterative_hash_gimple_type (tree type, hashval_t val,
VEC(tree, heap) **sccstack,
struct pointer_map_t *sccstate,
- struct obstack *sccstate_obstack)
+ struct obstack *sccstate_obstack,
+ enum gtc_mode mode)
{
hashval_t v;
void **slot;
{
v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
v = iterative_hash_name
- (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
+ (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
}
else
v = visit (TREE_TYPE (type), state, v,
- sccstack, sccstate, sccstate_obstack);
+ sccstack, sccstate, sccstate_obstack, mode);
}
/* For integer types hash the types min/max values and the string flag. */
{
v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
v = visit (TYPE_DOMAIN (type), state, v,
- sccstack, sccstate, sccstate_obstack);
+ sccstack, sccstate, sccstate_obstack, mode);
}
/* Recurse for aggregates with a single element type. */
|| TREE_CODE (type) == COMPLEX_TYPE
|| TREE_CODE (type) == VECTOR_TYPE)
v = visit (TREE_TYPE (type), state, v,
- sccstack, sccstate, sccstate_obstack);
+ sccstack, sccstate, sccstate_obstack, mode);
/* Incorporate function return and argument types. */
if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
/* For method types also incorporate their parent class. */
if (TREE_CODE (type) == METHOD_TYPE)
v = visit (TYPE_METHOD_BASETYPE (type), state, v,
- sccstack, sccstate, sccstate_obstack);
+ sccstack, sccstate, sccstate_obstack, mode);
/* For result types allow mismatch in completeness. */
if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
{
v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
v = iterative_hash_name
- (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
+ (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
}
else
v = visit (TREE_TYPE (type), state, v,
- sccstack, sccstate, sccstate_obstack);
+ sccstack, sccstate, sccstate_obstack, mode);
for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
{
{
v = iterative_hash_hashval_t (TREE_CODE (TREE_VALUE (p)), v);
v = iterative_hash_name
- (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_VALUE (p))), v);
+ (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_VALUE (p))), v);
}
else
v = visit (TREE_VALUE (p), state, v,
- sccstack, sccstate, sccstate_obstack);
+ sccstack, sccstate, sccstate_obstack, mode);
na++;
}
unsigned nf;
tree f;
- v = iterative_hash_name (TYPE_NAME (TYPE_MAIN_VARIANT (type)), v);
+ if (mode == GTC_MERGE)
+ v = iterative_hash_name (TYPE_NAME (TYPE_MAIN_VARIANT (type)), v);
for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
{
- v = iterative_hash_name (DECL_NAME (f), v);
+ if (mode == GTC_MERGE)
+ v = iterative_hash_name (DECL_NAME (f), v);
v = visit (TREE_TYPE (f), state, v,
- sccstack, sccstate, sccstate_obstack);
+ sccstack, sccstate, sccstate_obstack, mode);
nf++;
}
cstate->on_sccstack = false;
m->base.from = x;
m->to = cstate->u.hash;
- slot = htab_find_slot (type_hash_cache, m, INSERT);
+ slot = htab_find_slot (mode == GTC_MERGE
+ ? type_hash_cache : canonical_type_hash_cache,
+ m, INSERT);
gcc_assert (!*slot);
*slot = (void *) m;
}
types according to gimple_types_compatible_p. */
static hashval_t
-gimple_type_hash (const void *p)
+gimple_type_hash_1 (const void *p, enum gtc_mode mode)
{
const_tree t = (const_tree) p;
VEC(tree, heap) *sccstack = NULL;
void **slot;
struct tree_int_map m;
- if (type_hash_cache == NULL)
+ if (mode == GTC_MERGE
+ && type_hash_cache == NULL)
type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
tree_int_map_eq, NULL);
+ else if (mode == GTC_DIAG
+ && canonical_type_hash_cache == NULL)
+ canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
+ tree_int_map_eq, NULL);
m.base.from = CONST_CAST_TREE (t);
- if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
+ if ((slot = htab_find_slot (mode == GTC_MERGE
+ ? type_hash_cache : canonical_type_hash_cache,
+ &m, NO_INSERT))
&& *slot)
return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
sccstate = pointer_map_create ();
gcc_obstack_init (&sccstate_obstack);
val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
- &sccstack, sccstate, &sccstate_obstack);
+ &sccstack, sccstate, &sccstate_obstack,
+ mode);
VEC_free (tree, heap, sccstack);
pointer_map_destroy (sccstate);
obstack_free (&sccstate_obstack, NULL);
return val;
}
+static hashval_t
+gimple_type_hash (const void *p)
+{
+ return gimple_type_hash_1 (p, GTC_MERGE);
+}
+
+static hashval_t
+gimple_canonical_type_hash (const void *p)
+{
+ return gimple_type_hash_1 (p, GTC_DIAG);
+}
+
/* Returns nonzero if P1 and P2 are equal. */
{
void **slot;
gimple_type_leader_entry *leader;
+ tree mv_leader = NULL_TREE;
gcc_assert (TYPE_P (t));
pick up the non-typedef variants as canonical, otherwise we'll end
up taking typedef ids for structure tags during comparison. */
if (TYPE_MAIN_VARIANT (t) != t)
- gimple_register_type (TYPE_MAIN_VARIANT (t));
+ mv_leader = gimple_register_type (TYPE_MAIN_VARIANT (t));
if (gimple_types == NULL)
gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
{
leader->type = t;
leader->leader = t;
+ /* We're the type leader. Make our TYPE_MAIN_VARIANT valid. */
+ if (TYPE_MAIN_VARIANT (t) != t
+ && TYPE_MAIN_VARIANT (t) != mv_leader)
+ {
+ /* Remove us from our main variant list as we are not the variant
+ leader and the variant leader will change. */
+ tree tem = TYPE_MAIN_VARIANT (t);
+ while (tem && TYPE_NEXT_VARIANT (tem) != t)
+ tem = TYPE_NEXT_VARIANT (tem);
+ if (tem)
+ TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
+ TYPE_NEXT_VARIANT (t) = NULL_TREE;
+ /* Adjust our main variant. Linking us into its variant list
+ will happen at fixup time. */
+ TYPE_MAIN_VARIANT (t) = mv_leader;
+ }
*slot = (void *) t;
}
gimple_register_canonical_type (tree t)
{
void **slot;
+ tree orig_t = t;
gcc_assert (TYPE_P (t));
if (TYPE_CANONICAL (t))
return TYPE_CANONICAL (t);
+ /* Always register the type itself first so that if it turns out
+ to be the canonical type it will be the one we merge to as well. */
+ t = gimple_register_type (t);
+
/* Always register the main variant first. This is important so we
pick up the non-typedef variants as canonical, otherwise we'll end
up taking typedef ids for structure tags during comparison. */
gimple_register_canonical_type (TYPE_MAIN_VARIANT (t));
if (gimple_canonical_types == NULL)
- gimple_canonical_types = htab_create_ggc (16381, gimple_type_hash,
+ gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
gimple_canonical_type_eq, 0);
slot = htab_find_slot (gimple_canonical_types, t, INSERT);
*slot = (void *) t;
}
+ /* Also cache the canonical type in the non-leaders. */
+ TYPE_CANONICAL (orig_t) = t;
+
return t;
}
htab_collisions (gimple_types));
else
fprintf (stderr, "GIMPLE type table is empty\n");
+ if (type_hash_cache)
+ fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
+ "%ld searches, %ld collisions (ratio: %f)\n",
+ (long) htab_size (type_hash_cache),
+ (long) htab_elements (type_hash_cache),
+ (long) type_hash_cache->searches,
+ (long) type_hash_cache->collisions,
+ htab_collisions (type_hash_cache));
+ else
+ fprintf (stderr, "GIMPLE type hash table is empty\n");
if (gimple_canonical_types)
fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
"%ld searches, %ld collisions (ratio: %f)\n",
htab_collisions (gimple_canonical_types));
else
fprintf (stderr, "GIMPLE canonical type table is empty\n");
- if (type_hash_cache)
- fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
+ if (canonical_type_hash_cache)
+ fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
"%ld searches, %ld collisions (ratio: %f)\n",
- (long) htab_size (type_hash_cache),
- (long) htab_elements (type_hash_cache),
- (long) type_hash_cache->searches,
- (long) type_hash_cache->collisions,
- htab_collisions (type_hash_cache));
+ (long) htab_size (canonical_type_hash_cache),
+ (long) htab_elements (canonical_type_hash_cache),
+ (long) canonical_type_hash_cache->searches,
+ (long) canonical_type_hash_cache->collisions,
+ htab_collisions (canonical_type_hash_cache));
else
- fprintf (stderr, "GIMPLE type hash table is empty\n");
+ fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
if (gtc_visited)
fprintf (stderr, "GIMPLE type comparison table: size %ld, %ld "
"elements, %ld searches, %ld collisions (ratio: %f)\n",
htab_delete (type_hash_cache);
type_hash_cache = NULL;
}
+ if (canonical_type_hash_cache)
+ {
+ htab_delete (canonical_type_hash_cache);
+ canonical_type_hash_cache = NULL;
+ }
if (gtc_visited)
{
htab_delete (gtc_visited);