/* Unique id for next type created. */
static GTY(()) int next_type_uid = 1;
+/* Mapping from unique DECL_UID to the decl tree node. */
+static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
+ htab_t decl_for_uid_map;
+
+static void insert_decl_to_uid_decl_map (tree);
+
/* Since we cannot rehash a type after it is in the table, we have to
keep the hash code. */
int_cst_node = make_node (INTEGER_CST);
+ decl_for_uid_map = htab_create_ggc (4093, uid_decl_map_hash,
+ uid_decl_map_eq, NULL);
+
tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON] = 1;
tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_NON_COMMON] = 1;
tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON] = 1;
}
DECL_SOURCE_LOCATION (t) = input_location;
DECL_UID (t) = next_decl_uid++;
+ insert_decl_to_uid_decl_map (t);
break;
SET_DECL_RESTRICT_BASE (t, DECL_GET_RESTRICT_BASE (node));
DECL_BASED_ON_RESTRICT_P (t) = 1;
}
+ insert_decl_to_uid_decl_map (t);
}
else if (TREE_CODE_CLASS (code) == tcc_type)
{
{
tree copy = NULL_TREE;
int i;
- int n = TREE_OPERAND_LENGTH (exp);
- for (i = 1; i < n; i++)
+
+ for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
{
tree op = TREE_OPERAND (exp, i);
tree newop = SUBSTITUTE_IN_EXPR (op, f, r);
else
return exp;
}
+ break;
default:
gcc_unreachable ();
return t;
}
\f
+/* Return true if the DECL_UID in both trees are equal. */
+
+int
+uid_decl_map_eq (const void *va, const void *vb)
+{
+ const_tree a = (const_tree) va;
+ const_tree b = (const_tree) vb;
+ return (a->decl_minimal.uid == b->decl_minimal.uid);
+}
+
+/* Hash a tree in a uid_decl_map. */
+
+unsigned int
+uid_decl_map_hash (const void *item)
+{
+ return ((const_tree)item)->decl_minimal.uid;
+}
+
+/* Insert the declaration NODE into the map mapping its unique uid
+ back to the tree. */
+
+static void
+insert_decl_to_uid_decl_map (tree node)
+{
+ void **slot;
+ struct tree_decl_minimal key;
+
+ key.uid = DECL_UID (node);
+ slot = htab_find_slot_with_hash (decl_for_uid_map,
+ &key, DECL_UID (node), INSERT);
+
+ /* We should never try to re-insert a decl with the same uid.
+ ??? The C++ frontend breaks this invariant. Hopefully in a
+ non-fatal way, so just overwrite the slot in this case. */
+#if 0
+ gcc_assert (!*slot);
+#endif
+
+ *(tree *)slot = node;
+}
+
+/* Lookup the declaration tree from its unique DECL_UID UID. Returns
+ the tree node with DECL_UID UID or NULL, if this node was collected. */
+
+tree
+lookup_decl_from_uid (int uid)
+{
+ struct tree_decl_minimal key;
+
+ key.uid = uid;
+ return (tree) htab_find_with_hash (decl_for_uid_map, &key, uid);
+}
+
+/* Remove the declaration tree DECL from the global UID to decl map.
+ This needs to be called if you ggc_free a decl tree, otherwise
+ garbage collection will take care of it. */
+
+void
+remove_decl_from_map (tree decl)
+{
+ struct tree_decl_minimal key;
+
+ key.uid = DECL_UID (decl);
+#if ENABLE_CHECKING
+ gcc_assert (decl == htab_find_with_hash (decl_for_uid_map, &key, key.uid));
+#endif
+ htab_remove_elt_with_hash (decl_for_uid_map, &key, key.uid);
+}
+
+/* Print out the statistics for the decl_for_uid_map hash table. */
+
+static void
+print_decl_for_uid_map_statistics (void)
+{
+ fprintf (stderr, "DECL_FOR_UID_MAP hash: size %ld, %ld elements, %f collisions\n",
+ (long) htab_size (decl_for_uid_map),
+ (long) htab_elements (decl_for_uid_map),
+ htab_collisions (decl_for_uid_map));
+}
+
/* Create a DECL_... node of code CODE, name NAME and data type TYPE.
We do NOT enter this node in any sort of symbol table.
hashcode = type_hash_list (TYPE_ARG_TYPES (ntype), hashcode);
break;
case ARRAY_TYPE:
- hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype)),
- hashcode);
+ if (TYPE_DOMAIN (ntype))
+ hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype)),
+ hashcode);
break;
case INTEGER_TYPE:
hashcode = iterative_hash_object
ttype = build_qualified_type (ntype, quals);
}
+ else if (TYPE_QUALS (ttype) != quals)
+ ttype = build_qualified_type (ttype, quals);
return ttype;
}
TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
}
-/* Returns true iff cand is equivalent to base with type_quals. */
+/* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
bool
check_qualified_type (const_tree cand, const_tree base, int type_quals)
h->hash = hashcode;
h->type = type;
loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT);
- *loc = (void*)h;
+ *loc = (void *)h;
}
/* Given TYPE, and HASHCODE its hash code, return the canonical
|| (! pos && TREE_INT_CST_HIGH (t) == -1
&& (HOST_WIDE_INT) TREE_INT_CST_LOW (t) < 0
&& (!TYPE_UNSIGNED (TREE_TYPE (t))
- || TYPE_IS_SIZETYPE (TREE_TYPE (t))))
+ || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (TREE_TYPE (t)))))
|| (pos && TREE_INT_CST_HIGH (t) == 0)));
}
print_debug_expr_statistics ();
print_value_expr_statistics ();
print_restrict_base_statistics ();
+ print_decl_for_uid_map_statistics ();
lang_hooks.print_statistics ();
}
\f
inner,
TREE_CHAIN (TYPE_ARG_TYPES (type)));
}
+ else if (TREE_CODE (type) == OFFSET_TYPE)
+ {
+ inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
+ outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
+ }
else
return bottom;
return orig_field;
}
-/* Return value of a constant X. */
+/* Return value of a constant X and sign-extend it. */
HOST_WIDE_INT
int_cst_value (const_tree x)
{
unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
- bool negative = ((val >> (bits - 1)) & 1) != 0;
- gcc_assert (bits <= HOST_BITS_PER_WIDE_INT);
+ /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
+ gcc_assert (TREE_INT_CST_HIGH (x) == 0
+ || TREE_INT_CST_HIGH (x) == -1);
- if (negative)
- val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
- else
- val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
+ if (bits < HOST_BITS_PER_WIDE_INT)
+ {
+ bool negative = ((val >> (bits - 1)) & 1) != 0;
+ if (negative)
+ val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
+ else
+ val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
+ }
return val;
}