/* Language-independent node constructors for parse phase of GNU compiler.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
- Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+ 2011 Free Software Foundation, Inc.
This file is part of GCC.
#include "tm.h"
#include "flags.h"
#include "tree.h"
-#include "real.h"
#include "tm_p.h"
#include "function.h"
#include "obstack.h"
#include "toplev.h"
#include "ggc.h"
#include "hashtab.h"
+#include "filenames.h"
#include "output.h"
#include "target.h"
#include "langhooks.h"
#include "tree-flow.h"
#include "params.h"
#include "pointer-set.h"
-#include "fixed-value.h"
#include "tree-pass.h"
#include "langhooks-def.h"
#include "diagnostic.h"
+#include "tree-diagnostic.h"
+#include "tree-pretty-print.h"
#include "cgraph.h"
#include "timevar.h"
#include "except.h"
#include "debug.h"
+#include "intl.h"
/* Tree code classes. */
"exprs",
"constants",
"identifiers",
- "perm_tree_lists",
- "temp_tree_lists",
"vecs",
"binfos",
"ssa names",
static GTY(()) int next_decl_uid;
/* Unique id for next type created. */
static GTY(()) int next_type_uid = 1;
+/* Unique id for next debug decl created. Use negative numbers,
+ to catch erroneous uses. */
+static GTY(()) int next_debug_decl_uid;
/* Since we cannot rehash a type after it is in the table, we have to
keep the hash code. */
/* General tree->tree mapping structure for use in hash tables. */
-static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
+static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
htab_t debug_expr_for_decl;
-static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
+static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
htab_t value_expr_for_decl;
-static GTY ((if_marked ("tree_priority_map_marked_p"),
+static GTY ((if_marked ("tree_priority_map_marked_p"),
param_is (struct tree_priority_map)))
htab_t init_priority_for_decl;
tree_node_structure_for_code (enum tree_code code)
{
switch (TREE_CODE_CLASS (code))
- {
+ {
case tcc_declaration:
{
switch (code)
return TS_LABEL_DECL;
case RESULT_DECL:
return TS_RESULT_DECL;
+ case DEBUG_EXPR_DECL:
+ return TS_DECL_WRTL;
case CONST_DECL:
return TS_CONST_DECL;
case TYPE_DECL:
return TS_TYPE_DECL;
case FUNCTION_DECL:
return TS_FUNCTION_DECL;
+ case TRANSLATION_UNIT_DECL:
+ return TS_TRANSLATION_UNIT_DECL;
default:
return TS_DECL_NON_COMMON;
}
{
unsigned i;
-#define MARK_TS_BASE(C) \
- do { \
- tree_contains_struct[C][TS_BASE] = 1; \
- } while (0)
-
-#define MARK_TS_COMMON(C) \
- do { \
- MARK_TS_BASE (C); \
- tree_contains_struct[C][TS_COMMON] = 1; \
- } while (0)
-
-#define MARK_TS_DECL_MINIMAL(C) \
- do { \
- MARK_TS_COMMON (C); \
- tree_contains_struct[C][TS_DECL_MINIMAL] = 1; \
- } while (0)
-
-#define MARK_TS_DECL_COMMON(C) \
- do { \
- MARK_TS_DECL_MINIMAL (C); \
- tree_contains_struct[C][TS_DECL_COMMON] = 1; \
- } while (0)
-
-#define MARK_TS_DECL_WRTL(C) \
- do { \
- MARK_TS_DECL_COMMON (C); \
- tree_contains_struct[C][TS_DECL_WRTL] = 1; \
- } while (0)
-
-#define MARK_TS_DECL_WITH_VIS(C) \
- do { \
- MARK_TS_DECL_WRTL (C); \
- tree_contains_struct[C][TS_DECL_WITH_VIS] = 1; \
- } while (0)
-
-#define MARK_TS_DECL_NON_COMMON(C) \
- do { \
- MARK_TS_DECL_WITH_VIS (C); \
- tree_contains_struct[C][TS_DECL_NON_COMMON] = 1; \
- } while (0)
-
for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
{
enum tree_code code;
/* Mark all the structures that TS is derived from. */
switch (ts_code)
{
- case TS_COMMON:
+ case TS_TYPED:
MARK_TS_BASE (code);
break;
+ case TS_COMMON:
case TS_INT_CST:
case TS_REAL_CST:
case TS_FIXED_CST:
case TS_VECTOR:
case TS_STRING:
case TS_COMPLEX:
+ case TS_SSA_NAME:
+ case TS_CONSTRUCTOR:
+ MARK_TS_TYPED (code);
+ break;
+
case TS_IDENTIFIER:
case TS_DECL_MINIMAL:
case TS_TYPE:
case TS_LIST:
case TS_VEC:
case TS_EXP:
- case TS_SSA_NAME:
case TS_BLOCK:
case TS_BINFO:
case TS_STATEMENT_LIST:
- case TS_CONSTRUCTOR:
case TS_OMP_CLAUSE:
case TS_OPTIMIZATION:
case TS_TARGET_OPTION:
break;
case TS_DECL_WRTL:
+ case TS_CONST_DECL:
MARK_TS_DECL_COMMON (code);
break;
case TS_PARM_DECL:
case TS_LABEL_DECL:
case TS_RESULT_DECL:
- case TS_CONST_DECL:
MARK_TS_DECL_WRTL (code);
break;
MARK_TS_DECL_NON_COMMON (code);
break;
+ case TS_TRANSLATION_UNIT_DECL:
+ MARK_TS_DECL_COMMON (code);
+ break;
+
default:
gcc_unreachable ();
}
/* Basic consistency checks for attributes used in fold. */
gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
- gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_NON_COMMON]);
gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
- gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_WRTL]);
gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
- gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_WITH_VIS]);
gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
-
-#undef MARK_TS_BASE
-#undef MARK_TS_COMMON
-#undef MARK_TS_DECL_MINIMAL
-#undef MARK_TS_DECL_COMMON
-#undef MARK_TS_DECL_WRTL
-#undef MARK_TS_DECL_WITH_VIS
-#undef MARK_TS_DECL_NON_COMMON
}
type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
type_hash_eq, 0);
- debug_expr_for_decl = htab_create_ggc (512, tree_map_hash,
- tree_map_eq, 0);
+ debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
+ tree_decl_map_eq, 0);
- value_expr_for_decl = htab_create_ggc (512, tree_map_hash,
- tree_map_eq, 0);
+ value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
+ tree_decl_map_eq, 0);
init_priority_for_decl = htab_create_ggc (512, tree_priority_map_hash,
tree_priority_map_eq, 0);
int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
int_cst_hash_eq, NULL);
-
+
int_cst_node = make_node (INTEGER_CST);
cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
decl_str = IDENTIFIER_POINTER (decl_asmname);
asmname_str = IDENTIFIER_POINTER (asmname);
-
+
/* If the target assembler name was set by the user, things are trickier.
We have a leading '*' to begin with. After that, it's arguable what
return sizeof (struct tree_type_decl);
case FUNCTION_DECL:
return sizeof (struct tree_function_decl);
+ case DEBUG_EXPR_DECL:
+ return sizeof (struct tree_decl_with_rtl);
default:
return sizeof (struct tree_decl_non_common);
}
}
}
-/* Return a newly allocated node of code CODE. For decl and type
- nodes, some other fields are initialized. The rest of the node is
- initialized to zero. This function cannot be used for TREE_VEC or
- OMP_CLAUSE nodes, which is enforced by asserts in tree_code_size.
-
- Achoo! I got a code in the node. */
+/* Record interesting allocation statistics for a tree node with CODE
+ and LENGTH. */
-tree
-make_node_stat (enum tree_code code MEM_STAT_DECL)
+static void
+record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
+ size_t length ATTRIBUTE_UNUSED)
{
- tree t;
- enum tree_code_class type = TREE_CODE_CLASS (code);
- size_t length = tree_code_size (code);
#ifdef GATHER_STATISTICS
+ enum tree_code_class type = TREE_CODE_CLASS (code);
tree_node_kind kind;
switch (type)
kind = constr_kind;
break;
+ case OMP_CLAUSE:
+ kind = omp_clause_kind;
+ break;
+
default:
kind = x_kind;
break;
}
break;
-
+
+ case tcc_vl_exp:
+ kind = e_kind;
+ break;
+
default:
gcc_unreachable ();
}
tree_node_counts[(int) kind]++;
tree_node_sizes[(int) kind] += length;
#endif
+}
- if (code == IDENTIFIER_NODE)
- t = (tree) ggc_alloc_zone_pass_stat (length, &tree_id_zone);
- else
- t = (tree) ggc_alloc_zone_pass_stat (length, &tree_zone);
+/* Return a newly allocated node of code CODE. For decl and type
+ nodes, some other fields are initialized. The rest of the node is
+ initialized to zero. This function cannot be used for TREE_VEC or
+ OMP_CLAUSE nodes, which is enforced by asserts in tree_code_size.
+
+ Achoo! I got a code in the node. */
+
+tree
+make_node_stat (enum tree_code code MEM_STAT_DECL)
+{
+ tree t;
+ enum tree_code_class type = TREE_CODE_CLASS (code);
+ size_t length = tree_code_size (code);
- memset (t, 0, length);
+ record_node_allocation_statistics (code, length);
+ t = ggc_alloc_zone_cleared_tree_node_stat (
+ (code == IDENTIFIER_NODE) ? &tree_id_zone : &tree_zone,
+ length PASS_MEM_STAT);
TREE_SET_CODE (t, code);
switch (type)
DECL_ALIGN (t) = 1;
}
DECL_SOURCE_LOCATION (t) = input_location;
- DECL_UID (t) = next_decl_uid++;
+ if (TREE_CODE (t) == DEBUG_EXPR_DECL)
+ DECL_UID (t) = --next_debug_decl_uid;
+ else
+ {
+ DECL_UID (t) = next_decl_uid++;
+ SET_DECL_PT_UID (t, -1);
+ }
if (TREE_CODE (t) == LABEL_DECL)
LABEL_DECL_UID (t) = -1;
}
\f
/* Return a new node with the same contents as NODE except that its
- TREE_CHAIN is zero and it has a fresh uid. */
+ TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
tree
copy_node_stat (tree node MEM_STAT_DECL)
gcc_assert (code != STATEMENT_LIST);
length = tree_size (node);
- t = (tree) ggc_alloc_zone_pass_stat (length, &tree_zone);
+ record_node_allocation_statistics (code, length);
+ t = ggc_alloc_zone_tree_node_stat (&tree_zone, length PASS_MEM_STAT);
memcpy (t, node, length);
- TREE_CHAIN (t) = 0;
+ if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
+ TREE_CHAIN (t) = 0;
TREE_ASM_WRITTEN (t) = 0;
TREE_VISITED (t) = 0;
- t->base.ann = 0;
+ if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
+ *DECL_VAR_ANN_PTR (t) = 0;
if (TREE_CODE_CLASS (code) == tcc_declaration)
{
- DECL_UID (t) = next_decl_uid++;
+ if (code == DEBUG_EXPR_DECL)
+ DECL_UID (t) = --next_debug_decl_uid;
+ else
+ {
+ DECL_UID (t) = next_decl_uid++;
+ if (DECL_PT_UID_SET_P (node))
+ SET_DECL_PT_UID (t, DECL_PT_UID (node));
+ }
if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
&& DECL_HAS_VALUE_EXPR_P (node))
{
but the optimizer should catch that. */
TYPE_SYMTAB_POINTER (t) = 0;
TYPE_SYMTAB_ADDRESS (t) = 0;
-
+
/* Do not copy the values cache. */
if (TYPE_CACHED_VALUES_P(t))
{
return build_int_cst_wide (type, low, low < 0 ? -1 : 0);
}
-/* Create an INT_CST node with a LOW value zero extended. */
-
-tree
-build_int_cstu (tree type, unsigned HOST_WIDE_INT low)
-{
- return build_int_cst_wide (type, low, 0);
-}
-
/* Create an INT_CST node with a LOW value in TYPE. The value is sign extended
if it is negative. This function is similar to build_int_cst, but
the extra bits outside of the type precision are cleared. Constants
tree
build_int_cst_type (tree type, HOST_WIDE_INT low)
{
- unsigned HOST_WIDE_INT low1;
- HOST_WIDE_INT hi;
-
gcc_assert (type);
- fit_double_type (low, low < 0 ? -1 : 0, &low1, &hi, type);
+ return double_int_to_tree (type, shwi_to_double_int (low));
+}
+
+/* Constructs tree in type TYPE from with value given by CST. Signedness
+ of CST is assumed to be the same as the signedness of TYPE. */
+
+tree
+double_int_to_tree (tree type, double_int cst)
+{
+ /* Size types *are* sign extended. */
+ bool sign_extended_type = (!TYPE_UNSIGNED (type)
+ || (TREE_CODE (type) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (type)));
+
+ cst = double_int_ext (cst, TYPE_PRECISION (type), !sign_extended_type);
+
+ return build_int_cst_wide (type, cst.low, cst.high);
+}
+
+/* Returns true if CST fits into range of TYPE. Signedness of CST is assumed
+ to be the same as the signedness of TYPE. */
- return build_int_cst_wide (type, low1, hi);
+bool
+double_int_fits_to_tree_p (const_tree type, double_int cst)
+{
+ /* Size types *are* sign extended. */
+ bool sign_extended_type = (!TYPE_UNSIGNED (type)
+ || (TREE_CODE (type) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (type)));
+
+ double_int ext
+ = double_int_ext (cst, TYPE_PRECISION (type), !sign_extended_type);
+
+ return double_int_equal_p (cst, ext);
}
-/* Create an INT_CST node of TYPE and value HI:LOW. The value is truncated
- and sign extended according to the value range of TYPE. */
+/* We force the double_int CST to the range of the type TYPE by sign or
+ zero extending it. OVERFLOWABLE indicates if we are interested in
+ overflow of the value, when >0 we are only interested in signed
+ overflow, for <0 we are interested in any overflow. OVERFLOWED
+ indicates whether overflow has already occurred. CONST_OVERFLOWED
+ indicates whether constant overflow has already occurred. We force
+ T's value to be within range of T's type (by setting to 0 or 1 all
+ the bits outside the type's range). We set TREE_OVERFLOWED if,
+ OVERFLOWED is nonzero,
+ or OVERFLOWABLE is >0 and signed overflow occurs
+ or OVERFLOWABLE is <0 and any overflow occurs
+ We return a new tree node for the extended double_int. The node
+ is shared if no overflow flags are set. */
+
tree
-build_int_cst_wide_type (tree type,
- unsigned HOST_WIDE_INT low, HOST_WIDE_INT high)
+force_fit_type_double (tree type, double_int cst, int overflowable,
+ bool overflowed)
{
- fit_double_type (low, high, &low, &high, type);
- return build_int_cst_wide (type, low, high);
+ bool sign_extended_type;
+
+ /* Size types *are* sign extended. */
+ sign_extended_type = (!TYPE_UNSIGNED (type)
+ || (TREE_CODE (type) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (type)));
+
+ /* If we need to set overflow flags, return a new unshared node. */
+ if (overflowed || !double_int_fits_to_tree_p(type, cst))
+ {
+ if (overflowed
+ || overflowable < 0
+ || (overflowable > 0 && sign_extended_type))
+ {
+ tree t = make_node (INTEGER_CST);
+ TREE_INT_CST (t) = double_int_ext (cst, TYPE_PRECISION (type),
+ !sign_extended_type);
+ TREE_TYPE (t) = type;
+ TREE_OVERFLOW (t) = 1;
+ return t;
+ }
+ }
+
+ /* Else build a shared node. */
+ return double_int_to_tree (type, cst);
}
/* These are the hash table functions for the hash table of INTEGER_CST
switch (TREE_CODE (type))
{
+ case NULLPTR_TYPE:
+ gcc_assert (hi == 0 && low == 0);
+ /* Fallthru. */
+
case POINTER_TYPE:
case REFERENCE_TYPE:
/* Cache NULL pointer. */
TREE_INT_CST_LOW (t) = low;
TREE_INT_CST_HIGH (t) = hi;
TREE_TYPE (t) = type;
-
+
TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
}
}
tree
build_low_bits_mask (tree type, unsigned bits)
{
- unsigned HOST_WIDE_INT low;
- HOST_WIDE_INT high;
- unsigned HOST_WIDE_INT all_ones = ~(unsigned HOST_WIDE_INT) 0;
+ double_int mask;
gcc_assert (bits <= TYPE_PRECISION (type));
if (bits == TYPE_PRECISION (type)
&& !TYPE_UNSIGNED (type))
- {
- /* Sign extended all-ones mask. */
- low = all_ones;
- high = -1;
- }
- else if (bits <= HOST_BITS_PER_WIDE_INT)
- {
- low = all_ones >> (HOST_BITS_PER_WIDE_INT - bits);
- high = 0;
- }
+ /* Sign extended all-ones mask. */
+ mask = double_int_minus_one;
else
- {
- bits -= HOST_BITS_PER_WIDE_INT;
- low = all_ones;
- high = all_ones >> (HOST_BITS_PER_WIDE_INT - bits);
- }
+ mask = double_int_mask (bits);
- return build_int_cst_wide (type, low, high);
+ return build_int_cst_wide (type, mask.low, mask.high);
}
/* Checks that X is integer constant that can be expressed in (unsigned)
tree v = make_node (VECTOR_CST);
int over = 0;
tree link;
+ unsigned cnt = 0;
TREE_VECTOR_CST_ELTS (v) = vals;
TREE_TYPE (v) = type;
for (link = vals; link; link = TREE_CHAIN (link))
{
tree value = TREE_VALUE (link);
+ cnt++;
/* Don't crash if we get an address constant. */
if (!CONSTANT_CLASS_P (value))
over |= TREE_OVERFLOW (value);
}
+ gcc_assert (cnt == TYPE_VECTOR_SUBPARTS (type));
+
TREE_OVERFLOW (v) = over;
return v;
}
FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
list = tree_cons (NULL_TREE, value, list);
+ for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
+ list = tree_cons (NULL_TREE,
+ build_zero_cst (TREE_TYPE (type)), list);
return build_vector (type, nreverse (list));
}
+/* Build a vector of type VECTYPE where all the elements are SCs. */
+tree
+build_vector_from_val (tree vectype, tree sc)
+{
+ int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
+ VEC(constructor_elt, gc) *v = NULL;
+
+ if (sc == error_mark_node)
+ return sc;
+
+ /* Verify that the vector type is suitable for SC. Note that there
+ is some inconsistency in the type-system with respect to restrict
+ qualifications of pointers. Vector types always have a main-variant
+ element type and the qualification is applied to the vector-type.
+ So TREE_TYPE (vector-type) does not return a properly qualified
+ vector element-type. */
+ gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
+ TREE_TYPE (vectype)));
+
+ v = VEC_alloc (constructor_elt, gc, nunits);
+ for (i = 0; i < nunits; ++i)
+ CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
+
+ if (CONSTANT_CLASS_P (sc))
+ return build_vector_from_ctor (vectype, v);
+ else
+ return build_constructor (vectype, v);
+}
+
/* Return a new CONSTRUCTOR node whose type is TYPE and whose values
are in the VEC pointed to by VALS. */
tree
build_constructor (tree type, VEC(constructor_elt,gc) *vals)
{
tree c = make_node (CONSTRUCTOR);
+ unsigned int i;
+ constructor_elt *elt;
+ bool constant_p = true;
+
TREE_TYPE (c) = type;
CONSTRUCTOR_ELTS (c) = vals;
+
+ FOR_EACH_VEC_ELT (constructor_elt, vals, i, elt)
+ if (!TREE_CONSTANT (elt->value))
+ {
+ constant_p = false;
+ break;
+ }
+
+ TREE_CONSTANT (c) = constant_p;
+
return c;
}
{
VEC(constructor_elt,gc) *v;
constructor_elt *elt;
- tree t;
v = VEC_alloc (constructor_elt, gc, 1);
elt = VEC_quick_push (constructor_elt, v, NULL);
elt->index = index;
elt->value = value;
- t = build_constructor (type, v);
- TREE_CONSTANT (t) = TREE_CONSTANT (value);
- return t;
+ return build_constructor (type, v);
}
tree
build_constructor_from_list (tree type, tree vals)
{
- tree t, val;
+ tree t;
VEC(constructor_elt,gc) *v = NULL;
- bool constant_p = true;
if (vals)
{
v = VEC_alloc (constructor_elt, gc, list_length (vals));
for (t = vals; t; t = TREE_CHAIN (t))
- {
- constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
- val = TREE_VALUE (t);
- elt->index = TREE_PURPOSE (t);
- elt->value = val;
- if (!TREE_CONSTANT (val))
- constant_p = false;
- }
+ CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
}
- t = build_constructor (type, v);
- TREE_CONSTANT (t) = constant_p;
- return t;
+ return build_constructor (type, v);
}
/* Return a new FIXED_CST node whose type is TYPE and value is F. */
FIXED_VALUE_TYPE *fp;
v = make_node (FIXED_CST);
- fp = GGC_NEW (FIXED_VALUE_TYPE);
+ fp = ggc_alloc_fixed_value ();
memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
TREE_TYPE (v) = type;
Consider doing it via real_convert now. */
v = make_node (REAL_CST);
- dp = GGC_NEW (REAL_VALUE_TYPE);
+ dp = ggc_alloc_real_value ();
memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
TREE_TYPE (v) = type;
/* Do not waste bytes provided by padding of struct tree_string. */
length = len + offsetof (struct tree_string, str) + 1;
-#ifdef GATHER_STATISTICS
- tree_node_counts[(int) c_kind]++;
- tree_node_sizes[(int) c_kind] += length;
-#endif
+ record_node_allocation_statistics (STRING_CST, length);
- s = ggc_alloc_tree (length);
+ s = ggc_alloc_tree_node (length);
- memset (s, 0, sizeof (struct tree_common));
+ memset (s, 0, sizeof (struct tree_typed));
TREE_SET_CODE (s, STRING_CST);
TREE_CONSTANT (s) = 1;
TREE_STRING_LENGTH (s) = len;
case VECTOR_TYPE:
{
- tree scalar, cst;
- int i;
-
- scalar = build_one_cst (TREE_TYPE (type));
-
- /* Create 'vect_cst_ = {cst,cst,...,cst}' */
- cst = NULL_TREE;
- for (i = TYPE_VECTOR_SUBPARTS (type); --i >= 0; )
- cst = tree_cons (NULL_TREE, scalar, cst);
+ tree scalar = build_one_cst (TREE_TYPE (type));
- return build_vector (type, cst);
+ return build_vector_from_val (type, scalar);
}
case COMPLEX_TYPE:
return build_complex (type,
build_one_cst (TREE_TYPE (type)),
- fold_convert (TREE_TYPE (type), integer_zero_node));
+ build_zero_cst (TREE_TYPE (type)));
default:
gcc_unreachable ();
}
}
+/* Build 0 constant of type TYPE. This is used by constructor folding
+ and thus the constant should be represented in memory by
+ zero(es). */
+
+tree
+build_zero_cst (tree type)
+{
+ switch (TREE_CODE (type))
+ {
+ case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
+ case POINTER_TYPE: case REFERENCE_TYPE:
+ case OFFSET_TYPE:
+ return build_int_cst (type, 0);
+
+ case REAL_TYPE:
+ return build_real (type, dconst0);
+
+ case FIXED_POINT_TYPE:
+ return build_fixed (type, FCONST0 (TYPE_MODE (type)));
+
+ case VECTOR_TYPE:
+ {
+ tree scalar = build_zero_cst (TREE_TYPE (type));
+
+ return build_vector_from_val (type, scalar);
+ }
+
+ case COMPLEX_TYPE:
+ {
+ tree zero = build_zero_cst (TREE_TYPE (type));
+
+ return build_complex (type, zero, zero);
+ }
+
+ default:
+ if (!AGGREGATE_TYPE_P (type))
+ return fold_convert (type, integer_zero_node);
+ return build_constructor (type, NULL);
+ }
+}
+
+
/* Build a BINFO with LEN language slots. */
tree
size_t length = (offsetof (struct tree_binfo, base_binfos)
+ VEC_embedded_size (tree, base_binfos));
-#ifdef GATHER_STATISTICS
- tree_node_counts[(int) binfo_kind]++;
- tree_node_sizes[(int) binfo_kind] += length;
-#endif
+ record_node_allocation_statistics (TREE_BINFO, length);
- t = (tree) ggc_alloc_zone_pass_stat (length, &tree_zone);
+ t = ggc_alloc_zone_tree_node_stat (&tree_zone, length PASS_MEM_STAT);
memset (t, 0, offsetof (struct tree_binfo, base_binfos));
tree t;
int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
-#ifdef GATHER_STATISTICS
- tree_node_counts[(int) vec_kind]++;
- tree_node_sizes[(int) vec_kind] += length;
-#endif
-
- t = (tree) ggc_alloc_zone_pass_stat (length, &tree_zone);
+ record_node_allocation_statistics (TREE_VEC, length);
- memset (t, 0, length);
+ t = ggc_alloc_zone_cleared_tree_node_stat (&tree_zone, length PASS_MEM_STAT);
TREE_SET_CODE (t, TREE_VEC);
TREE_VEC_LENGTH (t) = len;
if (TREE_CODE (expr) != INTEGER_CST)
return 0;
- prec = (POINTER_TYPE_P (TREE_TYPE (expr))
- ? POINTER_SIZE : TYPE_PRECISION (TREE_TYPE (expr)));
+ prec = TYPE_PRECISION (TREE_TYPE (expr));
high = TREE_INT_CST_HIGH (expr);
low = TREE_INT_CST_LOW (expr);
if (TREE_CODE (expr) == COMPLEX_CST)
return tree_log2 (TREE_REALPART (expr));
- prec = (POINTER_TYPE_P (TREE_TYPE (expr))
- ? POINTER_SIZE : TYPE_PRECISION (TREE_TYPE (expr)));
-
+ prec = TYPE_PRECISION (TREE_TYPE (expr));
high = TREE_INT_CST_HIGH (expr);
low = TREE_INT_CST_LOW (expr);
if (TREE_CODE (expr) == COMPLEX_CST)
return tree_log2 (TREE_REALPART (expr));
- prec = (POINTER_TYPE_P (TREE_TYPE (expr))
- ? POINTER_SIZE : TYPE_PRECISION (TREE_TYPE (expr)));
-
+ prec = TYPE_PRECISION (TREE_TYPE (expr));
high = TREE_INT_CST_HIGH (expr);
low = TREE_INT_CST_LOW (expr);
return NULL_TREE;
}
+/* Return true if ELEM is in V. */
+
+bool
+vec_member (const_tree elem, VEC(tree,gc) *v)
+{
+ unsigned ix;
+ tree t;
+ FOR_EACH_VEC_ELT (tree, v, ix, t)
+ if (elem == t)
+ return true;
+ return false;
+}
+
/* Returns element number IDX (zero-origin) of chain CHAIN, or
NULL_TREE. */
{
if (elem == chain)
return 1;
- chain = TREE_CHAIN (chain);
+ chain = DECL_CHAIN (chain);
}
return 0;
tree t = TYPE_FIELDS (type);
int count = 0;
- for (; t; t = TREE_CHAIN (t))
+ for (; t; t = DECL_CHAIN (t))
if (TREE_CODE (t) == FIELD_DECL)
++count;
return count;
}
+/* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
+ UNION_TYPE TYPE, or NULL_TREE if none. */
+
+tree
+first_field (const_tree type)
+{
+ tree t = TYPE_FIELDS (type);
+ while (t && TREE_CODE (t) != FIELD_DECL)
+ t = TREE_CHAIN (t);
+ return t;
+}
+
/* Concatenate two chains of nodes (chained through TREE_CHAIN)
by modifying the last node in chain 1 to point to chain 2.
This is the Lisp primitive `nconc'. */
tree prev = 0, decl, next;
for (decl = t; decl; decl = next)
{
+ /* We shouldn't be using this function to reverse BLOCK chains; we
+ have blocks_nreverse for that. */
+ gcc_checking_assert (TREE_CODE (decl) != BLOCK);
next = TREE_CHAIN (decl);
TREE_CHAIN (decl) = prev;
prev = decl;
tree *pp = &ret;
unsigned int i;
tree t;
- for (i = 0; VEC_iterate (tree, vec, i, t); ++i)
+ FOR_EACH_VEC_ELT (tree, vec, i, t)
{
*pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
pp = &TREE_CHAIN (*pp);
purpose and value fields are PURPOSE and VALUE
and whose TREE_CHAIN is CHAIN. */
-tree
+tree
tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
{
tree node;
- node = (tree) ggc_alloc_zone_pass_stat (sizeof (struct tree_list), &tree_zone);
-
+ node = ggc_alloc_zone_tree_node_stat (&tree_zone, sizeof (struct tree_list)
+ PASS_MEM_STAT);
memset (node, 0, sizeof (struct tree_common));
-#ifdef GATHER_STATISTICS
- tree_node_counts[(int) x_kind]++;
- tree_node_sizes[(int) x_kind] += sizeof (struct tree_list);
-#endif
+ record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
TREE_SET_CODE (node, TREE_LIST);
TREE_CHAIN (node) = chain;
return node;
}
-/* Return the elements of a CONSTRUCTOR as a TREE_LIST. */
-
-tree
-ctor_to_list (tree ctor)
-{
- tree list = NULL_TREE;
- tree *p = &list;
- unsigned ix;
- tree purpose, val;
-
- FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), ix, purpose, val)
- {
- *p = build_tree_list (purpose, val);
- p = &TREE_CHAIN (*p);
- }
-
- return list;
-}
-
/* Return the values of the elements of a CONSTRUCTOR as a vector of
trees. */
min = TYPE_MIN_VALUE (index_type);
max = TYPE_MAX_VALUE (index_type);
+ /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
+ if (!max)
+ return error_mark_node;
+
return (integer_zerop (min)
? max
: fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
case BIT_FIELD_REF:
return NULL;
- case MISALIGNED_INDIRECT_REF:
- case ALIGN_INDIRECT_REF:
case INDIRECT_REF:
return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
return true;
case VAR_DECL:
- if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
- && !DECL_DLLIMPORT_P (op))
+ if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
|| DECL_THREAD_LOCAL_P (op)
|| DECL_CONTEXT (op) == current_function_decl
|| decl_function_context (op) == current_function_decl)
TREE_READONLY (t) = read_only;
}
\f
-/* Return 1 if EXP contains a PLACEHOLDER_EXPR; i.e., if it represents a size
- or offset that depends on a field within a record. */
+/* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
+ size or offset that depends on a field within a record. */
bool
contains_placeholder_p (const_tree exp)
return 0;
}
-/* Return true if any part of the computation of TYPE involves a
- PLACEHOLDER_EXPR. This includes size, bounds, qualifiers
- (for QUAL_UNION_TYPE) and field positions. */
+/* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
+ directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
+ field positions. */
static bool
type_contains_placeholder_1 (const_tree type)
the case of arrays) type involves a placeholder, this type does. */
if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
|| CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
- || (TREE_TYPE (type) != 0
+ || (!POINTER_TYPE_P (type)
+ && TREE_TYPE (type)
&& type_contains_placeholder_p (TREE_TYPE (type))))
return true;
|| CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
case ARRAY_TYPE:
- /* We're already checked the component type (TREE_TYPE), so just check
- the index type. */
+ /* We have already checked the component type above, so just check the
+ domain type. */
return type_contains_placeholder_p (TYPE_DOMAIN (type));
case RECORD_TYPE:
{
tree field;
- for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
+ for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
if (TREE_CODE (field) == FIELD_DECL
&& (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
|| (TREE_CODE (type) == QUAL_UNION_TYPE
}
}
+/* Wrapper around above function used to cache its result. */
+
bool
type_contains_placeholder_p (tree type)
{
unsigned int i;
tree iter;
- for (i = 0; VEC_iterate (tree, *queue, i, iter); i++)
+ FOR_EACH_VEC_ELT (tree, *queue, i, iter)
if (simple_cst_equal (iter, exp) == 1)
break;
}
TREE_READONLY (new_tree) |= TREE_READONLY (exp);
+
+ if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
+ TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
+
return new_tree;
}
}
TREE_READONLY (new_tree) |= TREE_READONLY (exp);
+
+ if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
+ TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
+
return new_tree;
}
\f
address is constant too. If it's a decl, its address is constant if the
decl is static. Everything else is not constant and, furthermore,
taking the address of a volatile variable is not volatile. */
- if (TREE_CODE (node) == INDIRECT_REF)
+ if (TREE_CODE (node) == INDIRECT_REF
+ || TREE_CODE (node) == MEM_REF)
UPDATE_FLAGS (TREE_OPERAND (node, 0));
else if (CONSTANT_CLASS_P (node))
;
build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
{
int length = sizeof (struct tree_exp);
-#ifdef GATHER_STATISTICS
- tree_node_kind kind;
-#endif
tree t;
-#ifdef GATHER_STATISTICS
- switch (TREE_CODE_CLASS (code))
- {
- case tcc_statement: /* an expression with side effects */
- kind = s_kind;
- break;
- case tcc_reference: /* a reference */
- kind = r_kind;
- break;
- default:
- kind = e_kind;
- break;
- }
-
- tree_node_counts[(int) kind]++;
- tree_node_sizes[(int) kind] += length;
-#endif
+ record_node_allocation_statistics (code, length);
gcc_assert (TREE_CODE_LENGTH (code) == 1);
- t = (tree) ggc_alloc_zone_pass_stat (length, &tree_zone);
+ t = ggc_alloc_zone_tree_node_stat (&tree_zone, length PASS_MEM_STAT);
memset (t, 0, sizeof (struct tree_common));
TREE_READONLY (t) = 0;
break;
- case MISALIGNED_INDIRECT_REF:
- case ALIGN_INDIRECT_REF:
case INDIRECT_REF:
/* Whether a dereference is readonly has nothing to do with whether
its operand is readonly. */
side_effects = 1; \
if (!TREE_READONLY (arg##N) \
&& !CONSTANT_CLASS_P (arg##N)) \
- read_only = 0; \
+ (void) (read_only = 0); \
if (!TREE_CONSTANT (arg##N)) \
- constant = 0; \
+ (void) (constant = 0); \
} \
} while (0)
PROCESS_ARG(2);
PROCESS_ARG(3);
PROCESS_ARG(4);
+ if (code == TARGET_MEM_REF)
+ side_effects = 0;
PROCESS_ARG(5);
TREE_SIDE_EFFECTS (t) = side_effects;
- TREE_THIS_VOLATILE (t) = 0;
+ TREE_THIS_VOLATILE (t)
+ = (code == TARGET_MEM_REF
+ && arg5 && TREE_THIS_VOLATILE (arg5));
return t;
}
-/* Similar except don't specify the TREE_TYPE
- and leave the TREE_SIDE_EFFECTS as 0.
- It is permissible for arguments to be null,
- or even garbage if their values do not matter. */
+/* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
+ on the pointer PTR. */
tree
-build_nt (enum tree_code code, ...)
+build_simple_mem_ref_loc (location_t loc, tree ptr)
{
- tree t;
- int length;
- int i;
- va_list p;
+ HOST_WIDE_INT offset = 0;
+ tree ptype = TREE_TYPE (ptr);
+ tree tem;
+ /* For convenience allow addresses that collapse to a simple base
+ and offset. */
+ if (TREE_CODE (ptr) == ADDR_EXPR
+ && (handled_component_p (TREE_OPERAND (ptr, 0))
+ || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
+ {
+ ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
+ gcc_assert (ptr);
+ ptr = build_fold_addr_expr (ptr);
+ gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
+ }
+ tem = build2 (MEM_REF, TREE_TYPE (ptype),
+ ptr, build_int_cst (ptype, offset));
+ SET_EXPR_LOCATION (tem, loc);
+ return tem;
+}
- gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
+/* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
- va_start (p, code);
+double_int
+mem_ref_offset (const_tree t)
+{
+ tree toff = TREE_OPERAND (t, 1);
+ return double_int_sext (tree_to_double_int (toff),
+ TYPE_PRECISION (TREE_TYPE (toff)));
+}
- t = make_node (code);
- length = TREE_CODE_LENGTH (code);
+/* Return the pointer-type relevant for TBAA purposes from the
+ gimple memory reference tree T. This is the type to be used for
+ the offset operand of MEM_REF or TARGET_MEM_REF replacements of T. */
- for (i = 0; i < length; i++)
- TREE_OPERAND (t, i) = va_arg (p, tree);
+tree
+reference_alias_ptr_type (const_tree t)
+{
+ const_tree base = t;
+ while (handled_component_p (base))
+ base = TREE_OPERAND (base, 0);
+ if (TREE_CODE (base) == MEM_REF)
+ return TREE_TYPE (TREE_OPERAND (base, 1));
+ else if (TREE_CODE (base) == TARGET_MEM_REF)
+ return TREE_TYPE (TMR_OFFSET (base));
+ else
+ return build_pointer_type (TYPE_MAIN_VARIANT (TREE_TYPE (base)));
+}
- va_end (p);
- return t;
+/* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
+ offsetted by OFFSET units. */
+
+tree
+build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
+{
+ tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
+ build_fold_addr_expr (base),
+ build_int_cst (ptr_type_node, offset));
+ tree addr = build1 (ADDR_EXPR, type, ref);
+ recompute_tree_invariant_for_addr_expr (addr);
+ return addr;
}
-/* Similar to build_nt, but for creating a CALL_EXPR object with
- ARGLIST passed as a list. */
+/* Similar except don't specify the TREE_TYPE
+ and leave the TREE_SIDE_EFFECTS as 0.
+ It is permissible for arguments to be null,
+ or even garbage if their values do not matter. */
tree
-build_nt_call_list (tree fn, tree arglist)
+build_nt (enum tree_code code, ...)
{
tree t;
+ int length;
int i;
+ va_list p;
- t = build_vl_exp (CALL_EXPR, list_length (arglist) + 3);
- CALL_EXPR_FN (t) = fn;
- CALL_EXPR_STATIC_CHAIN (t) = NULL_TREE;
- for (i = 0; arglist; arglist = TREE_CHAIN (arglist), i++)
- CALL_EXPR_ARG (t, i) = TREE_VALUE (arglist);
+ gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
+
+ va_start (p, code);
+
+ t = make_node (code);
+ length = TREE_CODE_LENGTH (code);
+
+ for (i = 0; i < length; i++)
+ TREE_OPERAND (t, i) = va_arg (p, tree);
+
+ va_end (p);
return t;
}
ret = build_vl_exp (CALL_EXPR, VEC_length (tree, args) + 3);
CALL_EXPR_FN (ret) = fn;
CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
- for (ix = 0; VEC_iterate (tree, args, ix, t); ++ix)
+ FOR_EACH_VEC_ELT (tree, args, ix, t)
CALL_EXPR_ARG (ret, ix) = t;
return ret;
}
return decl;
}
+VEC(tree,gc) *all_translation_units;
+
+/* Builds a new translation-unit decl with name NAME, queues it in the
+ global list of translation-unit decls and returns it. */
+
+tree
+build_translation_unit_decl (tree name)
+{
+ tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
+ name, NULL_TREE);
+ TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
+ VEC_safe_push (tree, gc, all_translation_units, tu);
+ return tu;
+}
+
\f
/* BLOCK nodes are used to represent the structure of binding contours
and declarations, once those contours have been exited and their contents
return block;
}
-expanded_location
-expand_location (source_location loc)
-{
- expanded_location xloc;
- if (loc == 0)
- {
- xloc.file = NULL;
- xloc.line = 0;
- xloc.column = 0;
- xloc.sysp = 0;
- }
- else
- {
- const struct line_map *map = linemap_lookup (line_table, loc);
- xloc.file = map->to_file;
- xloc.line = SOURCE_LINE (map, loc);
- xloc.column = SOURCE_COLUMN (map, loc);
- xloc.sysp = map->sysp != 0;
- };
- return xloc;
-}
-
\f
/* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
its canonical type, we will need to use structural equality
checks for this type. */
if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
- || !targetm.comp_type_attributes (ntype, ttype))
+ || !comp_type_attributes (ntype, ttype))
SET_TYPE_STRUCTURAL_EQUALITY (ntype);
else if (TYPE_CANONICAL (ntype) == ntype)
TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
return ttype;
}
+/* Compare two attributes for their value identity. Return true if the
+ attribute values are known to be equal; otherwise return false.
+*/
+
+static bool
+attribute_value_equal (const_tree attr1, const_tree attr2)
+{
+ if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
+ return true;
+
+ if (TREE_VALUE (attr1) != NULL_TREE
+ && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
+ && TREE_VALUE (attr2) != NULL
+ && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
+ return (simple_cst_list_equal (TREE_VALUE (attr1),
+ TREE_VALUE (attr2)) == 1);
+
+ return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
+}
+
+/* Return 0 if the attributes for two types are incompatible, 1 if they
+ are compatible, and 2 if they are nearly compatible (which causes a
+ warning to be generated). */
+int
+comp_type_attributes (const_tree type1, const_tree type2)
+{
+ const_tree a1 = TYPE_ATTRIBUTES (type1);
+ const_tree a2 = TYPE_ATTRIBUTES (type2);
+ const_tree a;
+
+ if (a1 == a2)
+ return 1;
+ for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
+ {
+ const struct attribute_spec *as;
+ const_tree attr;
+
+ as = lookup_attribute_spec (TREE_PURPOSE (a));
+ if (!as || as->affects_type_identity == false)
+ continue;
+
+ attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
+ if (!attr || !attribute_value_equal (a, attr))
+ break;
+ }
+ if (!a)
+ {
+ for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
+ {
+ const struct attribute_spec *as;
+
+ as = lookup_attribute_spec (TREE_PURPOSE (a));
+ if (!as || as->affects_type_identity == false)
+ continue;
+
+ if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
+ break;
+ /* We don't need to compare trees again, as we did this
+ already in first loop. */
+ }
+ /* All types - affecting identity - are equal, so
+ there is no need to call target hook for comparison. */
+ if (!a)
+ return 1;
+ }
+ /* As some type combinations - like default calling-convention - might
+ be compatible, we have to call the target hook to get the final result. */
+ return targetm.comp_type_attributes (type1, type2);
+}
/* Return a type like TTYPE except that its TYPE_ATTRIBUTE
is ATTRIBUTE.
TYPE_QUALS (ttype));
}
+
+/* Reset the expression *EXPR_P, a size or position.
+
+ ??? We could reset all non-constant sizes or positions. But it's cheap
+ enough to not do so and refrain from adding workarounds to dwarf2out.c.
+
+ We need to reset self-referential sizes or positions because they cannot
+ be gimplified and thus can contain a CALL_EXPR after the gimplification
+ is finished, which will run afoul of LTO streaming. And they need to be
+ reset to something essentially dummy but not constant, so as to preserve
+ the properties of the object they are attached to. */
+
+static inline void
+free_lang_data_in_one_sizepos (tree *expr_p)
+{
+ tree expr = *expr_p;
+ if (CONTAINS_PLACEHOLDER_P (expr))
+ *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
+}
+
+
+/* Reset all the fields in a binfo node BINFO. We only keep
+ BINFO_VIRTUALS, which is used by gimple_fold_obj_type_ref. */
+
+static void
+free_lang_data_in_binfo (tree binfo)
+{
+ unsigned i;
+ tree t;
+
+ gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
+
+ BINFO_VTABLE (binfo) = NULL_TREE;
+ BINFO_BASE_ACCESSES (binfo) = NULL;
+ BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
+ BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
+
+ FOR_EACH_VEC_ELT (tree, BINFO_BASE_BINFOS (binfo), i, t)
+ free_lang_data_in_binfo (t);
+}
+
+
/* Reset all language specific information still present in TYPE. */
static void
{
gcc_assert (TYPE_P (type));
- /* Fill in the alias-set. We need to at least track zeroness here
- for correctness. */
- if (lang_hooks.get_alias_set (type) == 0)
- TYPE_ALIAS_SET (type) = 0;
-
/* Give the FE a chance to remove its own data first. */
lang_hooks.free_lang_data (type);
}
}
}
-
+
/* Remove members that are not actually FIELD_DECLs from the field
list of an aggregate. These occur in C++. */
- if (TREE_CODE (type) == RECORD_TYPE
- || TREE_CODE (type) == UNION_TYPE
- || TREE_CODE (type) == QUAL_UNION_TYPE)
+ if (RECORD_OR_UNION_TYPE_P (type))
{
tree prev, member;
to be removed, we cannot set its TREE_CHAIN to NULL.
Otherwise, we would not be able to find all the other fields
in the other instances of this TREE_TYPE.
-
+
This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
prev = NULL_TREE;
member = TYPE_FIELDS (type);
TYPE_METHODS (type) = NULL_TREE;
if (TYPE_BINFO (type))
- {
- tree binfo = TYPE_BINFO (type);
-
- if (BINFO_VIRTUALS (binfo))
- {
- /* If the virtual function table for BINFO contains
- entries, these may be useful for folding OBJ_TYPE_REF
- expressions (see gimple_fold_obj_type_ref). In that
- case, we only clear the unused fields in the BINFO
- structure. */
- BINFO_OFFSET (binfo) = NULL_TREE;
- BINFO_VTABLE (binfo) = NULL_TREE;
- BINFO_VPTR_FIELD (binfo) = NULL_TREE;
- BINFO_BASE_ACCESSES (binfo) = NULL;
- BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
- BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
- BINFO_VPTR_FIELD (binfo) = NULL_TREE;
- }
- else
- {
- /* Otherwise, get rid of the whole binfo data. */
- TYPE_BINFO (type) = NULL_TREE;
- }
- }
+ free_lang_data_in_binfo (TYPE_BINFO (type));
}
else
{
/* For non-aggregate types, clear out the language slot (which
overloads TYPE_BINFO). */
TYPE_LANG_SLOT_1 (type) = NULL_TREE;
+
+ if (INTEGRAL_TYPE_P (type)
+ || SCALAR_FLOAT_TYPE_P (type)
+ || FIXED_POINT_TYPE_P (type))
+ {
+ free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
+ free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
+ }
}
- TYPE_CONTEXT (type) = NULL_TREE;
- TYPE_STUB_DECL (type) = NULL_TREE;
+ free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
+ free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
+
+ if (debug_info_level < DINFO_LEVEL_TERSE
+ || (TYPE_CONTEXT (type)
+ && TREE_CODE (TYPE_CONTEXT (type)) != FUNCTION_DECL
+ && TREE_CODE (TYPE_CONTEXT (type)) != NAMESPACE_DECL))
+ TYPE_CONTEXT (type) = NULL_TREE;
+
+ if (debug_info_level < DINFO_LEVEL_TERSE)
+ TYPE_STUB_DECL (type) = NULL_TREE;
}
|| DECL_ASSEMBLER_NAME_SET_P (decl))
return false;
+ /* Abstract decls do not need an assembler name. */
+ if (DECL_ABSTRACT (decl))
+ return false;
+
/* For VAR_DECLs, only static, public and external symbols need an
assembler name. */
if (TREE_CODE (decl) == VAR_DECL
&& !DECL_EXTERNAL (decl))
return false;
- /* Do not set assembler name on builtins. Allow RTL expansion to
- decide whether to expand inline or via a regular call. */
- if (TREE_CODE (decl) == FUNCTION_DECL
- && DECL_BUILT_IN (decl)
- && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
- return false;
-
- /* For FUNCTION_DECLs, only used functions and functions
- represented in the callgraph need an assembler name. */
- if (TREE_CODE (decl) == FUNCTION_DECL
- && cgraph_node_for_decl (decl) == NULL
- && !TREE_USED (decl))
- return false;
-
- return true;
-}
-
-
-/* Remove all the non-variable decls from BLOCK. LOCALS is the set of
- variables in DECL_STRUCT_FUNCTION (FN)->local_decls. Every decl
- in BLOCK that is not in LOCALS is removed. */
+ if (TREE_CODE (decl) == FUNCTION_DECL)
+ {
+ /* Do not set assembler name on builtins. Allow RTL expansion to
+ decide whether to expand inline or via a regular call. */
+ if (DECL_BUILT_IN (decl)
+ && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
+ return false;
-static void
-free_lang_data_in_block (tree fn, tree block, struct pointer_set_t *locals)
-{
- tree *tp, t;
+ /* Functions represented in the callgraph need an assembler name. */
+ if (cgraph_get_node (decl) != NULL)
+ return true;
- tp = &BLOCK_VARS (block);
- while (*tp)
- {
- if (!pointer_set_contains (locals, *tp))
- *tp = TREE_CHAIN (*tp);
- else
- tp = &TREE_CHAIN (*tp);
+ /* Unused and not public functions don't need an assembler name. */
+ if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
+ return false;
}
- for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
- free_lang_data_in_block (fn, t, locals);
+ return true;
}
if (DECL_NAME (decl))
TREE_TYPE (DECL_NAME (decl)) = NULL_TREE;
- if (TREE_CODE (decl) == CONST_DECL)
- DECL_CONTEXT (decl) = NULL_TREE;
-
- /* Ignore any intervening types, because we are going to clear their
- TYPE_CONTEXT fields. */
- if (TREE_CODE (decl) != FIELD_DECL)
- DECL_CONTEXT (decl) = decl_function_context (decl);
-
- if (DECL_CONTEXT (decl)
- && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
- DECL_CONTEXT (decl) = NULL_TREE;
-
- if (TREE_CODE (decl) == VAR_DECL)
- {
- tree context = DECL_CONTEXT (decl);
-
- if (context)
- {
- enum tree_code code = TREE_CODE (context);
- if (code == FUNCTION_DECL && DECL_ABSTRACT (context))
- {
- /* Do not clear the decl context here, that will promote
- all vars to global ones. */
- DECL_INITIAL (decl) = NULL_TREE;
- }
-
- if (TREE_STATIC (decl))
- DECL_CONTEXT (decl) = NULL_TREE;
- }
- }
+ free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
+ free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
+ if (TREE_CODE (decl) == FIELD_DECL)
+ free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
- if (TREE_CODE (decl) == PARM_DECL
- || TREE_CODE (decl) == FIELD_DECL
- || TREE_CODE (decl) == RESULT_DECL)
- {
- tree unit_size = DECL_SIZE_UNIT (decl);
- tree size = DECL_SIZE (decl);
- if ((unit_size && TREE_CODE (unit_size) != INTEGER_CST)
- || (size && TREE_CODE (size) != INTEGER_CST))
- {
- DECL_SIZE_UNIT (decl) = NULL_TREE;
- DECL_SIZE (decl) = NULL_TREE;
- }
+ /* DECL_FCONTEXT is only used for debug info generation. */
+ if (TREE_CODE (decl) == FIELD_DECL
+ && debug_info_level < DINFO_LEVEL_TERSE)
+ DECL_FCONTEXT (decl) = NULL_TREE;
- if (TREE_CODE (decl) == FIELD_DECL
- && DECL_FIELD_OFFSET (decl)
- && TREE_CODE (DECL_FIELD_OFFSET (decl)) != INTEGER_CST)
- DECL_FIELD_OFFSET (decl) = NULL_TREE;
- }
- else if (TREE_CODE (decl) == FUNCTION_DECL)
+ if (TREE_CODE (decl) == FUNCTION_DECL)
{
if (gimple_has_body_p (decl))
{
tree t;
- struct pointer_set_t *locals;
/* If DECL has a gimple body, then the context for its
arguments must be DECL. Otherwise, it doesn't really
the PARM_DECL will be used in the function's body). */
for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
DECL_CONTEXT (t) = decl;
-
- /* Collect all the symbols declared in DECL. */
- locals = pointer_set_create ();
- t = DECL_STRUCT_FUNCTION (decl)->local_decls;
- for (; t; t = TREE_CHAIN (t))
- {
- pointer_set_insert (locals, TREE_VALUE (t));
-
- /* All the local symbols should have DECL as their
- context. */
- DECL_CONTEXT (TREE_VALUE (t)) = decl;
- }
-
- /* Get rid of any decl not in local_decls. */
- free_lang_data_in_block (decl, DECL_INITIAL (decl), locals);
-
- pointer_set_destroy (locals);
}
/* DECL_SAVED_TREE holds the GENERIC representation for DECL.
At this point, it is not needed anymore. */
DECL_SAVED_TREE (decl) = NULL_TREE;
+
+ /* Clear the abstract origin if it refers to a method. Otherwise
+ dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
+ origin will not be output correctly. */
+ if (DECL_ABSTRACT_ORIGIN (decl)
+ && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
+ && RECORD_OR_UNION_TYPE_P
+ (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
+ DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
+
+ /* Sometimes the C++ frontend doesn't manage to transform a temporary
+ DECL_VINDEX referring to itself into a vtable slot number as it
+ should. Happens with functions that are copied and then forgotten
+ about. Just clear it, it won't matter anymore. */
+ if (DECL_VINDEX (decl) && !host_integerp (DECL_VINDEX (decl), 0))
+ DECL_VINDEX (decl) = NULL_TREE;
}
else if (TREE_CODE (decl) == VAR_DECL)
{
- tree expr = DECL_DEBUG_EXPR (decl);
- if (expr
- && TREE_CODE (expr) == VAR_DECL
- && !TREE_STATIC (expr) && !DECL_EXTERNAL (expr))
- SET_DECL_DEBUG_EXPR (decl, NULL_TREE);
-
- if (DECL_EXTERNAL (decl))
+ if ((DECL_EXTERNAL (decl)
+ && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
+ || (decl_function_context (decl) && !TREE_STATIC (decl)))
DECL_INITIAL (decl) = NULL_TREE;
}
else if (TREE_CODE (decl) == TYPE_DECL)
- {
- DECL_INITIAL (decl) = NULL_TREE;
-
- /* DECL_CONTEXT is overloaded as DECL_FIELD_CONTEXT for
- FIELD_DECLs, which should be preserved. Otherwise,
- we shouldn't be concerned with source-level lexical
- nesting beyond this point. */
- DECL_CONTEXT (decl) = NULL_TREE;
+ DECL_INITIAL (decl) = NULL_TREE;
+ else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
+ && DECL_INITIAL (decl)
+ && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
+ {
+ /* Strip builtins from the translation-unit BLOCK. We still have
+ targets without builtin_decl support and also builtins are
+ shared nodes and thus we can't use TREE_CHAIN in multiple
+ lists. */
+ tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
+ while (*nextp)
+ {
+ tree var = *nextp;
+ if (TREE_CODE (var) == FUNCTION_DECL
+ && DECL_BUILT_IN (var))
+ *nextp = TREE_CHAIN (var);
+ else
+ nextp = &TREE_CHAIN (var);
+ }
}
}
gcc_unreachable ();
}
-#define PUSH(t) \
- if (t && !pointer_set_contains (fld->pset, t)) \
- VEC_safe_push (tree, heap, fld->worklist, (t))
+/* Push tree node T into FLD->WORKLIST. */
+
+static inline void
+fld_worklist_push (tree t, struct free_lang_data_d *fld)
+{
+ if (t && !is_lang_specific (t) && !pointer_set_contains (fld->pset, t))
+ VEC_safe_push (tree, heap, fld->worklist, (t));
+}
+
/* Operand callback helper for free_lang_data_in_node. *TP is the
subtree operand being considered. */
static tree
-find_decls_types_r (tree *tp, int *ws ATTRIBUTE_UNUSED, void *data)
+find_decls_types_r (tree *tp, int *ws, void *data)
{
tree t = *tp;
struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
if (TREE_CODE (t) == TREE_LIST)
return NULL_TREE;
+ /* Language specific nodes will be removed, so there is no need
+ to gather anything under them. */
+ if (is_lang_specific (t))
+ {
+ *ws = 0;
+ return NULL_TREE;
+ }
+
if (DECL_P (t))
{
/* Note that walk_tree does not traverse every possible field in
decls, so we have to do our own traversals here. */
add_tree_to_fld_list (t, fld);
- PUSH (DECL_NAME (t));
- PUSH (DECL_CONTEXT (t));
- PUSH (DECL_SIZE (t));
- PUSH (DECL_SIZE_UNIT (t));
- PUSH (DECL_INITIAL(t));
- PUSH (DECL_ATTRIBUTES (t));
- PUSH (DECL_ABSTRACT_ORIGIN (t));
+ fld_worklist_push (DECL_NAME (t), fld);
+ fld_worklist_push (DECL_CONTEXT (t), fld);
+ fld_worklist_push (DECL_SIZE (t), fld);
+ fld_worklist_push (DECL_SIZE_UNIT (t), fld);
+
+ /* We are going to remove everything under DECL_INITIAL for
+ TYPE_DECLs. No point walking them. */
+ if (TREE_CODE (t) != TYPE_DECL)
+ fld_worklist_push (DECL_INITIAL (t), fld);
+
+ fld_worklist_push (DECL_ATTRIBUTES (t), fld);
+ fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
if (TREE_CODE (t) == FUNCTION_DECL)
{
- PUSH (DECL_ARGUMENTS (t));
- PUSH (DECL_RESULT (t));
+ fld_worklist_push (DECL_ARGUMENTS (t), fld);
+ fld_worklist_push (DECL_RESULT (t), fld);
}
else if (TREE_CODE (t) == TYPE_DECL)
{
- PUSH (DECL_ARGUMENT_FLD (t));
- PUSH (DECL_VINDEX (t));
+ fld_worklist_push (DECL_ARGUMENT_FLD (t), fld);
+ fld_worklist_push (DECL_VINDEX (t), fld);
}
else if (TREE_CODE (t) == FIELD_DECL)
{
- PUSH (DECL_FIELD_OFFSET (t));
- PUSH (DECL_BIT_FIELD_TYPE (t));
- PUSH (DECL_QUALIFIER (t));
- PUSH (DECL_FIELD_BIT_OFFSET (t));
- PUSH (DECL_FCONTEXT (t));
+ fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
+ fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
+ fld_worklist_push (DECL_QUALIFIER (t), fld);
+ fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
+ fld_worklist_push (DECL_FCONTEXT (t), fld);
}
else if (TREE_CODE (t) == VAR_DECL)
{
- PUSH (DECL_SECTION_NAME (t));
- PUSH (DECL_COMDAT_GROUP (t));
+ fld_worklist_push (DECL_SECTION_NAME (t), fld);
+ fld_worklist_push (DECL_COMDAT_GROUP (t), fld);
}
- PUSH (TREE_CHAIN (t));
+ if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
+ && DECL_HAS_VALUE_EXPR_P (t))
+ fld_worklist_push (DECL_VALUE_EXPR (t), fld);
+
+ if (TREE_CODE (t) != FIELD_DECL
+ && TREE_CODE (t) != TYPE_DECL)
+ fld_worklist_push (TREE_CHAIN (t), fld);
*ws = 0;
}
else if (TYPE_P (t))
types, so we have to do our own traversals here. */
add_tree_to_fld_list (t, fld);
- PUSH (TYPE_CACHED_VALUES (t));
- PUSH (TYPE_SIZE (t));
- PUSH (TYPE_SIZE_UNIT (t));
- PUSH (TYPE_ATTRIBUTES (t));
- PUSH (TYPE_POINTER_TO (t));
- PUSH (TYPE_REFERENCE_TO (t));
- PUSH (TYPE_NAME (t));
- PUSH (TYPE_MINVAL (t));
- PUSH (TYPE_MAXVAL (t));
- PUSH (TYPE_MAIN_VARIANT (t));
- PUSH (TYPE_NEXT_VARIANT (t));
- PUSH (TYPE_CONTEXT (t));
- PUSH (TYPE_CANONICAL (t));
-
- if (RECORD_OR_UNION_TYPE_P (t)
- && TYPE_BINFO (t))
+ if (!RECORD_OR_UNION_TYPE_P (t))
+ fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
+ fld_worklist_push (TYPE_SIZE (t), fld);
+ fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
+ fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
+ fld_worklist_push (TYPE_POINTER_TO (t), fld);
+ fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
+ fld_worklist_push (TYPE_NAME (t), fld);
+ /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
+ them and thus do not and want not to reach unused pointer types
+ this way. */
+ if (!POINTER_TYPE_P (t))
+ fld_worklist_push (TYPE_MINVAL (t), fld);
+ if (!RECORD_OR_UNION_TYPE_P (t))
+ fld_worklist_push (TYPE_MAXVAL (t), fld);
+ fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
+ /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
+ do not and want not to reach unused variants this way. */
+ fld_worklist_push (TYPE_CONTEXT (t), fld);
+ /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
+ and want not to reach unused types this way. */
+
+ if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
{
unsigned i;
tree tem;
for (i = 0; VEC_iterate (tree, BINFO_BASE_BINFOS (TYPE_BINFO (t)),
i, tem); ++i)
- PUSH (TREE_TYPE (tem));
+ fld_worklist_push (TREE_TYPE (tem), fld);
+ tem = BINFO_VIRTUALS (TYPE_BINFO (t));
+ if (tem
+ /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
+ && TREE_CODE (tem) == TREE_LIST)
+ do
+ {
+ fld_worklist_push (TREE_VALUE (tem), fld);
+ tem = TREE_CHAIN (tem);
+ }
+ while (tem);
+ }
+ if (RECORD_OR_UNION_TYPE_P (t))
+ {
+ tree tem;
+ /* Push all TYPE_FIELDS - there can be interleaving interesting
+ and non-interesting things. */
+ tem = TYPE_FIELDS (t);
+ while (tem)
+ {
+ if (TREE_CODE (tem) == FIELD_DECL)
+ fld_worklist_push (tem, fld);
+ tem = TREE_CHAIN (tem);
+ }
}
- PUSH (TREE_CHAIN (t));
+ fld_worklist_push (TREE_CHAIN (t), fld);
*ws = 0;
}
+ else if (TREE_CODE (t) == BLOCK)
+ {
+ tree tem;
+ for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
+ fld_worklist_push (tem, fld);
+ for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
+ fld_worklist_push (tem, fld);
+ fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
+ }
- PUSH (TREE_TYPE (t));
+ if (TREE_CODE (t) != IDENTIFIER_NODE)
+ fld_worklist_push (TREE_TYPE (t), fld);
return NULL_TREE;
}
-#undef PUSH
/* Find decls and types in T. */
{
basic_block bb;
struct function *fn;
+ unsigned ix;
tree t;
find_decls_types (n->decl, fld);
fn = DECL_STRUCT_FUNCTION (n->decl);
/* Traverse locals. */
- for (t = fn->local_decls; t; t = TREE_CHAIN (t))
- find_decls_types (TREE_VALUE (t), fld);
+ FOR_EACH_LOCAL_DECL (fn, ix, t)
+ find_decls_types (t, fld);
/* Traverse EH regions in FN. */
{
find_decls_types (v->decl, fld);
}
+/* If T needs an assembler name, have one created for it. */
+
+void
+assign_assembler_name_if_neeeded (tree t)
+{
+ if (need_assembler_name_p (t))
+ {
+ /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
+ diagnostics that use input_location to show locus
+ information. The problem here is that, at this point,
+ input_location is generally anchored to the end of the file
+ (since the parser is long gone), so we don't have a good
+ position to pin it to.
+
+ To alleviate this problem, this uses the location of T's
+ declaration. Examples of this are
+ testsuite/g++.dg/template/cond2.C and
+ testsuite/g++.dg/template/pr35240.C. */
+ location_t saved_location = input_location;
+ input_location = DECL_SOURCE_LOCATION (t);
+
+ decl_assembler_name (t);
+
+ input_location = saved_location;
+ }
+}
+
/* Free language specific information for every operand and expression
in every node of the call graph. This process operates in three stages:
for (n = cgraph_nodes; n; n = n->next)
find_decls_types_in_node (n, &fld);
- for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p); i++)
+ FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
find_decls_types (p->decl, &fld);
/* Find decls and types in every varpool symbol. */
- for (v = varpool_nodes_queue; v; v = v->next_needed)
+ for (v = varpool_nodes; v; v = v->next)
find_decls_types_in_var (v, &fld);
/* Set the assembler name on every decl found. We need to do this
now because free_lang_data_in_decl will invalidate data needed
for mangling. This breaks mangling on interdependent decls. */
- for (i = 0; VEC_iterate (tree, fld.decls, i, t); i++)
- if (need_assembler_name_p (t))
- {
- /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
- diagnostics that use input_location to show locus
- information. The problem here is that, at this point,
- input_location is generally anchored to the end of the file
- (since the parser is long gone), so we don't have a good
- position to pin it to.
-
- To alleviate this problem, this uses the location of T's
- declaration. Examples of this are
- testsuite/g++.dg/template/cond2.C and
- testsuite/g++.dg/template/pr35240.C. */
- location_t saved_location = input_location;
- input_location = DECL_SOURCE_LOCATION (t);
-
- decl_assembler_name (t);
-
- input_location = saved_location;
- }
+ FOR_EACH_VEC_ELT (tree, fld.decls, i, t)
+ assign_assembler_name_if_neeeded (t);
/* Traverse every decl found freeing its language data. */
- for (i = 0; VEC_iterate (tree, fld.decls, i, t); i++)
+ FOR_EACH_VEC_ELT (tree, fld.decls, i, t)
free_lang_data_in_decl (t);
/* Traverse every type found freeing its language data. */
- for (i = 0; VEC_iterate (tree, fld.types, i, t); i++)
+ FOR_EACH_VEC_ELT (tree, fld.types, i, t)
free_lang_data_in_type (t);
pointer_set_destroy (fld.pset);
static unsigned
free_lang_data (void)
{
+ unsigned i;
+
+ /* If we are the LTO frontend we have freed lang-specific data already. */
+ if (in_lto_p
+ || !flag_generate_lto)
+ return 0;
+
+ /* Allocate and assign alias sets to the standard integer types
+ while the slots are still in the way the frontends generated them. */
+ for (i = 0; i < itk_none; ++i)
+ if (integer_types[i])
+ TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
+
/* Traverse the IL resetting language specific information for
operands, expressions, etc. */
free_lang_data_in_cgraph ();
else
signed_char_type_node = char_type_node;
- /* Reset some langhooks. */
+ /* Reset some langhooks. Do not reset types_compatible_p, it may
+ still be used indirectly via the get_alias_set langhook. */
lang_hooks.callgraph.analyze_expr = NULL;
- lang_hooks.types_compatible_p = NULL;
lang_hooks.dwarf_name = lhd_dwarf_name;
lang_hooks.decl_printable_name = gimple_decl_printable_name;
- lang_hooks.set_decl_assembler_name = lhd_set_decl_assembler_name;
- lang_hooks.fold_obj_type_ref = gimple_fold_obj_type_ref;
+ /* We do not want the default decl_assembler_name implementation,
+ rather if we have fixed everything we want a wrapper around it
+ asserting that all non-local symbols already got their assembler
+ name and only produce assembler names for local symbols. Or rather
+ make sure we never call decl_assembler_name on local symbols and
+ devise a separate, middle-end private scheme for it. */
/* Reset diagnostic machinery. */
- diagnostic_starter (global_dc) = default_diagnostic_starter;
+ diagnostic_starter (global_dc) = default_tree_diagnostic_starter;
diagnostic_finalizer (global_dc) = default_diagnostic_finalizer;
diagnostic_format_decoder (global_dc) = default_tree_printer;
}
-/* Gate function for free_lang_data. */
-
-static bool
-gate_free_lang_data (void)
-{
- /* FIXME. Remove after save_debug_info is working. */
- return !flag_gtoggle && debug_info_level <= DINFO_LEVEL_TERSE;
-}
-
-
-struct simple_ipa_opt_pass pass_ipa_free_lang_data =
+struct simple_ipa_opt_pass pass_ipa_free_lang_data =
{
{
SIMPLE_IPA_PASS,
- NULL, /* name */
- gate_free_lang_data, /* gate */
+ "*free_lang_data", /* name */
+ NULL, /* gate */
free_lang_data, /* execute */
NULL, /* sub */
NULL, /* next */
if (TREE_CODE (ident) != IDENTIFIER_NODE)
return 0;
-
+
p = IDENTIFIER_POINTER (ident);
ident_len = IDENTIFIER_LENGTH (ident);
-
+
if (ident_len == attr_len
&& strcmp (attr, p) == 0)
return 1;
tree a;
for (a = lookup_attribute (IDENTIFIER_POINTER (TREE_PURPOSE (a2)),
attributes);
- a != NULL_TREE;
+ a != NULL_TREE && !attribute_value_equal (a, a2);
a = lookup_attribute (IDENTIFIER_POINTER (TREE_PURPOSE (a2)),
TREE_CHAIN (a)))
- {
- if (TREE_VALUE (a) != NULL
- && TREE_CODE (TREE_VALUE (a)) == TREE_LIST
- && TREE_VALUE (a2) != NULL
- && TREE_CODE (TREE_VALUE (a2)) == TREE_LIST)
- {
- if (simple_cst_list_equal (TREE_VALUE (a),
- TREE_VALUE (a2)) == 1)
- break;
- }
- else if (simple_cst_equal (TREE_VALUE (a),
- TREE_VALUE (a2)) == 1)
- break;
- }
+ ;
if (a == NULL_TREE)
{
a1 = copy_node (a2);
marked dllimport and a definition appears later, then the object
is not dllimport'd. We also remove a `new' dllimport if the old list
contains dllexport: dllexport always overrides dllimport, regardless
- of the order of declaration. */
+ of the order of declaration. */
if (!VAR_OR_FUNCTION_DECL_P (new_tree))
delete_dllimport_p = 0;
else if (DECL_DLLIMPORT_P (new_tree)
&& lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
- {
+ {
DECL_DLLIMPORT_P (new_tree) = 0;
warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
"dllimport ignored", new_tree);
}
/* Let an inline definition silently override the external reference,
- but otherwise warn about attribute inconsistency. */
+ but otherwise warn about attribute inconsistency. */
else if (TREE_CODE (new_tree) == VAR_DECL
|| !DECL_DECLARED_INLINE_P (new_tree))
warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
- if (delete_dllimport_p)
+ if (delete_dllimport_p)
{
tree prev, t;
- const size_t attr_len = strlen ("dllimport");
-
+ const size_t attr_len = strlen ("dllimport");
+
/* Scan the list for dllimport and delete it. */
for (prev = NULL_TREE, t = a; t; prev = t, t = TREE_CHAIN (t))
{
any damage. */
if (is_dllimport)
{
- /* Honor any target-specific overrides. */
+ /* Honor any target-specific overrides. */
if (!targetm.valid_dllimport_attribute_p (node))
*no_add_attrs = true;
&& DECL_DECLARED_INLINE_P (node))
{
warning (OPT_Wattributes, "inline function %q+D declared as "
- " dllimport: attribute ignored", node);
+ " dllimport: attribute ignored", node);
*no_add_attrs = true;
}
/* Like MS, treat definition of dllimported variables and
DECL_DLLIMPORT_P (node) = 1;
}
else if (TREE_CODE (node) == FUNCTION_DECL
- && DECL_DECLARED_INLINE_P (node))
+ && DECL_DECLARED_INLINE_P (node)
+ && flag_keep_inline_dllexport)
/* An exported function, even if inline, must be emitted. */
DECL_EXTERNAL (node) = 0;
if (DECL_VISIBILITY_SPECIFIED (node)
&& DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
error ("%qE implies default visibility, but %qD has already "
- "been declared with a different visibility",
+ "been declared with a different visibility",
name, node);
DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
DECL_VISIBILITY_SPECIFIED (node) = 1;
TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
+ TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
}
/* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
&& TYPE_NAME (cand) == TYPE_NAME (base)
/* Apparently this is needed for Objective-C. */
&& TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
+ /* Check alignment. */
+ && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
+ && attribute_list_equal (TYPE_ATTRIBUTES (cand),
+ TYPE_ATTRIBUTES (base)));
+}
+
+/* Returns true iff CAND is equivalent to BASE with ALIGN. */
+
+static bool
+check_aligned_type (const_tree cand, const_tree base, unsigned int align)
+{
+ return (TYPE_QUALS (cand) == TYPE_QUALS (base)
+ && TYPE_NAME (cand) == TYPE_NAME (base)
+ /* Apparently this is needed for Objective-C. */
+ && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
+ /* Check alignment. */
+ && TYPE_ALIGN (cand) == align
&& attribute_list_equal (TYPE_ATTRIBUTES (cand),
TYPE_ATTRIBUTES (base)));
}
else
/* T is its own canonical type. */
TYPE_CANONICAL (t) = t;
-
+
}
return t;
}
+/* Create a variant of type T with alignment ALIGN. */
+
+tree
+build_aligned_type (tree type, unsigned int align)
+{
+ tree t;
+
+ if (TYPE_PACKED (type)
+ || TYPE_ALIGN (type) == align)
+ return type;
+
+ for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
+ if (check_aligned_type (t, type, align))
+ return t;
+
+ t = build_variant_type_copy (type);
+ TYPE_ALIGN (t) = align;
+
+ return t;
+}
+
/* Create a new distinct copy of TYPE. The new type is made its own
MAIN_VARIANT. If TYPE requires structural equality checks, the
resulting type requires structural equality checks; otherwise, its
build_distinct_type_copy (tree type)
{
tree t = copy_node (type);
-
+
TYPE_POINTER_TO (t) = 0;
TYPE_REFERENCE_TO (t) = 0;
/* Since we're building a variant, assume that it is a non-semantic
variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
-
+
/* Add the new type to the chain of variants of TYPE. */
TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
TYPE_NEXT_VARIANT (m) = t;
return (a->from == b->from);
}
-/* Hash a from tree in a tree_map. */
+/* Hash a from tree in a tree_base_map. */
unsigned int
tree_map_base_hash (const void *item)
return ggc_marked_p (((const struct tree_map_base *) p)->from);
}
+/* Hash a from tree in a tree_map. */
+
unsigned int
tree_map_hash (const void *item)
{
return (((const struct tree_map *) item)->hash);
}
+/* Hash a from tree in a tree_decl_map. */
+
+unsigned int
+tree_decl_map_hash (const void *item)
+{
+ return DECL_UID (((const struct tree_decl_map *) item)->base.from);
+}
+
/* Return the initialization priority for DECL. */
priority_type
h = (struct tree_priority_map *) *loc;
if (!h)
{
- h = GGC_CNEW (struct tree_priority_map);
+ h = ggc_alloc_cleared_tree_priority_map ();
*loc = h;
h->base.from = decl;
h->init = DEFAULT_INIT_PRIORITY;
struct tree_priority_map *h;
gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
+ if (priority == DEFAULT_INIT_PRIORITY)
+ return;
h = decl_priority_info (decl);
h->init = priority;
-}
+}
/* Set the finalization priority for DECL to PRIORITY. */
struct tree_priority_map *h;
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
+ if (priority == DEFAULT_INIT_PRIORITY)
+ return;
h = decl_priority_info (decl);
h->fini = priority;
-}
+}
/* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
/* Lookup a debug expression for FROM, and return it if we find one. */
-tree
+tree
decl_debug_expr_lookup (tree from)
{
- struct tree_map *h, in;
+ struct tree_decl_map *h, in;
in.base.from = from;
- h = (struct tree_map *) htab_find_with_hash (debug_expr_for_decl, &in,
- htab_hash_pointer (from));
+ h = (struct tree_decl_map *)
+ htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
if (h)
return h->to;
return NULL_TREE;
void
decl_debug_expr_insert (tree from, tree to)
{
- struct tree_map *h;
+ struct tree_decl_map *h;
void **loc;
- h = GGC_NEW (struct tree_map);
- h->hash = htab_hash_pointer (from);
+ h = ggc_alloc_tree_decl_map ();
h->base.from = from;
h->to = to;
- loc = htab_find_slot_with_hash (debug_expr_for_decl, h, h->hash, INSERT);
- *(struct tree_map **) loc = h;
-}
+ loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
+ INSERT);
+ *(struct tree_decl_map **) loc = h;
+}
/* Lookup a value expression for FROM, and return it if we find one. */
-tree
+tree
decl_value_expr_lookup (tree from)
{
- struct tree_map *h, in;
+ struct tree_decl_map *h, in;
in.base.from = from;
- h = (struct tree_map *) htab_find_with_hash (value_expr_for_decl, &in,
- htab_hash_pointer (from));
+ h = (struct tree_decl_map *)
+ htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
if (h)
return h->to;
return NULL_TREE;
void
decl_value_expr_insert (tree from, tree to)
{
- struct tree_map *h;
+ struct tree_decl_map *h;
void **loc;
- h = GGC_NEW (struct tree_map);
- h->hash = htab_hash_pointer (from);
+ h = ggc_alloc_tree_decl_map ();
h->base.from = from;
h->to = to;
- loc = htab_find_slot_with_hash (value_expr_for_decl, h, h->hash, INSERT);
- *(struct tree_map **) loc = h;
+ loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
+ INSERT);
+ *(struct tree_decl_map **) loc = h;
}
/* Hashing of types so that we don't make duplicates.
|| TREE_TYPE (a->type) != TREE_TYPE (b->type)
|| !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
TYPE_ATTRIBUTES (b->type))
- || TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
- || TYPE_MODE (a->type) != TYPE_MODE (b->type)
- || (TREE_CODE (a->type) != COMPLEX_TYPE
+ || (TREE_CODE (a->type) != COMPLEX_TYPE
&& TYPE_NAME (a->type) != TYPE_NAME (b->type)))
return 0;
+ /* Be careful about comparing arrays before and after the element type
+ has been completed; don't compare TYPE_ALIGN unless both types are
+ complete. */
+ if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
+ && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
+ || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
+ return 0;
+
switch (TREE_CODE (a->type))
{
case VOID_TYPE:
return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
case METHOD_TYPE:
- return (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
- && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
- || (TYPE_ARG_TYPES (a->type)
- && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
- && TYPE_ARG_TYPES (b->type)
- && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
- && type_list_equal (TYPE_ARG_TYPES (a->type),
- TYPE_ARG_TYPES (b->type)))));
-
+ if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
+ && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
+ || (TYPE_ARG_TYPES (a->type)
+ && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
+ && TYPE_ARG_TYPES (b->type)
+ && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
+ && type_list_equal (TYPE_ARG_TYPES (a->type),
+ TYPE_ARG_TYPES (b->type)))))
+ break;
+ return 0;
case ARRAY_TYPE:
return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
struct type_hash *h;
void **loc;
- h = GGC_NEW (struct type_hash);
+ h = ggc_alloc_type_hash ();
h->hash = hashcode;
h->type = type;
loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT);
being passed. */
gcc_assert (TYPE_MAIN_VARIANT (type) == type);
- if (!lang_hooks.types.hash_types)
- return type;
-
/* See if the type is in the hash table already. If so, return it.
Otherwise, add the type. */
t1 = type_hash_lookup (hashcode, type);
/* See if the data pointed to by the type hash table is marked. We consider
it marked if the type is marked or if a debug type number or symbol
- table entry has been made for the type. This reduces the amount of
- debugging output and eliminates that dependency of the debug output on
- the number of garbage collections. */
+ table entry has been made for the type. */
static int
type_hash_marked_p (const void *p)
{
const_tree const type = ((const struct type_hash *) p)->type;
- return ggc_marked_p (type) || TYPE_SYMTAB_POINTER (type);
+ return ggc_marked_p (type);
}
static void
const_tree. */
for (attr = lookup_attribute (IDENTIFIER_POINTER (TREE_PURPOSE (t2)),
CONST_CAST_TREE(l1));
- attr != NULL_TREE;
+ attr != NULL_TREE && !attribute_value_equal (t2, attr);
attr = lookup_attribute (IDENTIFIER_POINTER (TREE_PURPOSE (t2)),
TREE_CHAIN (attr)))
- {
- if (TREE_VALUE (t2) != NULL
- && TREE_CODE (TREE_VALUE (t2)) == TREE_LIST
- && TREE_VALUE (attr) != NULL
- && TREE_CODE (TREE_VALUE (attr)) == TREE_LIST)
- {
- if (simple_cst_list_equal (TREE_VALUE (t2),
- TREE_VALUE (attr)) == 1)
- break;
- }
- else if (simple_cst_equal (TREE_VALUE (t2), TREE_VALUE (attr)) == 1)
- break;
- }
+ ;
- if (attr == 0)
+ if (attr == NULL_TREE)
return 0;
}
return false;
}
+/* Return true if CODE represents a ternary tree code for which the
+ first two operands are commutative. Otherwise return false. */
+bool
+commutative_ternary_tree_code (enum tree_code code)
+{
+ switch (code)
+ {
+ case WIDEN_MULT_PLUS_EXPR:
+ case WIDEN_MULT_MINUS_EXPR:
+ return true;
+
+ default:
+ break;
+ }
+ return false;
+}
+
/* Generate a hash value for an expression. This can be used iteratively
by passing a previous result as the VAL argument.
return iterative_hash_expr (TREE_IMAGPART (t), val);
case VECTOR_CST:
return iterative_hash_expr (TREE_VECTOR_CST_ELTS (t), val);
-
case SSA_NAME:
- /* we can just compare by pointer. */
+ /* We can just compare by pointer. */
return iterative_hash_host_wide_int (SSA_NAME_VERSION (t), val);
-
+ case PLACEHOLDER_EXPR:
+ /* The node itself doesn't matter. */
+ return val;
case TREE_LIST:
/* A list of expressions, for a CALL_EXPR or as the elements of a
VECTOR_CST. */
}
return val;
}
+ case MEM_REF:
+ {
+ /* The type of the second operand is relevant, except for
+ its top-level qualifiers. */
+ tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (t, 1)));
+
+ val = iterative_hash_object (TYPE_HASH (type), val);
+
+ /* We could use the standard hash computation from this point
+ on. */
+ val = iterative_hash_object (code, val);
+ val = iterative_hash_expr (TREE_OPERAND (t, 1), val);
+ val = iterative_hash_expr (TREE_OPERAND (t, 0), val);
+ return val;
+ }
case FUNCTION_DECL:
/* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
Otherwise nodes that compare equal according to operand_equal_p might
else
{
gcc_assert (IS_EXPR_CODE_CLASS (tclass));
-
+
val = iterative_hash_object (code, val);
/* Don't hash the type, that can lead to having nodes which
tree
build_pointer_type (tree to_type)
{
- return build_pointer_type_for_mode (to_type, ptr_mode, false);
+ addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
+ : TYPE_ADDR_SPACE (to_type);
+ enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
+ return build_pointer_type_for_mode (to_type, pointer_mode, false);
}
/* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
SET_TYPE_STRUCTURAL_EQUALITY (t);
else if (TYPE_CANONICAL (to_type) != to_type)
- TYPE_CANONICAL (t)
+ TYPE_CANONICAL (t)
= build_reference_type_for_mode (TYPE_CANONICAL (to_type),
mode, can_alias_all);
tree
build_reference_type (tree to_type)
{
- return build_reference_type_for_mode (to_type, ptr_mode, false);
+ addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
+ : TYPE_ADDR_SPACE (to_type);
+ enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
+ return build_reference_type_for_mode (to_type, pointer_mode, false);
}
/* Build a type that is compatible with t but has no cv quals anywhere
}
}
-/* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
- MAXVAL should be the maximum value in the domain
- (one less than the length of the array).
-
- The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
- We don't enforce this limit, that is up to caller (e.g. language front end).
- The limit exists because the result is a signed type and we don't handle
- sizes that use more than one HOST_WIDE_INT. */
-
-tree
-build_index_type (tree maxval)
-{
- tree itype = make_node (INTEGER_TYPE);
-
- TREE_TYPE (itype) = sizetype;
- TYPE_PRECISION (itype) = TYPE_PRECISION (sizetype);
- TYPE_MIN_VALUE (itype) = size_zero_node;
- TYPE_MAX_VALUE (itype) = fold_convert (sizetype, maxval);
- SET_TYPE_MODE (itype, TYPE_MODE (sizetype));
- TYPE_SIZE (itype) = TYPE_SIZE (sizetype);
- TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (sizetype);
- TYPE_ALIGN (itype) = TYPE_ALIGN (sizetype);
- TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (sizetype);
-
- if (host_integerp (maxval, 1))
- return type_hash_canon (tree_low_cst (maxval, 1), itype);
- else
- {
- /* Since we cannot hash this type, we need to compare it using
- structural equality checks. */
- SET_TYPE_STRUCTURAL_EQUALITY (itype);
- return itype;
- }
-}
+#define MAX_INT_CACHED_PREC \
+ (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
+static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
/* Builds a signed or unsigned integer type of precision PRECISION.
Used for C bitfields whose precision does not match that of
build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
int unsignedp)
{
- tree itype = make_node (INTEGER_TYPE);
+ tree itype, ret;
+ if (unsignedp)
+ unsignedp = MAX_INT_CACHED_PREC + 1;
+
+ if (precision <= MAX_INT_CACHED_PREC)
+ {
+ itype = nonstandard_integer_type_cache[precision + unsignedp];
+ if (itype)
+ return itype;
+ }
+
+ itype = make_node (INTEGER_TYPE);
TYPE_PRECISION (itype) = precision;
if (unsignedp)
else
fixup_signed_type (itype);
+ ret = itype;
if (host_integerp (TYPE_MAX_VALUE (itype), 1))
- return type_hash_canon (tree_low_cst (TYPE_MAX_VALUE (itype), 1), itype);
+ ret = type_hash_canon (tree_low_cst (TYPE_MAX_VALUE (itype), 1), itype);
+ if (precision <= MAX_INT_CACHED_PREC)
+ nonstandard_integer_type_cache[precision + unsignedp] = ret;
- return itype;
+ return ret;
}
-/* Create a range of some discrete type TYPE (an INTEGER_TYPE,
- ENUMERAL_TYPE or BOOLEAN_TYPE), with low bound LOWVAL and
- high bound HIGHVAL. If TYPE is NULL, sizetype is used. */
+/* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
+ or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
+ is true, reuse such a type that has already been constructed. */
-tree
-build_range_type (tree type, tree lowval, tree highval)
+static tree
+build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
{
tree itype = make_node (INTEGER_TYPE);
+ hashval_t hashcode = 0;
TREE_TYPE (itype) = type;
- if (type == NULL_TREE)
- type = sizetype;
TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
TYPE_ALIGN (itype) = TYPE_ALIGN (type);
TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
- if (host_integerp (lowval, 0) && highval != 0 && host_integerp (highval, 0))
- return type_hash_canon (tree_low_cst (highval, 0)
- - tree_low_cst (lowval, 0),
- itype);
- else
+ if (!shared)
return itype;
+
+ if ((TYPE_MIN_VALUE (itype)
+ && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
+ || (TYPE_MAX_VALUE (itype)
+ && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
+ {
+ /* Since we cannot reliably merge this type, we need to compare it using
+ structural equality checks. */
+ SET_TYPE_STRUCTURAL_EQUALITY (itype);
+ return itype;
+ }
+
+ hashcode = iterative_hash_expr (TYPE_MIN_VALUE (itype), hashcode);
+ hashcode = iterative_hash_expr (TYPE_MAX_VALUE (itype), hashcode);
+ hashcode = iterative_hash_hashval_t (TYPE_HASH (type), hashcode);
+ itype = type_hash_canon (hashcode, itype);
+
+ return itype;
+}
+
+/* Wrapper around build_range_type_1 with SHARED set to true. */
+
+tree
+build_range_type (tree type, tree lowval, tree highval)
+{
+ return build_range_type_1 (type, lowval, highval, true);
+}
+
+/* Wrapper around build_range_type_1 with SHARED set to false. */
+
+tree
+build_nonshared_range_type (tree type, tree lowval, tree highval)
+{
+ return build_range_type_1 (type, lowval, highval, false);
+}
+
+/* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
+ MAXVAL should be the maximum value in the domain
+ (one less than the length of the array).
+
+ The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
+ We don't enforce this limit, that is up to caller (e.g. language front end).
+ The limit exists because the result is a signed type and we don't handle
+ sizes that use more than one HOST_WIDE_INT. */
+
+tree
+build_index_type (tree maxval)
+{
+ return build_range_type (sizetype, size_zero_node, maxval);
}
/* Return true if the debug information for TYPE, a subtype, should be emitted
return true;
}
-/* Just like build_index_type, but takes lowval and highval instead
- of just highval (maxval). */
-
-tree
-build_index_2_type (tree lowval, tree highval)
-{
- return build_range_type (sizetype, lowval, highval);
-}
-
/* Construct, lay out and return the type of arrays of elements with ELT_TYPE
and number of elements specified by the range of values of INDEX_TYPE.
- If such a type has already been constructed, reuse it. */
+ If SHARED is true, reuse such a type that has already been constructed. */
-tree
-build_array_type (tree elt_type, tree index_type)
+static tree
+build_array_type_1 (tree elt_type, tree index_type, bool shared)
{
tree t;
- hashval_t hashcode = 0;
if (TREE_CODE (elt_type) == FUNCTION_TYPE)
{
t = make_node (ARRAY_TYPE);
TREE_TYPE (t) = elt_type;
TYPE_DOMAIN (t) = index_type;
+ TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
layout_type (t);
/* If the element type is incomplete at this point we get marked for
if (TYPE_STRUCTURAL_EQUALITY_P (t))
return t;
- hashcode = iterative_hash_object (TYPE_HASH (elt_type), hashcode);
- if (index_type)
- hashcode = iterative_hash_object (TYPE_HASH (index_type), hashcode);
- t = type_hash_canon (hashcode, t);
+ if (shared)
+ {
+ hashval_t hashcode = iterative_hash_object (TYPE_HASH (elt_type), 0);
+ if (index_type)
+ hashcode = iterative_hash_object (TYPE_HASH (index_type), hashcode);
+ t = type_hash_canon (hashcode, t);
+ }
if (TYPE_CANONICAL (t) == t)
{
SET_TYPE_STRUCTURAL_EQUALITY (t);
else if (TYPE_CANONICAL (elt_type) != elt_type
|| (index_type && TYPE_CANONICAL (index_type) != index_type))
- TYPE_CANONICAL (t)
- = build_array_type (TYPE_CANONICAL (elt_type),
- index_type ? TYPE_CANONICAL (index_type) : NULL);
+ TYPE_CANONICAL (t)
+ = build_array_type_1 (TYPE_CANONICAL (elt_type),
+ index_type
+ ? TYPE_CANONICAL (index_type) : NULL_TREE,
+ shared);
}
return t;
}
+/* Wrapper around build_array_type_1 with SHARED set to true. */
+
+tree
+build_array_type (tree elt_type, tree index_type)
+{
+ return build_array_type_1 (elt_type, index_type, true);
+}
+
+/* Wrapper around build_array_type_1 with SHARED set to false. */
+
+tree
+build_nonshared_array_type (tree elt_type, tree index_type)
+{
+ return build_array_type_1 (elt_type, index_type, false);
+}
+
/* Recursively examines the array elements of TYPE, until a non-array
element type is found. */
}
/* Computes the canonical argument types from the argument type list
- ARGTYPES.
+ ARGTYPES.
Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
on entry to this function, or if any of the ARGTYPES are
canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
true) or would not differ from ARGTYPES. */
-static tree
-maybe_canonicalize_argtypes(tree argtypes,
+static tree
+maybe_canonicalize_argtypes(tree argtypes,
bool *any_structural_p,
bool *any_noncanonical_p)
{
tree arg;
bool any_noncanonical_argtypes_p = false;
-
+
for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
{
if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
/* Set up the canonical type. */
any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
- canon_argtypes = maybe_canonicalize_argtypes (arg_types,
+ canon_argtypes = maybe_canonicalize_argtypes (arg_types,
&any_structural_p,
&any_noncanonical_p);
if (any_structural_p)
else if (any_noncanonical_p)
TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
canon_argtypes);
-
+
if (!COMPLETE_TYPE_P (t))
layout_type (t);
return t;
if (TREE_CODE (orig_type) != METHOD_TYPE
|| !bitmap_bit_p (args_to_skip, 0))
{
- new_type = copy_node (orig_type);
+ new_type = build_distinct_type_copy (orig_type);
TYPE_ARG_TYPES (new_type) = new_reversed;
}
else
return new_type;
}
-/* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP.
-
+/* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP.
+
Arguments from DECL_ARGUMENTS list can't be removed now, since they are
- linked by TREE_CHAIN directly. It is caller responsibility to eliminate
+ linked by TREE_CHAIN directly. The caller is responsible for eliminating
them when they are being duplicated (i.e. copy_arguments_for_versioning). */
tree
we expect first argument to be THIS pointer. */
if (bitmap_bit_p (args_to_skip, 0))
DECL_VINDEX (new_decl) = NULL_TREE;
+
+ /* When signature changes, we need to clear builtin info. */
+ if (DECL_BUILT_IN (new_decl) && !bitmap_empty_p (args_to_skip))
+ {
+ DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN;
+ DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0;
+ }
return new_decl;
}
/* Build a function type. The RETURN_TYPE is the type returned by the
- function. If VAARGS is set, no void_type_node is appended to the
- the list. ARGP muse be alway be terminated be a NULL_TREE. */
+ function. If VAARGS is set, no void_type_node is appended to the
+ the list. ARGP must be always be terminated be a NULL_TREE. */
static tree
build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
last = args;
if (args != NULL_TREE)
args = nreverse (args);
- gcc_assert (args != NULL_TREE && last != void_list_node);
+ gcc_assert (last != void_list_node);
}
else if (args == NULL_TREE)
args = void_list_node;
if (any_structural_p)
SET_TYPE_STRUCTURAL_EQUALITY (t);
else if (any_noncanonical_p)
- TYPE_CANONICAL (t)
+ TYPE_CANONICAL (t)
= build_method_type_directly (TYPE_CANONICAL (basetype),
TYPE_CANONICAL (rettype),
canon_argtypes);
SET_TYPE_STRUCTURAL_EQUALITY (t);
else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
|| TYPE_CANONICAL (type) != type)
- TYPE_CANONICAL (t)
+ TYPE_CANONICAL (t)
= build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
TYPE_CANONICAL (type));
}
if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
SET_TYPE_STRUCTURAL_EQUALITY (t);
else if (TYPE_CANONICAL (component_type) != component_type)
- TYPE_CANONICAL (t)
+ TYPE_CANONICAL (t)
= build_complex_type (TYPE_CANONICAL (component_type));
}
}
}
+ /* If we finally reach a constant see if it fits in for_type and
+ in that case convert it. */
+ if (for_type
+ && TREE_CODE (win) == INTEGER_CST
+ && TREE_TYPE (win) != for_type
+ && int_fits_type_p (win, for_type))
+ win = fold_convert (for_type, win);
+
return win;
}
\f
return win;
}
\f
-/* Nonzero if integer constant C has a value that is permissible
+/* Returns true if integer constant C has a value that is permissible
for type TYPE (an INTEGER_TYPE). */
-int
+bool
int_fits_type_p (const_tree c, const_tree type)
{
tree type_low_bound, type_high_bound;
/* If at least one bound of the type is a constant integer, we can check
ourselves and maybe make a decision. If no such decision is possible, but
this type is a subtype, try checking against that. Otherwise, use
- fit_double_type, which checks against the precision.
+ double_int_fits_to_tree_p, which checks against the precision.
Compute the status for each possibly constant bound, and return if we see
one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
int t_neg = (unsc && double_int_negative_p (dd));
if (c_neg && !t_neg)
- return 0;
+ return false;
if ((c_neg || !t_neg) && double_int_ucmp (dc, dd) < 0)
- return 0;
+ return false;
}
else if (double_int_cmp (dc, dd, unsc) < 0)
- return 0;
+ return false;
ok_for_low_bound = true;
}
else
int t_neg = (unsc && double_int_negative_p (dd));
if (t_neg && !c_neg)
- return 0;
+ return false;
if ((t_neg || !c_neg) && double_int_ucmp (dc, dd) > 0)
- return 0;
+ return false;
}
else if (double_int_cmp (dc, dd, unsc) > 0)
- return 0;
+ return false;
ok_for_high_bound = true;
}
else
/* If the constant fits both bounds, the result is known. */
if (ok_for_low_bound && ok_for_high_bound)
- return 1;
+ return true;
/* Perform some generic filtering which may allow making a decision
even if the bounds are not constant. First, negative integers
never fit in unsigned types, */
if (TYPE_UNSIGNED (type) && !unsc && double_int_negative_p (dc))
- return 0;
+ return false;
/* Second, narrower types always fit in wider ones. */
if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
- return 1;
+ return true;
/* Third, unsigned integers with top bit set never fit signed types. */
if (! TYPE_UNSIGNED (type) && unsc)
if (prec < HOST_BITS_PER_WIDE_INT)
{
if (((((unsigned HOST_WIDE_INT) 1) << prec) & dc.low) != 0)
- return 0;
+ return false;
}
else if (((((unsigned HOST_WIDE_INT) 1)
<< (prec - HOST_BITS_PER_WIDE_INT)) & dc.high) != 0)
- return 0;
+ return false;
}
/* If we haven't been able to decide at this point, there nothing more we
goto retry;
}
- /* Or to fit_double_type, if nothing else. */
- return !fit_double_type (dc.low, dc.high, &dc.low, &dc.high, type);
+ /* Or to double_int_fits_to_tree_p, if nothing else. */
+ return double_int_fits_to_tree_p (type, dc);
}
/* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
}
}
- if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
+ if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
&& TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
mpz_set_double_int (max, tree_to_double_int (TYPE_MAX_VALUE (type)),
TYPE_UNSIGNED (type));
auto_var_in_fn_p (const_tree var, const_tree fn)
{
return (DECL_P (var) && DECL_CONTEXT (var) == fn
- && (((TREE_CODE (var) == VAR_DECL || TREE_CODE (var) == PARM_DECL)
+ && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
+ || TREE_CODE (var) == PARM_DECL)
&& ! TREE_STATIC (var))
|| TREE_CODE (var) == LABEL_DECL
|| TREE_CODE (var) == RESULT_DECL));
definition we normally use, since that would produce infinite
recursion via pointers. */
/* This is variably modified if some field's type is. */
- for (t = TYPE_FIELDS (type); t; t = TREE_CHAIN (t))
+ for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
if (TREE_CODE (t) == FIELD_DECL)
{
RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
p = q = ASTRDUP (first_global_object_name);
/* If the target is handling the constructors/destructors, they
will be local to this file and the name is only necessary for
- debugging purposes. */
- else if ((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
+ debugging purposes.
+ We also assign sub_I and sub_D sufixes to constructors called from
+ the global static constructors. These are always local. */
+ else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
+ || (strncmp (type, "sub_", 4) == 0
+ && (type[4] == 'I' || type[4] == 'D')))
{
const char *file = main_input_filename;
if (! file)
file = input_filename;
/* Just use the file's basename, because the full pathname
might be quite long. */
- p = strrchr (file, '/');
- if (p)
- p++;
- else
- p = file;
- p = q = ASTRDUP (p);
+ p = q = ASTRDUP (lbasename (file));
}
else
{
while ((code = va_arg (args, int)))
{
const char *prefix = length ? " or " : "expected ";
-
+
strcpy (tmp + length, prefix);
length += strlen (prefix);
strcpy (tmp + length, tree_code_name[code]);
whether CODE contains the tree structure identified by EN. */
void
-tree_contains_struct_check_failed (const_tree node,
+tree_contains_struct_check_failed (const_tree node,
const enum tree_node_structure_enum en,
- const char *file, int line,
+ const char *file, int line,
const char *function)
{
internal_error
SET_TYPE_STRUCTURAL_EQUALITY (t);
else if (TYPE_CANONICAL (innertype) != innertype
|| mode != VOIDmode)
- TYPE_CANONICAL (t)
+ TYPE_CANONICAL (t)
= make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
layout_type (t);
- {
- tree index = build_int_cst (NULL_TREE, nunits - 1);
- tree array = build_array_type (TYPE_MAIN_VARIANT (innertype),
- build_index_type (index));
- tree rt = make_node (RECORD_TYPE);
-
- TYPE_FIELDS (rt) = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
- get_identifier ("f"), array);
- DECL_CONTEXT (TYPE_FIELDS (rt)) = rt;
- layout_type (rt);
- TYPE_DEBUG_REPRESENTATION_TYPE (t) = rt;
- /* In dwarfout.c, type lookup uses TYPE_UID numbers. We want to output
- the representation type, and we want to find that die when looking up
- the vector type. This is most easily achieved by making the TYPE_UID
- numbers equal. */
- TYPE_UID (rt) = TYPE_UID (t);
- }
-
hashcode = iterative_hash_host_wide_int (VECTOR_TYPE, hashcode);
hashcode = iterative_hash_host_wide_int (nunits, hashcode);
hashcode = iterative_hash_host_wide_int (mode, hashcode);
if (size == LONG_LONG_TYPE_SIZE)
return (unsignedp ? long_long_unsigned_type_node
: long_long_integer_type_node);
+ if (size == 128 && int128_integer_type_node)
+ return (unsignedp ? int128_unsigned_type_node
+ : int128_integer_type_node);
if (unsignedp)
return make_unsigned_type (size);
this function to select one of the types as sizetype. */
void
-build_common_tree_nodes (bool signed_char, bool signed_sizetype)
+build_common_tree_nodes (bool signed_char)
{
error_mark_node = make_node (ERROR_MARK);
TREE_TYPE (error_mark_node) = error_mark_node;
- initialize_sizetypes (signed_sizetype);
+ initialize_sizetypes ();
/* Define both `signed char' and `unsigned char'. */
signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
-
+#if HOST_BITS_PER_WIDE_INT >= 64
+ /* TODO: This isn't correct, but as logic depends at the moment on
+ host's instead of target's wide-integer.
+ If there is a target not supporting TImode, but has an 128-bit
+ integer-scalar register, this target check needs to be adjusted. */
+ if (targetm.scalar_mode_supported_p (TImode))
+ {
+ int128_integer_type_node = make_signed_type (128);
+ int128_unsigned_type_node = make_unsigned_type (128);
+ }
+#endif
/* Define a boolean type. This type only represents boolean values but
may be larger than char depending on the value of BOOL_TYPE_SIZE.
Front ends which want to override this size (i.e. Java) can redefine
build_common_tree_nodes_2 (int short_double)
{
/* Define these next since types below may used them. */
- integer_zero_node = build_int_cst (NULL_TREE, 0);
- integer_one_node = build_int_cst (NULL_TREE, 1);
- integer_minus_one_node = build_int_cst (NULL_TREE, -1);
+ integer_zero_node = build_int_cst (integer_type_node, 0);
+ integer_one_node = build_int_cst (integer_type_node, 1);
+ integer_three_node = build_int_cst (integer_type_node, 3);
+ integer_minus_one_node = build_int_cst (integer_type_node, -1);
size_zero_node = size_int (0);
size_one_node = size_int (1);
/* Decimal float types. */
dfloat32_type_node = make_node (REAL_TYPE);
- TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
+ TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
layout_type (dfloat32_type_node);
SET_TYPE_MODE (dfloat32_type_node, SDmode);
dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
dfloat128_type_node = make_node (REAL_TYPE);
- TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
+ TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
layout_type (dfloat128_type_node);
SET_TYPE_MODE (dfloat128_type_node, TDmode);
dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
declare the type to be __builtin_va_list. */
if (TREE_CODE (t) != RECORD_TYPE)
t = build_variant_type_copy (t);
-
+
va_list_type_node = t;
}
}
TREE_NOTHROW (decl) = 1;
if (ecf_flags & ECF_MALLOC)
DECL_IS_MALLOC (decl) = 1;
+ if (ecf_flags & ECF_LEAF)
+ DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
+ NULL, DECL_ATTRIBUTES (decl));
built_in_decls[code] = decl;
implicit_built_in_decls[code] = decl;
void
build_common_builtin_nodes (void)
{
- tree tmp, tmp2, ftype;
+ tree tmp, ftype;
if (built_in_decls[BUILT_IN_MEMCPY] == NULL
|| built_in_decls[BUILT_IN_MEMMOVE] == NULL)
{
- tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
- tmp = tree_cons (NULL_TREE, const_ptr_type_node, tmp);
- tmp = tree_cons (NULL_TREE, ptr_type_node, tmp);
- ftype = build_function_type (ptr_type_node, tmp);
+ ftype = build_function_type_list (ptr_type_node,
+ ptr_type_node, const_ptr_type_node,
+ size_type_node, NULL_TREE);
if (built_in_decls[BUILT_IN_MEMCPY] == NULL)
local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
- "memcpy", ECF_NOTHROW);
+ "memcpy", ECF_NOTHROW | ECF_LEAF);
if (built_in_decls[BUILT_IN_MEMMOVE] == NULL)
local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
- "memmove", ECF_NOTHROW);
+ "memmove", ECF_NOTHROW | ECF_LEAF);
}
if (built_in_decls[BUILT_IN_MEMCMP] == NULL)
{
- tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
- tmp = tree_cons (NULL_TREE, const_ptr_type_node, tmp);
- tmp = tree_cons (NULL_TREE, const_ptr_type_node, tmp);
- ftype = build_function_type (integer_type_node, tmp);
+ ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
+ const_ptr_type_node, size_type_node,
+ NULL_TREE);
local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
- "memcmp", ECF_PURE | ECF_NOTHROW);
+ "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
}
if (built_in_decls[BUILT_IN_MEMSET] == NULL)
{
- tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
- tmp = tree_cons (NULL_TREE, integer_type_node, tmp);
- tmp = tree_cons (NULL_TREE, ptr_type_node, tmp);
- ftype = build_function_type (ptr_type_node, tmp);
+ ftype = build_function_type_list (ptr_type_node,
+ ptr_type_node, integer_type_node,
+ size_type_node, NULL_TREE);
local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
- "memset", ECF_NOTHROW);
+ "memset", ECF_NOTHROW | ECF_LEAF);
}
if (built_in_decls[BUILT_IN_ALLOCA] == NULL)
{
- tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
- ftype = build_function_type (ptr_type_node, tmp);
+ ftype = build_function_type_list (ptr_type_node,
+ size_type_node, NULL_TREE);
local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
- "alloca", ECF_NOTHROW | ECF_MALLOC);
+ "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
}
- tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node);
- tmp = tree_cons (NULL_TREE, ptr_type_node, tmp);
- tmp = tree_cons (NULL_TREE, ptr_type_node, tmp);
- ftype = build_function_type (void_type_node, tmp);
+ /* If we're checking the stack, `alloca' can throw. */
+ if (flag_stack_check)
+ TREE_NOTHROW (built_in_decls[BUILT_IN_ALLOCA]) = 0;
+
+ ftype = build_function_type_list (void_type_node,
+ ptr_type_node, ptr_type_node,
+ ptr_type_node, NULL_TREE);
local_define_builtin ("__builtin_init_trampoline", ftype,
BUILT_IN_INIT_TRAMPOLINE,
- "__builtin_init_trampoline", ECF_NOTHROW);
+ "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
- tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node);
- ftype = build_function_type (ptr_type_node, tmp);
+ ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
local_define_builtin ("__builtin_adjust_trampoline", ftype,
BUILT_IN_ADJUST_TRAMPOLINE,
"__builtin_adjust_trampoline",
ECF_CONST | ECF_NOTHROW);
- tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node);
- tmp = tree_cons (NULL_TREE, ptr_type_node, tmp);
- ftype = build_function_type (void_type_node, tmp);
+ ftype = build_function_type_list (void_type_node,
+ ptr_type_node, ptr_type_node, NULL_TREE);
local_define_builtin ("__builtin_nonlocal_goto", ftype,
BUILT_IN_NONLOCAL_GOTO,
"__builtin_nonlocal_goto",
ECF_NORETURN | ECF_NOTHROW);
- tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node);
- tmp = tree_cons (NULL_TREE, ptr_type_node, tmp);
- ftype = build_function_type (void_type_node, tmp);
+ ftype = build_function_type_list (void_type_node,
+ ptr_type_node, ptr_type_node, NULL_TREE);
local_define_builtin ("__builtin_setjmp_setup", ftype,
BUILT_IN_SETJMP_SETUP,
"__builtin_setjmp_setup", ECF_NOTHROW);
- tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node);
- ftype = build_function_type (ptr_type_node, tmp);
+ ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
local_define_builtin ("__builtin_setjmp_dispatcher", ftype,
BUILT_IN_SETJMP_DISPATCHER,
"__builtin_setjmp_dispatcher",
ECF_PURE | ECF_NOTHROW);
- tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node);
- ftype = build_function_type (void_type_node, tmp);
+ ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
local_define_builtin ("__builtin_setjmp_receiver", ftype,
BUILT_IN_SETJMP_RECEIVER,
"__builtin_setjmp_receiver", ECF_NOTHROW);
- ftype = build_function_type (ptr_type_node, void_list_node);
+ ftype = build_function_type_list (ptr_type_node, NULL_TREE);
local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
- "__builtin_stack_save", ECF_NOTHROW);
+ "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
- tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node);
- ftype = build_function_type (void_type_node, tmp);
+ ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
local_define_builtin ("__builtin_stack_restore", ftype,
BUILT_IN_STACK_RESTORE,
- "__builtin_stack_restore", ECF_NOTHROW);
-
- ftype = build_function_type (void_type_node, void_list_node);
- local_define_builtin ("__builtin_profile_func_enter", ftype,
- BUILT_IN_PROFILE_FUNC_ENTER, "profile_func_enter", 0);
- local_define_builtin ("__builtin_profile_func_exit", ftype,
- BUILT_IN_PROFILE_FUNC_EXIT, "profile_func_exit", 0);
+ "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
/* If there's a possibility that we might use the ARM EABI, build the
alternate __cxa_end_cleanup node used to resume from C++ and Java. */
if (targetm.arm_eabi_unwinder)
{
- ftype = build_function_type (void_type_node, void_list_node);
+ ftype = build_function_type_list (void_type_node, NULL_TREE);
local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
BUILT_IN_CXA_END_CLEANUP,
- "__cxa_end_cleanup", ECF_NORETURN);
+ "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
}
- tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node);
- ftype = build_function_type (void_type_node, tmp);
+ ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
local_define_builtin ("__builtin_unwind_resume", ftype,
BUILT_IN_UNWIND_RESUME,
- (USING_SJLJ_EXCEPTIONS
+ ((targetm.except_unwind_info (&global_options)
+ == UI_SJLJ)
? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
ECF_NORETURN);
landing pad. These functions are PURE instead of CONST to prevent
them from being hoisted past the exception edge that will initialize
its value in the landing pad. */
- tmp = tree_cons (NULL_TREE, integer_type_node, void_list_node);
- ftype = build_function_type (ptr_type_node, tmp);
+ ftype = build_function_type_list (ptr_type_node,
+ integer_type_node, NULL_TREE);
local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
- "__builtin_eh_pointer", ECF_PURE | ECF_NOTHROW);
+ "__builtin_eh_pointer", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
- tmp2 = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
- ftype = build_function_type (tmp2, tmp);
+ tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
+ ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
- "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW);
+ "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
- tmp = tree_cons (NULL_TREE, integer_type_node, void_list_node);
- tmp = tree_cons (NULL_TREE, integer_type_node, tmp);
- ftype = build_function_type (void_type_node, tmp);
+ ftype = build_function_type_list (void_type_node,
+ integer_type_node, integer_type_node,
+ NULL_TREE);
local_define_builtin ("__builtin_eh_copy_values", ftype,
BUILT_IN_EH_COPY_VALUES,
"__builtin_eh_copy_values", ECF_NOTHROW);
/* Complex multiplication and division. These are handled as builtins
rather than optabs because emit_library_call_value doesn't support
- complex. Further, we can do slightly better with folding these
+ complex. Further, we can do slightly better with folding these
beasties if the real and complex parts of the arguments are separate. */
{
int mode;
continue;
inner_type = TREE_TYPE (type);
- tmp = tree_cons (NULL_TREE, inner_type, void_list_node);
- tmp = tree_cons (NULL_TREE, inner_type, tmp);
- tmp = tree_cons (NULL_TREE, inner_type, tmp);
- tmp = tree_cons (NULL_TREE, inner_type, tmp);
- ftype = build_function_type (type, tmp);
+ ftype = build_function_type_list (type, inner_type, inner_type,
+ inner_type, inner_type, NULL_TREE);
mcode = ((enum built_in_function)
(BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
built_in_names[mcode] = concat ("__mul", mode_name_buf, "3", NULL);
local_define_builtin (built_in_names[mcode], ftype, mcode,
- built_in_names[mcode], ECF_CONST | ECF_NOTHROW);
+ built_in_names[mcode], ECF_CONST | ECF_NOTHROW | ECF_LEAF);
built_in_names[dcode] = concat ("__div", mode_name_buf, "3", NULL);
local_define_builtin (built_in_names[dcode], ftype, dcode,
- built_in_names[dcode], ECF_CONST | ECF_NOTHROW);
+ built_in_names[dcode], ECF_CONST | ECF_NOTHROW | ECF_LEAF);
}
}
}
reconstruct_complex_type (tree type, tree bottom)
{
tree inner, outer;
-
+
if (TREE_CODE (type) == POINTER_TYPE)
{
inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
/* The build_method_type_directly() routine prepends 'this' to argument list,
so we must compensate by getting rid of it. */
- outer
- = build_method_type_directly
+ outer
+ = build_method_type_directly
(TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
inner,
TREE_CHAIN (TYPE_ARG_TYPES (type)));
else
return bottom;
- return build_qualified_type (outer, TYPE_QUALS (type));
+ return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
+ TYPE_QUALS (type));
}
/* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
return true;
}
+ case STRING_CST:
+ {
+ int i;
+
+ /* We need to loop through all elements to handle cases like
+ "\0" and "\0foobar". */
+ for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
+ if (TREE_STRING_POINTER (init)[i] != '\0')
+ return false;
+
+ return true;
+ }
+
default:
return false;
}
length = omp_clause_num_ops[code];
size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
- t = GGC_NEWVAR (union tree_node, size);
+ record_node_allocation_statistics (OMP_CLAUSE, size);
+
+ t = ggc_alloc_tree_node (size);
memset (t, 0, size);
TREE_SET_CODE (t, OMP_CLAUSE);
OMP_CLAUSE_SET_CODE (t, code);
OMP_CLAUSE_LOCATION (t) = loc;
-#ifdef GATHER_STATISTICS
- tree_node_counts[(int) omp_clause_kind]++;
- tree_node_sizes[(int) omp_clause_kind] += size;
-#endif
-
return t;
}
gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
gcc_assert (len >= 1);
-#ifdef GATHER_STATISTICS
- tree_node_counts[(int) e_kind]++;
- tree_node_sizes[(int) e_kind] += length;
-#endif
-
- t = (tree) ggc_alloc_zone_pass_stat (length, &tree_zone);
+ record_node_allocation_statistics (code, length);
- memset (t, 0, length);
+ t = ggc_alloc_zone_cleared_tree_node_stat (&tree_zone, length PASS_MEM_STAT);
TREE_SET_CODE (t, code);
return t;
}
+/* Helper function for build_call_* functions; build a CALL_EXPR with
+ indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
+ the argument slots. */
-/* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE
- and FN and a null static chain slot. ARGLIST is a TREE_LIST of the
- arguments. */
-
-tree
-build_call_list (tree return_type, tree fn, tree arglist)
+static tree
+build_call_1 (tree return_type, tree fn, int nargs)
{
tree t;
- int i;
- t = build_vl_exp (CALL_EXPR, list_length (arglist) + 3);
+ t = build_vl_exp (CALL_EXPR, nargs + 3);
TREE_TYPE (t) = return_type;
CALL_EXPR_FN (t) = fn;
- CALL_EXPR_STATIC_CHAIN (t) = NULL_TREE;
- for (i = 0; arglist; arglist = TREE_CHAIN (arglist), i++)
- CALL_EXPR_ARG (t, i) = TREE_VALUE (arglist);
- process_call_operands (t);
+ CALL_EXPR_STATIC_CHAIN (t) = NULL;
+
return t;
}
tree t;
int i;
- t = build_vl_exp (CALL_EXPR, nargs + 3);
- TREE_TYPE (t) = return_type;
- CALL_EXPR_FN (t) = fn;
- CALL_EXPR_STATIC_CHAIN (t) = NULL_TREE;
+ t = build_call_1 (return_type, fn, nargs);
for (i = 0; i < nargs; i++)
CALL_EXPR_ARG (t, i) = va_arg (args, tree);
process_call_operands (t);
tree t;
int i;
- t = build_vl_exp (CALL_EXPR, nargs + 3);
- TREE_TYPE (t) = return_type;
- CALL_EXPR_FN (t) = fn;
- CALL_EXPR_STATIC_CHAIN (t) = NULL_TREE;
+ t = build_call_1 (return_type, fn, nargs);
for (i = 0; i < nargs; i++)
CALL_EXPR_ARG (t, i) = args[i];
process_call_operands (t);
tree ret, t;
unsigned int ix;
- ret = build_vl_exp (CALL_EXPR, VEC_length (tree, args) + 3);
- TREE_TYPE (ret) = return_type;
- CALL_EXPR_FN (ret) = fn;
- CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
- for (ix = 0; VEC_iterate (tree, args, ix, t); ++ix)
+ ret = build_call_1 (return_type, fn, VEC_length (tree, args));
+ FOR_EACH_VEC_ELT (tree, args, ix, t)
CALL_EXPR_ARG (ret, ix) = t;
process_call_operands (ret);
return ret;
return (TREE_ADDRESSABLE (t)
|| is_global_var (t)
|| (TREE_CODE (t) == RESULT_DECL
+ && !DECL_BY_REFERENCE (t)
&& aggregate_value_p (t, current_function_decl)));
}
-/* There are situations in which a language considers record types
- compatible which have different field lists. Decide if two fields
- are compatible. It is assumed that the parent records are compatible. */
-
-bool
-fields_compatible_p (const_tree f1, const_tree f2)
-{
- if (!operand_equal_p (DECL_FIELD_BIT_OFFSET (f1),
- DECL_FIELD_BIT_OFFSET (f2), OEP_ONLY_CONST))
- return false;
-
- if (!operand_equal_p (DECL_FIELD_OFFSET (f1),
- DECL_FIELD_OFFSET (f2), OEP_ONLY_CONST))
- return false;
-
- if (!types_compatible_p (TREE_TYPE (f1), TREE_TYPE (f2)))
- return false;
-
- return true;
-}
-
-/* Locate within RECORD a field that is compatible with ORIG_FIELD. */
-
-tree
-find_compatible_field (tree record, tree orig_field)
-{
- tree f;
-
- for (f = TYPE_FIELDS (record); f ; f = TREE_CHAIN (f))
- if (TREE_CODE (f) == FIELD_DECL
- && fields_compatible_p (f, orig_field))
- return f;
-
- /* ??? Why isn't this on the main fields list? */
- f = TYPE_VFIELD (record);
- if (f && TREE_CODE (f) == FIELD_DECL
- && fields_compatible_p (f, orig_field))
- return f;
-
- /* ??? We should abort here, but Java appears to do Bad Things
- with inherited fields. */
- return orig_field;
-}
-
/* Return value of a constant X and sign-extend it. */
HOST_WIDE_INT
{
tree t = type;
if (POINTER_TYPE_P (type))
- t = size_type_node;
+ {
+ /* If the pointer points to the normal address space, use the
+ size_type_node. Otherwise use an appropriate size for the pointer
+ based on the named address space it points to. */
+ if (!TYPE_ADDR_SPACE (TREE_TYPE (t)))
+ t = size_type_node;
+ else
+ return lang_hooks.types.type_for_size (TYPE_PRECISION (t), unsignedp);
+ }
if (!INTEGRAL_TYPE_P (t) || TYPE_UNSIGNED (t) == unsignedp)
return t;
-
+
return lang_hooks.types.type_for_size (TYPE_PRECISION (t), unsignedp);
}
tree
upper_bound_in_type (tree outer, tree inner)
{
- unsigned HOST_WIDE_INT lo, hi;
+ double_int high;
unsigned int det = 0;
unsigned oprec = TYPE_PRECISION (outer);
unsigned iprec = TYPE_PRECISION (inner);
/* Compute 2^^prec - 1. */
if (prec <= HOST_BITS_PER_WIDE_INT)
{
- hi = 0;
- lo = ((~(unsigned HOST_WIDE_INT) 0)
+ high.high = 0;
+ high.low = ((~(unsigned HOST_WIDE_INT) 0)
>> (HOST_BITS_PER_WIDE_INT - prec));
}
else
{
- hi = ((~(unsigned HOST_WIDE_INT) 0)
+ high.high = ((~(unsigned HOST_WIDE_INT) 0)
>> (2 * HOST_BITS_PER_WIDE_INT - prec));
- lo = ~(unsigned HOST_WIDE_INT) 0;
+ high.low = ~(unsigned HOST_WIDE_INT) 0;
}
- return build_int_cst_wide (outer, lo, hi);
+ return double_int_to_tree (outer, high);
}
/* Returns the smallest value obtainable by casting something in INNER type to
tree
lower_bound_in_type (tree outer, tree inner)
{
- unsigned HOST_WIDE_INT lo, hi;
+ double_int low;
unsigned oprec = TYPE_PRECISION (outer);
unsigned iprec = TYPE_PRECISION (inner);
contains all values of INNER type. In particular, both INNER
and OUTER types have zero in common. */
|| (oprec > iprec && TYPE_UNSIGNED (inner)))
- lo = hi = 0;
+ low.low = low.high = 0;
else
{
/* If we are widening a signed type to another signed type, we
if (prec <= HOST_BITS_PER_WIDE_INT)
{
- hi = ~(unsigned HOST_WIDE_INT) 0;
- lo = (~(unsigned HOST_WIDE_INT) 0) << (prec - 1);
+ low.high = ~(unsigned HOST_WIDE_INT) 0;
+ low.low = (~(unsigned HOST_WIDE_INT) 0) << (prec - 1);
}
else
{
- hi = ((~(unsigned HOST_WIDE_INT) 0)
+ low.high = ((~(unsigned HOST_WIDE_INT) 0)
<< (prec - HOST_BITS_PER_WIDE_INT - 1));
- lo = 0;
+ low.low = 0;
}
}
- return build_int_cst_wide (outer, lo, hi);
+ return double_int_to_tree (outer, low);
}
/* Return nonzero if two operands that are suitable for PHI nodes are
}
/* Returns number of zeros at the end of binary representation of X.
-
+
??? Use ffs if available? */
tree
case BIND_EXPR:
{
tree decl;
- for (decl = BIND_EXPR_VARS (*tp); decl; decl = TREE_CHAIN (decl))
+ for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
{
/* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
into declarations that are just mentioned, rather than
that are directly gimplified in gimplify_type_sizes in order for the
mark/copy-if-shared/unmark machinery of the gimplifier to work with
variable-sized types.
-
+
Note that DECLs get walked as part of processing the BIND_EXPR. */
if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
{
return result;
/* If this is a record type, also walk the fields. */
- if (TREE_CODE (*type_p) == RECORD_TYPE
- || TREE_CODE (*type_p) == UNION_TYPE
- || TREE_CODE (*type_p) == QUAL_UNION_TYPE)
+ if (RECORD_OR_UNION_TYPE_P (*type_p))
{
tree field;
for (field = TYPE_FIELDS (*type_p); field;
- field = TREE_CHAIN (field))
+ field = DECL_CHAIN (field))
{
/* We'd like to look at the type of the field, but we can
easily get infinite recursion. So assume it's pointed
return NULL;
}
-/* Build and return a TREE_LIST of arguments in the CALL_EXPR exp.
- FIXME: don't use this function. It exists for compatibility with
- the old representation of CALL_EXPRs where a list was used to hold the
- arguments. Places that currently extract the arglist from a CALL_EXPR
- ought to be rewritten to use the CALL_EXPR itself. */
-tree
-call_expr_arglist (tree exp)
-{
- tree arglist = NULL_TREE;
- int i;
- for (i = call_expr_nargs (exp) - 1; i >= 0; i--)
- arglist = tree_cons (NULL_TREE, CALL_EXPR_ARG (exp, i), arglist);
- return arglist;
-}
-
-
/* Create a nameless artificial label and put it in the current
function context. The label has a location of LOC. Returns the
newly created label. */
/* Return true if TYPE has a variable argument list. */
bool
-stdarg_p (tree fntype)
+stdarg_p (const_tree fntype)
{
function_args_iterator args_iter;
tree n = NULL_TREE, t;
/* Use the cache of optimization nodes. */
- cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node));
+ cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
+ &global_options);
slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
t = (tree) *slot;
/* Use the cache of optimization nodes. */
- cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node));
+ cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
+ &global_options);
slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
t = (tree) *slot;
outer_type = TREE_TYPE (exp);
inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
+ if (!inner_type)
+ return false;
+
/* Use precision rather then machine mode when we can, which gives
the correct answer even for submode (bit-field) types. */
if ((INTEGRAL_TYPE_P (outer_type)
lhd_gcc_personality (void)
{
if (!gcc_eh_personality_decl)
- gcc_eh_personality_decl
- = build_personality_function (USING_SJLJ_EXCEPTIONS
- ? "__gcc_personality_sj0"
- : "__gcc_personality_v0");
-
+ gcc_eh_personality_decl = build_personality_function ("gcc");
return gcc_eh_personality_decl;
}
+/* Try to find a base info of BINFO that would have its field decl at offset
+ OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
+ found, return, otherwise return NULL_TREE. */
+
+tree
+get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
+{
+ tree type = BINFO_TYPE (binfo);
+
+ while (true)
+ {
+ HOST_WIDE_INT pos, size;
+ tree fld;
+ int i;
+
+ if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
+ return binfo;
+ if (offset < 0)
+ return NULL_TREE;
+
+ for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
+ {
+ if (TREE_CODE (fld) != FIELD_DECL)
+ continue;
+
+ pos = int_bit_position (fld);
+ size = tree_low_cst (DECL_SIZE (fld), 1);
+ if (pos <= offset && (pos + size) > offset)
+ break;
+ }
+ if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
+ return NULL_TREE;
+
+ if (!DECL_ARTIFICIAL (fld))
+ {
+ binfo = TYPE_BINFO (TREE_TYPE (fld));
+ if (!binfo)
+ return NULL_TREE;
+ }
+ /* Offset 0 indicates the primary base, whose vtable contents are
+ represented in the binfo for the derived class. */
+ else if (offset != 0)
+ {
+ tree base_binfo, found_binfo = NULL_TREE;
+ for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
+ if (TREE_TYPE (base_binfo) == TREE_TYPE (fld))
+ {
+ found_binfo = base_binfo;
+ break;
+ }
+ if (!found_binfo)
+ return NULL_TREE;
+ binfo = found_binfo;
+ }
+
+ type = TREE_TYPE (fld);
+ offset -= pos;
+ }
+}
+
+/* Returns true if X is a typedef decl. */
+
+bool
+is_typedef_decl (tree x)
+{
+ return (x && TREE_CODE (x) == TYPE_DECL
+ && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
+}
+
+/* Returns true iff TYPE is a type variant created for a typedef. */
+
+bool
+typedef_variant_p (tree type)
+{
+ return is_typedef_decl (TYPE_NAME (type));
+}
+
+/* Warn about a use of an identifier which was marked deprecated. */
+void
+warn_deprecated_use (tree node, tree attr)
+{
+ const char *msg;
+
+ if (node == 0 || !warn_deprecated_decl)
+ return;
+
+ if (!attr)
+ {
+ if (DECL_P (node))
+ attr = DECL_ATTRIBUTES (node);
+ else if (TYPE_P (node))
+ {
+ tree decl = TYPE_STUB_DECL (node);
+ if (decl)
+ attr = lookup_attribute ("deprecated",
+ TYPE_ATTRIBUTES (TREE_TYPE (decl)));
+ }
+ }
+
+ if (attr)
+ attr = lookup_attribute ("deprecated", attr);
+
+ if (attr)
+ msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
+ else
+ msg = NULL;
+
+ if (DECL_P (node))
+ {
+ expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
+ if (msg)
+ warning (OPT_Wdeprecated_declarations,
+ "%qD is deprecated (declared at %s:%d): %s",
+ node, xloc.file, xloc.line, msg);
+ else
+ warning (OPT_Wdeprecated_declarations,
+ "%qD is deprecated (declared at %s:%d)",
+ node, xloc.file, xloc.line);
+ }
+ else if (TYPE_P (node))
+ {
+ tree what = NULL_TREE;
+ tree decl = TYPE_STUB_DECL (node);
+
+ if (TYPE_NAME (node))
+ {
+ if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
+ what = TYPE_NAME (node);
+ else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
+ && DECL_NAME (TYPE_NAME (node)))
+ what = DECL_NAME (TYPE_NAME (node));
+ }
+
+ if (decl)
+ {
+ expanded_location xloc
+ = expand_location (DECL_SOURCE_LOCATION (decl));
+ if (what)
+ {
+ if (msg)
+ warning (OPT_Wdeprecated_declarations,
+ "%qE is deprecated (declared at %s:%d): %s",
+ what, xloc.file, xloc.line, msg);
+ else
+ warning (OPT_Wdeprecated_declarations,
+ "%qE is deprecated (declared at %s:%d)", what,
+ xloc.file, xloc.line);
+ }
+ else
+ {
+ if (msg)
+ warning (OPT_Wdeprecated_declarations,
+ "type is deprecated (declared at %s:%d): %s",
+ xloc.file, xloc.line, msg);
+ else
+ warning (OPT_Wdeprecated_declarations,
+ "type is deprecated (declared at %s:%d)",
+ xloc.file, xloc.line);
+ }
+ }
+ else
+ {
+ if (what)
+ {
+ if (msg)
+ warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
+ what, msg);
+ else
+ warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
+ }
+ else
+ {
+ if (msg)
+ warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
+ msg);
+ else
+ warning (OPT_Wdeprecated_declarations, "type is deprecated");
+ }
+ }
+ }
+}
+
#include "gt-tree.h"