/* Language-independent node constructors for parse phase of GNU compiler.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ Free Software Foundation, Inc.
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
/* This file contains the low level primitives for operating on tree nodes,
including allocation, list operations, interning of identifiers,
"unary",
"binary",
"statement",
+ "vl_exp",
"expression",
+ "gimple_stmt"
};
/* obstack.[ch] explicitly declined to prototype this. */
"binfos",
"phi_nodes",
"ssa names",
+ "constructors",
"random kinds",
"lang_decl kinds",
- "lang_type kinds"
+ "lang_type kinds",
+ "omp clauses",
+ "gimple statements"
};
#endif /* GATHER_STATISTICS */
/* General tree->tree mapping structure for use in hash tables. */
-struct tree_map GTY(())
-{
- hashval_t hash;
- tree from;
- tree to;
-};
static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
htab_t debug_expr_for_decl;
+static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
+ htab_t value_expr_for_decl;
+
+static GTY ((if_marked ("tree_priority_map_marked_p"),
+ param_is (struct tree_priority_map)))
+ htab_t init_priority_for_decl;
+
+static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
+ htab_t restrict_base_for_decl;
+
static void set_type_quals (tree, int);
static int type_hash_eq (const void *, const void *);
static hashval_t type_hash_hash (const void *);
-static int tree_map_eq (const void *, const void *);
-static hashval_t tree_map_hash (const void *);
static hashval_t int_cst_hash_hash (const void *);
static int int_cst_hash_eq (const void *, const void *);
static void print_type_hash_statistics (void);
static void print_debug_expr_statistics (void);
-static tree make_vector_type (tree, int, enum machine_mode);
+static void print_value_expr_statistics (void);
static int type_hash_marked_p (const void *);
-static int tree_map_marked_p (const void *);
static unsigned int type_hash_list (tree, hashval_t);
static unsigned int attribute_hash_list (tree, hashval_t);
tree global_trees[TI_MAX];
tree integer_types[itk_none];
+unsigned char tree_contains_struct[256][64];
+
+/* Number of operands for each OpenMP clause. */
+unsigned const char omp_clause_num_ops[] =
+{
+ 0, /* OMP_CLAUSE_ERROR */
+ 1, /* OMP_CLAUSE_PRIVATE */
+ 1, /* OMP_CLAUSE_SHARED */
+ 1, /* OMP_CLAUSE_FIRSTPRIVATE */
+ 1, /* OMP_CLAUSE_LASTPRIVATE */
+ 4, /* OMP_CLAUSE_REDUCTION */
+ 1, /* OMP_CLAUSE_COPYIN */
+ 1, /* OMP_CLAUSE_COPYPRIVATE */
+ 1, /* OMP_CLAUSE_IF */
+ 1, /* OMP_CLAUSE_NUM_THREADS */
+ 1, /* OMP_CLAUSE_SCHEDULE */
+ 0, /* OMP_CLAUSE_NOWAIT */
+ 0, /* OMP_CLAUSE_ORDERED */
+ 0 /* OMP_CLAUSE_DEFAULT */
+};
+
+const char * const omp_clause_code_name[] =
+{
+ "error_clause",
+ "private",
+ "shared",
+ "firstprivate",
+ "lastprivate",
+ "reduction",
+ "copyin",
+ "copyprivate",
+ "if",
+ "num_threads",
+ "schedule",
+ "nowait",
+ "ordered",
+ "default"
+};
\f
/* Init tree.c. */
debug_expr_for_decl = htab_create_ggc (512, tree_map_hash,
tree_map_eq, 0);
+ value_expr_for_decl = htab_create_ggc (512, tree_map_hash,
+ tree_map_eq, 0);
+ init_priority_for_decl = htab_create_ggc (512, tree_priority_map_hash,
+ tree_priority_map_eq, 0);
+ restrict_base_for_decl = htab_create_ggc (256, tree_map_hash,
+ tree_map_eq, 0);
+
int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
int_cst_hash_eq, NULL);
int_cst_node = make_node (INTEGER_CST);
+ tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON] = 1;
+ tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_NON_COMMON] = 1;
+ tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON] = 1;
+
+
+ tree_contains_struct[CONST_DECL][TS_DECL_COMMON] = 1;
+ tree_contains_struct[VAR_DECL][TS_DECL_COMMON] = 1;
+ tree_contains_struct[PARM_DECL][TS_DECL_COMMON] = 1;
+ tree_contains_struct[RESULT_DECL][TS_DECL_COMMON] = 1;
+ tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON] = 1;
+ tree_contains_struct[TYPE_DECL][TS_DECL_COMMON] = 1;
+ tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON] = 1;
+ tree_contains_struct[LABEL_DECL][TS_DECL_COMMON] = 1;
+ tree_contains_struct[FIELD_DECL][TS_DECL_COMMON] = 1;
+
+
+ tree_contains_struct[CONST_DECL][TS_DECL_WRTL] = 1;
+ tree_contains_struct[VAR_DECL][TS_DECL_WRTL] = 1;
+ tree_contains_struct[PARM_DECL][TS_DECL_WRTL] = 1;
+ tree_contains_struct[RESULT_DECL][TS_DECL_WRTL] = 1;
+ tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL] = 1;
+ tree_contains_struct[LABEL_DECL][TS_DECL_WRTL] = 1;
+
+ tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL] = 1;
+ tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL] = 1;
+ tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL] = 1;
+ tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL] = 1;
+ tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL] = 1;
+ tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL] = 1;
+ tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL] = 1;
+ tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL] = 1;
+ tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL] = 1;
+ tree_contains_struct[STRUCT_FIELD_TAG][TS_DECL_MINIMAL] = 1;
+ tree_contains_struct[NAME_MEMORY_TAG][TS_DECL_MINIMAL] = 1;
+ tree_contains_struct[SYMBOL_MEMORY_TAG][TS_DECL_MINIMAL] = 1;
+ tree_contains_struct[MEMORY_PARTITION_TAG][TS_DECL_MINIMAL] = 1;
+
+ tree_contains_struct[STRUCT_FIELD_TAG][TS_MEMORY_TAG] = 1;
+ tree_contains_struct[NAME_MEMORY_TAG][TS_MEMORY_TAG] = 1;
+ tree_contains_struct[SYMBOL_MEMORY_TAG][TS_MEMORY_TAG] = 1;
+ tree_contains_struct[MEMORY_PARTITION_TAG][TS_MEMORY_TAG] = 1;
+
+ tree_contains_struct[STRUCT_FIELD_TAG][TS_STRUCT_FIELD_TAG] = 1;
+ tree_contains_struct[MEMORY_PARTITION_TAG][TS_MEMORY_PARTITION_TAG] = 1;
+
+ tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS] = 1;
+ tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS] = 1;
+ tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS] = 1;
+ tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_WITH_VIS] = 1;
+
+ tree_contains_struct[VAR_DECL][TS_VAR_DECL] = 1;
+ tree_contains_struct[FIELD_DECL][TS_FIELD_DECL] = 1;
+ tree_contains_struct[PARM_DECL][TS_PARM_DECL] = 1;
+ tree_contains_struct[LABEL_DECL][TS_LABEL_DECL] = 1;
+ tree_contains_struct[RESULT_DECL][TS_RESULT_DECL] = 1;
+ tree_contains_struct[CONST_DECL][TS_CONST_DECL] = 1;
+ tree_contains_struct[TYPE_DECL][TS_TYPE_DECL] = 1;
+ tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL] = 1;
+
+ lang_hooks.init_ts ();
}
\f
{
if (!DECL_ASSEMBLER_NAME_SET_P (decl))
lang_hooks.set_decl_assembler_name (decl);
- return DECL_CHECK (decl)->decl.assembler_name;
+ return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
+}
+
+/* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
+
+bool
+decl_assembler_name_equal (tree decl, tree asmname)
+{
+ tree decl_asmname = DECL_ASSEMBLER_NAME (decl);
+
+ if (decl_asmname == asmname)
+ return true;
+
+ /* If the target assembler name was set by the user, things are trickier.
+ We have a leading '*' to begin with. After that, it's arguable what
+ is the correct thing to do with -fleading-underscore. Arguably, we've
+ historically been doing the wrong thing in assemble_alias by always
+ printing the leading underscore. Since we're not changing that, make
+ sure user_label_prefix follows the '*' before matching. */
+ if (IDENTIFIER_POINTER (decl_asmname)[0] == '*')
+ {
+ const char *decl_str = IDENTIFIER_POINTER (decl_asmname) + 1;
+ size_t ulp_len = strlen (user_label_prefix);
+
+ if (ulp_len == 0)
+ ;
+ else if (strncmp (decl_str, user_label_prefix, ulp_len) == 0)
+ decl_str += ulp_len;
+ else
+ return false;
+
+ return strcmp (decl_str, IDENTIFIER_POINTER (asmname)) == 0;
+ }
+
+ return false;
}
/* Compute the number of bytes occupied by a tree with code CODE.
- This function cannot be used for TREE_VEC, PHI_NODE, or STRING_CST
- codes, which are of variable length. */
+ This function cannot be used for nodes that have variable sizes,
+ including TREE_VEC, PHI_NODE, STRING_CST, and CALL_EXPR. */
size_t
tree_code_size (enum tree_code code)
{
switch (TREE_CODE_CLASS (code))
{
case tcc_declaration: /* A decl node */
- return sizeof (struct tree_decl);
+ {
+ switch (code)
+ {
+ case FIELD_DECL:
+ return sizeof (struct tree_field_decl);
+ case PARM_DECL:
+ return sizeof (struct tree_parm_decl);
+ case VAR_DECL:
+ return sizeof (struct tree_var_decl);
+ case LABEL_DECL:
+ return sizeof (struct tree_label_decl);
+ case RESULT_DECL:
+ return sizeof (struct tree_result_decl);
+ case CONST_DECL:
+ return sizeof (struct tree_const_decl);
+ case TYPE_DECL:
+ return sizeof (struct tree_type_decl);
+ case FUNCTION_DECL:
+ return sizeof (struct tree_function_decl);
+ case NAME_MEMORY_TAG:
+ case SYMBOL_MEMORY_TAG:
+ return sizeof (struct tree_memory_tag);
+ case STRUCT_FIELD_TAG:
+ return sizeof (struct tree_struct_field_tag);
+ case MEMORY_PARTITION_TAG:
+ return sizeof (struct tree_memory_partition_tag);
+ default:
+ return sizeof (struct tree_decl_non_common);
+ }
+ }
case tcc_type: /* a type node */
return sizeof (struct tree_type);
case tcc_unary: /* a unary arithmetic expression */
case tcc_binary: /* a binary arithmetic expression */
return (sizeof (struct tree_exp)
+ + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
+
+ case tcc_gimple_stmt:
+ return (sizeof (struct gimple_stmt)
+ (TREE_CODE_LENGTH (code) - 1) * sizeof (char *));
case tcc_constant: /* a constant */
case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
case TREE_VEC:
+ case OMP_CLAUSE:
case PHI_NODE: gcc_unreachable ();
case SSA_NAME: return sizeof (struct tree_ssa_name);
case STATEMENT_LIST: return sizeof (struct tree_statement_list);
case BLOCK: return sizeof (struct tree_block);
case VALUE_HANDLE: return sizeof (struct tree_value_handle);
+ case CONSTRUCTOR: return sizeof (struct tree_constructor);
default:
return lang_hooks.tree_size (code);
}
/* Compute the number of bytes occupied by NODE. This routine only
- looks at TREE_CODE, except for PHI_NODE and TREE_VEC nodes. */
+ looks at TREE_CODE, except for those nodes that have variable sizes. */
size_t
tree_size (tree node)
{
case TREE_VEC:
return (sizeof (struct tree_vec)
- + (TREE_VEC_LENGTH (node) - 1) * sizeof(char *));
+ + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
case STRING_CST:
- return sizeof (struct tree_string) + TREE_STRING_LENGTH (node) - 1;
+ return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
+
+ case OMP_CLAUSE:
+ return (sizeof (struct tree_omp_clause)
+ + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
+ * sizeof (tree));
default:
- return tree_code_size (code);
+ if (TREE_CODE_CLASS (code) == tcc_vl_exp)
+ return (sizeof (struct tree_exp)
+ + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
+ else
+ return tree_code_size (code);
}
}
/* Return a newly allocated node of code CODE. For decl and type
nodes, some other fields are initialized. The rest of the node is
- initialized to zero. This function cannot be used for PHI_NODE or
- TREE_VEC nodes, which is enforced by asserts in tree_code_size.
+ initialized to zero. This function cannot be used for PHI_NODE,
+ TREE_VEC or OMP_CLAUSE nodes, which is enforced by asserts in
+ tree_code_size.
Achoo! I got a code in the node. */
kind = c_kind;
break;
+ case tcc_gimple_stmt:
+ kind = gimple_stmt_kind;
+ break;
+
case tcc_exceptional: /* something random, like an identifier. */
switch (code)
{
kind = id_kind;
break;
- case TREE_VEC:;
+ case TREE_VEC:
kind = vec_kind;
break;
kind = b_kind;
break;
+ case CONSTRUCTOR:
+ kind = constr_kind;
+ break;
+
default:
kind = x_kind;
break;
break;
case tcc_declaration:
- if (code != FUNCTION_DECL)
- DECL_ALIGN (t) = 1;
- DECL_USER_ALIGN (t) = 0;
- DECL_IN_SYSTEM_HEADER (t) = in_system_header;
+ if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
+ DECL_IN_SYSTEM_HEADER (t) = in_system_header;
+ if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
+ {
+ if (code != FUNCTION_DECL)
+ DECL_ALIGN (t) = 1;
+ DECL_USER_ALIGN (t) = 0;
+ /* We have not yet computed the alias set for this declaration. */
+ DECL_POINTER_ALIAS_SET (t) = -1;
+ }
DECL_SOURCE_LOCATION (t) = input_location;
DECL_UID (t) = next_decl_uid++;
- /* We have not yet computed the alias set for this declaration. */
- DECL_POINTER_ALIAS_SET (t) = -1;
break;
case tcc_type:
TYPE_UID (t) = next_type_uid++;
- TYPE_ALIGN (t) = char_type_node ? TYPE_ALIGN (char_type_node) : 0;
+ TYPE_ALIGN (t) = BITS_PER_UNIT;
TYPE_USER_ALIGN (t) = 0;
TYPE_MAIN_VARIANT (t) = t;
+ TYPE_CANONICAL (t) = t;
/* Default to no attributes for type, but let target change that. */
TYPE_ATTRIBUTES (t) = NULL_TREE;
}
break;
+ case tcc_gimple_stmt:
+ switch (code)
+ {
+ case GIMPLE_MODIFY_STMT:
+ TREE_SIDE_EFFECTS (t) = 1;
+ break;
+
+ default:
+ break;
+ }
+
default:
/* Other classes need no special treatment. */
break;
t = ggc_alloc_zone_pass_stat (length, &tree_zone);
memcpy (t, node, length);
- TREE_CHAIN (t) = 0;
+ if (!GIMPLE_TUPLE_P (node))
+ TREE_CHAIN (t) = 0;
TREE_ASM_WRITTEN (t) = 0;
TREE_VISITED (t) = 0;
- t->common.ann = 0;
+ t->base.ann = 0;
if (TREE_CODE_CLASS (code) == tcc_declaration)
- DECL_UID (t) = next_decl_uid++;
+ {
+ DECL_UID (t) = next_decl_uid++;
+ if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
+ && DECL_HAS_VALUE_EXPR_P (node))
+ {
+ SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
+ DECL_HAS_VALUE_EXPR_P (t) = 1;
+ }
+ if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
+ {
+ SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
+ DECL_HAS_INIT_PRIORITY_P (t) = 1;
+ }
+ if (TREE_CODE (node) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (node))
+ {
+ SET_DECL_RESTRICT_BASE (t, DECL_GET_RESTRICT_BASE (node));
+ DECL_BASED_ON_RESTRICT_P (t) = 1;
+ }
+ }
else if (TREE_CODE_CLASS (code) == tcc_type)
{
TYPE_UID (t) = next_type_uid++;
tree
build_int_cst (tree type, HOST_WIDE_INT low)
{
+ /* Support legacy code. */
+ if (!type)
+ type = integer_type_node;
+
return build_int_cst_wide (type, low, low < 0 ? -1 : 0);
}
tree
build_int_cst_type (tree type, HOST_WIDE_INT low)
{
- unsigned HOST_WIDE_INT val = (unsigned HOST_WIDE_INT) low;
- unsigned HOST_WIDE_INT hi, mask;
- unsigned bits;
- bool signed_p;
- bool negative;
-
- if (!type)
- type = integer_type_node;
-
- bits = TYPE_PRECISION (type);
- signed_p = !TYPE_UNSIGNED (type);
+ unsigned HOST_WIDE_INT low1;
+ HOST_WIDE_INT hi;
- if (bits >= HOST_BITS_PER_WIDE_INT)
- negative = (low < 0);
- else
- {
- /* If the sign bit is inside precision of LOW, use it to determine
- the sign of the constant. */
- negative = ((val >> (bits - 1)) & 1) != 0;
+ gcc_assert (type);
- /* Mask out the bits outside of the precision of the constant. */
- mask = (((unsigned HOST_WIDE_INT) 2) << (bits - 1)) - 1;
-
- if (signed_p && negative)
- val |= ~mask;
- else
- val &= mask;
- }
+ fit_double_type (low, low < 0 ? -1 : 0, &low1, &hi, type);
- /* Determine the high bits. */
- hi = (negative ? ~(unsigned HOST_WIDE_INT) 0 : 0);
+ return build_int_cst_wide (type, low1, hi);
+}
- /* For unsigned type we need to mask out the bits outside of the type
- precision. */
- if (!signed_p)
- {
- if (bits <= HOST_BITS_PER_WIDE_INT)
- hi = 0;
- else
- {
- bits -= HOST_BITS_PER_WIDE_INT;
- mask = (((unsigned HOST_WIDE_INT) 2) << (bits - 1)) - 1;
- hi &= mask;
- }
- }
+/* Create an INT_CST node of TYPE and value HI:LOW. The value is truncated
+ and sign extended according to the value range of TYPE. */
- return build_int_cst_wide (type, val, hi);
+tree
+build_int_cst_wide_type (tree type,
+ unsigned HOST_WIDE_INT low, HOST_WIDE_INT high)
+{
+ fit_double_type (low, high, &low, &high, type);
+ return build_int_cst_wide (type, low, high);
}
/* These are the hash table functions for the hash table of INTEGER_CST
&& TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt));
}
-/* Create an INT_CST node of TYPE and value HI:LOW. If TYPE is NULL,
- integer_type_node is used. The returned node is always shared.
- For small integers we use a per-type vector cache, for larger ones
- we use a single hash table. */
+/* Create an INT_CST node of TYPE and value HI:LOW.
+ The returned node is always shared. For small integers we use a
+ per-type vector cache, for larger ones we use a single hash table. */
tree
build_int_cst_wide (tree type, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
int ix = -1;
int limit = 0;
- if (!type)
- type = integer_type_node;
+ gcc_assert (type);
switch (TREE_CODE (type))
{
break;
case INTEGER_TYPE:
- case CHAR_TYPE:
case OFFSET_TYPE:
if (TYPE_UNSIGNED (type))
{
ix = 0;
}
break;
- default:
+
+ case ENUMERAL_TYPE:
break;
+
+ default:
+ gcc_unreachable ();
}
if (ix >= 0)
}
/* Return a new VECTOR_CST node whose type is TYPE and whose values
- are in a list pointed by VALS. */
+ are in a list pointed to by VALS. */
tree
build_vector (tree type, tree vals)
{
tree v = make_node (VECTOR_CST);
- int over1 = 0, over2 = 0;
+ int over = 0;
tree link;
TREE_VECTOR_CST_ELTS (v) = vals;
{
tree value = TREE_VALUE (link);
- over1 |= TREE_OVERFLOW (value);
- over2 |= TREE_CONSTANT_OVERFLOW (value);
- }
+ /* Don't crash if we get an address constant. */
+ if (!CONSTANT_CLASS_P (value))
+ continue;
- TREE_OVERFLOW (v) = over1;
- TREE_CONSTANT_OVERFLOW (v) = over2;
+ over |= TREE_OVERFLOW (value);
+ }
+ TREE_OVERFLOW (v) = over;
return v;
}
+/* Return a new VECTOR_CST node whose type is TYPE and whose values
+ are extracted from V, a vector of CONSTRUCTOR_ELT. */
+
+tree
+build_vector_from_ctor (tree type, VEC(constructor_elt,gc) *v)
+{
+ tree list = NULL_TREE;
+ unsigned HOST_WIDE_INT idx;
+ tree value;
+
+ FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
+ list = tree_cons (NULL_TREE, value, list);
+ return build_vector (type, nreverse (list));
+}
+
/* Return a new CONSTRUCTOR node whose type is TYPE and whose values
- are in a list pointed to by VALS. */
+ are in the VEC pointed to by VALS. */
tree
-build_constructor (tree type, tree vals)
+build_constructor (tree type, VEC(constructor_elt,gc) *vals)
{
tree c = make_node (CONSTRUCTOR);
TREE_TYPE (c) = type;
CONSTRUCTOR_ELTS (c) = vals;
+ return c;
+}
+
+/* Build a CONSTRUCTOR node made of a single initializer, with the specified
+ INDEX and VALUE. */
+tree
+build_constructor_single (tree type, tree index, tree value)
+{
+ VEC(constructor_elt,gc) *v;
+ constructor_elt *elt;
+ tree t;
+
+ v = VEC_alloc (constructor_elt, gc, 1);
+ elt = VEC_quick_push (constructor_elt, v, NULL);
+ elt->index = index;
+ elt->value = value;
+
+ t = build_constructor (type, v);
+ TREE_CONSTANT (t) = TREE_CONSTANT (value);
+ return t;
+}
+
+
+/* Return a new CONSTRUCTOR node whose type is TYPE and whose values
+ are in a list pointed to by VALS. */
+tree
+build_constructor_from_list (tree type, tree vals)
+{
+ tree t, val;
+ VEC(constructor_elt,gc) *v = NULL;
+ bool constant_p = true;
- /* ??? May not be necessary. Mirrors what build does. */
if (vals)
{
- TREE_SIDE_EFFECTS (c) = TREE_SIDE_EFFECTS (vals);
- TREE_READONLY (c) = TREE_READONLY (vals);
- TREE_CONSTANT (c) = TREE_CONSTANT (vals);
- TREE_INVARIANT (c) = TREE_INVARIANT (vals);
+ v = VEC_alloc (constructor_elt, gc, list_length (vals));
+ for (t = vals; t; t = TREE_CHAIN (t))
+ {
+ constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
+ val = TREE_VALUE (t);
+ elt->index = TREE_PURPOSE (t);
+ elt->value = val;
+ if (!TREE_CONSTANT (val))
+ constant_p = false;
+ }
}
- return c;
+ t = build_constructor (type, v);
+ TREE_CONSTANT (t) = constant_p;
+ return t;
}
+
/* Return a new REAL_CST node whose type is TYPE and value is D. */
tree
TREE_TYPE (v) = type;
TREE_REAL_CST_PTR (v) = dp;
- TREE_OVERFLOW (v) = TREE_CONSTANT_OVERFLOW (v) = overflow;
+ TREE_OVERFLOW (v) = overflow;
return v;
}
v = build_real (type, real_value_from_int_cst (type, i));
TREE_OVERFLOW (v) |= overflow;
- TREE_CONSTANT_OVERFLOW (v) |= overflow;
return v;
}
{
tree s;
size_t length;
-
- length = len + sizeof (struct tree_string);
+
+ /* Do not waste bytes provided by padding of struct tree_string. */
+ length = len + offsetof (struct tree_string, str) + 1;
#ifdef GATHER_STATISTICS
tree_node_counts[(int) c_kind]++;
memset (s, 0, sizeof (struct tree_common));
TREE_SET_CODE (s, STRING_CST);
+ TREE_CONSTANT (s) = 1;
+ TREE_INVARIANT (s) = 1;
TREE_STRING_LENGTH (s) = len;
memcpy ((char *) TREE_STRING_POINTER (s), str, len);
((char *) TREE_STRING_POINTER (s))[len] = '\0';
TREE_IMAGPART (t) = imag;
TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
- TREE_CONSTANT_OVERFLOW (t)
- = TREE_CONSTANT_OVERFLOW (real) | TREE_CONSTANT_OVERFLOW (imag);
return t;
}
+/* Return a constant of arithmetic type TYPE which is the
+ multiplicative identity of the set TYPE. */
+
+tree
+build_one_cst (tree type)
+{
+ switch (TREE_CODE (type))
+ {
+ case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
+ case POINTER_TYPE: case REFERENCE_TYPE:
+ case OFFSET_TYPE:
+ return build_int_cst (type, 1);
+
+ case REAL_TYPE:
+ return build_real (type, dconst1);
+
+ case VECTOR_TYPE:
+ {
+ tree scalar, cst;
+ int i;
+
+ scalar = build_one_cst (TREE_TYPE (type));
+
+ /* Create 'vect_cst_ = {cst,cst,...,cst}' */
+ cst = NULL_TREE;
+ for (i = TYPE_VECTOR_SUBPARTS (type); --i >= 0; )
+ cst = tree_cons (NULL_TREE, scalar, cst);
+
+ return build_vector (type, cst);
+ }
+
+ case COMPLEX_TYPE:
+ return build_complex (type,
+ build_one_cst (TREE_TYPE (type)),
+ fold_convert (TREE_TYPE (type), integer_zero_node));
+
+ default:
+ gcc_unreachable ();
+ }
+}
+
/* Build a BINFO with LEN language slots. */
tree
STRIP_NOPS (expr);
return ((TREE_CODE (expr) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (expr)
&& TREE_INT_CST_LOW (expr) == 0
&& TREE_INT_CST_HIGH (expr) == 0)
|| (TREE_CODE (expr) == COMPLEX_CST
STRIP_NOPS (expr);
return ((TREE_CODE (expr) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (expr)
&& TREE_INT_CST_LOW (expr) == 1
&& TREE_INT_CST_HIGH (expr) == 0)
|| (TREE_CODE (expr) == COMPLEX_CST
&& integer_zerop (TREE_IMAGPART (expr)))
return 1;
- else if (TREE_CODE (expr) != INTEGER_CST
- || TREE_CONSTANT_OVERFLOW (expr))
+ else if (TREE_CODE (expr) != INTEGER_CST)
return 0;
uns = TYPE_UNSIGNED (TREE_TYPE (expr));
+ if (TREE_INT_CST_LOW (expr) == ~(unsigned HOST_WIDE_INT) 0
+ && TREE_INT_CST_HIGH (expr) == -1)
+ return 1;
if (!uns)
- return (TREE_INT_CST_LOW (expr) == ~(unsigned HOST_WIDE_INT) 0
- && TREE_INT_CST_HIGH (expr) == -1);
+ return 0;
/* Note that using TYPE_PRECISION here is wrong. We care about the
actual bits, not the (arbitrary) range of the type. */
&& integer_zerop (TREE_IMAGPART (expr)))
return 1;
- if (TREE_CODE (expr) != INTEGER_CST || TREE_CONSTANT_OVERFLOW (expr))
+ if (TREE_CODE (expr) != INTEGER_CST)
return 0;
prec = (POINTER_TYPE_P (TREE_TYPE (expr))
STRIP_NOPS (expr);
return ((TREE_CODE (expr) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (expr)
&& (TREE_INT_CST_LOW (expr) != 0
|| TREE_INT_CST_HIGH (expr) != 0))
|| (TREE_CODE (expr) == COMPLEX_CST
STRIP_NOPS (expr);
return ((TREE_CODE (expr) == REAL_CST
- && ! TREE_CONSTANT_OVERFLOW (expr)
&& REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0))
|| (TREE_CODE (expr) == COMPLEX_CST
&& real_zerop (TREE_REALPART (expr))
STRIP_NOPS (expr);
return ((TREE_CODE (expr) == REAL_CST
- && ! TREE_CONSTANT_OVERFLOW (expr)
&& REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1))
|| (TREE_CODE (expr) == COMPLEX_CST
&& real_onep (TREE_REALPART (expr))
STRIP_NOPS (expr);
return ((TREE_CODE (expr) == REAL_CST
- && ! TREE_CONSTANT_OVERFLOW (expr)
&& REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst2))
|| (TREE_CODE (expr) == COMPLEX_CST
&& real_twop (TREE_REALPART (expr))
STRIP_NOPS (expr);
return ((TREE_CODE (expr) == REAL_CST
- && ! TREE_CONSTANT_OVERFLOW (expr)
&& REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1))
|| (TREE_CODE (expr) == COMPLEX_CST
&& real_minus_onep (TREE_REALPART (expr))
return size_zero_node;
}
- if (TREE_CODE (t) == INTEGER_CST)
- t = force_fit_type (t, 0, false, false);
-
return t;
}
t = TYPE_SIZE_UNIT (type);
if (t == 0
|| TREE_CODE (t) != INTEGER_CST
- || TREE_OVERFLOW (t)
|| TREE_INT_CST_HIGH (t) != 0
/* If the result would appear negative, it's too big to represent. */
|| (HOST_WIDE_INT) TREE_INT_CST_LOW (t) < 0)
return TREE_INT_CST_LOW (t);
}
+
+/* Return the maximum size of TYPE (in bytes) as a wide integer
+ or return -1 if the size can vary or is larger than an integer. */
+
+HOST_WIDE_INT
+max_int_size_in_bytes (tree type)
+{
+ HOST_WIDE_INT size = -1;
+ tree size_tree;
+
+ /* If this is an array type, check for a possible MAX_SIZE attached. */
+
+ if (TREE_CODE (type) == ARRAY_TYPE)
+ {
+ size_tree = TYPE_ARRAY_MAX_SIZE (type);
+
+ if (size_tree && host_integerp (size_tree, 1))
+ size = tree_low_cst (size_tree, 1);
+ }
+
+ /* If we still haven't been able to get a size, see if the language
+ can compute a maximum size. */
+
+ if (size == -1)
+ {
+ size_tree = lang_hooks.types.max_size (type);
+
+ if (size_tree && host_integerp (size_tree, 1))
+ size = tree_low_cst (size_tree, 1);
+ }
+
+ return size;
+}
\f
/* Return the bit position of FIELD, in bits from the start of the record.
This is a tree of type bitsizetype. */
align1 = TYPE_ALIGN (TREE_TYPE (t));
return MAX (align0, align1);
+ case GIMPLE_MODIFY_STMT:
+ /* We should never ask for the alignment of a gimple statement. */
+ gcc_unreachable ();
+
case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
case CLEANUP_POINT_EXPR:
return (integer_zerop (min)
? max
- : fold (build2 (MINUS_EXPR, TREE_TYPE (max), max, min)));
+ : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
}
\f
/* If arg is static -- a reference to an object in static storage -- then
case VAR_DECL:
return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
- && ! DECL_THREAD_LOCAL (arg)
- && ! DECL_NON_ADDR_CONST_P (arg)
+ && ! DECL_THREAD_LOCAL_P (arg)
+ && ! DECL_DLLIMPORT_P (arg)
? arg : NULL);
case CONST_DECL:
enum tree_code code = TREE_CODE (t);
switch (TREE_CODE_CLASS (code))
- {
+ {
case tcc_declaration:
- return TS_DECL;
+ {
+ switch (code)
+ {
+ case FIELD_DECL:
+ return TS_FIELD_DECL;
+ case PARM_DECL:
+ return TS_PARM_DECL;
+ case VAR_DECL:
+ return TS_VAR_DECL;
+ case LABEL_DECL:
+ return TS_LABEL_DECL;
+ case RESULT_DECL:
+ return TS_RESULT_DECL;
+ case CONST_DECL:
+ return TS_CONST_DECL;
+ case TYPE_DECL:
+ return TS_TYPE_DECL;
+ case FUNCTION_DECL:
+ return TS_FUNCTION_DECL;
+ case SYMBOL_MEMORY_TAG:
+ case NAME_MEMORY_TAG:
+ case STRUCT_FIELD_TAG:
+ case MEMORY_PARTITION_TAG:
+ return TS_MEMORY_TAG;
+ default:
+ return TS_DECL_NON_COMMON;
+ }
+ }
case tcc_type:
return TS_TYPE;
case tcc_reference:
case tcc_binary:
case tcc_expression:
case tcc_statement:
+ case tcc_vl_exp:
return TS_EXP;
+ case tcc_gimple_stmt:
+ return TS_GIMPLE_STATEMENT;
default: /* tcc_constant and tcc_exceptional */
break;
}
case VECTOR_CST: return TS_VECTOR;
case STRING_CST: return TS_STRING;
/* tcc_exceptional cases. */
+ /* FIXME tuples: eventually this should be TS_BASE. For now, nothing
+ returns TS_BASE. */
case ERROR_MARK: return TS_COMMON;
case IDENTIFIER_NODE: return TS_IDENTIFIER;
case TREE_LIST: return TS_LIST;
case PLACEHOLDER_EXPR: return TS_COMMON;
case STATEMENT_LIST: return TS_STATEMENT_LIST;
case BLOCK: return TS_BLOCK;
+ case CONSTRUCTOR: return TS_CONSTRUCTOR;
case TREE_BINFO: return TS_BINFO;
case VALUE_HANDLE: return TS_VALUE_HANDLE;
+ case OMP_CLAUSE: return TS_OMP_CLAUSE;
default:
gcc_unreachable ();
return 0;
}
+ case tcc_vl_exp:
+ switch (code)
+ {
+ case CALL_EXPR:
+ {
+ tree arg;
+ call_expr_arg_iterator iter;
+ FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
+ if (CONTAINS_PLACEHOLDER_P (arg))
+ return 1;
+ return 0;
+ }
+ default:
+ return 0;
+ }
+
default:
return 0;
}
case COMPLEX_TYPE:
case ENUMERAL_TYPE:
case BOOLEAN_TYPE:
- case CHAR_TYPE:
case POINTER_TYPE:
case OFFSET_TYPE:
case REFERENCE_TYPE:
substitute_in_expr (tree exp, tree f, tree r)
{
enum tree_code code = TREE_CODE (exp);
- tree op0, op1, op2;
+ tree op0, op1, op2, op3;
tree new;
tree inner;
if (op0 == TREE_OPERAND (exp, 0))
return exp;
- new = fold (build3 (COMPONENT_REF, TREE_TYPE (exp),
- op0, TREE_OPERAND (exp, 1), NULL_TREE));
+ new = fold_build3 (COMPONENT_REF, TREE_TYPE (exp),
+ op0, TREE_OPERAND (exp, 1), NULL_TREE);
}
else
switch (TREE_CODE_CLASS (code))
if (op0 == TREE_OPERAND (exp, 0))
return exp;
- new = fold (build1 (code, TREE_TYPE (exp), op0));
+ new = fold_build1 (code, TREE_TYPE (exp), op0);
break;
case 2:
if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
return exp;
- new = fold (build2 (code, TREE_TYPE (exp), op0, op1));
+ new = fold_build2 (code, TREE_TYPE (exp), op0, op1);
break;
case 3:
&& op2 == TREE_OPERAND (exp, 2))
return exp;
- new = fold (build3 (code, TREE_TYPE (exp), op0, op1, op2));
+ new = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
+ break;
+
+ case 4:
+ op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
+ op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
+ op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
+ op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
+
+ if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
+ && op2 == TREE_OPERAND (exp, 2)
+ && op3 == TREE_OPERAND (exp, 3))
+ return exp;
+
+ new = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
break;
default:
}
break;
+ case tcc_vl_exp:
+ {
+ tree copy = NULL_TREE;
+ int i;
+ int n = TREE_OPERAND_LENGTH (exp);
+ for (i = 1; i < n; i++)
+ {
+ tree op = TREE_OPERAND (exp, i);
+ tree newop = SUBSTITUTE_IN_EXPR (op, f, r);
+ if (newop != op)
+ {
+ copy = copy_node (exp);
+ TREE_OPERAND (copy, i) = newop;
+ }
+ }
+ if (copy)
+ new = fold (copy);
+ else
+ return exp;
+ }
+
default:
gcc_unreachable ();
}
: (REFERENCE_CLASS_P (elt)
|| UNARY_CLASS_P (elt)
|| BINARY_CLASS_P (elt)
+ || VL_EXP_CLASS_P (elt)
|| EXPRESSION_CLASS_P (elt))
? TREE_OPERAND (elt, 0) : 0))
if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
: (REFERENCE_CLASS_P (elt)
|| UNARY_CLASS_P (elt)
|| BINARY_CLASS_P (elt)
+ || VL_EXP_CLASS_P (elt)
|| EXPRESSION_CLASS_P (elt))
? TREE_OPERAND (elt, 0) : 0))
if (POINTER_TYPE_P (TREE_TYPE (elt))
&& (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
== need_type))
- return fold (build1 (INDIRECT_REF, need_type, elt));
+ return fold_build1 (INDIRECT_REF, need_type, elt);
/* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
survives until RTL generation, there will be an error. */
if (op0 == TREE_OPERAND (exp, 0))
return exp;
else
- return fold (build1 (code, TREE_TYPE (exp), op0));
+ return fold_build1 (code, TREE_TYPE (exp), op0);
case 2:
op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
return exp;
else
- return fold (build2 (code, TREE_TYPE (exp), op0, op1));
+ return fold_build2 (code, TREE_TYPE (exp), op0, op1);
case 3:
op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
&& op2 == TREE_OPERAND (exp, 2))
return exp;
else
- return fold (build3 (code, TREE_TYPE (exp), op0, op1, op2));
+ return fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
case 4:
op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
}
break;
+ case tcc_vl_exp:
+ {
+ tree copy = NULL_TREE;
+ int i;
+ int n = TREE_OPERAND_LENGTH (exp);
+ for (i = 1; i < n; i++)
+ {
+ tree op = TREE_OPERAND (exp, i);
+ tree newop = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
+ if (newop != op)
+ {
+ if (!copy)
+ copy = copy_node (exp);
+ TREE_OPERAND (copy, i) = newop;
+ }
+ }
+ if (copy)
+ return fold (copy);
+ else
+ return exp;
+ }
+
default:
gcc_unreachable ();
}
case CONVERT_EXPR:
case FLOAT_EXPR:
case FIX_TRUNC_EXPR:
- case FIX_FLOOR_EXPR:
- case FIX_ROUND_EXPR:
- case FIX_CEIL_EXPR:
result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
break;
case tcc_statement:
case tcc_expression:
case tcc_reference:
+ case tcc_vl_exp:
/* If the expression has side-effects, then encase it in a SAVE_EXPR
so that it will only be evaluated once. */
/* The reference (r) and comparison (<) classes could be handled as
TREE_INVARIANT, and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
void
-recompute_tree_invarant_for_addr_expr (tree t)
+recompute_tree_invariant_for_addr_expr (tree t)
{
tree node;
bool tc = true, ti = true, se = false;
UPDATE_TITCSE (TREE_OPERAND (node, 2));
}
+ node = lang_hooks.expr_to_decl (node, &tc, &ti, &se);
+
/* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
the address, since &(*a)->b is a form of addition. If it's a decl, it's
invariant and constant if the decl is static. It's also invariant if it's
;
else if (decl_function_context (node) == current_function_decl
/* Addresses of thread-local variables are invariant. */
- || (TREE_CODE (node) == VAR_DECL && DECL_THREAD_LOCAL (node)))
+ || (TREE_CODE (node) == VAR_DECL
+ && DECL_THREAD_LOCAL_P (node)))
tc = false;
else
ti = tc = false;
Constants, decls, types and misc nodes cannot be.
We define 5 non-variadic functions, from 0 to 4 arguments. This is
- enough for all extant tree codes. These functions can be called
- directly (preferably!), but can also be obtained via GCC preprocessor
- magic within the build macro. */
+ enough for all extant tree codes. */
tree
build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
#else
SET_EXPR_LOCUS (t, NULL);
#endif
- TREE_COMPLEXITY (t) = 0;
TREE_OPERAND (t, 0) = node;
TREE_BLOCK (t) = NULL_TREE;
if (node && !TYPE_P (node))
case ADDR_EXPR:
if (node)
- recompute_tree_invarant_for_addr_expr (t);
+ recompute_tree_invariant_for_addr_expr (t);
break;
default:
- if (TREE_CODE_CLASS (code) == tcc_unary
+ if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
&& node && !TYPE_P (node)
&& TREE_CONSTANT (node))
TREE_CONSTANT (t) = 1;
- if (TREE_CODE_CLASS (code) == tcc_unary
+ if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
&& node && TREE_INVARIANT (node))
TREE_INVARIANT (t) = 1;
if (TREE_CODE_CLASS (code) == tcc_reference
gcc_assert (TREE_CODE_LENGTH (code) == 2);
+#if 1
+ /* FIXME tuples: Statement's aren't expressions! */
+ if (code == GIMPLE_MODIFY_STMT)
+ return build_gimple_modify_stmt_stat (arg0, arg1 PASS_MEM_STAT);
+#else
+ /* Must use build_gimple_modify_stmt to construct GIMPLE_MODIFY_STMTs. */
+ gcc_assert (code != GIMPLE_MODIFY_STMT);
+#endif
+
t = make_node_stat (code PASS_MEM_STAT);
TREE_TYPE (t) = tt;
return t;
}
+
+/* Build a GIMPLE_MODIFY_STMT node. This tree code doesn't have a
+ type, so we can't use build2 (a.k.a. build2_stat). */
+
+tree
+build_gimple_modify_stmt_stat (tree arg0, tree arg1 MEM_STAT_DECL)
+{
+ tree t;
+
+ t = make_node_stat (GIMPLE_MODIFY_STMT PASS_MEM_STAT);
+ /* ?? We don't care about setting flags for tuples... */
+ GIMPLE_STMT_OPERAND (t, 0) = arg0;
+ GIMPLE_STMT_OPERAND (t, 1) = arg1;
+ return t;
+}
+
tree
build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
tree arg2 MEM_STAT_DECL)
tree t;
gcc_assert (TREE_CODE_LENGTH (code) == 3);
+ gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
t = make_node_stat (code PASS_MEM_STAT);
TREE_TYPE (t) = tt;
PROCESS_ARG(1);
PROCESS_ARG(2);
- if (code == CALL_EXPR && !side_effects)
- {
- tree node;
- int i;
-
- /* Calls have side-effects, except those to const or
- pure functions. */
- i = call_expr_flags (t);
- if (!(i & (ECF_CONST | ECF_PURE)))
- side_effects = 1;
-
- /* And even those have side-effects if their arguments do. */
- else for (node = arg1; node; node = TREE_CHAIN (node))
- if (TREE_SIDE_EFFECTS (TREE_VALUE (node)))
- {
- side_effects = 1;
- break;
- }
- }
-
TREE_SIDE_EFFECTS (t) = side_effects;
TREE_THIS_VOLATILE (t)
= (TREE_CODE_CLASS (code) == tcc_reference
return t;
}
-/* Backup definition for non-gcc build compilers. */
-
tree
-(build) (enum tree_code code, tree tt, ...)
+build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
+ tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
{
- tree t, arg0, arg1, arg2, arg3;
- int length = TREE_CODE_LENGTH (code);
- va_list p;
+ bool constant, read_only, side_effects, invariant;
+ tree t;
- va_start (p, tt);
- switch (length)
- {
- case 0:
- t = build0 (code, tt);
- break;
- case 1:
- arg0 = va_arg (p, tree);
- t = build1 (code, tt, arg0);
- break;
- case 2:
- arg0 = va_arg (p, tree);
- arg1 = va_arg (p, tree);
- t = build2 (code, tt, arg0, arg1);
- break;
- case 3:
- arg0 = va_arg (p, tree);
- arg1 = va_arg (p, tree);
- arg2 = va_arg (p, tree);
- t = build3 (code, tt, arg0, arg1, arg2);
- break;
- case 4:
- arg0 = va_arg (p, tree);
- arg1 = va_arg (p, tree);
- arg2 = va_arg (p, tree);
- arg3 = va_arg (p, tree);
- t = build4 (code, tt, arg0, arg1, arg2, arg3);
- break;
- default:
- gcc_unreachable ();
- }
- va_end (p);
+ gcc_assert (TREE_CODE_LENGTH (code) == 5);
+
+ t = make_node_stat (code PASS_MEM_STAT);
+ TREE_TYPE (t) = tt;
+
+ side_effects = TREE_SIDE_EFFECTS (t);
+
+ PROCESS_ARG(0);
+ PROCESS_ARG(1);
+ PROCESS_ARG(2);
+ PROCESS_ARG(3);
+ PROCESS_ARG(4);
+
+ TREE_SIDE_EFFECTS (t) = side_effects;
+ TREE_THIS_VOLATILE (t)
+ = (TREE_CODE_CLASS (code) == tcc_reference
+ && arg0 && TREE_THIS_VOLATILE (arg0));
+
+ return t;
+}
+
+tree
+build7_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
+ tree arg2, tree arg3, tree arg4, tree arg5,
+ tree arg6 MEM_STAT_DECL)
+{
+ bool constant, read_only, side_effects, invariant;
+ tree t;
+
+ gcc_assert (code == TARGET_MEM_REF);
+
+ t = make_node_stat (code PASS_MEM_STAT);
+ TREE_TYPE (t) = tt;
+
+ side_effects = TREE_SIDE_EFFECTS (t);
+
+ PROCESS_ARG(0);
+ PROCESS_ARG(1);
+ PROCESS_ARG(2);
+ PROCESS_ARG(3);
+ PROCESS_ARG(4);
+ PROCESS_ARG(5);
+ PROCESS_ARG(6);
+
+ TREE_SIDE_EFFECTS (t) = side_effects;
+ TREE_THIS_VOLATILE (t) = 0;
return t;
}
int i;
va_list p;
+ gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
+
va_start (p, code);
t = make_node (code);
va_end (p);
return t;
}
+
+/* Similar to build_nt, but for creating a CALL_EXPR object with
+ ARGLIST passed as a list. */
+
+tree
+build_nt_call_list (tree fn, tree arglist)
+{
+ tree t;
+ int i;
+
+ t = build_vl_exp (CALL_EXPR, list_length (arglist) + 3);
+ CALL_EXPR_FN (t) = fn;
+ CALL_EXPR_STATIC_CHAIN (t) = NULL_TREE;
+ for (i = 0; arglist; arglist = TREE_CHAIN (arglist), i++)
+ CALL_EXPR_ARG (t, i) = TREE_VALUE (arglist);
+ return t;
+}
\f
/* Create a DECL_... node of code CODE, name NAME and data type TYPE.
We do NOT enter this node in any sort of symbol table.
else if (code == FUNCTION_DECL)
DECL_MODE (t) = FUNCTION_MODE;
- /* Set default visibility to whatever the user supplied with
- visibility_specified depending on #pragma GCC visibility. */
- DECL_VISIBILITY (t) = default_visibility;
- DECL_VISIBILITY_SPECIFIED (t) = visibility_options.inpragma;
-
return t;
}
#if 1 /* ! defined(USE_MAPPED_LOCATION) */
/* ??? gengtype doesn't handle conditionals */
-static GTY(()) tree last_annotated_node;
+static GTY(()) source_locus last_annotated_node;
#endif
#ifdef USE_MAPPED_LOCATION
expand_location (source_location loc)
{
expanded_location xloc;
- if (loc == 0) { xloc.file = NULL; xloc.line = 0; xloc.column = 0; }
+ if (loc == 0)
+ {
+ xloc.file = NULL;
+ xloc.line = 0;
+ xloc.column = 0;
+ }
else
{
const struct line_map *map = linemap_lookup (&line_table, loc);
a node with the same information already attached to that node!
Just return instead of wasting memory. */
if (EXPR_LOCUS (node)
+ && EXPR_LINENO (node) == line
&& (EXPR_FILENAME (node) == file
- || ! strcmp (EXPR_FILENAME (node), file))
- && EXPR_LINENO (node) == line)
+ || !strcmp (EXPR_FILENAME (node), file)))
{
- last_annotated_node = node;
+ last_annotated_node = EXPR_LOCUS (node);
return;
}
entry cache can reduce the number of allocations by more
than half. */
if (last_annotated_node
- && EXPR_LOCUS (last_annotated_node)
- && (EXPR_FILENAME (last_annotated_node) == file
- || ! strcmp (EXPR_FILENAME (last_annotated_node), file))
- && EXPR_LINENO (last_annotated_node) == line)
+ && last_annotated_node->line == line
+ && (last_annotated_node->file == file
+ || !strcmp (last_annotated_node->file, file)))
{
- SET_EXPR_LOCUS (node, EXPR_LOCUS (last_annotated_node));
+ SET_EXPR_LOCUS (node, last_annotated_node);
return;
}
SET_EXPR_LOCUS (node, ggc_alloc (sizeof (location_t)));
EXPR_LINENO (node) = line;
EXPR_FILENAME (node) = file;
- last_annotated_node = node;
+ last_annotated_node = EXPR_LOCUS (node);
}
void
}
#endif
\f
+/* Source location accessor functions. */
+
+
+/* The source location of this expression. Non-tree_exp nodes such as
+ decls and constants can be shared among multiple locations, so
+ return nothing. */
+location_t
+expr_location (tree node)
+{
+#ifdef USE_MAPPED_LOCATION
+ if (GIMPLE_STMT_P (node))
+ return GIMPLE_STMT_LOCUS (node);
+ return EXPR_P (node) ? node->exp.locus : UNKNOWN_LOCATION;
+#else
+ if (GIMPLE_STMT_P (node))
+ return EXPR_HAS_LOCATION (node)
+ ? *GIMPLE_STMT_LOCUS (node) : UNKNOWN_LOCATION;
+ return EXPR_HAS_LOCATION (node) ? *node->exp.locus : UNKNOWN_LOCATION;
+#endif
+}
+
+void
+set_expr_location (tree node, location_t locus)
+{
+#ifdef USE_MAPPED_LOCATION
+ if (GIMPLE_STMT_P (node))
+ GIMPLE_STMT_LOCUS (node) = locus;
+ else
+ EXPR_CHECK (node)->exp.locus = locus;
+#else
+ annotate_with_locus (node, locus);
+#endif
+}
+
+bool
+expr_has_location (tree node)
+{
+#ifdef USE_MAPPED_LOCATION
+ return expr_location (node) != UNKNOWN_LOCATION;
+#else
+ return expr_locus (node) != NULL;
+#endif
+}
+
+#ifdef USE_MAPPED_LOCATION
+source_location *
+#else
+source_locus
+#endif
+expr_locus (tree node)
+{
+#ifdef USE_MAPPED_LOCATION
+ if (GIMPLE_STMT_P (node))
+ return &GIMPLE_STMT_LOCUS (node);
+ return EXPR_P (node) ? &node->exp.locus : (location_t *) NULL;
+#else
+ if (GIMPLE_STMT_P (node))
+ return GIMPLE_STMT_LOCUS (node);
+ /* ?? The cast below was originally "(location_t *)" in the macro,
+ but that makes no sense. ?? */
+ return EXPR_P (node) ? node->exp.locus : (source_locus) NULL;
+#endif
+}
+
+void
+set_expr_locus (tree node,
+#ifdef USE_MAPPED_LOCATION
+ source_location *loc
+#else
+ source_locus loc
+#endif
+ )
+{
+#ifdef USE_MAPPED_LOCATION
+ if (loc == NULL)
+ {
+ if (GIMPLE_STMT_P (node))
+ GIMPLE_STMT_LOCUS (node) = UNKNOWN_LOCATION;
+ else
+ EXPR_CHECK (node)->exp.locus = UNKNOWN_LOCATION;
+ }
+ else
+ {
+ if (GIMPLE_STMT_P (node))
+ GIMPLE_STMT_LOCUS (node) = *loc;
+ else
+ EXPR_CHECK (node)->exp.locus = *loc;
+ }
+#else
+ if (GIMPLE_STMT_P (node))
+ GIMPLE_STMT_LOCUS (node) = loc;
+ else
+ EXPR_CHECK (node)->exp.locus = loc;
+#endif
+}
+
+const char **
+expr_filename (tree node)
+{
+#ifdef USE_MAPPED_LOCATION
+ if (GIMPLE_STMT_P (node))
+ return &LOCATION_FILE (GIMPLE_STMT_LOCUS (node));
+ return &LOCATION_FILE (EXPR_CHECK (node)->exp.locus);
+#else
+ if (GIMPLE_STMT_P (node))
+ return &GIMPLE_STMT_LOCUS (node)->file;
+ return &(EXPR_CHECK (node)->exp.locus->file);
+#endif
+}
+
+int *
+expr_lineno (tree node)
+{
+#ifdef USE_MAPPED_LOCATION
+ if (GIMPLE_STMT_P (node))
+ return &LOCATION_LINE (GIMPLE_STMT_LOCUS (node));
+ return &LOCATION_LINE (EXPR_CHECK (node)->exp.locus);
+#else
+ if (GIMPLE_STMT_P (node))
+ return &GIMPLE_STMT_LOCUS (node)->line;
+ return &EXPR_CHECK (node)->exp.locus->line;
+#endif
+}
+\f
/* Return a declaration like DDECL except that its DECL_ATTRIBUTES
is ATTRIBUTE. */
}
/* Return a type like TTYPE except that its TYPE_ATTRIBUTE
- is ATTRIBUTE.
+ is ATTRIBUTE and its qualifiers are QUALS.
Record such modified types already made so we don't make duplicates. */
-tree
-build_type_attribute_variant (tree ttype, tree attribute)
+static tree
+build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
{
if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
{
TYPE_REFERENCE_TO (ntype) = 0;
TYPE_ATTRIBUTES (ntype) = attribute;
+ if (TYPE_STRUCTURAL_EQUALITY_P (ttype))
+ SET_TYPE_STRUCTURAL_EQUALITY (ntype);
+ else
+ TYPE_CANONICAL (ntype)
+ = build_qualified_type (TYPE_CANONICAL (ttype), quals);
+
/* Create a new main variant of TYPE. */
TYPE_MAIN_VARIANT (ntype) = ntype;
TYPE_NEXT_VARIANT (ntype) = 0;
}
ntype = type_hash_canon (hashcode, ntype);
- ttype = build_qualified_type (ntype, TYPE_QUALS (ttype));
+
+ /* If the target-dependent attributes make NTYPE different from
+ its canonical type, we will need to use structural equality
+ checks for this qualified type. */
+ if (!targetm.comp_type_attributes (ntype, ttype))
+ SET_TYPE_STRUCTURAL_EQUALITY (ntype);
+
+ ttype = build_qualified_type (ntype, quals);
}
return ttype;
}
+/* Return a type like TTYPE except that its TYPE_ATTRIBUTE
+ is ATTRIBUTE.
+
+ Record such modified types already made so we don't make duplicates. */
+
+tree
+build_type_attribute_variant (tree ttype, tree attribute)
+{
+ return build_type_attribute_qual_variant (ttype, attribute,
+ TYPE_QUALS (ttype));
+}
+
/* Return nonzero if IDENT is a valid name for attribute ATTR,
or zero if not.
gcc_assert (attr[1] == '_');
gcc_assert (attr[attr_len - 2] == '_');
gcc_assert (attr[attr_len - 1] == '_');
- gcc_assert (attr[1] == '_');
if (ident_len == attr_len - 4
&& strncmp (attr + 2, p, attr_len - 4) == 0)
return 1;
return NULL_TREE;
}
+/* Remove any instances of attribute ATTR_NAME in LIST and return the
+ modified list. */
+
+tree
+remove_attribute (const char *attr_name, tree list)
+{
+ tree *p;
+ size_t attr_len = strlen (attr_name);
+
+ for (p = &list; *p; )
+ {
+ tree l = *p;
+ gcc_assert (TREE_CODE (TREE_PURPOSE (l)) == IDENTIFIER_NODE);
+ if (is_attribute_with_length_p (attr_name, attr_len, TREE_PURPOSE (l)))
+ *p = TREE_CHAIN (l);
+ else
+ p = &TREE_CHAIN (l);
+ }
+
+ return list;
+}
+
/* Return an attribute list that is the union of a1 and a2. */
tree
a = lookup_attribute (IDENTIFIER_POINTER (TREE_PURPOSE (a2)),
TREE_CHAIN (a)))
{
- if (simple_cst_equal (TREE_VALUE (a), TREE_VALUE (a2)) == 1)
+ if (TREE_VALUE (a) != NULL
+ && TREE_CODE (TREE_VALUE (a)) == TREE_LIST
+ && TREE_VALUE (a2) != NULL
+ && TREE_CODE (TREE_VALUE (a2)) == TREE_LIST)
+ {
+ if (simple_cst_list_equal (TREE_VALUE (a),
+ TREE_VALUE (a2)) == 1)
+ break;
+ }
+ else if (simple_cst_equal (TREE_VALUE (a),
+ TREE_VALUE (a2)) == 1)
break;
}
if (a == NULL_TREE)
merge_dllimport_decl_attributes (tree old, tree new)
{
tree a;
- int delete_dllimport_p;
-
- old = DECL_ATTRIBUTES (old);
- new = DECL_ATTRIBUTES (new);
+ int delete_dllimport_p = 1;
/* What we need to do here is remove from `old' dllimport if it doesn't
appear in `new'. dllimport behaves like extern: if a declaration is
marked dllimport and a definition appears later, then the object
- is not dllimport'd. */
- if (lookup_attribute ("dllimport", old) != NULL_TREE
- && lookup_attribute ("dllimport", new) == NULL_TREE)
- delete_dllimport_p = 1;
+ is not dllimport'd. We also remove a `new' dllimport if the old list
+ contains dllexport: dllexport always overrides dllimport, regardless
+ of the order of declaration. */
+ if (!VAR_OR_FUNCTION_DECL_P (new))
+ delete_dllimport_p = 0;
+ else if (DECL_DLLIMPORT_P (new)
+ && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
+ {
+ DECL_DLLIMPORT_P (new) = 0;
+ warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
+ "dllimport ignored", new);
+ }
+ else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new))
+ {
+ /* Warn about overriding a symbol that has already been used. eg:
+ extern int __attribute__ ((dllimport)) foo;
+ int* bar () {return &foo;}
+ int foo;
+ */
+ if (TREE_USED (old))
+ {
+ warning (0, "%q+D redeclared without dllimport attribute "
+ "after being referenced with dll linkage", new);
+ /* If we have used a variable's address with dllimport linkage,
+ keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
+ decl may already have had TREE_INVARIANT and TREE_CONSTANT
+ computed.
+ We still remove the attribute so that assembler code refers
+ to '&foo rather than '_imp__foo'. */
+ if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
+ DECL_DLLIMPORT_P (new) = 1;
+ }
+
+ /* Let an inline definition silently override the external reference,
+ but otherwise warn about attribute inconsistency. */
+ else if (TREE_CODE (new) == VAR_DECL
+ || !DECL_DECLARED_INLINE_P (new))
+ warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
+ "previous dllimport ignored", new);
+ }
else
delete_dllimport_p = 0;
- a = merge_attributes (old, new);
+ a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new));
- if (delete_dllimport_p)
+ if (delete_dllimport_p)
{
tree prev, t;
-
+ const size_t attr_len = strlen ("dllimport");
+
/* Scan the list for dllimport and delete it. */
for (prev = NULL_TREE, t = a; t; prev = t, t = TREE_CHAIN (t))
{
- if (is_attribute_p ("dllimport", TREE_PURPOSE (t)))
+ if (is_attribute_with_length_p ("dllimport", attr_len,
+ TREE_PURPOSE (t)))
{
if (prev == NULL_TREE)
a = TREE_CHAIN (a);
return NULL_TREE;
}
+ if (TREE_CODE (node) != FUNCTION_DECL
+ && TREE_CODE (node) != VAR_DECL)
+ {
+ *no_add_attrs = true;
+ warning (OPT_Wattributes, "%qs attribute ignored",
+ IDENTIFIER_POINTER (name));
+ return NULL_TREE;
+ }
+
/* Report error on dllimport ambiguities seen now before they cause
any damage. */
- if (is_attribute_p ("dllimport", name))
+ else if (is_attribute_p ("dllimport", name))
{
+ /* Honor any target-specific overrides. */
+ if (!targetm.valid_dllimport_attribute_p (node))
+ *no_add_attrs = true;
+
+ else if (TREE_CODE (node) == FUNCTION_DECL
+ && DECL_DECLARED_INLINE_P (node))
+ {
+ warning (OPT_Wattributes, "inline function %q+D declared as "
+ " dllimport: attribute ignored", node);
+ *no_add_attrs = true;
+ }
/* Like MS, treat definition of dllimported variables and
- non-inlined functions on declaration as syntax errors. We
- allow the attribute for function definitions if declared
- inline. */
- if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node)
- && !DECL_DECLARED_INLINE_P (node))
+ non-inlined functions on declaration as syntax errors. */
+ else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
{
- error ("%Jfunction %qD definition is marked dllimport.", node, node);
+ error ("function %q+D definition is marked dllimport", node);
*no_add_attrs = true;
}
- else if (TREE_CODE (node) == VAR_DECL)
+ else if (TREE_CODE (node) == VAR_DECL)
{
if (DECL_INITIAL (node))
{
- error ("%Jvariable %qD definition is marked dllimport.",
- node, node);
+ error ("variable %q+D definition is marked dllimport",
+ node);
*no_add_attrs = true;
}
if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
TREE_PUBLIC (node) = 1;
}
+
+ if (*no_add_attrs == false)
+ DECL_DLLIMPORT_P (node) = 1;
}
/* Report error if symbol is not accessible at global scope. */
&& (TREE_CODE (node) == VAR_DECL
|| TREE_CODE (node) == FUNCTION_DECL))
{
- error ("%Jexternal linkage required for symbol %qD because of "
- "%qs attribute.", node, node, IDENTIFIER_POINTER (name));
+ error ("external linkage required for symbol %q+D because of "
+ "%qs attribute", node, IDENTIFIER_POINTER (name));
*no_add_attrs = true;
}
{
t = build_variant_type_copy (type);
set_type_quals (t, type_quals);
+
+ if (TYPE_STRUCTURAL_EQUALITY_P (type))
+ /* Propagate structural equality. */
+ SET_TYPE_STRUCTURAL_EQUALITY (t);
+ else if (TYPE_CANONICAL (type) != type)
+ /* Build the underlying canonical type, since it is different
+ from TYPE. */
+ TYPE_CANONICAL (t) = build_qualified_type (TYPE_CANONICAL (type),
+ type_quals);
+ else
+ /* T is its own canonical type. */
+ TYPE_CANONICAL (t) = t;
+
}
return t;
}
/* Create a new distinct copy of TYPE. The new type is made its own
- MAIN_VARIANT. */
+ MAIN_VARIANT. If TYPE requires structural equality checks, the
+ resulting type requires structural equality checks; otherwise, its
+ TYPE_CANONICAL points to itself. */
tree
build_distinct_type_copy (tree type)
TYPE_POINTER_TO (t) = 0;
TYPE_REFERENCE_TO (t) = 0;
+ /* Set the canonical type either to a new equivalence class, or
+ propagate the need for structural equality checks. */
+ if (TYPE_STRUCTURAL_EQUALITY_P (type))
+ SET_TYPE_STRUCTURAL_EQUALITY (t);
+ else
+ TYPE_CANONICAL (t) = t;
+
/* Make it its own variant. */
TYPE_MAIN_VARIANT (t) = t;
TYPE_NEXT_VARIANT (t) = 0;
return t;
}
-/* Create a new variant of TYPE, equivalent but distinct.
- This is so the caller can modify it. */
+/* Create a new variant of TYPE, equivalent but distinct. This is so
+ the caller can modify it. TYPE_CANONICAL for the return type will
+ be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
+ are considered equal by the language itself (or that both types
+ require structural equality checks). */
tree
build_variant_type_copy (tree type)
tree t, m = TYPE_MAIN_VARIANT (type);
t = build_distinct_type_copy (type);
+
+ /* Since we're building a variant, assume that it is a non-semantic
+ variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
+ TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
/* Add the new type to the chain of variants of TYPE. */
TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
\f
/* Return true if the from tree in both tree maps are equal. */
-static int
-tree_map_eq (const void *va, const void *vb)
+int
+tree_map_base_eq (const void *va, const void *vb)
{
- const struct tree_map *a = va, *b = vb;
+ const struct tree_map_base *a = va, *b = vb;
return (a->from == b->from);
}
/* Hash a from tree in a tree_map. */
-static hashval_t
-tree_map_hash (const void *item)
+unsigned int
+tree_map_base_hash (const void *item)
{
- return (((const struct tree_map *) item)->hash);
+ return htab_hash_pointer (((const struct tree_map_base *)item)->from);
}
/* Return true if this tree map structure is marked for garbage collection
purposes. We simply return true if the from tree is marked, so that this
structure goes away when the from tree goes away. */
-static int
-tree_map_marked_p (const void *p)
+int
+tree_map_base_marked_p (const void *p)
+{
+ return ggc_marked_p (((struct tree_map_base *) p)->from);
+}
+
+unsigned int
+tree_map_hash (const void *item)
+{
+ return (((const struct tree_map *) item)->hash);
+}
+
+/* Return the initialization priority for DECL. */
+
+priority_type
+decl_init_priority_lookup (tree decl)
+{
+ struct tree_priority_map *h;
+ struct tree_map_base in;
+
+ gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
+ gcc_assert (TREE_CODE (decl) == VAR_DECL
+ ? DECL_HAS_INIT_PRIORITY_P (decl)
+ : DECL_STATIC_CONSTRUCTOR (decl));
+ in.from = decl;
+ h = htab_find (init_priority_for_decl, &in);
+ return h ? h->init : DEFAULT_INIT_PRIORITY;
+}
+
+/* Return the finalization priority for DECL. */
+
+priority_type
+decl_fini_priority_lookup (tree decl)
+{
+ struct tree_priority_map *h;
+ struct tree_map_base in;
+
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
+ gcc_assert (DECL_STATIC_DESTRUCTOR (decl));
+ in.from = decl;
+ h = htab_find (init_priority_for_decl, &in);
+ return h ? h->fini : DEFAULT_INIT_PRIORITY;
+}
+
+/* Return the initialization and finalization priority information for
+ DECL. If there is no previous priority information, a freshly
+ allocated structure is returned. */
+
+static struct tree_priority_map *
+decl_priority_info (tree decl)
+{
+ struct tree_priority_map in;
+ struct tree_priority_map *h;
+ void **loc;
+
+ in.base.from = decl;
+ loc = htab_find_slot (init_priority_for_decl, &in, INSERT);
+ h = *loc;
+ if (!h)
+ {
+ h = GGC_CNEW (struct tree_priority_map);
+ *loc = h;
+ h->base.from = decl;
+ h->init = DEFAULT_INIT_PRIORITY;
+ h->fini = DEFAULT_INIT_PRIORITY;
+ }
+
+ return h;
+}
+
+/* Set the initialization priority for DECL to PRIORITY. */
+
+void
+decl_init_priority_insert (tree decl, priority_type priority)
+{
+ struct tree_priority_map *h;
+
+ gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
+ h = decl_priority_info (decl);
+ h->init = priority;
+}
+
+/* Set the finalization priority for DECL to PRIORITY. */
+
+void
+decl_fini_priority_insert (tree decl, priority_type priority)
+{
+ struct tree_priority_map *h;
+
+ gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
+ h = decl_priority_info (decl);
+ h->fini = priority;
+}
+
+/* Look up a restrict qualified base decl for FROM. */
+
+tree
+decl_restrict_base_lookup (tree from)
+{
+ struct tree_map *h;
+ struct tree_map in;
+
+ in.base.from = from;
+ h = htab_find_with_hash (restrict_base_for_decl, &in,
+ htab_hash_pointer (from));
+ return h ? h->to : NULL_TREE;
+}
+
+/* Record the restrict qualified base TO for FROM. */
+
+void
+decl_restrict_base_insert (tree from, tree to)
{
- tree from = ((struct tree_map *) p)->from;
+ struct tree_map *h;
+ void **loc;
- return ggc_marked_p (from);
+ h = ggc_alloc (sizeof (struct tree_map));
+ h->hash = htab_hash_pointer (from);
+ h->base.from = from;
+ h->to = to;
+ loc = htab_find_slot_with_hash (restrict_base_for_decl, h, h->hash, INSERT);
+ *(struct tree_map **) loc = h;
}
/* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
htab_collisions (debug_expr_for_decl));
}
+/* Print out the statistics for the DECL_VALUE_EXPR hash table. */
+
+static void
+print_value_expr_statistics (void)
+{
+ fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
+ (long) htab_size (value_expr_for_decl),
+ (long) htab_elements (value_expr_for_decl),
+ htab_collisions (value_expr_for_decl));
+}
+
+/* Print out statistics for the RESTRICT_BASE_FOR_DECL hash table, but
+ don't print anything if the table is empty. */
+
+static void
+print_restrict_base_statistics (void)
+{
+ if (htab_elements (restrict_base_for_decl) != 0)
+ fprintf (stderr,
+ "RESTRICT_BASE hash: size %ld, %ld elements, %f collisions\n",
+ (long) htab_size (restrict_base_for_decl),
+ (long) htab_elements (restrict_base_for_decl),
+ htab_collisions (restrict_base_for_decl));
+}
+
/* Lookup a debug expression for FROM, and return it if we find one. */
tree
decl_debug_expr_lookup (tree from)
{
struct tree_map *h, in;
- in.from = from;
+ in.base.from = from;
h = htab_find_with_hash (debug_expr_for_decl, &in, htab_hash_pointer (from));
if (h)
h = ggc_alloc (sizeof (struct tree_map));
h->hash = htab_hash_pointer (from);
- h->from = from;
+ h->base.from = from;
h->to = to;
loc = htab_find_slot_with_hash (debug_expr_for_decl, h, h->hash, INSERT);
*(struct tree_map **) loc = h;
}
-
-/* Hashing of types so that we don't make duplicates.
- The entry point is `type_hash_canon'. */
-/* Compute a hash code for a list of types (chain of TREE_LIST nodes
- with types in the TREE_VALUE slots), by adding the hash codes
- of the individual types. */
+/* Lookup a value expression for FROM, and return it if we find one. */
-unsigned int
+tree
+decl_value_expr_lookup (tree from)
+{
+ struct tree_map *h, in;
+ in.base.from = from;
+
+ h = htab_find_with_hash (value_expr_for_decl, &in, htab_hash_pointer (from));
+ if (h)
+ return h->to;
+ return NULL_TREE;
+}
+
+/* Insert a mapping FROM->TO in the value expression hashtable. */
+
+void
+decl_value_expr_insert (tree from, tree to)
+{
+ struct tree_map *h;
+ void **loc;
+
+ h = ggc_alloc (sizeof (struct tree_map));
+ h->hash = htab_hash_pointer (from);
+ h->base.from = from;
+ h->to = to;
+ loc = htab_find_slot_with_hash (value_expr_for_decl, h, h->hash, INSERT);
+ *(struct tree_map **) loc = h;
+}
+
+/* Hashing of types so that we don't make duplicates.
+ The entry point is `type_hash_canon'. */
+
+/* Compute a hash code for a list of types (chain of TREE_LIST nodes
+ with types in the TREE_VALUE slots), by adding the hash codes
+ of the individual types. */
+
+unsigned int
type_hash_list (tree list, hashval_t hashcode)
{
tree tail;
case INTEGER_TYPE:
case REAL_TYPE:
case BOOLEAN_TYPE:
- case CHAR_TYPE:
return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
|| tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
TYPE_MAX_VALUE (b->type)))
attr = lookup_attribute (IDENTIFIER_POINTER (TREE_PURPOSE (t2)),
TREE_CHAIN (attr)))
{
- if (simple_cst_equal (TREE_VALUE (t2), TREE_VALUE (attr)) == 1)
+ if (TREE_VALUE (t2) != NULL
+ && TREE_CODE (TREE_VALUE (t2)) == TREE_LIST
+ && TREE_VALUE (attr) != NULL
+ && TREE_CODE (TREE_VALUE (attr)) == TREE_LIST)
+ {
+ if (simple_cst_list_equal (TREE_VALUE (t2),
+ TREE_VALUE (attr)) == 1)
+ break;
+ }
+ else if (simple_cst_equal (TREE_VALUE (t2), TREE_VALUE (attr)) == 1)
break;
}
if (attr == 0)
return 0;
-
- if (simple_cst_equal (TREE_VALUE (t2), TREE_VALUE (attr)) != 1)
- return 0;
}
return 1;
/* Return 1 if T is an INTEGER_CST that can be manipulated efficiently on
the host. If POS is zero, the value can be represented in a single
- HOST_WIDE_INT. If POS is nonzero, the value must be positive and can
+ HOST_WIDE_INT. If POS is nonzero, the value must be non-negative and can
be represented in a single unsigned HOST_WIDE_INT. */
int
host_integerp (tree t, int pos)
{
return (TREE_CODE (t) == INTEGER_CST
- && ! TREE_OVERFLOW (t)
&& ((TREE_INT_CST_HIGH (t) == 0
&& (HOST_WIDE_INT) TREE_INT_CST_LOW (t) >= 0)
|| (! pos && TREE_INT_CST_HIGH (t) == -1
/* Return the HOST_WIDE_INT least significant bits of T if it is an
INTEGER_CST and there is no overflow. POS is nonzero if the result must
- be positive. We must be able to satisfy the above conditions. */
+ be non-negative. We must be able to satisfy the above conditions. */
HOST_WIDE_INT
tree_low_cst (tree t, int pos)
/* Return an indication of the sign of the integer constant T.
The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
- Note that -1 will never be returned it T's type is unsigned. */
+ Note that -1 will never be returned if T's type is unsigned. */
int
tree_int_cst_sgn (tree t)
TREE_STRING_LENGTH (t1)));
case CONSTRUCTOR:
- return simple_cst_list_equal (CONSTRUCTOR_ELTS (t1),
- CONSTRUCTOR_ELTS (t2));
+ {
+ unsigned HOST_WIDE_INT idx;
+ VEC(constructor_elt, gc) *v1 = CONSTRUCTOR_ELTS (t1);
+ VEC(constructor_elt, gc) *v2 = CONSTRUCTOR_ELTS (t2);
+
+ if (VEC_length (constructor_elt, v1) != VEC_length (constructor_elt, v2))
+ return false;
+
+ for (idx = 0; idx < VEC_length (constructor_elt, v1); ++idx)
+ /* ??? Should we handle also fields here? */
+ if (!simple_cst_equal (VEC_index (constructor_elt, v1, idx)->value,
+ VEC_index (constructor_elt, v2, idx)->value))
+ return false;
+ return true;
+ }
case SAVE_EXPR:
return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
case CALL_EXPR:
- cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
+ cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
if (cmp <= 0)
return cmp;
- return
- simple_cst_list_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
+ if (call_expr_nargs (t1) != call_expr_nargs (t2))
+ return 0;
+ {
+ tree arg1, arg2;
+ call_expr_arg_iterator iter1, iter2;
+ for (arg1 = first_call_expr_arg (t1, &iter1),
+ arg2 = first_call_expr_arg (t2, &iter2);
+ arg1 && arg2;
+ arg1 = next_call_expr_arg (&iter1),
+ arg2 = next_call_expr_arg (&iter2))
+ {
+ cmp = simple_cst_equal (arg1, arg2);
+ if (cmp <= 0)
+ return cmp;
+ }
+ return arg1 == arg2;
+ }
case TARGET_EXPR:
/* Special case: if either target is an unallocated VAR_DECL,
for (; t; t = TREE_CHAIN (t))
val = iterative_hash_expr (TREE_VALUE (t), val);
return val;
+ case CONSTRUCTOR:
+ {
+ unsigned HOST_WIDE_INT idx;
+ tree field, value;
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
+ {
+ val = iterative_hash_expr (field, val);
+ val = iterative_hash_expr (value, val);
+ }
+ return val;
+ }
case FUNCTION_DECL:
/* When referring to a built-in FUNCTION_DECL, use the
__builtin__ form. Otherwise nodes that compare equal
if (class == tcc_declaration)
{
- /* Otherwise, we can just compare decls by pointer. */
- val = iterative_hash_pointer (t, val);
+ /* DECL's have a unique ID */
+ val = iterative_hash_host_wide_int (DECL_UID (t), val);
}
else
{
val = iterative_hash_hashval_t (two, val);
}
else
- for (i = TREE_CODE_LENGTH (code) - 1; i >= 0; --i)
+ for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
val = iterative_hash_expr (TREE_OPERAND (t, i), val);
}
return val;
{
tree t;
+ if (to_type == error_mark_node)
+ return error_mark_node;
+
/* In some cases, languages will have things that aren't a POINTER_TYPE
(such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
In that case, return that type without regard to the rest of our
TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
TYPE_POINTER_TO (to_type) = t;
+ if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
+ SET_TYPE_STRUCTURAL_EQUALITY (t);
+ else if (TYPE_CANONICAL (to_type) != to_type)
+ TYPE_CANONICAL (t)
+ = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
+ mode, can_alias_all);
+
/* Lay out the type. This function has many callers that are concerned
with expression-construction, and this simplifies them all. */
layout_type (t);
TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
TYPE_REFERENCE_TO (to_type) = t;
+ if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
+ SET_TYPE_STRUCTURAL_EQUALITY (t);
+ else if (TYPE_CANONICAL (to_type) != to_type)
+ TYPE_CANONICAL (t)
+ = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
+ mode, can_alias_all);
+
layout_type (t);
return t;
if (host_integerp (maxval, 1))
return type_hash_canon (tree_low_cst (maxval, 1), itype);
else
- return itype;
+ {
+ /* Since we cannot hash this type, we need to compare it using
+ structural equality checks. */
+ SET_TYPE_STRUCTURAL_EQUALITY (itype);
+ return itype;
+ }
}
/* Builds a signed or unsigned integer type of precision PRECISION.
}
/* Create a range of some discrete type TYPE (an INTEGER_TYPE,
- ENUMERAL_TYPE, BOOLEAN_TYPE, or CHAR_TYPE), with
- low bound LOWVAL and high bound HIGHVAL.
- if TYPE==NULL_TREE, sizetype is used. */
+ ENUMERAL_TYPE or BOOLEAN_TYPE), with low bound LOWVAL and
+ high bound HIGHVAL. If TYPE is NULL, sizetype is used. */
tree
build_range_type (tree type, tree lowval, tree highval)
if (type == NULL_TREE)
type = sizetype;
- TYPE_MIN_VALUE (itype) = convert (type, lowval);
- TYPE_MAX_VALUE (itype) = highval ? convert (type, highval) : NULL;
+ TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
+ TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
TYPE_PRECISION (itype) = TYPE_PRECISION (type);
TYPE_MODE (itype) = TYPE_MODE (type);
if (index_type == 0)
{
- layout_type (t);
+ tree save = t;
+ hashcode = iterative_hash_object (TYPE_HASH (elt_type), hashcode);
+ t = type_hash_canon (hashcode, t);
+ if (save == t)
+ layout_type (t);
+
+ if (TYPE_CANONICAL (t) == t)
+ {
+ if (TYPE_STRUCTURAL_EQUALITY_P (elt_type))
+ SET_TYPE_STRUCTURAL_EQUALITY (t);
+ else if (TYPE_CANONICAL (elt_type) != elt_type)
+ TYPE_CANONICAL (t)
+ = build_array_type (TYPE_CANONICAL (elt_type), index_type);
+ }
+
return t;
}
if (!COMPLETE_TYPE_P (t))
layout_type (t);
+
+ if (TYPE_CANONICAL (t) == t)
+ {
+ if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
+ || TYPE_STRUCTURAL_EQUALITY_P (index_type))
+ SET_TYPE_STRUCTURAL_EQUALITY (t);
+ else if (TYPE_CANONICAL (elt_type) != elt_type
+ || TYPE_CANONICAL (index_type) != index_type)
+ TYPE_CANONICAL (t)
+ = build_array_type (TYPE_CANONICAL (elt_type),
+ TYPE_CANONICAL (index_type));
+ }
+
return t;
}
TREE_TYPE (t) = value_type;
TYPE_ARG_TYPES (t) = arg_types;
+ /* We don't have canonicalization of function types, yet. */
+ SET_TYPE_STRUCTURAL_EQUALITY (t);
+
/* If we already have such a type, use the old one. */
hashcode = iterative_hash_object (TYPE_HASH (value_type), hashcode);
hashcode = type_hash_list (arg_types, hashcode);
argtypes = tree_cons (NULL_TREE, ptype, argtypes);
TYPE_ARG_TYPES (t) = argtypes;
+ /* We don't have canonicalization of method types yet. */
+ SET_TYPE_STRUCTURAL_EQUALITY (t);
+
/* If we already have such a type, use the old one. */
hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
hashcode = iterative_hash_object (TYPE_HASH (rettype), hashcode);
if (!COMPLETE_TYPE_P (t))
layout_type (t);
+ if (TYPE_CANONICAL (t) == t)
+ {
+ if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
+ || TYPE_STRUCTURAL_EQUALITY_P (type))
+ SET_TYPE_STRUCTURAL_EQUALITY (t);
+ else if (TYPE_CANONICAL (basetype) != basetype
+ || TYPE_CANONICAL (type) != type)
+ TYPE_CANONICAL (t)
+ = build_offset_type (TYPE_CANONICAL (basetype),
+ TYPE_CANONICAL (type));
+ }
+
return t;
}
if (!COMPLETE_TYPE_P (t))
layout_type (t);
+ if (TYPE_CANONICAL (t) == t)
+ {
+ if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
+ SET_TYPE_STRUCTURAL_EQUALITY (t);
+ else if (TYPE_CANONICAL (component_type) != component_type)
+ TYPE_CANONICAL (t)
+ = build_complex_type (TYPE_CANONICAL (component_type));
+ }
+
/* If we are writing Dwarf2 output we need to create a name,
since complex is a fundamental type. */
if ((write_symbols == DWARF2_DEBUG || write_symbols == VMS_AND_DWARF2_DEBUG)
while (TREE_CODE (op) == NOP_EXPR
|| TREE_CODE (op) == CONVERT_EXPR)
{
- int bitschange
- = TYPE_PRECISION (TREE_TYPE (op))
- - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
+ int bitschange;
+
+ /* TYPE_PRECISION on vector types has different meaning
+ (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
+ so avoid them here. */
+ if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
+ break;
+
+ bitschange = TYPE_PRECISION (TREE_TYPE (op))
+ - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
/* Truncations are many-one so cannot be removed.
Unless we are later going to truncate down even farther. */
{
if (first)
uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
- win = build3 (COMPONENT_REF, type, TREE_OPERAND (op, 0),
- TREE_OPERAND (op, 1), NULL_TREE);
- TREE_SIDE_EFFECTS (win) = TREE_SIDE_EFFECTS (op);
- TREE_THIS_VOLATILE (win) = TREE_THIS_VOLATILE (op);
+ win = fold_convert (type, op);
}
}
+
*unsignedp_ptr = uns;
return win;
}
tree type_low_bound = TYPE_MIN_VALUE (type);
tree type_high_bound = TYPE_MAX_VALUE (type);
bool ok_for_low_bound, ok_for_high_bound;
- tree tmp;
+ unsigned HOST_WIDE_INT low;
+ HOST_WIDE_INT high;
/* If at least one bound of the type is a constant integer, we can check
ourselves and maybe make a decision. If no such decision is possible, but
this type is a subtype, try checking against that. Otherwise, use
- force_fit_type, which checks against the precision.
+ fit_double_type, which checks against the precision.
Compute the status for each possibly constant bound, and return if we see
one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
return 0;
/* If we haven't been able to decide at this point, there nothing more we
- can check ourselves here. Look at the base type if we have one. */
- if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != 0)
+ can check ourselves here. Look at the base type if we have one and it
+ has the same precision. */
+ if (TREE_CODE (type) == INTEGER_TYPE
+ && TREE_TYPE (type) != 0
+ && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
return int_fits_type_p (c, TREE_TYPE (type));
- /* Or to force_fit_type, if nothing else. */
- tmp = copy_node (c);
- TREE_TYPE (tmp) = type;
- tmp = force_fit_type (tmp, -1, false, false);
- return TREE_INT_CST_HIGH (tmp) == TREE_INT_CST_HIGH (c)
- && TREE_INT_CST_LOW (tmp) == TREE_INT_CST_LOW (c);
+ /* Or to fit_double_type, if nothing else. */
+ low = TREE_INT_CST_LOW (c);
+ high = TREE_INT_CST_HIGH (c);
+ return !fit_double_type (low, high, &low, &high, type);
}
/* Subprogram of following function. Called by walk_tree.
}
/* Returns true if T is, contains, or refers to a type with variable
- size. If FN is nonzero, only return true if a modifier of the type
- or position of FN is a variable or parameter inside FN.
+ size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
+ arguments, but not the return type. If FN is nonzero, only return
+ true if a modifier of the type or position of FN is a variable or
+ parameter inside FN.
This concept is more general than that of C99 'variably modified types':
in C99, a struct type is never variably modified because a VLA may not
if (type == error_mark_node)
return false;
- /* If TYPE itself has variable size, it is variably modified.
-
- We do not yet have a representation of the C99 '[*]' syntax.
- When a representation is chosen, this function should be modified
- to test for that case as well. */
+ /* If TYPE itself has variable size, it is variably modified. */
RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
- RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT(type));
+ RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
switch (TREE_CODE (type))
{
case POINTER_TYPE:
case REFERENCE_TYPE:
- case ARRAY_TYPE:
case VECTOR_TYPE:
if (variably_modified_type_p (TREE_TYPE (type), fn))
return true;
case FUNCTION_TYPE:
case METHOD_TYPE:
- /* If TYPE is a function type, it is variably modified if any of the
- parameters or the return type are variably modified. */
+ /* If TYPE is a function type, it is variably modified if the
+ return type is variably modified. */
if (variably_modified_type_p (TREE_TYPE (type), fn))
return true;
-
- for (t = TYPE_ARG_TYPES (type);
- t && t != void_list_node;
- t = TREE_CHAIN (t))
- if (variably_modified_type_p (TREE_VALUE (t), fn))
- return true;
break;
case INTEGER_TYPE:
case REAL_TYPE:
case ENUMERAL_TYPE:
case BOOLEAN_TYPE:
- case CHAR_TYPE:
/* Scalar types are variably modified if their end points
aren't constant. */
RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
case RECORD_TYPE:
case UNION_TYPE:
case QUAL_UNION_TYPE:
- /* We can't see if any of the field are variably-modified by the
+ /* We can't see if any of the fields are variably-modified by the
definition we normally use, since that would produce infinite
recursion via pointers. */
/* This is variably modified if some field's type is. */
}
break;
+ case ARRAY_TYPE:
+ /* Do not call ourselves to avoid infinite recursion. This is
+ variably modified if the element type is. */
+ RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
+ RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
+ break;
+
default:
break;
}
{
tree addr;
+ if (call == error_mark_node)
+ return call;
+
/* It's invalid to call this function with anything but a
CALL_EXPR. */
gcc_assert (TREE_CODE (call) == CALL_EXPR);
/* The first operand to the CALL is the address of the function
called. */
- addr = TREE_OPERAND (call, 0);
+ addr = CALL_EXPR_FN (call);
STRIP_NOPS (addr);
#endif
print_type_hash_statistics ();
print_debug_expr_statistics ();
+ print_value_expr_statistics ();
+ print_restrict_base_statistics ();
lang_hooks.print_statistics ();
}
\f
*p = '_';
}
-/* Generate a name for a function unique to this translation unit.
+/* Generate a name for a special-purpose function function.
+ The generated name may need to be unique across the whole link.
TYPE is some string to identify the purpose of this function to the
- linker or collect2. */
+ linker or collect2; it must start with an uppercase letter,
+ one of:
+ I - for constructors
+ D - for destructors
+ N - for C++ anonymous namespaces
+ F - for DWARF unwind frame information. */
tree
-get_file_function_name_long (const char *type)
+get_file_function_name (const char *type)
{
char *buf;
const char *p;
char *q;
+ /* If we already have a name we know to be unique, just use that. */
if (first_global_object_name)
p = first_global_object_name;
+ /* If the target is handling the constructors/destructors, they
+ will be local to this file and the name is only necessary for
+ debugging purposes. */
+ else if ((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
+ {
+ const char *file = main_input_filename;
+ if (! file)
+ file = input_filename;
+ /* Just use the file's basename, because the full pathname
+ might be quite long. */
+ p = strrchr (file, '/');
+ if (p)
+ p++;
+ else
+ p = file;
+ p = q = ASTRDUP (p);
+ clean_symbol_name (q);
+ }
else
{
- /* We don't have anything that we know to be unique to this translation
+ /* Otherwise, the name must be unique across the entire link.
+ We don't have anything that we know to be unique to this translation
unit, so use what we do have and throw in some randomness. */
unsigned len;
const char *name = weak_global_object_name;
return get_identifier (buf);
}
-
-/* If KIND=='I', return a suitable global initializer (constructor) name.
- If KIND=='D', return a suitable global clean-up (destructor) name. */
-
-tree
-get_file_function_name (int kind)
-{
- char p[2];
-
- p[0] = kind;
- p[1] = 0;
-
- return get_file_function_name_long (p);
-}
\f
#if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
tree_code_name[TREE_CODE (node)], function, trim_filename (file), line);
}
+/* Similar to tree_check_failed, except that instead of specifying a
+ dozen codes, use the knowledge that they're all sequential. */
+
+void
+tree_range_check_failed (const tree node, const char *file, int line,
+ const char *function, enum tree_code c1,
+ enum tree_code c2)
+{
+ char *buffer;
+ unsigned length = 0;
+ enum tree_code c;
+
+ for (c = c1; c <= c2; ++c)
+ length += 4 + strlen (tree_code_name[c]);
+
+ length += strlen ("expected ");
+ buffer = alloca (length);
+ length = 0;
+
+ for (c = c1; c <= c2; ++c)
+ {
+ const char *prefix = length ? " or " : "expected ";
+
+ strcpy (buffer + length, prefix);
+ length += strlen (prefix);
+ strcpy (buffer + length, tree_code_name[c]);
+ length += strlen (tree_code_name[c]);
+ }
+
+ internal_error ("tree check: %s, have %s in %s, at %s:%d",
+ buffer, tree_code_name[TREE_CODE (node)],
+ function, trim_filename (file), line);
+}
+
+
+/* Similar to tree_check_failed, except that we check that a tree does
+ not have the specified code, given in CL. */
+
+void
+tree_not_class_check_failed (const tree node, const enum tree_code_class cl,
+ const char *file, int line, const char *function)
+{
+ internal_error
+ ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
+ TREE_CODE_CLASS_STRING (cl),
+ TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
+ tree_code_name[TREE_CODE (node)], function, trim_filename (file), line);
+}
+
+
+/* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
+
+void
+omp_clause_check_failed (const tree node, const char *file, int line,
+ const char *function, enum omp_clause_code code)
+{
+ internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
+ omp_clause_code_name[code], tree_code_name[TREE_CODE (node)],
+ function, trim_filename (file), line);
+}
+
+
+/* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
+
+void
+omp_clause_range_check_failed (const tree node, const char *file, int line,
+ const char *function, enum omp_clause_code c1,
+ enum omp_clause_code c2)
+{
+ char *buffer;
+ unsigned length = 0;
+ enum omp_clause_code c;
+
+ for (c = c1; c <= c2; ++c)
+ length += 4 + strlen (omp_clause_code_name[c]);
+
+ length += strlen ("expected ");
+ buffer = alloca (length);
+ length = 0;
+
+ for (c = c1; c <= c2; ++c)
+ {
+ const char *prefix = length ? " or " : "expected ";
+
+ strcpy (buffer + length, prefix);
+ length += strlen (prefix);
+ strcpy (buffer + length, omp_clause_code_name[c]);
+ length += strlen (omp_clause_code_name[c]);
+ }
+
+ internal_error ("tree check: %s, have %s in %s, at %s:%d",
+ buffer, omp_clause_code_name[TREE_CODE (node)],
+ function, trim_filename (file), line);
+}
+
+
+#undef DEFTREESTRUCT
+#define DEFTREESTRUCT(VAL, NAME) NAME,
+
+static const char *ts_enum_names[] = {
+#include "treestruct.def"
+};
+#undef DEFTREESTRUCT
+
+#define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
+
+/* Similar to tree_class_check_failed, except that we check for
+ whether CODE contains the tree structure identified by EN. */
+
+void
+tree_contains_struct_check_failed (const tree node,
+ const enum tree_node_structure_enum en,
+ const char *file, int line,
+ const char *function)
+{
+ internal_error
+ ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
+ TS_ENUM_NAME(en),
+ tree_code_name[TREE_CODE (node)], function, trim_filename (file), line);
+}
+
+
/* Similar to above, except that the check is for the bounds of a TREE_VEC's
(dynamically sized) vector. */
}
/* Similar to above, except that the check is for the bounds of the operand
- vector of an expression node. */
+ vector of an expression node EXP. */
void
-tree_operand_check_failed (int idx, enum tree_code code, const char *file,
+tree_operand_check_failed (int idx, tree exp, const char *file,
int line, const char *function)
{
+ int code = TREE_CODE (exp);
internal_error
("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
- idx + 1, tree_code_name[code], TREE_CODE_LENGTH (code),
+ idx + 1, tree_code_name[code], TREE_OPERAND_LENGTH (exp),
function, trim_filename (file), line);
}
+
+/* Similar to above, except that the check is for the number of
+ operands of an OMP_CLAUSE node. */
+
+void
+omp_clause_operand_check_failed (int idx, tree t, const char *file,
+ int line, const char *function)
+{
+ internal_error
+ ("tree check: accessed operand %d of omp_clause %s with %d operands "
+ "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
+ omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
+ trim_filename (file), line);
+}
#endif /* ENABLE_TREE_CHECKING */
\f
/* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
static tree
make_vector_type (tree innertype, int nunits, enum machine_mode mode)
{
- tree t = make_node (VECTOR_TYPE);
+ tree t;
+ hashval_t hashcode = 0;
+ /* Build a main variant, based on the main variant of the inner type, then
+ use it to build the variant we return. */
+ if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
+ && TYPE_MAIN_VARIANT (innertype) != innertype)
+ return build_type_attribute_qual_variant (
+ make_vector_type (TYPE_MAIN_VARIANT (innertype), nunits, mode),
+ TYPE_ATTRIBUTES (innertype),
+ TYPE_QUALS (innertype));
+
+ t = make_node (VECTOR_TYPE);
TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
- TYPE_VECTOR_SUBPARTS (t) = nunits;
+ SET_TYPE_VECTOR_SUBPARTS (t, nunits);
TYPE_MODE (t) = mode;
TYPE_READONLY (t) = TYPE_READONLY (innertype);
TYPE_VOLATILE (t) = TYPE_VOLATILE (innertype);
+ if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
+ SET_TYPE_STRUCTURAL_EQUALITY (t);
+ else if (TYPE_CANONICAL (innertype) != innertype
+ || mode != VOIDmode)
+ TYPE_CANONICAL (t)
+ = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
+
layout_type (t);
{
TYPE_UID (rt) = TYPE_UID (t);
}
- /* Build our main variant, based on the main variant of the inner type. */
- if (TYPE_MAIN_VARIANT (innertype) != innertype)
- {
- tree innertype_main_variant = TYPE_MAIN_VARIANT (innertype);
- unsigned int hash = TYPE_HASH (innertype_main_variant);
- TYPE_MAIN_VARIANT (t)
- = type_hash_canon (hash, make_vector_type (innertype_main_variant,
- nunits, mode));
- }
-
- return t;
+ hashcode = iterative_hash_host_wide_int (VECTOR_TYPE, hashcode);
+ hashcode = iterative_hash_host_wide_int (mode, hashcode);
+ hashcode = iterative_hash_object (TYPE_HASH (innertype), hashcode);
+ return type_hash_canon (hashcode, t);
}
static tree
/* Define both `signed char' and `unsigned char'. */
signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
+ TYPE_STRING_FLAG (signed_char_type_node) = 1;
unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
+ TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
/* Define `char', which is like either `signed char' or `unsigned char'
but not the same as either. */
= (signed_char
? make_signed_type (CHAR_TYPE_SIZE)
: make_unsigned_type (CHAR_TYPE_SIZE));
+ TYPE_STRING_FLAG (char_type_node) = 1;
short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
long_double_ptr_type_node = build_pointer_type (long_double_type_node);
integer_ptr_type_node = build_pointer_type (integer_type_node);
+ /* Fixed size integer types. */
+ uint32_type_node = build_nonstandard_integer_type (32, true);
+ uint64_type_node = build_nonstandard_integer_type (64, true);
+
+ /* Decimal float types. */
+ dfloat32_type_node = make_node (REAL_TYPE);
+ TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
+ layout_type (dfloat32_type_node);
+ TYPE_MODE (dfloat32_type_node) = SDmode;
+ dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
+
+ dfloat64_type_node = make_node (REAL_TYPE);
+ TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
+ layout_type (dfloat64_type_node);
+ TYPE_MODE (dfloat64_type_node) = DDmode;
+ dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
+
+ dfloat128_type_node = make_node (REAL_TYPE);
+ TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
+ layout_type (dfloat128_type_node);
+ TYPE_MODE (dfloat128_type_node) = TDmode;
+ dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
+
complex_integer_type_node = make_node (COMPLEX_TYPE);
TREE_TYPE (complex_integer_type_node) = integer_type_node;
layout_type (complex_integer_type_node);
declare the type to be __builtin_va_list. */
if (TREE_CODE (t) != RECORD_TYPE)
t = build_variant_type_copy (t);
-
+
va_list_type_node = t;
}
}
{
tree decl;
- decl = lang_hooks.builtin_function (name, type, code, BUILT_IN_NORMAL,
- library_name, NULL_TREE);
+ decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
+ library_name, NULL_TREE);
if (ecf_flags & ECF_CONST)
TREE_READONLY (decl) = 1;
if (ecf_flags & ECF_PURE)
"__builtin_nonlocal_goto",
ECF_NORETURN | ECF_NOTHROW);
+ tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node);
+ tmp = tree_cons (NULL_TREE, ptr_type_node, tmp);
+ ftype = build_function_type (void_type_node, tmp);
+ local_define_builtin ("__builtin_setjmp_setup", ftype,
+ BUILT_IN_SETJMP_SETUP,
+ "__builtin_setjmp_setup", ECF_NOTHROW);
+
+ tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node);
+ ftype = build_function_type (ptr_type_node, tmp);
+ local_define_builtin ("__builtin_setjmp_dispatcher", ftype,
+ BUILT_IN_SETJMP_DISPATCHER,
+ "__builtin_setjmp_dispatcher",
+ ECF_PURE | ECF_NOTHROW);
+
+ tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node);
+ ftype = build_function_type (void_type_node, tmp);
+ local_define_builtin ("__builtin_setjmp_receiver", ftype,
+ BUILT_IN_SETJMP_RECEIVER,
+ "__builtin_setjmp_receiver", ECF_NOTHROW);
+
ftype = build_function_type (ptr_type_node, void_list_node);
local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
"__builtin_stack_save", ECF_NOTHROW);
return make_vector_type (innertype, nunits, VOIDmode);
}
+
/* Build RESX_EXPR with given REGION_NUMBER. */
tree
build_resx (int region_number)
return true;
case CONSTRUCTOR:
- elt = CONSTRUCTOR_ELTS (init);
- if (elt == NULL_TREE)
- return true;
+ {
+ unsigned HOST_WIDE_INT idx;
- for (; elt ; elt = TREE_CHAIN (elt))
- if (! initializer_zerop (TREE_VALUE (elt)))
- return false;
- return true;
+ FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
+ if (!initializer_zerop (elt))
+ return false;
+ return true;
+ }
default:
return false;
}
}
-void
-add_var_to_bind_expr (tree bind_expr, tree var)
-{
- BIND_EXPR_VARS (bind_expr)
- = chainon (BIND_EXPR_VARS (bind_expr), var);
- if (BIND_EXPR_BLOCK (bind_expr))
- BLOCK_VARS (BIND_EXPR_BLOCK (bind_expr))
- = BIND_EXPR_VARS (bind_expr);
-}
-
/* Build an empty statement. */
tree
}
+/* Build an OpenMP clause with code CODE. */
+
+tree
+build_omp_clause (enum omp_clause_code code)
+{
+ tree t;
+ int size, length;
+
+ length = omp_clause_num_ops[code];
+ size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
+
+ t = ggc_alloc (size);
+ memset (t, 0, size);
+ TREE_SET_CODE (t, OMP_CLAUSE);
+ OMP_CLAUSE_SET_CODE (t, code);
+
+#ifdef GATHER_STATISTICS
+ tree_node_counts[(int) omp_clause_kind]++;
+ tree_node_sizes[(int) omp_clause_kind] += size;
+#endif
+
+ return t;
+}
+
+/* Set various status flags when building a CALL_EXPR object T. */
+
+static void
+process_call_operands (tree t)
+{
+ bool side_effects;
+
+ side_effects = TREE_SIDE_EFFECTS (t);
+ if (!side_effects)
+ {
+ int i, n;
+ n = TREE_OPERAND_LENGTH (t);
+ for (i = 1; i < n; i++)
+ {
+ tree op = TREE_OPERAND (t, i);
+ if (op && TREE_SIDE_EFFECTS (op))
+ {
+ side_effects = 1;
+ break;
+ }
+ }
+ }
+ if (!side_effects)
+ {
+ int i;
+
+ /* Calls have side-effects, except those to const or
+ pure functions. */
+ i = call_expr_flags (t);
+ if (!(i & (ECF_CONST | ECF_PURE)))
+ side_effects = 1;
+ }
+ TREE_SIDE_EFFECTS (t) = side_effects;
+}
+
+/* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
+ includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
+ Except for the CODE and operand count field, other storage for the
+ object is initialized to zeros. */
+
+tree
+build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
+{
+ tree t;
+ int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
+
+ gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
+ gcc_assert (len >= 1);
+
+#ifdef GATHER_STATISTICS
+ tree_node_counts[(int) e_kind]++;
+ tree_node_sizes[(int) e_kind] += length;
+#endif
+
+ t = ggc_alloc_zone_pass_stat (length, &tree_zone);
+
+ memset (t, 0, length);
+
+ TREE_SET_CODE (t, code);
+
+ /* Can't use TREE_OPERAND to store the length because if checking is
+ enabled, it will try to check the length before we store it. :-P */
+ t->exp.operands[0] = build_int_cst (sizetype, len);
+
+ return t;
+}
+
+
+/* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE
+ and FN and a null static chain slot. ARGLIST is a TREE_LIST of the
+ arguments. */
+
+tree
+build_call_list (tree return_type, tree fn, tree arglist)
+{
+ tree t;
+ int i;
+
+ t = build_vl_exp (CALL_EXPR, list_length (arglist) + 3);
+ TREE_TYPE (t) = return_type;
+ CALL_EXPR_FN (t) = fn;
+ CALL_EXPR_STATIC_CHAIN (t) = NULL_TREE;
+ for (i = 0; arglist; arglist = TREE_CHAIN (arglist), i++)
+ CALL_EXPR_ARG (t, i) = TREE_VALUE (arglist);
+ process_call_operands (t);
+ return t;
+}
+
+/* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
+ FN and a null static chain slot. NARGS is the number of call arguments
+ which are specified as "..." arguments. */
+
+tree
+build_call_nary (tree return_type, tree fn, int nargs, ...)
+{
+ tree ret;
+ va_list args;
+ va_start (args, nargs);
+ ret = build_call_valist (return_type, fn, nargs, args);
+ va_end (args);
+ return ret;
+}
+
+/* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
+ FN and a null static chain slot. NARGS is the number of call arguments
+ which are specified as a va_list ARGS. */
+
+tree
+build_call_valist (tree return_type, tree fn, int nargs, va_list args)
+{
+ tree t;
+ int i;
+
+ t = build_vl_exp (CALL_EXPR, nargs + 3);
+ TREE_TYPE (t) = return_type;
+ CALL_EXPR_FN (t) = fn;
+ CALL_EXPR_STATIC_CHAIN (t) = NULL_TREE;
+ for (i = 0; i < nargs; i++)
+ CALL_EXPR_ARG (t, i) = va_arg (args, tree);
+ process_call_operands (t);
+ return t;
+}
+
+/* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
+ FN and a null static chain slot. NARGS is the number of call arguments
+ which are specified as a tree array ARGS. */
+
+tree
+build_call_array (tree return_type, tree fn, int nargs, tree *args)
+{
+ tree t;
+ int i;
+
+ t = build_vl_exp (CALL_EXPR, nargs + 3);
+ TREE_TYPE (t) = return_type;
+ CALL_EXPR_FN (t) = fn;
+ CALL_EXPR_STATIC_CHAIN (t) = NULL_TREE;
+ for (i = 0; i < nargs; i++)
+ CALL_EXPR_ARG (t, i) = args[i];
+ process_call_operands (t);
+ return t;
+}
+
+
/* Returns true if it is possible to prove that the index of
an array access REF (an ARRAY_REF expression) falls into the
array bounds. */
return true;
}
+/* Returns true if it is possible to prove that the range of
+ an array access REF (an ARRAY_RANGE_REF expression) falls
+ into the array bounds. */
+
+bool
+range_in_array_bounds_p (tree ref)
+{
+ tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref));
+ tree range_min, range_max, min, max;
+
+ range_min = TYPE_MIN_VALUE (domain_type);
+ range_max = TYPE_MAX_VALUE (domain_type);
+ if (!range_min
+ || !range_max
+ || TREE_CODE (range_min) != INTEGER_CST
+ || TREE_CODE (range_max) != INTEGER_CST)
+ return false;
+
+ min = array_ref_low_bound (ref);
+ max = array_ref_up_bound (ref);
+ if (!min
+ || !max
+ || TREE_CODE (min) != INTEGER_CST
+ || TREE_CODE (max) != INTEGER_CST)
+ return false;
+
+ if (tree_int_cst_lt (range_min, min)
+ || tree_int_cst_lt (max, range_max))
+ return false;
+
+ return true;
+}
+
/* Return true if T (assumed to be a DECL) is a global variable. */
bool
is_global_var (tree t)
{
- return (TREE_STATIC (t) || DECL_EXTERNAL (t));
+ if (MTAG_P (t))
+ return (TREE_STATIC (t) || MTAG_GLOBAL (t));
+ else
+ return (TREE_STATIC (t) || DECL_EXTERNAL (t));
}
/* Return true if T (assumed to be a DECL) must be assigned a memory
bool
needs_to_live_in_memory (tree t)
{
+ if (TREE_CODE (t) == SSA_NAME)
+ t = SSA_NAME_VAR (t);
+
return (TREE_ADDRESSABLE (t)
|| is_global_var (t)
|| (TREE_CODE (t) == RESULT_DECL
return val;
}
-/* Returns the greatest common divisor of A and B, which must be
- INTEGER_CSTs. */
-
-tree
-tree_fold_gcd (tree a, tree b)
-{
- tree a_mod_b;
- tree type = TREE_TYPE (a);
-
- gcc_assert (TREE_CODE (a) == INTEGER_CST);
- gcc_assert (TREE_CODE (b) == INTEGER_CST);
-
- if (integer_zerop (a))
- return b;
-
- if (integer_zerop (b))
- return a;
-
- if (tree_int_cst_sgn (a) == -1)
- a = fold (build2 (MULT_EXPR, type, a,
- convert (type, integer_minus_one_node)));
-
- if (tree_int_cst_sgn (b) == -1)
- b = fold (build2 (MULT_EXPR, type, b,
- convert (type, integer_minus_one_node)));
-
- while (1)
- {
- a_mod_b = fold (build2 (FLOOR_MOD_EXPR, type, a, b));
-
- if (!TREE_INT_CST_LOW (a_mod_b)
- && !TREE_INT_CST_HIGH (a_mod_b))
- return b;
-
- a = b;
- b = a_mod_b;
- }
-}
/* Returns unsigned variant of TYPE. */
tree
unsigned_type_for (tree type)
{
+ if (POINTER_TYPE_P (type))
+ return lang_hooks.types.unsigned_type (size_type_node);
return lang_hooks.types.unsigned_type (type);
}
tree
signed_type_for (tree type)
{
+ if (POINTER_TYPE_P (type))
+ return lang_hooks.types.signed_type (size_type_node);
return lang_hooks.types.signed_type (type);
}
upper_bound_in_type (tree outer, tree inner)
{
unsigned HOST_WIDE_INT lo, hi;
- unsigned bits = TYPE_PRECISION (inner);
+ unsigned int det = 0;
+ unsigned oprec = TYPE_PRECISION (outer);
+ unsigned iprec = TYPE_PRECISION (inner);
+ unsigned prec;
+
+ /* Compute a unique number for every combination. */
+ det |= (oprec > iprec) ? 4 : 0;
+ det |= TYPE_UNSIGNED (outer) ? 2 : 0;
+ det |= TYPE_UNSIGNED (inner) ? 1 : 0;
+
+ /* Determine the exponent to use. */
+ switch (det)
+ {
+ case 0:
+ case 1:
+ /* oprec <= iprec, outer: signed, inner: don't care. */
+ prec = oprec - 1;
+ break;
+ case 2:
+ case 3:
+ /* oprec <= iprec, outer: unsigned, inner: don't care. */
+ prec = oprec;
+ break;
+ case 4:
+ /* oprec > iprec, outer: signed, inner: signed. */
+ prec = iprec - 1;
+ break;
+ case 5:
+ /* oprec > iprec, outer: signed, inner: unsigned. */
+ prec = iprec;
+ break;
+ case 6:
+ /* oprec > iprec, outer: unsigned, inner: signed. */
+ prec = oprec;
+ break;
+ case 7:
+ /* oprec > iprec, outer: unsigned, inner: unsigned. */
+ prec = iprec;
+ break;
+ default:
+ gcc_unreachable ();
+ }
- if (TYPE_UNSIGNED (outer) || TYPE_UNSIGNED (inner))
+ /* Compute 2^^prec - 1. */
+ if (prec <= HOST_BITS_PER_WIDE_INT)
{
- /* Zero extending in these cases. */
- if (bits <= HOST_BITS_PER_WIDE_INT)
- {
- hi = 0;
- lo = (~(unsigned HOST_WIDE_INT) 0)
- >> (HOST_BITS_PER_WIDE_INT - bits);
- }
- else
- {
- hi = (~(unsigned HOST_WIDE_INT) 0)
- >> (2 * HOST_BITS_PER_WIDE_INT - bits);
- lo = ~(unsigned HOST_WIDE_INT) 0;
- }
+ hi = 0;
+ lo = ((~(unsigned HOST_WIDE_INT) 0)
+ >> (HOST_BITS_PER_WIDE_INT - prec));
}
else
{
- /* Sign extending in these cases. */
- if (bits <= HOST_BITS_PER_WIDE_INT)
- {
- hi = 0;
- lo = (~(unsigned HOST_WIDE_INT) 0)
- >> (HOST_BITS_PER_WIDE_INT - bits) >> 1;
- }
- else
- {
- hi = (~(unsigned HOST_WIDE_INT) 0)
- >> (2 * HOST_BITS_PER_WIDE_INT - bits) >> 1;
- lo = ~(unsigned HOST_WIDE_INT) 0;
- }
+ hi = ((~(unsigned HOST_WIDE_INT) 0)
+ >> (2 * HOST_BITS_PER_WIDE_INT - prec));
+ lo = ~(unsigned HOST_WIDE_INT) 0;
}
- return fold_convert (outer,
- build_int_cst_wide (inner, lo, hi));
+ return build_int_cst_wide (outer, lo, hi);
}
/* Returns the smallest value obtainable by casting something in INNER type to
lower_bound_in_type (tree outer, tree inner)
{
unsigned HOST_WIDE_INT lo, hi;
- unsigned bits = TYPE_PRECISION (inner);
-
- if (TYPE_UNSIGNED (outer) || TYPE_UNSIGNED (inner))
+ unsigned oprec = TYPE_PRECISION (outer);
+ unsigned iprec = TYPE_PRECISION (inner);
+
+ /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
+ and obtain 0. */
+ if (TYPE_UNSIGNED (outer)
+ /* If we are widening something of an unsigned type, OUTER type
+ contains all values of INNER type. In particular, both INNER
+ and OUTER types have zero in common. */
+ || (oprec > iprec && TYPE_UNSIGNED (inner)))
lo = hi = 0;
- else if (bits <= HOST_BITS_PER_WIDE_INT)
- {
- hi = ~(unsigned HOST_WIDE_INT) 0;
- lo = (~(unsigned HOST_WIDE_INT) 0) << (bits - 1);
- }
else
{
- hi = (~(unsigned HOST_WIDE_INT) 0) << (bits - HOST_BITS_PER_WIDE_INT - 1);
- lo = 0;
+ /* If we are widening a signed type to another signed type, we
+ want to obtain -2^^(iprec-1). If we are keeping the
+ precision or narrowing to a signed type, we want to obtain
+ -2^(oprec-1). */
+ unsigned prec = oprec > iprec ? iprec : oprec;
+
+ if (prec <= HOST_BITS_PER_WIDE_INT)
+ {
+ hi = ~(unsigned HOST_WIDE_INT) 0;
+ lo = (~(unsigned HOST_WIDE_INT) 0) << (prec - 1);
+ }
+ else
+ {
+ hi = ((~(unsigned HOST_WIDE_INT) 0)
+ << (prec - HOST_BITS_PER_WIDE_INT - 1));
+ lo = 0;
+ }
}
- return fold_convert (outer,
- build_int_cst_wide (inner, lo, hi));
+ return build_int_cst_wide (outer, lo, hi);
}
/* Return nonzero if two operands that are suitable for PHI nodes are
WALK_SUBTREE (TYPE_DOMAIN (type));
break;
- case BOOLEAN_TYPE:
- case ENUMERAL_TYPE:
- case INTEGER_TYPE:
- case CHAR_TYPE:
- case REAL_TYPE:
- WALK_SUBTREE (TYPE_MIN_VALUE (type));
- WALK_SUBTREE (TYPE_MAX_VALUE (type));
- break;
-
case OFFSET_TYPE:
WALK_SUBTREE (TREE_TYPE (type));
WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
interesting below this point in the tree. */
if (!walk_subtrees)
{
+ /* But we still need to check our siblings. */
if (code == TREE_LIST)
- /* But we still need to check our siblings. */
WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
+ else if (code == OMP_CLAUSE)
+ WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
else
return NULL_TREE;
}
result = lang_hooks.tree_inlining.walk_subtrees (tp, &walk_subtrees, func,
data, pset);
- if (result || ! walk_subtrees)
+ if (result || !walk_subtrees)
return result;
- /* If this is a DECL_EXPR, walk into various fields of the type that it's
- defining. We only want to walk into these fields of a type in this
- case. Note that decls get walked as part of the processing of a
- BIND_EXPR.
-
- ??? Precisely which fields of types that we are supposed to walk in
- this case vs. the normal case aren't well defined. */
- if (code == DECL_EXPR
- && TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL
- && TREE_CODE (TREE_TYPE (DECL_EXPR_DECL (*tp))) != ERROR_MARK)
+ switch (code)
{
- tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
+ case ERROR_MARK:
+ case IDENTIFIER_NODE:
+ case INTEGER_CST:
+ case REAL_CST:
+ case VECTOR_CST:
+ case STRING_CST:
+ case BLOCK:
+ case PLACEHOLDER_EXPR:
+ case SSA_NAME:
+ case FIELD_DECL:
+ case RESULT_DECL:
+ /* None of these have subtrees other than those already walked
+ above. */
+ break;
- /* Call the function for the type. See if it returns anything or
- doesn't want us to continue. If we are to continue, walk both
- the normal fields and those for the declaration case. */
- result = (*func) (type_p, &walk_subtrees, data);
- if (result || !walk_subtrees)
- return NULL_TREE;
+ case TREE_LIST:
+ WALK_SUBTREE (TREE_VALUE (*tp));
+ WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
+ break;
- result = walk_type_fields (*type_p, func, data, pset);
- if (result)
- return result;
+ case TREE_VEC:
+ {
+ int len = TREE_VEC_LENGTH (*tp);
- WALK_SUBTREE (TYPE_SIZE (*type_p));
- WALK_SUBTREE (TYPE_SIZE_UNIT (*type_p));
+ if (len == 0)
+ break;
- /* If this is a record type, also walk the fields. */
- if (TREE_CODE (*type_p) == RECORD_TYPE
- || TREE_CODE (*type_p) == UNION_TYPE
- || TREE_CODE (*type_p) == QUAL_UNION_TYPE)
- {
- tree field;
+ /* Walk all elements but the first. */
+ while (--len)
+ WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
- for (field = TYPE_FIELDS (*type_p); field;
- field = TREE_CHAIN (field))
- {
- /* We'd like to look at the type of the field, but we can easily
- get infinite recursion. So assume it's pointed to elsewhere
- in the tree. Also, ignore things that aren't fields. */
- if (TREE_CODE (field) != FIELD_DECL)
- continue;
-
- WALK_SUBTREE (DECL_FIELD_OFFSET (field));
- WALK_SUBTREE (DECL_SIZE (field));
- WALK_SUBTREE (DECL_SIZE_UNIT (field));
- if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
- WALK_SUBTREE (DECL_QUALIFIER (field));
- }
- }
- }
+ /* Now walk the first one as a tail call. */
+ WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
+ }
- else if (code != SAVE_EXPR
- && code != BIND_EXPR
- && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
- {
- int i, len;
+ case COMPLEX_CST:
+ WALK_SUBTREE (TREE_REALPART (*tp));
+ WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
- /* Walk over all the sub-trees of this operand. */
- len = TREE_CODE_LENGTH (code);
- /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
- But, we only want to walk once. */
- if (code == TARGET_EXPR
- && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
- --len;
+ case CONSTRUCTOR:
+ {
+ unsigned HOST_WIDE_INT idx;
+ constructor_elt *ce;
- /* Go through the subtrees. We need to do this in forward order so
- that the scope of a FOR_EXPR is handled properly. */
-#ifdef DEBUG_WALK_TREE
- for (i = 0; i < len; ++i)
- WALK_SUBTREE (TREE_OPERAND (*tp, i));
-#else
- for (i = 0; i < len - 1; ++i)
- WALK_SUBTREE (TREE_OPERAND (*tp, i));
+ for (idx = 0;
+ VEC_iterate(constructor_elt, CONSTRUCTOR_ELTS (*tp), idx, ce);
+ idx++)
+ WALK_SUBTREE (ce->value);
+ }
+ break;
- if (len)
- {
- /* The common case is that we may tail recurse here. */
- if (code != BIND_EXPR
- && !TREE_CHAIN (*tp))
- WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
- else
- WALK_SUBTREE (TREE_OPERAND (*tp, len - 1));
- }
-#endif
- }
+ case SAVE_EXPR:
+ WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
- /* If this is a type, walk the needed fields in the type. */
- else if (TYPE_P (*tp))
- {
- result = walk_type_fields (*tp, func, data, pset);
- if (result)
- return result;
- }
- else
- {
- /* Not one of the easy cases. We must explicitly go through the
- children. */
- switch (code)
- {
- case ERROR_MARK:
- case IDENTIFIER_NODE:
- case INTEGER_CST:
- case REAL_CST:
- case VECTOR_CST:
- case STRING_CST:
- case BLOCK:
- case PLACEHOLDER_EXPR:
- case SSA_NAME:
- case FIELD_DECL:
- case RESULT_DECL:
- /* None of these have subtrees other than those already walked
- above. */
- break;
+ case BIND_EXPR:
+ {
+ tree decl;
+ for (decl = BIND_EXPR_VARS (*tp); decl; decl = TREE_CHAIN (decl))
+ {
+ /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
+ into declarations that are just mentioned, rather than
+ declared; they don't really belong to this part of the tree.
+ And, we can see cycles: the initializer for a declaration
+ can refer to the declaration itself. */
+ WALK_SUBTREE (DECL_INITIAL (decl));
+ WALK_SUBTREE (DECL_SIZE (decl));
+ WALK_SUBTREE (DECL_SIZE_UNIT (decl));
+ }
+ WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
+ }
- case TREE_LIST:
- WALK_SUBTREE (TREE_VALUE (*tp));
- WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
- break;
+ case STATEMENT_LIST:
+ {
+ tree_stmt_iterator i;
+ for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
+ WALK_SUBTREE (*tsi_stmt_ptr (i));
+ }
+ break;
- case TREE_VEC:
+ case OMP_CLAUSE:
+ switch (OMP_CLAUSE_CODE (*tp))
+ {
+ case OMP_CLAUSE_PRIVATE:
+ case OMP_CLAUSE_SHARED:
+ case OMP_CLAUSE_FIRSTPRIVATE:
+ case OMP_CLAUSE_LASTPRIVATE:
+ case OMP_CLAUSE_COPYIN:
+ case OMP_CLAUSE_COPYPRIVATE:
+ case OMP_CLAUSE_IF:
+ case OMP_CLAUSE_NUM_THREADS:
+ case OMP_CLAUSE_SCHEDULE:
+ WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
+ /* FALLTHRU */
+
+ case OMP_CLAUSE_NOWAIT:
+ case OMP_CLAUSE_ORDERED:
+ case OMP_CLAUSE_DEFAULT:
+ WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
+
+ case OMP_CLAUSE_REDUCTION:
{
- int len = TREE_VEC_LENGTH (*tp);
+ int i;
+ for (i = 0; i < 4; i++)
+ WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
+ WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
+ }
- if (len == 0)
- break;
+ default:
+ gcc_unreachable ();
+ }
+ break;
- /* Walk all elements but the first. */
- while (--len)
- WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
+ case TARGET_EXPR:
+ {
+ int i, len;
+
+ /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
+ But, we only want to walk once. */
+ len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
+ for (i = 0; i < len; ++i)
+ WALK_SUBTREE (TREE_OPERAND (*tp, i));
+ WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
+ }
- /* Now walk the first one as a tail call. */
- WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
- }
+ case DECL_EXPR:
+ /* If this is a TYPE_DECL, walk into the fields of the type that it's
+ defining. We only want to walk into these fields of a type in this
+ case and not in the general case of a mere reference to the type.
- case COMPLEX_CST:
- WALK_SUBTREE (TREE_REALPART (*tp));
- WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
+ The criterion is as follows: if the field can be an expression, it
+ must be walked only here. This should be in keeping with the fields
+ that are directly gimplified in gimplify_type_sizes in order for the
+ mark/copy-if-shared/unmark machinery of the gimplifier to work with
+ variable-sized types.
+
+ Note that DECLs get walked as part of processing the BIND_EXPR. */
+ if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
+ {
+ tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
+ if (TREE_CODE (*type_p) == ERROR_MARK)
+ return NULL_TREE;
+
+ /* Call the function for the type. See if it returns anything or
+ doesn't want us to continue. If we are to continue, walk both
+ the normal fields and those for the declaration case. */
+ result = (*func) (type_p, &walk_subtrees, data);
+ if (result || !walk_subtrees)
+ return result;
- case CONSTRUCTOR:
- WALK_SUBTREE_TAIL (CONSTRUCTOR_ELTS (*tp));
+ result = walk_type_fields (*type_p, func, data, pset);
+ if (result)
+ return result;
- case SAVE_EXPR:
- WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
+ /* If this is a record type, also walk the fields. */
+ if (TREE_CODE (*type_p) == RECORD_TYPE
+ || TREE_CODE (*type_p) == UNION_TYPE
+ || TREE_CODE (*type_p) == QUAL_UNION_TYPE)
+ {
+ tree field;
- case BIND_EXPR:
- {
- tree decl;
- for (decl = BIND_EXPR_VARS (*tp); decl; decl = TREE_CHAIN (decl))
- {
- /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
- into declarations that are just mentioned, rather than
- declared; they don't really belong to this part of the tree.
- And, we can see cycles: the initializer for a declaration
- can refer to the declaration itself. */
- WALK_SUBTREE (DECL_INITIAL (decl));
- WALK_SUBTREE (DECL_SIZE (decl));
- WALK_SUBTREE (DECL_SIZE_UNIT (decl));
- }
- WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
- }
+ for (field = TYPE_FIELDS (*type_p); field;
+ field = TREE_CHAIN (field))
+ {
+ /* We'd like to look at the type of the field, but we can
+ easily get infinite recursion. So assume it's pointed
+ to elsewhere in the tree. Also, ignore things that
+ aren't fields. */
+ if (TREE_CODE (field) != FIELD_DECL)
+ continue;
+
+ WALK_SUBTREE (DECL_FIELD_OFFSET (field));
+ WALK_SUBTREE (DECL_SIZE (field));
+ WALK_SUBTREE (DECL_SIZE_UNIT (field));
+ if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
+ WALK_SUBTREE (DECL_QUALIFIER (field));
+ }
+ }
- case STATEMENT_LIST:
- {
- tree_stmt_iterator i;
- for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
- WALK_SUBTREE (*tsi_stmt_ptr (i));
- }
- break;
+ /* Same for scalar types. */
+ else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
+ || TREE_CODE (*type_p) == ENUMERAL_TYPE
+ || TREE_CODE (*type_p) == INTEGER_TYPE
+ || TREE_CODE (*type_p) == REAL_TYPE)
+ {
+ WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
+ WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
+ }
- default:
- /* ??? This could be a language-defined node. We really should make
- a hook for it, but right now just ignore it. */
- break;
+ WALK_SUBTREE (TYPE_SIZE (*type_p));
+ WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
}
+ /* FALLTHRU */
+
+ default:
+ if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
+ || IS_GIMPLE_STMT_CODE_CLASS (TREE_CODE_CLASS (code)))
+ {
+ int i, len;
+
+ /* Walk over all the sub-trees of this operand. */
+ len = TREE_OPERAND_LENGTH (*tp);
+
+ /* Go through the subtrees. We need to do this in forward order so
+ that the scope of a FOR_EXPR is handled properly. */
+ if (len)
+ {
+ for (i = 0; i < len - 1; ++i)
+ WALK_SUBTREE (GENERIC_TREE_OPERAND (*tp, i));
+ WALK_SUBTREE_TAIL (GENERIC_TREE_OPERAND (*tp, len - 1));
+ }
+ }
+ /* If this is a type, walk the needed fields in the type. */
+ else if (TYPE_P (*tp))
+ return walk_type_fields (*tp, func, data, pset);
+ break;
}
/* We didn't find what we were looking for. */
return result;
}
+
+/* Return true if STMT is an empty statement or contains nothing but
+ empty statements. */
+
+bool
+empty_body_p (tree stmt)
+{
+ tree_stmt_iterator i;
+ tree body;
+
+ if (IS_EMPTY_STMT (stmt))
+ return true;
+ else if (TREE_CODE (stmt) == BIND_EXPR)
+ body = BIND_EXPR_BODY (stmt);
+ else if (TREE_CODE (stmt) == STATEMENT_LIST)
+ body = stmt;
+ else
+ return false;
+
+ for (i = tsi_start (body); !tsi_end_p (i); tsi_next (&i))
+ if (!empty_body_p (tsi_stmt (i)))
+ return false;
+
+ return true;
+}
+
+tree *
+tree_block (tree t)
+{
+ char const c = TREE_CODE_CLASS (TREE_CODE (t));
+
+ if (IS_EXPR_CODE_CLASS (c))
+ return &t->exp.block;
+ else if (IS_GIMPLE_STMT_CODE_CLASS (c))
+ return &GIMPLE_STMT_BLOCK (t);
+ gcc_unreachable ();
+ return NULL;
+}
+
+tree *
+generic_tree_operand (tree node, int i)
+{
+ if (GIMPLE_STMT_P (node))
+ return &GIMPLE_STMT_OPERAND (node, i);
+ return &TREE_OPERAND (node, i);
+}
+
+tree *
+generic_tree_type (tree node)
+{
+ if (GIMPLE_STMT_P (node))
+ return &void_type_node;
+ return &TREE_TYPE (node);
+}
+
+/* Build and return a TREE_LIST of arguments in the CALL_EXPR exp.
+ FIXME: don't use this function. It exists for compatibility with
+ the old representation of CALL_EXPRs where a list was used to hold the
+ arguments. Places that currently extract the arglist from a CALL_EXPR
+ ought to be rewritten to use the CALL_EXPR itself. */
+tree
+call_expr_arglist (tree exp)
+{
+ tree arglist = NULL_TREE;
+ int i;
+ for (i = call_expr_nargs (exp) - 1; i >= 0; i--)
+ arglist = tree_cons (NULL_TREE, CALL_EXPR_ARG (exp, i), arglist);
+ return arglist;
+}
+
#include "gt-tree.h"