/* Language-independent node constructors for parse phase of GNU compiler.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
-02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
/* This file contains the low level primitives for operating on tree nodes,
including allocation, list operations, interning of identifiers,
#include "tree-flow.h"
#include "params.h"
#include "pointer-set.h"
+#include "fixed-value.h"
/* Each tree code class has an associated string representation.
These must correspond to the tree_code_class entries. */
{
case INTEGER_CST: return sizeof (struct tree_int_cst);
case REAL_CST: return sizeof (struct tree_real_cst);
+ case FIXED_CST: return sizeof (struct tree_fixed_cst);
case COMPLEX_CST: return sizeof (struct tree_complex);
case VECTOR_CST: return sizeof (struct tree_vector);
case STRING_CST: gcc_unreachable ();
return t;
}
+/* Return a new FIXED_CST node whose type is TYPE and value is F. */
+
+tree
+build_fixed (tree type, FIXED_VALUE_TYPE f)
+{
+ tree v;
+ FIXED_VALUE_TYPE *fp;
+
+ v = make_node (FIXED_CST);
+ fp = ggc_alloc (sizeof (FIXED_VALUE_TYPE));
+ memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
+
+ TREE_TYPE (v) = type;
+ TREE_FIXED_CST_PTR (v) = fp;
+ return v;
+}
/* Return a new REAL_CST node whose type is TYPE and value is D. */
TREE_CONSTANT (s) = 1;
TREE_INVARIANT (s) = 1;
TREE_STRING_LENGTH (s) = len;
- memcpy ((char *) TREE_STRING_POINTER (s), str, len);
- ((char *) TREE_STRING_POINTER (s))[len] = '\0';
+ memcpy (s->string.str, str, len);
+ s->string.str[len] = '\0';
return s;
}
case REAL_TYPE:
return build_real (type, dconst1);
+ case FIXED_POINT_TYPE:
+ /* We can only generate 1 for accum types. */
+ gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
+ return build_fixed (type, FCONST1(TYPE_MODE (type)));
+
case VECTOR_TYPE:
{
tree scalar, cst;
|| integer_nonzerop (TREE_IMAGPART (expr)))));
}
+/* Return 1 if EXPR is the fixed-point constant zero. */
+
+int
+fixed_zerop (const_tree expr)
+{
+ return (TREE_CODE (expr) == FIXED_CST
+ && double_int_zero_p (TREE_FIXED_CST (expr).data));
+}
+
/* Return the power of two represented by a tree node known to be a
power of two. */
make_unsigned_type). */
tree
-size_in_bytes (tree type)
+size_in_bytes (const_tree type)
{
tree t;
or return -1 if the size can vary or is larger than an integer. */
HOST_WIDE_INT
-max_int_size_in_bytes (tree type)
+max_int_size_in_bytes (const_tree type)
{
HOST_WIDE_INT size = -1;
tree size_tree;
/* tcc_constant cases. */
case INTEGER_CST: return TS_INT_CST;
case REAL_CST: return TS_REAL_CST;
+ case FIXED_CST: return TS_FIXED_CST;
case COMPLEX_CST: return TS_COMPLEX;
case VECTOR_CST: return TS_VECTOR;
case STRING_CST: return TS_STRING;
or offset that depends on a field within a record. */
bool
-contains_placeholder_p (tree exp)
+contains_placeholder_p (const_tree exp)
{
enum tree_code code;
{
case CALL_EXPR:
{
- tree arg;
- call_expr_arg_iterator iter;
- FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
+ const_tree arg;
+ const_call_expr_arg_iterator iter;
+ FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
if (CONTAINS_PLACEHOLDER_P (arg))
return 1;
return 0;
case INTEGER_TYPE:
case REAL_TYPE:
+ case FIXED_POINT_TYPE:
/* Here we just check the bounds. */
return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
|| CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
{
tree copy = NULL_TREE;
int i;
- int n = TREE_OPERAND_LENGTH (exp);
- for (i = 1; i < n; i++)
+
+ for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
{
tree op = TREE_OPERAND (exp, i);
tree newop = SUBSTITUTE_IN_EXPR (op, f, r);
else
return exp;
}
+ break;
default:
gcc_unreachable ();
TREE_SET_CODE (t, code);
TREE_TYPE (t) = type;
-#ifdef USE_MAPPED_LOCATION
SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
-#else
- SET_EXPR_LOCUS (t, NULL);
-#endif
TREE_OPERAND (t, 0) = node;
TREE_BLOCK (t) = NULL_TREE;
if (node && !TYPE_P (node))
if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
- && TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
+ && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
&& useless_type_conversion_p (sizetype, TREE_TYPE (arg1)));
t = make_node_stat (code PASS_MEM_STAT);
return block;
}
-#if 1 /* ! defined(USE_MAPPED_LOCATION) */
-/* ??? gengtype doesn't handle conditionals */
-static GTY(()) source_locus last_annotated_node;
-#endif
-
-#ifdef USE_MAPPED_LOCATION
-
expanded_location
expand_location (source_location loc)
{
}
else
{
- const struct line_map *map = linemap_lookup (&line_table, loc);
+ const struct line_map *map = linemap_lookup (line_table, loc);
xloc.file = map->to_file;
xloc.line = SOURCE_LINE (map, loc);
xloc.column = SOURCE_COLUMN (map, loc);
return xloc;
}
-#else
-
-/* Record the exact location where an expression or an identifier were
- encountered. */
-
-void
-annotate_with_file_line (tree node, const char *file, int line)
-{
- /* Roughly one percent of the calls to this function are to annotate
- a node with the same information already attached to that node!
- Just return instead of wasting memory. */
- if (EXPR_LOCUS (node)
- && EXPR_LINENO (node) == line
- && (EXPR_FILENAME (node) == file
- || !strcmp (EXPR_FILENAME (node), file)))
- {
- last_annotated_node = EXPR_LOCUS (node);
- return;
- }
-
- /* In heavily macroized code (such as GCC itself) this single
- entry cache can reduce the number of allocations by more
- than half. */
- if (last_annotated_node
- && last_annotated_node->line == line
- && (last_annotated_node->file == file
- || !strcmp (last_annotated_node->file, file)))
- {
- SET_EXPR_LOCUS (node, last_annotated_node);
- return;
- }
-
- SET_EXPR_LOCUS (node, ggc_alloc (sizeof (location_t)));
- EXPR_LINENO (node) = line;
- EXPR_FILENAME (node) = file;
- last_annotated_node = EXPR_LOCUS (node);
-}
-
-void
-annotate_with_locus (tree node, location_t locus)
-{
- annotate_with_file_line (node, locus.file, locus.line);
-}
-#endif
\f
/* Source location accessor functions. */
decls and constants can be shared among multiple locations, so
return nothing. */
location_t
-expr_location (tree node)
+expr_location (const_tree node)
{
-#ifdef USE_MAPPED_LOCATION
if (GIMPLE_STMT_P (node))
return GIMPLE_STMT_LOCUS (node);
return EXPR_P (node) ? node->exp.locus : UNKNOWN_LOCATION;
-#else
- if (GIMPLE_STMT_P (node))
- return EXPR_HAS_LOCATION (node)
- ? *GIMPLE_STMT_LOCUS (node) : UNKNOWN_LOCATION;
- return EXPR_HAS_LOCATION (node) ? *node->exp.locus : UNKNOWN_LOCATION;
-#endif
}
void
set_expr_location (tree node, location_t locus)
{
-#ifdef USE_MAPPED_LOCATION
if (GIMPLE_STMT_P (node))
GIMPLE_STMT_LOCUS (node) = locus;
else
EXPR_CHECK (node)->exp.locus = locus;
-#else
- annotate_with_locus (node, locus);
-#endif
}
bool
-expr_has_location (tree node)
+expr_has_location (const_tree node)
{
-#ifdef USE_MAPPED_LOCATION
return expr_location (node) != UNKNOWN_LOCATION;
-#else
- return expr_locus (node) != NULL;
-#endif
}
-#ifdef USE_MAPPED_LOCATION
source_location *
-#else
-source_locus
-#endif
-expr_locus (tree node)
+expr_locus (const_tree node)
{
-#ifdef USE_MAPPED_LOCATION
- if (GIMPLE_STMT_P (node))
- return &GIMPLE_STMT_LOCUS (node);
- return EXPR_P (node) ? &node->exp.locus : (location_t *) NULL;
-#else
if (GIMPLE_STMT_P (node))
- return GIMPLE_STMT_LOCUS (node);
- /* ?? The cast below was originally "(location_t *)" in the macro,
- but that makes no sense. ?? */
- return EXPR_P (node) ? node->exp.locus : (source_locus) NULL;
-#endif
+ return CONST_CAST (source_location *, &GIMPLE_STMT_LOCUS (node));
+ return (EXPR_P (node)
+ ? CONST_CAST (source_location *, &node->exp.locus)
+ : (source_location *) NULL);
}
void
-set_expr_locus (tree node,
-#ifdef USE_MAPPED_LOCATION
- source_location *loc
-#else
- source_locus loc
-#endif
- )
+set_expr_locus (tree node, source_location *loc)
{
-#ifdef USE_MAPPED_LOCATION
if (loc == NULL)
{
if (GIMPLE_STMT_P (node))
else
EXPR_CHECK (node)->exp.locus = *loc;
}
-#else
- if (GIMPLE_STMT_P (node))
- GIMPLE_STMT_LOCUS (node) = loc;
- else
- EXPR_CHECK (node)->exp.locus = loc;
-#endif
}
-const char **
-expr_filename (tree node)
+/* Return the file name of the location of NODE. */
+const char *
+expr_filename (const_tree node)
{
-#ifdef USE_MAPPED_LOCATION
if (GIMPLE_STMT_P (node))
- return &LOCATION_FILE (GIMPLE_STMT_LOCUS (node));
- return &LOCATION_FILE (EXPR_CHECK (node)->exp.locus);
-#else
- if (GIMPLE_STMT_P (node))
- return &GIMPLE_STMT_LOCUS (node)->file;
- return &(EXPR_CHECK (node)->exp.locus->file);
-#endif
+ return LOCATION_FILE (GIMPLE_STMT_LOCUS (node));
+ return LOCATION_FILE (EXPR_CHECK (node)->exp.locus);
}
-int *
-expr_lineno (tree node)
+/* Return the line number of the location of NODE. */
+int
+expr_lineno (const_tree node)
{
-#ifdef USE_MAPPED_LOCATION
- if (GIMPLE_STMT_P (node))
- return &LOCATION_LINE (GIMPLE_STMT_LOCUS (node));
- return &LOCATION_LINE (EXPR_CHECK (node)->exp.locus);
-#else
if (GIMPLE_STMT_P (node))
- return &GIMPLE_STMT_LOCUS (node)->line;
- return &EXPR_CHECK (node)->exp.locus->line;
-#endif
+ return LOCATION_LINE (GIMPLE_STMT_LOCUS (node));
+ return LOCATION_LINE (EXPR_CHECK (node)->exp.locus);
}
+
\f
/* Return a declaration like DDECL except that its DECL_ATTRIBUTES
is ATTRIBUTE. */
tree ntype;
enum tree_code code = TREE_CODE (ttype);
- ntype = copy_node (ttype);
-
- TYPE_POINTER_TO (ntype) = 0;
- TYPE_REFERENCE_TO (ntype) = 0;
- TYPE_ATTRIBUTES (ntype) = attribute;
+ /* Building a distinct copy of a tagged type is inappropriate; it
+ causes breakage in code that expects there to be a one-to-one
+ relationship between a struct and its fields.
+ build_duplicate_type is another solution (as used in
+ handle_transparent_union_attribute), but that doesn't play well
+ with the stronger C++ type identity model. */
+ if (TREE_CODE (ttype) == RECORD_TYPE
+ || TREE_CODE (ttype) == UNION_TYPE
+ || TREE_CODE (ttype) == QUAL_UNION_TYPE
+ || TREE_CODE (ttype) == ENUMERAL_TYPE)
+ {
+ warning (OPT_Wattributes,
+ "ignoring attributes applied to %qT after definition",
+ TYPE_MAIN_VARIANT (ttype));
+ return build_qualified_type (ttype, quals);
+ }
- if (TYPE_STRUCTURAL_EQUALITY_P (ttype))
- SET_TYPE_STRUCTURAL_EQUALITY (ntype);
- else
- TYPE_CANONICAL (ntype)
- = build_qualified_type (TYPE_CANONICAL (ttype), quals);
+ ntype = build_distinct_type_copy (ttype);
- /* Create a new main variant of TYPE. */
- TYPE_MAIN_VARIANT (ntype) = ntype;
- TYPE_NEXT_VARIANT (ntype) = 0;
+ TYPE_ATTRIBUTES (ntype) = attribute;
set_type_quals (ntype, TYPE_UNQUALIFIED);
hashcode = iterative_hash_object (code, hashcode);
hashcode = type_hash_list (TYPE_ARG_TYPES (ntype), hashcode);
break;
case ARRAY_TYPE:
- hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype)),
- hashcode);
+ if (TYPE_DOMAIN (ntype))
+ hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype)),
+ hashcode);
break;
case INTEGER_TYPE:
hashcode = iterative_hash_object
(TREE_INT_CST_HIGH (TYPE_MAX_VALUE (ntype)), hashcode);
break;
case REAL_TYPE:
+ case FIXED_POINT_TYPE:
{
unsigned int precision = TYPE_PRECISION (ntype);
hashcode = iterative_hash_object (precision, hashcode);
/* If the target-dependent attributes make NTYPE different from
its canonical type, we will need to use structural equality
checks for this qualified type. */
- if (!targetm.comp_type_attributes (ntype, ttype))
+ ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
+ if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
+ || !targetm.comp_type_attributes (ntype, ttype))
SET_TYPE_STRUCTURAL_EQUALITY (ntype);
+ else
+ TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
ttype = build_qualified_type (ntype, quals);
}
+ else if (TYPE_QUALS (ttype) != quals)
+ ttype = build_qualified_type (ttype, quals);
return ttype;
}
if (is_attribute_with_length_p (attr_name, attr_len, TREE_PURPOSE (l)))
return l;
}
-
return NULL_TREE;
}
return NULL_TREE;
}
+ if (TREE_CODE (node) == TYPE_DECL
+ && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
+ && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
+ {
+ *no_add_attrs = true;
+ warning (OPT_Wattributes, "%qs attribute ignored",
+ IDENTIFIER_POINTER (name));
+ return NULL_TREE;
+ }
+
/* Report error on dllimport ambiguities seen now before they cause
any damage. */
else if (is_attribute_p ("dllimport", name))
TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
}
-/* Returns true iff cand is equivalent to base with type_quals. */
+/* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
bool
check_qualified_type (const_tree cand, const_tree base, int type_quals)
struct tree_map_base in;
gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
- gcc_assert (TREE_CODE (decl) == VAR_DECL
- ? DECL_HAS_INIT_PRIORITY_P (decl)
- : DECL_STATIC_CONSTRUCTOR (decl));
in.from = decl;
h = htab_find (init_priority_for_decl, &in);
return h ? h->init : DEFAULT_INIT_PRIORITY;
struct tree_map_base in;
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
- gcc_assert (DECL_STATIC_DESTRUCTOR (decl));
in.from = decl;
h = htab_find (init_priority_for_decl, &in);
return h ? h->fini : DEFAULT_INIT_PRIORITY;
|| tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
TYPE_MIN_VALUE (b->type))));
+ case FIXED_POINT_TYPE:
+ return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
+
case OFFSET_TYPE:
return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
TYPE_FIELDS (b->type))));
case FUNCTION_TYPE:
- return (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
- || (TYPE_ARG_TYPES (a->type)
- && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
- && TYPE_ARG_TYPES (b->type)
- && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
- && type_list_equal (TYPE_ARG_TYPES (a->type),
- TYPE_ARG_TYPES (b->type))));
+ if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
+ || (TYPE_ARG_TYPES (a->type)
+ && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
+ && TYPE_ARG_TYPES (b->type)
+ && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
+ && type_list_equal (TYPE_ARG_TYPES (a->type),
+ TYPE_ARG_TYPES (b->type))))
+ break;
+ return 0;
default:
return 0;
}
+
+ if (lang_hooks.types.type_hash_eq != NULL)
+ return lang_hooks.types.type_hash_eq (a->type, b->type);
+
+ return 1;
}
/* Return the cached hash value. */
h->hash = hashcode;
h->type = type;
loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT);
- *(struct type_hash **) loc = h;
+ *loc = (void *)h;
}
/* Given TYPE, and HASHCODE its hash code, return the canonical
equivalent to l1. */
int
-attribute_list_equal (tree l1, tree l2)
+attribute_list_equal (const_tree l1, const_tree l2)
{
return attribute_list_contained (l1, l2)
&& attribute_list_contained (l2, l1);
correctly. */
int
-attribute_list_contained (tree l1, tree l2)
+attribute_list_contained (const_tree l1, const_tree l2)
{
- tree t1, t2;
+ const_tree t1, t2;
/* First check the obvious, maybe the lists are identical. */
if (l1 == l2)
for (; t2 != 0; t2 = TREE_CHAIN (t2))
{
- tree attr;
- for (attr = lookup_attribute (IDENTIFIER_POINTER (TREE_PURPOSE (t2)), l1);
+ const_tree attr;
+ /* This CONST_CAST is okay because lookup_attribute does not
+ modify its argument and the return value is assigned to a
+ const_tree. */
+ for (attr = lookup_attribute (IDENTIFIER_POINTER (TREE_PURPOSE (t2)),
+ CONST_CAST_TREE(l1));
attr != NULL_TREE;
attr = lookup_attribute (IDENTIFIER_POINTER (TREE_PURPOSE (t2)),
TREE_CHAIN (attr)))
Also, the TREE_PURPOSEs must match. */
int
-type_list_equal (tree l1, tree l2)
+type_list_equal (const_tree l1, const_tree l2)
{
- tree t1, t2;
+ const_tree t1, t2;
for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
if (TREE_VALUE (t1) != TREE_VALUE (t2)
&& (HOST_WIDE_INT) TREE_INT_CST_LOW (t) >= 0)
|| (! pos && TREE_INT_CST_HIGH (t) == -1
&& (HOST_WIDE_INT) TREE_INT_CST_LOW (t) < 0
- && !TYPE_UNSIGNED (TREE_TYPE (t)))
+ && (!TYPE_UNSIGNED (TREE_TYPE (t))
+ || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (TREE_TYPE (t)))))
|| (pos && TREE_INT_CST_HIGH (t) == 0)));
}
this function. */
int
-simple_cst_equal (tree t1, tree t2)
+simple_cst_equal (const_tree t1, const_tree t2)
{
enum tree_code code1, code2;
int cmp;
case REAL_CST:
return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
+ case FIXED_CST:
+ return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
+
case STRING_CST:
return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
&& ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
if (call_expr_nargs (t1) != call_expr_nargs (t2))
return 0;
{
- tree arg1, arg2;
- call_expr_arg_iterator iter1, iter2;
- for (arg1 = first_call_expr_arg (t1, &iter1),
- arg2 = first_call_expr_arg (t2, &iter2);
+ const_tree arg1, arg2;
+ const_call_expr_arg_iterator iter1, iter2;
+ for (arg1 = first_const_call_expr_arg (t1, &iter1),
+ arg2 = first_const_call_expr_arg (t2, &iter2);
arg1 && arg2;
- arg1 = next_call_expr_arg (&iter1),
- arg2 = next_call_expr_arg (&iter2))
+ arg1 = next_const_call_expr_arg (&iter1),
+ arg2 = next_const_call_expr_arg (&iter2))
{
cmp = simple_cst_equal (arg1, arg2);
if (cmp <= 0)
return iterative_hash_hashval_t (val2, val);
}
+ case FIXED_CST:
+ {
+ unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
+
+ return iterative_hash_hashval_t (val2, val);
+ }
case STRING_CST:
return iterative_hash (TREE_STRING_POINTER (t),
TREE_STRING_LENGTH (t), val);
the innermost dimension of ARRAY. */
tree
-get_inner_array_type (tree array)
+get_inner_array_type (const_tree array)
{
tree type = TREE_TYPE (array);
if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
|| TYPE_STRUCTURAL_EQUALITY_P (type))
SET_TYPE_STRUCTURAL_EQUALITY (t);
- else if (TYPE_CANONICAL (basetype) != basetype
+ else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
|| TYPE_CANONICAL (type) != type)
TYPE_CANONICAL (t)
- = build_offset_type (TYPE_CANONICAL (basetype),
+ = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
TYPE_CANONICAL (type));
}
= build_complex_type (TYPE_CANONICAL (component_type));
}
- /* If we are writing Dwarf2 output we need to create a name,
- since complex is a fundamental type. */
- if ((write_symbols == DWARF2_DEBUG || write_symbols == VMS_AND_DWARF2_DEBUG)
- && ! TYPE_NAME (t))
+ /* We need to create a name, since complex is a fundamental type. */
+ if (! TYPE_NAME (t))
{
const char *name;
if (component_type == char_type_node)
If FOR_TYPE is nonzero, we return a value which, if converted to
type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
- If FOR_TYPE is nonzero, unaligned bit-field references may be changed to the
- narrowest type that can hold the value, even if they don't exactly fit.
- Otherwise, bit-field references are changed to a narrower type
- only if they can be fetched directly from memory in that type.
-
OP must have integer, real or enumeral type. Pointers are not allowed!
There are some cases where the obvious value we could return
}
}
- if (TREE_CODE (op) == COMPONENT_REF
- /* Since type_for_size always gives an integer type. */
- && TREE_CODE (type) != REAL_TYPE
- /* Don't crash if field not laid out yet. */
- && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
- && host_integerp (DECL_SIZE (TREE_OPERAND (op, 1)), 1))
- {
- unsigned int innerprec
- = tree_low_cst (DECL_SIZE (TREE_OPERAND (op, 1)), 1);
- int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
- || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
- type = lang_hooks.types.type_for_size (innerprec, unsignedp);
-
- /* We can get this structure field in the narrowest type it fits in.
- If FOR_TYPE is 0, do this only for a field that matches the
- narrower type exactly and is aligned for it
- The resulting extension to its nominal type (a fullword type)
- must fit the same conditions as for other extensions. */
-
- if (type != 0
- && INT_CST_LT_UNSIGNED (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (op)))
- && (for_type || ! DECL_BIT_FIELD (TREE_OPERAND (op, 1)))
- && (! uns || final_prec <= innerprec || unsignedp))
- {
- win = build3 (COMPONENT_REF, type, TREE_OPERAND (op, 0),
- TREE_OPERAND (op, 1), NULL_TREE);
- TREE_SIDE_EFFECTS (win) = TREE_SIDE_EFFECTS (op);
- TREE_THIS_VOLATILE (win) = TREE_THIS_VOLATILE (op);
- }
- }
-
return win;
}
\f
if (TREE_CODE (op) == COMPONENT_REF
/* Since type_for_size always gives an integer type. */
&& TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
+ && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
/* Ensure field is laid out already. */
&& DECL_SIZE (TREE_OPERAND (op, 1)) != 0
&& host_integerp (DECL_SIZE (TREE_OPERAND (op, 1)), 1))
precision of the type are returned instead. */
void
-get_type_static_bounds (tree type, mpz_t min, mpz_t max)
+get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
{
if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
&& TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
}
}
+/* auto_var_in_fn_p is called to determine whether VAR is an automatic
+ variable defined in function FN. */
+
+bool
+auto_var_in_fn_p (const_tree var, const_tree fn)
+{
+ return (DECL_P (var) && DECL_CONTEXT (var) == fn
+ && (((TREE_CODE (var) == VAR_DECL || TREE_CODE (var) == PARM_DECL)
+ && ! TREE_STATIC (var))
+ || TREE_CODE (var) == LABEL_DECL
+ || TREE_CODE (var) == RESULT_DECL));
+}
+
/* Subprogram of following function. Called by walk_tree.
Return *TP if it is an automatic variable or parameter of the
*walk_subtrees = 0;
else if (DECL_P (*tp)
- && lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
+ && auto_var_in_fn_p (*tp, fn))
return *tp;
return NULL_TREE;
case INTEGER_TYPE:
case REAL_TYPE:
+ case FIXED_POINT_TYPE:
case ENUMERAL_TYPE:
case BOOLEAN_TYPE:
/* Scalar types are variably modified if their end points
determined. */
tree
-get_callee_fndecl (tree call)
+get_callee_fndecl (const_tree call)
{
tree addr;
if (call == error_mark_node)
- return call;
+ return error_mark_node;
/* It's invalid to call this function with anything but a
CALL_EXPR. */
return make_signed_type (size);
}
+/* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
+
+static tree
+make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
+{
+ if (satp)
+ {
+ if (size == SHORT_FRACT_TYPE_SIZE)
+ return unsignedp ? sat_unsigned_short_fract_type_node
+ : sat_short_fract_type_node;
+ if (size == FRACT_TYPE_SIZE)
+ return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
+ if (size == LONG_FRACT_TYPE_SIZE)
+ return unsignedp ? sat_unsigned_long_fract_type_node
+ : sat_long_fract_type_node;
+ if (size == LONG_LONG_FRACT_TYPE_SIZE)
+ return unsignedp ? sat_unsigned_long_long_fract_type_node
+ : sat_long_long_fract_type_node;
+ }
+ else
+ {
+ if (size == SHORT_FRACT_TYPE_SIZE)
+ return unsignedp ? unsigned_short_fract_type_node
+ : short_fract_type_node;
+ if (size == FRACT_TYPE_SIZE)
+ return unsignedp ? unsigned_fract_type_node : fract_type_node;
+ if (size == LONG_FRACT_TYPE_SIZE)
+ return unsignedp ? unsigned_long_fract_type_node
+ : long_fract_type_node;
+ if (size == LONG_LONG_FRACT_TYPE_SIZE)
+ return unsignedp ? unsigned_long_long_fract_type_node
+ : long_long_fract_type_node;
+ }
+
+ return make_fract_type (size, unsignedp, satp);
+}
+
+/* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
+
+static tree
+make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
+{
+ if (satp)
+ {
+ if (size == SHORT_ACCUM_TYPE_SIZE)
+ return unsignedp ? sat_unsigned_short_accum_type_node
+ : sat_short_accum_type_node;
+ if (size == ACCUM_TYPE_SIZE)
+ return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
+ if (size == LONG_ACCUM_TYPE_SIZE)
+ return unsignedp ? sat_unsigned_long_accum_type_node
+ : sat_long_accum_type_node;
+ if (size == LONG_LONG_ACCUM_TYPE_SIZE)
+ return unsignedp ? sat_unsigned_long_long_accum_type_node
+ : sat_long_long_accum_type_node;
+ }
+ else
+ {
+ if (size == SHORT_ACCUM_TYPE_SIZE)
+ return unsignedp ? unsigned_short_accum_type_node
+ : short_accum_type_node;
+ if (size == ACCUM_TYPE_SIZE)
+ return unsignedp ? unsigned_accum_type_node : accum_type_node;
+ if (size == LONG_ACCUM_TYPE_SIZE)
+ return unsignedp ? unsigned_long_accum_type_node
+ : long_accum_type_node;
+ if (size == LONG_LONG_ACCUM_TYPE_SIZE)
+ return unsignedp ? unsigned_long_long_accum_type_node
+ : long_long_accum_type_node;
+ }
+
+ return make_accum_type (size, unsignedp, satp);
+}
+
/* Create nodes for all integer types (and error_mark_node) using the sizes
of C datatypes. The caller should call set_sizetype soon after calling
this function to select one of the types as sizetype. */
complex_double_type_node = build_complex_type (double_type_node);
complex_long_double_type_node = build_complex_type (long_double_type_node);
+/* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
+#define MAKE_FIXED_TYPE_NODE(KIND,WIDTH,SIZE) \
+ sat_ ## WIDTH ## KIND ## _type_node = \
+ make_sat_signed_ ## KIND ## _type (SIZE); \
+ sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
+ make_sat_unsigned_ ## KIND ## _type (SIZE); \
+ WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
+ unsigned_ ## WIDTH ## KIND ## _type_node = \
+ make_unsigned_ ## KIND ## _type (SIZE);
+
+/* Make fixed-point type nodes based on four different widths. */
+#define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
+ MAKE_FIXED_TYPE_NODE (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
+ MAKE_FIXED_TYPE_NODE (N1, , N2 ## _TYPE_SIZE) \
+ MAKE_FIXED_TYPE_NODE (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
+ MAKE_FIXED_TYPE_NODE (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
+
+/* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
+#define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
+ NAME ## _type_node = \
+ make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
+ u ## NAME ## _type_node = \
+ make_or_reuse_unsigned_ ## KIND ## _type \
+ (GET_MODE_BITSIZE (U ## MODE ## mode)); \
+ sat_ ## NAME ## _type_node = \
+ make_or_reuse_sat_signed_ ## KIND ## _type \
+ (GET_MODE_BITSIZE (MODE ## mode)); \
+ sat_u ## NAME ## _type_node = \
+ make_or_reuse_sat_unsigned_ ## KIND ## _type \
+ (GET_MODE_BITSIZE (U ## MODE ## mode));
+
+ /* Fixed-point type and mode nodes. */
+ MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
+ MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
+ MAKE_FIXED_MODE_NODE (fract, qq, QQ)
+ MAKE_FIXED_MODE_NODE (fract, hq, HQ)
+ MAKE_FIXED_MODE_NODE (fract, sq, SQ)
+ MAKE_FIXED_MODE_NODE (fract, dq, DQ)
+ MAKE_FIXED_MODE_NODE (fract, tq, TQ)
+ MAKE_FIXED_MODE_NODE (accum, ha, HA)
+ MAKE_FIXED_MODE_NODE (accum, sa, SA)
+ MAKE_FIXED_MODE_NODE (accum, da, DA)
+ MAKE_FIXED_MODE_NODE (accum, ta, TA)
+
{
tree t = targetm.build_builtin_va_list ();
inner,
TREE_CHAIN (TYPE_ARG_TYPES (type)));
}
+ else if (TREE_CODE (type) == OFFSET_TYPE)
+ {
+ inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
+ outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
+ }
else
return bottom;
- TYPE_READONLY (outer) = TYPE_READONLY (type);
- TYPE_VOLATILE (outer) = TYPE_VOLATILE (type);
-
- return outer;
+ return build_qualified_type (outer, TYPE_QUALS (type));
}
/* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
{
case MODE_VECTOR_INT:
case MODE_VECTOR_FLOAT:
+ case MODE_VECTOR_FRACT:
+ case MODE_VECTOR_UFRACT:
+ case MODE_VECTOR_ACCUM:
+ case MODE_VECTOR_UACCUM:
nunits = GET_MODE_NUNITS (mode);
break;
return real_zerop (init)
&& ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
+ case FIXED_CST:
+ return fixed_zerop (init);
+
case COMPLEX_CST:
return integer_zerop (init)
|| (real_zerop (init)
location. */
bool
-needs_to_live_in_memory (tree t)
+needs_to_live_in_memory (const_tree t)
{
if (TREE_CODE (t) == SSA_NAME)
t = SSA_NAME_VAR (t);
are compatible. It is assumed that the parent records are compatible. */
bool
-fields_compatible_p (tree f1, tree f2)
+fields_compatible_p (const_tree f1, const_tree f2)
{
if (!operand_equal_p (DECL_FIELD_BIT_OFFSET (f1),
DECL_FIELD_BIT_OFFSET (f2), OEP_ONLY_CONST))
return orig_field;
}
-/* Return value of a constant X. */
+/* Return value of a constant X and sign-extend it. */
HOST_WIDE_INT
int_cst_value (const_tree x)
{
unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
- bool negative = ((val >> (bits - 1)) & 1) != 0;
- gcc_assert (bits <= HOST_BITS_PER_WIDE_INT);
+ /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
+ gcc_assert (TREE_INT_CST_HIGH (x) == 0
+ || TREE_INT_CST_HIGH (x) == -1);
- if (negative)
- val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
- else
- val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
+ if (bits < HOST_BITS_PER_WIDE_INT)
+ {
+ bool negative = ((val >> (bits - 1)) & 1) != 0;
+ if (negative)
+ val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
+ else
+ val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
+ }
return val;
}
and get the same result, only slower. */
int
-operand_equal_for_phi_arg_p (tree arg0, tree arg1)
+operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
{
if (arg0 == arg1)
return 1;
#define WALK_SUBTREE(NODE) \
do \
{ \
- result = walk_tree (&(NODE), func, data, pset); \
+ result = walk_tree_1 (&(NODE), func, data, pset, lh); \
if (result) \
return result; \
} \
static tree
walk_type_fields (tree type, walk_tree_fn func, void *data,
- struct pointer_set_t *pset)
+ struct pointer_set_t *pset, walk_tree_lh lh)
{
tree result = NULL_TREE;
and to avoid visiting a node more than once. */
tree
-walk_tree (tree *tp, walk_tree_fn func, void *data, struct pointer_set_t *pset)
+walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
+ struct pointer_set_t *pset, walk_tree_lh lh)
{
enum tree_code code;
int walk_subtrees;
return NULL_TREE;
}
- result = lang_hooks.tree_inlining.walk_subtrees (tp, &walk_subtrees, func,
- data, pset);
- if (result || !walk_subtrees)
- return result;
+ if (lh)
+ {
+ result = (*lh) (tp, &walk_subtrees, func, data, pset);
+ if (result || !walk_subtrees)
+ return result;
+ }
switch (code)
{
case IDENTIFIER_NODE:
case INTEGER_CST:
case REAL_CST:
+ case FIXED_CST:
case VECTOR_CST:
case STRING_CST:
case BLOCK:
if (result || !walk_subtrees)
return result;
- result = walk_type_fields (*type_p, func, data, pset);
+ result = walk_type_fields (*type_p, func, data, pset, lh);
if (result)
return result;
else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
|| TREE_CODE (*type_p) == ENUMERAL_TYPE
|| TREE_CODE (*type_p) == INTEGER_TYPE
+ || TREE_CODE (*type_p) == FIXED_POINT_TYPE
|| TREE_CODE (*type_p) == REAL_TYPE)
{
WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
}
/* If this is a type, walk the needed fields in the type. */
else if (TYPE_P (*tp))
- return walk_type_fields (*tp, func, data, pset);
+ return walk_type_fields (*tp, func, data, pset, lh);
break;
}
/* Like walk_tree, but does not walk duplicate nodes more than once. */
tree
-walk_tree_without_duplicates (tree *tp, walk_tree_fn func, void *data)
+walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
+ walk_tree_lh lh)
{
tree result;
struct pointer_set_t *pset;
pset = pointer_set_create ();
- result = walk_tree (tp, func, data, pset);
+ result = walk_tree_1 (tp, func, data, pset, lh);
pointer_set_destroy (pset);
return result;
}
return arglist;
}
+/* Return true if TYPE has a variable argument list. */
+
+bool
+stdarg_p (tree fntype)
+{
+ function_args_iterator args_iter;
+ tree n = NULL_TREE, t;
+
+ if (!fntype)
+ return false;
+
+ FOREACH_FUNCTION_ARGS(fntype, t, args_iter)
+ {
+ n = t;
+ }
+
+ return n != NULL_TREE && n != void_type_node;
+}
+
+/* Return true if TYPE has a prototype. */
+
+bool
+prototype_p (tree fntype)
+{
+ tree t;
+
+ gcc_assert (fntype != NULL_TREE);
+
+ t = TYPE_ARG_TYPES (fntype);
+ return (t != NULL_TREE);
+}
+
+/* Return the number of arguments that a function has. */
+
+int
+function_args_count (tree fntype)
+{
+ function_args_iterator args_iter;
+ tree t;
+ int num = 0;
+
+ if (fntype)
+ {
+ FOREACH_FUNCTION_ARGS(fntype, t, args_iter)
+ {
+ num++;
+ }
+ }
+
+ return num;
+}
+
+/* If BLOCK is inlined from an __attribute__((__artificial__))
+ routine, return pointer to location from where it has been
+ called. */
+location_t *
+block_nonartificial_location (tree block)
+{
+ location_t *ret = NULL;
+
+ while (block && TREE_CODE (block) == BLOCK
+ && BLOCK_ABSTRACT_ORIGIN (block))
+ {
+ tree ao = BLOCK_ABSTRACT_ORIGIN (block);
+
+ while (TREE_CODE (ao) == BLOCK && BLOCK_ABSTRACT_ORIGIN (ao))
+ ao = BLOCK_ABSTRACT_ORIGIN (ao);
+
+ if (TREE_CODE (ao) == FUNCTION_DECL)
+ {
+ /* If AO is an artificial inline, point RET to the
+ call site locus at which it has been inlined and continue
+ the loop, in case AO's caller is also an artificial
+ inline. */
+ if (DECL_DECLARED_INLINE_P (ao)
+ && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
+ ret = &BLOCK_SOURCE_LOCATION (block);
+ else
+ break;
+ }
+ else if (TREE_CODE (ao) != BLOCK)
+ break;
+
+ block = BLOCK_SUPERCONTEXT (block);
+ }
+ return ret;
+}
+
#include "gt-tree.h"