/* Language-independent node constructors for parse phase of GNU compiler.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
This file is part of GCC.
return build_int_cst_wide (type, low, 0);
}
-/* Create an INT_CST node with a LOW value zero or sign extended depending
- on the type. */
+/* Create an INT_CST node with a LOW value in TYPE. The value is sign extended
+ if it is negative. This function is similar to build_int_cst, but
+ the extra bits outside of the type precision are cleared. Constants
+ with these extra bits may confuse the fold so that it detects overflows
+ even in cases when they do not occur, and in general should be avoided.
+ We cannot however make this a default behavior of build_int_cst without
+ more intrusive changes, since there are parts of gcc that rely on the extra
+ precision of the integer constants. */
tree
build_int_cst_type (tree type, HOST_WIDE_INT low)
{
unsigned HOST_WIDE_INT val = (unsigned HOST_WIDE_INT) low;
+ unsigned HOST_WIDE_INT hi;
unsigned bits;
bool signed_p;
bool negative;
- tree ret;
if (!type)
type = integer_type_node;
bits = TYPE_PRECISION (type);
signed_p = !TYPE_UNSIGNED (type);
- negative = ((val >> (bits - 1)) & 1) != 0;
- if (signed_p && negative)
+ if (bits >= HOST_BITS_PER_WIDE_INT)
+ negative = (low < 0);
+ else
{
- if (bits < HOST_BITS_PER_WIDE_INT)
+ /* If the sign bit is inside precision of LOW, use it to determine
+ the sign of the constant. */
+ negative = ((val >> (bits - 1)) & 1) != 0;
+
+ /* Mask out the bits outside of the precision of the constant. */
+ if (signed_p && negative)
val = val | ((~(unsigned HOST_WIDE_INT) 0) << bits);
- ret = build_int_cst_wide (type, val, ~(unsigned HOST_WIDE_INT) 0);
+ else
+ val = val & ~((~(unsigned HOST_WIDE_INT) 0) << bits);
}
- else
+
+ /* Determine the high bits. */
+ hi = (negative ? ~(unsigned HOST_WIDE_INT) 0 : 0);
+
+ /* For unsigned type we need to mask out the bits outside of the type
+ precision. */
+ if (!signed_p)
{
- if (bits < HOST_BITS_PER_WIDE_INT)
- val = val & ~((~(unsigned HOST_WIDE_INT) 0) << bits);
- ret = build_int_cst_wide (type, val, 0);
+ if (bits <= HOST_BITS_PER_WIDE_INT)
+ hi = 0;
+ else
+ {
+ bits -= HOST_BITS_PER_WIDE_INT;
+ hi = hi & ~((~(unsigned HOST_WIDE_INT) 0) << bits);
+ }
}
- return ret;
+ return build_int_cst_wide (type, val, hi);
}
/* These are the hash table functions for the hash table of INTEGER_CST
return inner;
}
-/* Returns the index of the first non-tree operand for CODE, or the number
- of operands if all are trees. */
-
-int
-first_rtl_op (enum tree_code code)
-{
- switch (code)
- {
- default:
- return TREE_CODE_LENGTH (code);
- }
-}
-
/* Return which tree structure is used by T. */
enum tree_node_structure_enum
break;
}
- switch (first_rtl_op (code))
+ switch (TREE_CODE_LENGTH (code))
{
case 1:
return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
case METHOD_TYPE:
case FILE_TYPE:
case FUNCTION_TYPE:
+ case VECTOR_TYPE:
return false;
case INTEGER_TYPE:
|| CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
case ARRAY_TYPE:
- case SET_TYPE:
- case VECTOR_TYPE:
/* We're already checked the component type (TREE_TYPE), so just check
the index type. */
return type_contains_placeholder_p (TYPE_DOMAIN (type));
case tcc_comparison:
case tcc_expression:
case tcc_reference:
- switch (first_rtl_op (code))
+ switch (TREE_CODE_LENGTH (code))
{
case 0:
return exp;
case tcc_expression:
case tcc_reference:
case tcc_statement:
- switch (first_rtl_op (code))
+ switch (TREE_CODE_LENGTH (code))
{
case 0:
return exp;
{
if (staticp (node))
;
- else if (decl_function_context (node) == current_function_decl)
+ else if (decl_function_context (node) == current_function_decl
+ /* Addresses of thread-local variables are invariant. */
+ || (TREE_CODE (node) == VAR_DECL && DECL_THREAD_LOCAL (node)))
tc = false;
else
ti = tc = false;
TREE_COMPLEXITY (t) = 0;
TREE_OPERAND (t, 0) = node;
TREE_BLOCK (t) = NULL_TREE;
- if (node && !TYPE_P (node) && first_rtl_op (code) != 0)
+ if (node && !TYPE_P (node))
{
TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
TREE_READONLY (t) = TREE_READONLY (node);
#define PROCESS_ARG(N) \
do { \
TREE_OPERAND (t, N) = arg##N; \
- if (arg##N &&!TYPE_P (arg##N) && fro > N) \
+ if (arg##N &&!TYPE_P (arg##N)) \
{ \
if (TREE_SIDE_EFFECTS (arg##N)) \
side_effects = 1; \
{
bool constant, read_only, side_effects, invariant;
tree t;
- int fro;
gcc_assert (TREE_CODE_LENGTH (code) == 2);
result based on those same flags for the arguments. But if the
arguments aren't really even `tree' expressions, we shouldn't be trying
to do this. */
- fro = first_rtl_op (code);
/* Expressions without side effects may be constant if their
arguments are as well. */
{
bool constant, read_only, side_effects, invariant;
tree t;
- int fro;
gcc_assert (TREE_CODE_LENGTH (code) == 3);
t = make_node_stat (code PASS_MEM_STAT);
TREE_TYPE (t) = tt;
- fro = first_rtl_op (code);
-
side_effects = TREE_SIDE_EFFECTS (t);
PROCESS_ARG(0);
{
bool constant, read_only, side_effects, invariant;
tree t;
- int fro;
gcc_assert (TREE_CODE_LENGTH (code) == 4);
t = make_node_stat (code PASS_MEM_STAT);
TREE_TYPE (t) = tt;
- fro = first_rtl_op (code);
-
side_effects = TREE_SIDE_EFFECTS (t);
PROCESS_ARG(0);
return ttype;
}
+
/* Return nonzero if IDENT is a valid name for attribute ATTR,
or zero if not.
`text'. One might then also require attribute lists to be stored in
their canonicalized form. */
-int
-is_attribute_p (const char *attr, tree ident)
+static int
+is_attribute_with_length_p (const char *attr, int attr_len, tree ident)
{
- int ident_len, attr_len;
+ int ident_len;
const char *p;
if (TREE_CODE (ident) != IDENTIFIER_NODE)
return 0;
-
- if (strcmp (attr, IDENTIFIER_POINTER (ident)) == 0)
- return 1;
-
+
p = IDENTIFIER_POINTER (ident);
- ident_len = strlen (p);
- attr_len = strlen (attr);
+ ident_len = IDENTIFIER_LENGTH (ident);
+
+ if (ident_len == attr_len
+ && strcmp (attr, p) == 0)
+ return 1;
/* If ATTR is `__text__', IDENT must be `text'; and vice versa. */
if (attr[0] == '_')
return 0;
}
+/* Return nonzero if IDENT is a valid name for attribute ATTR,
+ or zero if not.
+
+ We try both `text' and `__text__', ATTR may be either one. */
+
+int
+is_attribute_p (const char *attr, tree ident)
+{
+ return is_attribute_with_length_p (attr, strlen (attr), ident);
+}
+
/* Given an attribute name and a list of attributes, return a pointer to the
attribute's list element if the attribute is part of the list, or NULL_TREE
if not found. If the attribute appears more than once, this only
lookup_attribute (const char *attr_name, tree list)
{
tree l;
+ size_t attr_len = strlen (attr_name);
for (l = list; l; l = TREE_CHAIN (l))
{
gcc_assert (TREE_CODE (TREE_PURPOSE (l)) == IDENTIFIER_NODE);
- if (is_attribute_p (attr_name, TREE_PURPOSE (l)))
+ if (is_attribute_with_length_p (attr_name, attr_len, TREE_PURPOSE (l)))
return l;
}
TYPE_ARG_TYPES (b->type)))));
case ARRAY_TYPE:
- case SET_TYPE:
return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
case RECORD_TYPE:
for (; t; t = TREE_CHAIN (t))
val = iterative_hash_expr (TREE_VALUE (t), val);
return val;
+ case FUNCTION_DECL:
+ /* When referring to a built-in FUNCTION_DECL, use the
+ __builtin__ form. Otherwise nodes that compare equal
+ according to operand_equal_p might get different
+ hash codes. */
+ if (DECL_BUILT_IN (t))
+ {
+ val = iterative_hash_pointer (built_in_decls[DECL_FUNCTION_CODE (t)],
+ val);
+ return val;
+ }
+ /* else FALL THROUGH */
default:
class = TREE_CODE_CLASS (code);
if (class == tcc_declaration)
{
- /* Decls we can just compare by pointer. */
+ /* Otherwise, we can just compare decls by pointer. */
val = iterative_hash_pointer (t, val);
}
else
val = iterative_hash_hashval_t (two, val);
}
else
- for (i = first_rtl_op (code) - 1; i >= 0; --i)
+ for (i = TREE_CODE_LENGTH (code) - 1; i >= 0; --i)
val = iterative_hash_expr (TREE_OPERAND (t, i), val);
}
return val;
for (args = NULL_TREE; t != NULL_TREE; t = va_arg (p, tree))
args = tree_cons (NULL_TREE, t, args);
- last = args;
- args = nreverse (args);
- TREE_CHAIN (last) = void_list_node;
+ if (args == NULL_TREE)
+ args = void_list_node;
+ else
+ {
+ last = args;
+ args = nreverse (args);
+ TREE_CHAIN (last) = void_list_node;
+ }
args = build_function_type (return_type, args);
va_end (p);
{
tree type_low_bound = TYPE_MIN_VALUE (type);
tree type_high_bound = TYPE_MAX_VALUE (type);
- int ok_for_low_bound, ok_for_high_bound;
-
- /* Perform some generic filtering first, which may allow making a decision
- even if the bounds are not constant. First, negative integers never fit
- in unsigned types, */
- if ((TYPE_UNSIGNED (type) && tree_int_cst_sgn (c) < 0)
- /* Also, unsigned integers with top bit set never fit signed types. */
- || (! TYPE_UNSIGNED (type)
- && TYPE_UNSIGNED (TREE_TYPE (c)) && tree_int_cst_msb (c)))
- return 0;
+ bool ok_for_low_bound, ok_for_high_bound;
+ tree tmp;
/* If at least one bound of the type is a constant integer, we can check
ourselves and maybe make a decision. If no such decision is possible, but
for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
for "constant known to fit". */
- ok_for_low_bound = -1;
- ok_for_high_bound = -1;
-
/* Check if C >= type_low_bound. */
if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
{
- ok_for_low_bound = ! tree_int_cst_lt (c, type_low_bound);
- if (! ok_for_low_bound)
+ if (tree_int_cst_lt (c, type_low_bound))
return 0;
+ ok_for_low_bound = true;
}
+ else
+ ok_for_low_bound = false;
/* Check if c <= type_high_bound. */
if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
{
- ok_for_high_bound = ! tree_int_cst_lt (type_high_bound, c);
- if (! ok_for_high_bound)
+ if (tree_int_cst_lt (type_high_bound, c))
return 0;
+ ok_for_high_bound = true;
}
+ else
+ ok_for_high_bound = false;
/* If the constant fits both bounds, the result is known. */
- if (ok_for_low_bound == 1 && ok_for_high_bound == 1)
+ if (ok_for_low_bound && ok_for_high_bound)
return 1;
+ /* Perform some generic filtering which may allow making a decision
+ even if the bounds are not constant. First, negative integers
+ never fit in unsigned types, */
+ if (TYPE_UNSIGNED (type) && tree_int_cst_sgn (c) < 0)
+ return 0;
+
+ /* Second, narrower types always fit in wider ones. */
+ if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
+ return 1;
+
+ /* Third, unsigned integers with top bit set never fit signed types. */
+ if (! TYPE_UNSIGNED (type)
+ && TYPE_UNSIGNED (TREE_TYPE (c))
+ && tree_int_cst_msb (c))
+ return 0;
+
/* If we haven't been able to decide at this point, there nothing more we
can check ourselves here. Look at the base type if we have one. */
- else if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != 0)
+ if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != 0)
return int_fits_type_p (c, TREE_TYPE (type));
/* Or to force_fit_type, if nothing else. */
- else
- {
- c = copy_node (c);
- TREE_TYPE (c) = type;
- c = force_fit_type (c, -1, false, false);
- return !TREE_OVERFLOW (c);
- }
+ tmp = copy_node (c);
+ TREE_TYPE (tmp) = type;
+ tmp = force_fit_type (tmp, -1, false, false);
+ return TREE_INT_CST_HIGH (tmp) == TREE_INT_CST_HIGH (c)
+ && TREE_INT_CST_LOW (tmp) == TREE_INT_CST_LOW (c);
}
/* Subprogram of following function. Called by walk_tree.
case POINTER_TYPE:
case REFERENCE_TYPE:
case ARRAY_TYPE:
- case SET_TYPE:
case VECTOR_TYPE:
if (variably_modified_type_p (TREE_TYPE (type), fn))
return true;
}
}
+/* A subroutine of build_common_builtin_nodes. Define a builtin function. */
+
+static void
+local_define_builtin (const char *name, tree type, enum built_in_function code,
+ const char *library_name, int ecf_flags)
+{
+ tree decl;
+
+ decl = lang_hooks.builtin_function (name, type, code, BUILT_IN_NORMAL,
+ library_name, NULL_TREE);
+ if (ecf_flags & ECF_CONST)
+ TREE_READONLY (decl) = 1;
+ if (ecf_flags & ECF_PURE)
+ DECL_IS_PURE (decl) = 1;
+ if (ecf_flags & ECF_NORETURN)
+ TREE_THIS_VOLATILE (decl) = 1;
+ if (ecf_flags & ECF_NOTHROW)
+ TREE_NOTHROW (decl) = 1;
+ if (ecf_flags & ECF_MALLOC)
+ DECL_IS_MALLOC (decl) = 1;
+
+ built_in_decls[code] = decl;
+ implicit_built_in_decls[code] = decl;
+}
+
+/* Call this function after instantiating all builtins that the language
+ front end cares about. This will build the rest of the builtins that
+ are relied upon by the tree optimizers and the middle-end. */
+
+void
+build_common_builtin_nodes (void)
+{
+ tree tmp, ftype;
+
+ if (built_in_decls[BUILT_IN_MEMCPY] == NULL
+ || built_in_decls[BUILT_IN_MEMMOVE] == NULL)
+ {
+ tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
+ tmp = tree_cons (NULL_TREE, const_ptr_type_node, tmp);
+ tmp = tree_cons (NULL_TREE, ptr_type_node, tmp);
+ ftype = build_function_type (ptr_type_node, tmp);
+
+ if (built_in_decls[BUILT_IN_MEMCPY] == NULL)
+ local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
+ "memcpy", ECF_NOTHROW);
+ if (built_in_decls[BUILT_IN_MEMMOVE] == NULL)
+ local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
+ "memmove", ECF_NOTHROW);
+ }
+
+ if (built_in_decls[BUILT_IN_MEMCMP] == NULL)
+ {
+ tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
+ tmp = tree_cons (NULL_TREE, const_ptr_type_node, tmp);
+ tmp = tree_cons (NULL_TREE, const_ptr_type_node, tmp);
+ ftype = build_function_type (ptr_type_node, tmp);
+ local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
+ "memcmp", ECF_PURE | ECF_NOTHROW);
+ }
+
+ if (built_in_decls[BUILT_IN_MEMSET] == NULL)
+ {
+ tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
+ tmp = tree_cons (NULL_TREE, integer_type_node, tmp);
+ tmp = tree_cons (NULL_TREE, ptr_type_node, tmp);
+ ftype = build_function_type (ptr_type_node, tmp);
+ local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
+ "memset", ECF_NOTHROW);
+ }
+
+ if (built_in_decls[BUILT_IN_ALLOCA] == NULL)
+ {
+ tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
+ ftype = build_function_type (ptr_type_node, tmp);
+ local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
+ "alloca", ECF_NOTHROW | ECF_MALLOC);
+ }
+
+ tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node);
+ tmp = tree_cons (NULL_TREE, ptr_type_node, tmp);
+ tmp = tree_cons (NULL_TREE, ptr_type_node, tmp);
+ ftype = build_function_type (void_type_node, tmp);
+ local_define_builtin ("__builtin_init_trampoline", ftype,
+ BUILT_IN_INIT_TRAMPOLINE,
+ "__builtin_init_trampoline", ECF_NOTHROW);
+
+ tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node);
+ ftype = build_function_type (ptr_type_node, tmp);
+ local_define_builtin ("__builtin_adjust_trampoline", ftype,
+ BUILT_IN_ADJUST_TRAMPOLINE,
+ "__builtin_adjust_trampoline",
+ ECF_CONST | ECF_NOTHROW);
+
+ tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node);
+ tmp = tree_cons (NULL_TREE, ptr_type_node, tmp);
+ ftype = build_function_type (void_type_node, tmp);
+ local_define_builtin ("__builtin_nonlocal_goto", ftype,
+ BUILT_IN_NONLOCAL_GOTO,
+ "__builtin_nonlocal_goto",
+ ECF_NORETURN | ECF_NOTHROW);
+
+ ftype = build_function_type (ptr_type_node, void_list_node);
+ local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
+ "__builtin_stack_save", ECF_NOTHROW);
+
+ tmp = tree_cons (NULL_TREE, ptr_type_node, void_list_node);
+ ftype = build_function_type (void_type_node, tmp);
+ local_define_builtin ("__builtin_stack_restore", ftype,
+ BUILT_IN_STACK_RESTORE,
+ "__builtin_stack_restore", ECF_NOTHROW);
+
+ ftype = build_function_type (void_type_node, void_list_node);
+ local_define_builtin ("__builtin_profile_func_enter", ftype,
+ BUILT_IN_PROFILE_FUNC_ENTER, "profile_func_enter", 0);
+ local_define_builtin ("__builtin_profile_func_exit", ftype,
+ BUILT_IN_PROFILE_FUNC_EXIT, "profile_func_exit", 0);
+
+ /* Complex multiplication and division. These are handled as builtins
+ rather than optabs because emit_library_call_value doesn't support
+ complex. Further, we can do slightly better with folding these
+ beasties if the real and complex parts of the arguments are separate. */
+ {
+ enum machine_mode mode;
+
+ for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
+ {
+ char mode_name_buf[4], *q;
+ const char *p;
+ enum built_in_function mcode, dcode;
+ tree type, inner_type;
+
+ type = lang_hooks.types.type_for_mode (mode, 0);
+ if (type == NULL)
+ continue;
+ inner_type = TREE_TYPE (type);
+
+ tmp = tree_cons (NULL_TREE, inner_type, void_list_node);
+ tmp = tree_cons (NULL_TREE, inner_type, tmp);
+ tmp = tree_cons (NULL_TREE, inner_type, tmp);
+ tmp = tree_cons (NULL_TREE, inner_type, tmp);
+ ftype = build_function_type (type, tmp);
+
+ mcode = BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT;
+ dcode = BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT;
+
+ for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
+ *q = TOLOWER (*p);
+ *q = '\0';
+
+ built_in_names[mcode] = concat ("__mul", mode_name_buf, "3", NULL);
+ local_define_builtin (built_in_names[mcode], ftype, mcode,
+ built_in_names[mcode], ECF_CONST | ECF_NOTHROW);
+
+ built_in_names[dcode] = concat ("__div", mode_name_buf, "3", NULL);
+ local_define_builtin (built_in_names[dcode], ftype, dcode,
+ built_in_names[dcode], ECF_CONST | ECF_NOTHROW);
+ }
+ }
+}
+
/* HACK. GROSS. This is absolutely disgusting. I wish there was a
better way.
if (elt == NULL_TREE)
return true;
- /* A set is empty only if it has no elements. */
- if (TREE_CODE (TREE_TYPE (init)) == SET_TYPE)
- return false;
-
for (; elt ; elt = TREE_CHAIN (elt))
if (! initializer_zerop (TREE_VALUE (elt)))
return false;
build_int_cst_wide (inner, lo, hi));
}
+/* Return nonzero if two operands that are suitable for PHI nodes are
+ necessarily equal. Specifically, both ARG0 and ARG1 must be either
+ SSA_NAME or invariant. Note that this is strictly an optimization.
+ That is, callers of this function can directly call operand_equal_p
+ and get the same result, only slower. */
+
+int
+operand_equal_for_phi_arg_p (tree arg0, tree arg1)
+{
+ if (arg0 == arg1)
+ return 1;
+ if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
+ return 0;
+ return operand_equal_p (arg0, arg1, 0);
+}
+
+/* Returns number of zeros at the end of binary representation of X.
+
+ ??? Use ffs if available? */
+
+tree
+num_ending_zeros (tree x)
+{
+ unsigned HOST_WIDE_INT fr, nfr;
+ unsigned num, abits;
+ tree type = TREE_TYPE (x);
+
+ if (TREE_INT_CST_LOW (x) == 0)
+ {
+ num = HOST_BITS_PER_WIDE_INT;
+ fr = TREE_INT_CST_HIGH (x);
+ }
+ else
+ {
+ num = 0;
+ fr = TREE_INT_CST_LOW (x);
+ }
+
+ for (abits = HOST_BITS_PER_WIDE_INT / 2; abits; abits /= 2)
+ {
+ nfr = fr >> abits;
+ if (nfr << abits == fr)
+ {
+ num += abits;
+ fr = nfr;
+ }
+ }
+
+ if (num > TYPE_PRECISION (type))
+ num = TYPE_PRECISION (type);
+
+ return build_int_cst_type (type, num);
+}
+
#include "gt-tree.h"