GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
-02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
/*@@ This file should be rewritten to use an arbitrary precision
@@ representation for "struct tree_int_cst" and "struct tree_real_cst".
int
fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
- unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
+ unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
{
unsigned HOST_WIDE_INT low0 = l1;
HOST_WIDE_INT high0 = h1;
overflow. */
bool
-may_negate_without_overflow_p (tree t)
+may_negate_without_overflow_p (const_tree t)
{
unsigned HOST_WIDE_INT val;
unsigned int prec;
{
tem = strip_float_extensions (t);
if (tem != t && negate_expr_p (tem))
- return negate_expr (tem);
+ return fold_convert (type, negate_expr (tem));
}
break;
If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
tree
-int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
+int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
{
unsigned HOST_WIDE_INT int1l, int2l;
HOST_WIDE_INT int1h, int2h;
return build_vector (type, list);
}
+/* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
+
+bool
+fold_convertible_p (const_tree type, const_tree arg)
+{
+ tree orig = TREE_TYPE (arg);
+
+ if (type == orig)
+ return true;
+
+ if (TREE_CODE (arg) == ERROR_MARK
+ || TREE_CODE (type) == ERROR_MARK
+ || TREE_CODE (orig) == ERROR_MARK)
+ return false;
+
+ if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
+ return true;
+
+ switch (TREE_CODE (type))
+ {
+ case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
+ case POINTER_TYPE: case REFERENCE_TYPE:
+ case OFFSET_TYPE:
+ if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
+ || TREE_CODE (orig) == OFFSET_TYPE)
+ return true;
+ return (TREE_CODE (orig) == VECTOR_TYPE
+ && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
+
+ default:
+ return TREE_CODE (type) == TREE_CODE (orig);
+ }
+}
+
/* Convert expression ARG to type TYPE. Used by the middle-end for
simple conversions in preference to calling the front-end's convert. */
|| TREE_CODE (orig) == ERROR_MARK)
return error_mark_node;
- if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
- || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
- TYPE_MAIN_VARIANT (orig)))
+ if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
return fold_build1 (NOP_EXPR, type, arg);
switch (TREE_CODE (type))
to ensure that global memory is unchanged in between. */
int
-operand_equal_p (tree arg0, tree arg1, unsigned int flags)
+operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
{
/* If either is ERROR_MARK, they aren't equal. */
if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
/* Now see if all the arguments are the same. */
{
- call_expr_arg_iterator iter0, iter1;
- tree a0, a1;
- for (a0 = first_call_expr_arg (arg0, &iter0),
- a1 = first_call_expr_arg (arg1, &iter1);
+ const_call_expr_arg_iterator iter0, iter1;
+ const_tree a0, a1;
+ for (a0 = first_const_call_expr_arg (arg0, &iter0),
+ a1 = first_const_call_expr_arg (arg1, &iter1);
a0 && a1;
- a0 = next_call_expr_arg (&iter0),
- a1 = next_call_expr_arg (&iter1))
+ a0 = next_const_call_expr_arg (&iter0),
+ a1 = next_const_call_expr_arg (&iter1))
if (! operand_equal_p (a0, a1, flags))
return 0;
build_int_cst (type, 1), 0),
OEP_ONLY_CONST))
return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
- type, arg1, arg2));
+ type,
+ fold_convert (type, arg1),
+ arg2));
break;
case LE_EXPR:
build_int_cst (type, 1), 0),
OEP_ONLY_CONST))
return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
- type, arg1, arg2));
+ type,
+ fold_convert (type, arg1),
+ arg2));
break;
case GT_EXPR:
build_int_cst (type, 1), 0),
OEP_ONLY_CONST))
return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
- type, arg1, arg2));
+ type,
+ fold_convert (type, arg1),
+ arg2));
break;
case GE_EXPR:
build_int_cst (type, 1), 0),
OEP_ONLY_CONST))
return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
- type, arg1, arg2));
+ type,
+ fold_convert (type, arg1),
+ arg2));
break;
case NE_EXPR:
break;
evaluate the operands in reverse order. */
bool
-tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
+tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
{
STRIP_SIGN_NOPS (arg0);
STRIP_SIGN_NOPS (arg1);
arg00 = TREE_OPERAND (arg0, 0);
arg01 = TREE_OPERAND (arg0, 1);
}
+ else if (TREE_CODE (arg0) == INTEGER_CST)
+ {
+ arg00 = build_one_cst (type);
+ arg01 = arg0;
+ }
else
{
arg00 = arg0;
arg10 = TREE_OPERAND (arg1, 0);
arg11 = TREE_OPERAND (arg1, 1);
}
+ else if (TREE_CODE (arg1) == INTEGER_CST)
+ {
+ arg10 = build_one_cst (type);
+ arg11 = arg1;
+ }
else
{
arg10 = arg1;
upon failure. */
static int
-native_encode_int (tree expr, unsigned char *ptr, int len)
+native_encode_int (const_tree expr, unsigned char *ptr, int len)
{
tree type = TREE_TYPE (expr);
int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
upon failure. */
static int
-native_encode_real (tree expr, unsigned char *ptr, int len)
+native_encode_real (const_tree expr, unsigned char *ptr, int len)
{
tree type = TREE_TYPE (expr);
int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
upon failure. */
static int
-native_encode_complex (tree expr, unsigned char *ptr, int len)
+native_encode_complex (const_tree expr, unsigned char *ptr, int len)
{
int rsize, isize;
tree part;
upon failure. */
static int
-native_encode_vector (tree expr, unsigned char *ptr, int len)
+native_encode_vector (const_tree expr, unsigned char *ptr, int len)
{
int i, size, offset, count;
tree itype, elem, elements;
placed in the buffer, or zero upon failure. */
int
-native_encode_expr (tree expr, unsigned char *ptr, int len)
+native_encode_expr (const_tree expr, unsigned char *ptr, int len)
{
switch (TREE_CODE (expr))
{
If the buffer cannot be interpreted, return NULL_TREE. */
static tree
-native_interpret_int (tree type, unsigned char *ptr, int len)
+native_interpret_int (tree type, const unsigned char *ptr, int len)
{
int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
int byte, offset, word, words;
If the buffer cannot be interpreted, return NULL_TREE. */
static tree
-native_interpret_real (tree type, unsigned char *ptr, int len)
+native_interpret_real (tree type, const unsigned char *ptr, int len)
{
enum machine_mode mode = TYPE_MODE (type);
int total_bytes = GET_MODE_SIZE (mode);
If the buffer cannot be interpreted, return NULL_TREE. */
static tree
-native_interpret_complex (tree type, unsigned char *ptr, int len)
+native_interpret_complex (tree type, const unsigned char *ptr, int len)
{
tree etype, rpart, ipart;
int size;
If the buffer cannot be interpreted, return NULL_TREE. */
static tree
-native_interpret_vector (tree type, unsigned char *ptr, int len)
+native_interpret_vector (tree type, const unsigned char *ptr, int len)
{
tree etype, elem, elements;
int i, size, count;
return NULL_TREE. */
tree
-native_interpret_expr (tree type, unsigned char *ptr, int len)
+native_interpret_expr (tree type, const unsigned char *ptr, int len)
{
switch (TREE_CODE (type))
{
return native_interpret_expr (type, buffer, len);
}
+/* Build an expression for the address of T. Folds away INDIRECT_REF
+ to avoid confusing the gimplify process. When IN_FOLD is true
+ avoid modifications of T. */
+
+static tree
+build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
+{
+ /* The size of the object is not relevant when talking about its address. */
+ if (TREE_CODE (t) == WITH_SIZE_EXPR)
+ t = TREE_OPERAND (t, 0);
+
+ /* Note: doesn't apply to ALIGN_INDIRECT_REF */
+ if (TREE_CODE (t) == INDIRECT_REF
+ || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
+ {
+ t = TREE_OPERAND (t, 0);
+
+ if (TREE_TYPE (t) != ptrtype)
+ t = build1 (NOP_EXPR, ptrtype, t);
+ }
+ else if (!in_fold)
+ {
+ tree base = t;
+
+ while (handled_component_p (base))
+ base = TREE_OPERAND (base, 0);
+
+ if (DECL_P (base))
+ TREE_ADDRESSABLE (base) = 1;
+
+ t = build1 (ADDR_EXPR, ptrtype, t);
+ }
+ else
+ t = build1 (ADDR_EXPR, ptrtype, t);
+
+ return t;
+}
+
+/* Build an expression for the address of T with type PTRTYPE. This
+ function modifies the input parameter 'T' by sometimes setting the
+ TREE_ADDRESSABLE flag. */
+
+tree
+build_fold_addr_expr_with_type (tree t, tree ptrtype)
+{
+ return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
+}
+
+/* Build an expression for the address of T. This function modifies
+ the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
+ flag. When called from fold functions, use fold_addr_expr instead. */
+
+tree
+build_fold_addr_expr (tree t)
+{
+ return build_fold_addr_expr_with_type_1 (t,
+ build_pointer_type (TREE_TYPE (t)),
+ false);
+}
+
+/* Same as build_fold_addr_expr, builds an expression for the address
+ of T, but avoids touching the input node 't'. Fold functions
+ should use this version. */
+
+static tree
+fold_addr_expr (tree t)
+{
+ tree ptrtype = build_pointer_type (TREE_TYPE (t));
+
+ return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
+}
/* Fold a unary expression of code CODE and type TYPE with operand
OP0. Return the folded expression if folding is successful.
if (! offset && bitpos == 0
&& TYPE_MAIN_VARIANT (TREE_TYPE (type))
== TYPE_MAIN_VARIANT (TREE_TYPE (base)))
- return fold_convert (type, build_fold_addr_expr (base));
+ return fold_convert (type, fold_addr_expr (base));
}
if ((TREE_CODE (op0) == MODIFY_EXPR
tree op0 = TREE_OPERAND (cref0, 0);
tree op1 = TREE_OPERAND (cref1, 0);
return fold_build2 (code, type,
- build_fold_addr_expr (op0),
- build_fold_addr_expr (op1));
+ fold_addr_expr (op0),
+ fold_addr_expr (op1));
}
}
return fold_build2 (MINUS_EXPR, type,
fold_convert (type, arg1),
fold_convert (type, TREE_OPERAND (arg0, 0)));
- /* Convert ~A + 1 to -A. */
- if (INTEGRAL_TYPE_P (type)
- && TREE_CODE (arg0) == BIT_NOT_EXPR
- && integer_onep (arg1))
- return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
+
+ if (INTEGRAL_TYPE_P (type))
+ {
+ /* Convert ~A + 1 to -A. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && integer_onep (arg1))
+ return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
+
+ /* ~X + X is -1. */
+ if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && !TYPE_OVERFLOW_TRAPS (type))
+ {
+ tree tem = TREE_OPERAND (arg0, 0);
+
+ STRIP_NOPS (tem);
+ if (operand_equal_p (tem, arg1, 0))
+ {
+ t1 = build_int_cst_type (type, -1);
+ return omit_one_operand (type, t1, arg1);
+ }
+ }
+
+ /* X + ~X is -1. */
+ if (TREE_CODE (arg1) == BIT_NOT_EXPR
+ && !TYPE_OVERFLOW_TRAPS (type))
+ {
+ tree tem = TREE_OPERAND (arg1, 0);
+
+ STRIP_NOPS (tem);
+ if (operand_equal_p (arg0, tem, 0))
+ {
+ t1 = build_int_cst_type (type, -1);
+ return omit_one_operand (type, t1, arg0);
+ }
+ }
+ }
/* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
same or one. */
if (integer_zerop (arg1))
return non_lvalue (fold_convert (type, arg0));
- /* ~X + X is -1. */
- if (TREE_CODE (arg0) == BIT_NOT_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
- && !TYPE_OVERFLOW_TRAPS (type))
- {
- t1 = build_int_cst_type (type, -1);
- return omit_one_operand (type, t1, arg1);
- }
-
- /* X + ~X is -1. */
- if (TREE_CODE (arg1) == BIT_NOT_EXPR
- && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
- && !TYPE_OVERFLOW_TRAPS (type))
- {
- t1 = build_int_cst_type (type, -1);
- return omit_one_operand (type, t1, arg0);
- }
-
/* If we are adding two BIT_AND_EXPR's, both of which are and'ing
with a constant, and the two constants have no bits in common,
we should treat this as a BIT_IOR_EXPR since this may produce more
if (integer_zerop (arg1))
return non_lvalue (fold_convert (type, arg0));
if (integer_all_onesp (arg1))
- return fold_build1 (BIT_NOT_EXPR, type, arg0);
+ return fold_build1 (BIT_NOT_EXPR, type, op0);
if (operand_equal_p (arg0, arg1, 0))
return omit_one_operand (type, integer_zero_node, arg0);
"when distributing negation across "
"division"),
WARN_STRICT_OVERFLOW_MISC);
- return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
+ return fold_build2 (code, type,
+ fold_convert (type, TREE_OPERAND (arg0, 0)),
negate_expr (arg1));
}
if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
if (TREE_CODE_CLASS (code) != tcc_type
&& TREE_CODE_CLASS (code) != tcc_declaration
- && code != TREE_LIST)
+ && code != TREE_LIST
+ && code != SSA_NAME)
fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
switch (TREE_CODE_CLASS (code))
{
}
}
+/* Helper function for outputting the checksum of a tree T. When
+ debugging with gdb, you can "define mynext" to be "next" followed
+ by "call debug_fold_checksum (op0)", then just trace down till the
+ outputs differ. */
+
+void
+debug_fold_checksum (tree t)
+{
+ int i;
+ unsigned char checksum[16];
+ struct md5_ctx ctx;
+ htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
+
+ md5_init_ctx (&ctx);
+ fold_checksum_tree (t, &ctx, ht);
+ md5_finish_ctx (&ctx, checksum);
+ htab_empty (ht);
+
+ for (i = 0; i < 16; i++)
+ fprintf (stderr, "%d ", checksum[i]);
+
+ fprintf (stderr, "\n");
+}
+
#endif
/* Fold a unary tree expression with code CODE of type TYPE with an
&& (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
== MODE_INT)
&& (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
- return fold_convert (TREE_TYPE (exp),
- build_int_cst (NULL_TREE,
- (TREE_STRING_POINTER (string)
- [TREE_INT_CST_LOW (index)])));
+ return build_int_cst_type (TREE_TYPE (exp),
+ (TREE_STRING_POINTER (string)
+ [TREE_INT_CST_LOW (index)]));
}
return NULL;
}
return constant_boolean_node (result, type);
}
-/* Build an expression for the a clean point containing EXPR with type TYPE.
- Don't build a cleanup point expression for EXPR which don't have side
- effects. */
+/* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
+ indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
+ itself. */
tree
fold_build_cleanup_point_expr (tree type, tree expr)
return build1 (CLEANUP_POINT_EXPR, type, expr);
}
-/* Build an expression for the address of T. Folds away INDIRECT_REF to
- avoid confusing the gimplify process. */
-
-tree
-build_fold_addr_expr_with_type (tree t, tree ptrtype)
-{
- /* The size of the object is not relevant when talking about its address. */
- if (TREE_CODE (t) == WITH_SIZE_EXPR)
- t = TREE_OPERAND (t, 0);
-
- /* Note: doesn't apply to ALIGN_INDIRECT_REF */
- if (TREE_CODE (t) == INDIRECT_REF
- || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
- {
- t = TREE_OPERAND (t, 0);
- if (TREE_TYPE (t) != ptrtype)
- t = build1 (NOP_EXPR, ptrtype, t);
- }
- else
- {
- tree base = t;
-
- while (handled_component_p (base))
- base = TREE_OPERAND (base, 0);
- if (DECL_P (base))
- TREE_ADDRESSABLE (base) = 1;
-
- t = build1 (ADDR_EXPR, ptrtype, t);
- }
-
- return t;
-}
-
-tree
-build_fold_addr_expr (tree t)
-{
- return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
-}
-
/* Given a pointer value OP0 and a type TYPE, return a simplified version
of an indirection through OP0, or NULL_TREE if no simplification is
possible. */
core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
poffset, &mode, &unsignedp, &volatilep,
false);
- core = build_fold_addr_expr (core);
+ core = fold_addr_expr (core);
}
else
{