OSDN Git Service

* tree-scalar-evolution.c (instantiate_parameters_1): An SSA_NAME
[pf3gnuchains/gcc-fork.git] / gcc / fold-const.c
index 1057d26..5af6f0d 100644 (file)
@@ -134,7 +134,6 @@ static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
                                                 tree, tree,
                                                 tree, tree, int);
-static bool fold_real_zero_addition_p (const_tree, const_tree, int);
 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
                                 tree, tree, tree);
 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
@@ -1492,7 +1491,7 @@ split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
       || TREE_CODE (in) == FIXED_CST)
     *litp = in;
   else if (TREE_CODE (in) == code
-          || (! FLOAT_TYPE_P (TREE_TYPE (in))
+          || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
               && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
               /* We can associate addition and subtraction together (even
                  though the C standard doesn't say so) for integers because
@@ -2122,8 +2121,22 @@ fold_convert_const_int_from_int (tree type, const_tree arg1)
   t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
                             TREE_INT_CST_HIGH (arg1),
                             /* Don't set the overflow when
-                               converting a pointer  */
-                            !POINTER_TYPE_P (TREE_TYPE (arg1)),
+                               converting from a pointer,  */
+                            !POINTER_TYPE_P (TREE_TYPE (arg1))
+                            /* or to a sizetype with same signedness
+                               and the precision is unchanged.
+                               ???  sizetype is always sign-extended,
+                               but its signedness depends on the
+                               frontend.  Thus we see spurious overflows
+                               here if we do not check this.  */
+                            && !((TYPE_PRECISION (TREE_TYPE (arg1))
+                                  == TYPE_PRECISION (type))
+                                 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
+                                     == TYPE_UNSIGNED (type))
+                                 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
+                                      && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
+                                     || (TREE_CODE (type) == INTEGER_TYPE
+                                         && TYPE_IS_SIZETYPE (type)))),
                             (TREE_INT_CST_HIGH (arg1) < 0
                              && (TYPE_UNSIGNED (type)
                                  < TYPE_UNSIGNED (TREE_TYPE (arg1))))
@@ -2462,8 +2475,15 @@ fold_convertible_p (const_tree type, const_tree arg)
       return (TREE_CODE (orig) == VECTOR_TYPE
              && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
 
-    default:
+    case REAL_TYPE:
+    case FIXED_POINT_TYPE:
+    case COMPLEX_TYPE:
+    case VECTOR_TYPE:
+    case VOID_TYPE:
       return TREE_CODE (type) == TREE_CODE (orig);
+
+    default:
+      return false;
     }
 }
 
@@ -6095,6 +6115,9 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
             }
           break;
         }
+      /* If the constant is negative, we cannot simplify this.  */
+      if (tree_int_cst_sgn (c) == -1)
+        break;
       /* FALLTHROUGH */
     case NEGATE_EXPR:
       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
@@ -6409,7 +6432,7 @@ fold_binary_op_with_conditional_arg (enum tree_code code,
    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
 
-static bool
+bool
 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
 {
   if (!real_zerop (addend))
@@ -7028,12 +7051,14 @@ fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
   if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
     return NULL_TREE;
 
-  arg1_unw = get_unwidened (arg1, shorter_type);
+  arg1_unw = get_unwidened (arg1, NULL_TREE);
 
   /* If possible, express the comparison in the shorter mode.  */
   if ((code == EQ_EXPR || code == NE_EXPR
        || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
       && (TREE_TYPE (arg1_unw) == shorter_type
+         || (TYPE_PRECISION (shorter_type)
+             >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
          || (TREE_CODE (arg1_unw) == INTEGER_CST
              && (TREE_CODE (shorter_type) == INTEGER_TYPE
                  || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
@@ -8009,6 +8034,14 @@ fold_unary (enum tree_code code, tree type, tree op0)
 
   switch (code)
     {
+    case PAREN_EXPR:
+      /* Re-association barriers around constants and other re-association
+        barriers can be removed.  */
+      if (CONSTANT_CLASS_P (op0)
+         || TREE_CODE (op0) == PAREN_EXPR)
+       return fold_convert (type, op0);
+      return NULL_TREE;
+
     case NOP_EXPR:
     case FLOAT_EXPR:
     case CONVERT_EXPR:
@@ -8244,7 +8277,12 @@ fold_unary (enum tree_code code, tree type, tree op0)
     case VIEW_CONVERT_EXPR:
       if (TREE_TYPE (op0) == type)
        return op0;
-      if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
+      if (TREE_CODE (op0) == VIEW_CONVERT_EXPR
+         || (TREE_CODE (op0) == NOP_EXPR
+             && INTEGRAL_TYPE_P (TREE_TYPE (op0))
+             && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
+             && TYPE_PRECISION (TREE_TYPE (op0))
+                == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
        return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
       return fold_view_convert_expr (type, op0);
 
@@ -8309,7 +8347,7 @@ fold_unary (enum tree_code code, tree type, tree op0)
       if (TREE_CODE (arg0) == INTEGER_CST)
         return fold_not_const (arg0, type);
       else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
-       return TREE_OPERAND (op0, 0);
+       return fold_convert (type, TREE_OPERAND (arg0, 0));
       /* Convert ~ (-A) to A - 1.  */
       else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
        return fold_build2 (MINUS_EXPR, type,
@@ -9855,13 +9893,18 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
         is a rotate of A by B bits.  */
       {
        enum tree_code code0, code1;
+       tree rtype;
        code0 = TREE_CODE (arg0);
        code1 = TREE_CODE (arg1);
        if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
             || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
            && operand_equal_p (TREE_OPERAND (arg0, 0),
                                TREE_OPERAND (arg1, 0), 0)
-           && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
+           && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
+               TYPE_UNSIGNED (rtype))
+           /* Only create rotates in complete modes.  Other cases are not
+              expanded properly.  */
+           && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
          {
            tree tree01, tree11;
            enum tree_code code01, code11;
@@ -10567,8 +10610,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
          && TREE_CODE (arg1) == INTEGER_CST
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
        {
-         unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
-         int width = TYPE_PRECISION (type);
+         unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
+         int width = TYPE_PRECISION (type), w;
          hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
          lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
          hi2 = TREE_INT_CST_HIGH (arg1);
@@ -10596,16 +10639,35 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
            return fold_build2 (BIT_IOR_EXPR, type,
                                TREE_OPERAND (arg0, 0), arg1);
 
-         /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2.  */
+         /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
+            unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
+            mode which allows further optimizations.  */
          hi1 &= mhi;
          lo1 &= mlo;
-         if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
+         hi2 &= mhi;
+         lo2 &= mlo;
+         hi3 = hi1 & ~hi2;
+         lo3 = lo1 & ~lo2;
+         for (w = BITS_PER_UNIT;
+              w <= width && w <= HOST_BITS_PER_WIDE_INT;
+              w <<= 1)
+           {
+             unsigned HOST_WIDE_INT mask
+               = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
+             if (((lo1 | lo2) & mask) == mask
+                 && (lo1 & ~mask) == 0 && hi1 == 0)
+               {
+                 hi3 = 0;
+                 lo3 = mask;
+                 break;
+               }
+           }
+         if (hi3 != hi1 || lo3 != lo1)
            return fold_build2 (BIT_IOR_EXPR, type,
                                fold_build2 (BIT_AND_EXPR, type,
                                             TREE_OPERAND (arg0, 0),
                                             build_int_cst_wide (type,
-                                                                lo1 & ~lo2,
-                                                                hi1 & ~hi2)),
+                                                                lo3, hi3)),
                                arg1);
        }
 
@@ -10970,6 +11032,100 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
            return build_int_cst (type, residue & low);
        }
 
+      /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
+             (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
+        if the new mask might be further optimized.  */
+      if ((TREE_CODE (arg0) == LSHIFT_EXPR
+          || TREE_CODE (arg0) == RSHIFT_EXPR)
+         && host_integerp (TREE_OPERAND (arg0, 1), 1)
+         && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
+         && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
+            < TYPE_PRECISION (TREE_TYPE (arg0))
+         && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
+         && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
+       {
+         unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
+         unsigned HOST_WIDE_INT mask
+           = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
+         unsigned HOST_WIDE_INT newmask, zerobits = 0;
+         tree shift_type = TREE_TYPE (arg0);
+
+         if (TREE_CODE (arg0) == LSHIFT_EXPR)
+           zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
+         else if (TREE_CODE (arg0) == RSHIFT_EXPR
+                  && TYPE_PRECISION (TREE_TYPE (arg0))
+                     == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
+           {
+             unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
+             tree arg00 = TREE_OPERAND (arg0, 0);
+             /* See if more bits can be proven as zero because of
+                zero extension.  */
+             if (TREE_CODE (arg00) == NOP_EXPR
+                 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
+               {
+                 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
+                 if (TYPE_PRECISION (inner_type)
+                     == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
+                     && TYPE_PRECISION (inner_type) < prec)
+                   {
+                     prec = TYPE_PRECISION (inner_type);
+                     /* See if we can shorten the right shift.  */
+                     if (shiftc < prec)
+                       shift_type = inner_type;
+                   }
+               }
+             zerobits = ~(unsigned HOST_WIDE_INT) 0;
+             zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
+             zerobits <<= prec - shiftc;
+             /* For arithmetic shift if sign bit could be set, zerobits
+                can contain actually sign bits, so no transformation is
+                possible, unless MASK masks them all away.  In that
+                case the shift needs to be converted into logical shift.  */
+             if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
+                 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
+               {
+                 if ((mask & zerobits) == 0)
+                   shift_type = unsigned_type_for (TREE_TYPE (arg0));
+                 else
+                   zerobits = 0;
+               }
+           }
+
+         /* ((X << 16) & 0xff00) is (X, 0).  */
+         if ((mask & zerobits) == mask)
+           return omit_one_operand (type, build_int_cst (type, 0), arg0);
+
+         newmask = mask | zerobits;
+         if (newmask != mask && (newmask & (newmask + 1)) == 0)
+           {
+             unsigned int prec;
+
+             /* Only do the transformation if NEWMASK is some integer
+                mode's mask.  */
+             for (prec = BITS_PER_UNIT;
+                  prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
+               if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
+                 break;
+             if (prec < HOST_BITS_PER_WIDE_INT
+                 || newmask == ~(unsigned HOST_WIDE_INT) 0)
+               {
+                 if (shift_type != TREE_TYPE (arg0))
+                   {
+                     tem = fold_build2 (TREE_CODE (arg0), shift_type,
+                                        fold_convert (shift_type,
+                                                      TREE_OPERAND (arg0, 0)),
+                                        TREE_OPERAND (arg0, 1));
+                     tem = fold_convert (type, tem);
+                   }
+                 else
+                   tem = op0;
+                 return fold_build2 (BIT_AND_EXPR, type, tem,
+                                     build_int_cst_type (TREE_TYPE (op1),
+                                                         newmask));
+               }
+           }
+       }
+
       goto associate;
 
     case RDIV_EXPR:
@@ -11492,7 +11648,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
       if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
        {
          tree tem = build_int_cst (TREE_TYPE (arg1),
-                                   GET_MODE_BITSIZE (TYPE_MODE (type)));
+                                   TYPE_PRECISION (type));
          tem = const_binop (MINUS_EXPR, tem, arg1, 0);
          return fold_build2 (RROTATE_EXPR, type, op0, tem);
        }
@@ -11511,8 +11667,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
                            fold_build2 (code, type,
                                         TREE_OPERAND (arg0, 1), arg1));
 
-      /* Two consecutive rotates adding up to the width of the mode can
-        be ignored.  */
+      /* Two consecutive rotates adding up to the precision of the
+        type can be ignored.  */
       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
          && TREE_CODE (arg0) == RROTATE_EXPR
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
@@ -11520,9 +11676,28 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
          && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
          && ((TREE_INT_CST_LOW (arg1)
               + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
-             == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
+             == (unsigned int) TYPE_PRECISION (type)))
        return TREE_OPERAND (arg0, 0);
 
+      /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
+             (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
+        if the latter can be further optimized.  */
+      if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
+         && TREE_CODE (arg0) == BIT_AND_EXPR
+         && TREE_CODE (arg1) == INTEGER_CST
+         && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+       {
+         tree mask = fold_build2 (code, type,
+                                  fold_convert (type, TREE_OPERAND (arg0, 1)),
+                                  arg1);
+         tree shift = fold_build2 (code, type,
+                                   fold_convert (type, TREE_OPERAND (arg0, 0)),
+                                   arg1);
+         tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
+         if (tem)
+           return tem;
+       }
+
       return NULL_TREE;
 
     case MIN_EXPR: