OSDN Git Service

Daily bump.
[pf3gnuchains/gcc-fork.git] / gcc / fold-const.c
index 5af6f0d..8c0cb1d 100644 (file)
@@ -1,6 +1,6 @@
 /* Fold a constant sub-tree into a single node for C-compiler
    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
-   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
+   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
    Free Software Foundation, Inc.
 
 This file is part of GCC.
@@ -58,12 +58,14 @@ along with GCC; see the file COPYING3.  If not see
 #include "rtl.h"
 #include "expr.h"
 #include "tm_p.h"
+#include "target.h"
 #include "toplev.h"
 #include "intl.h"
 #include "ggc.h"
 #include "hashtab.h"
 #include "langhooks.h"
 #include "md5.h"
+#include "gimple.h"
 
 /* Nonzero if we are folding constants inside an initializer; zero
    otherwise.  */
@@ -103,13 +105,12 @@ static enum comparison_code comparison_to_compcode (enum tree_code);
 static enum tree_code compcode_to_comparison (enum comparison_code);
 static tree combine_comparisons (enum tree_code, enum tree_code,
                                 enum tree_code, tree, tree, tree);
-static int truth_value_p (enum tree_code);
 static int operand_equal_for_comparison_p (tree, tree, tree);
 static int twoval_comparison_p (tree, tree *, tree *, int *);
 static tree eval_subst (tree, tree, tree, tree, tree);
 static tree pedantic_omit_one_operand (tree, tree, tree);
 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
-static tree make_bit_field_ref (tree, tree, int, int, int);
+static tree make_bit_field_ref (tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, int);
 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
                                    enum machine_mode *, int *, int *,
@@ -836,7 +837,7 @@ div_and_round_double (enum tree_code code, int uns,
        if (hden < 0)
          neg_double (lden, hden, &labs_den, &habs_den);
 
-       /* If (2 * abs (lrem) >= abs (lden)) */
+       /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient.  */
        mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
                    labs_rem, habs_rem, &ltwice, &htwice);
 
@@ -844,7 +845,7 @@ div_and_round_double (enum tree_code code, int uns,
             < (unsigned HOST_WIDE_INT) htwice)
            || (((unsigned HOST_WIDE_INT) habs_den
                 == (unsigned HOST_WIDE_INT) htwice)
-               && (labs_den < ltwice)))
+               && (labs_den <= ltwice)))
          {
            if (*hquo < 0)
              /* quo = quo - 1;  */
@@ -951,7 +952,7 @@ fold_defer_overflow_warnings (void)
    deferred code.  */
 
 void
-fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
+fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
 {
   const char *warnmsg;
   location_t locus;
@@ -973,7 +974,7 @@ fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
   if (!issue || warnmsg == NULL)
     return;
 
-  if (stmt != NULL_TREE && TREE_NO_WARNING (stmt))
+  if (gimple_no_warning_p (stmt))
     return;
 
   /* Use the smallest code level when deciding to issue the
@@ -984,10 +985,10 @@ fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
   if (!issue_strict_overflow_warning (code))
     return;
 
-  if (stmt == NULL_TREE || !expr_has_location (stmt))
+  if (stmt == NULL)
     locus = input_location;
   else
-    locus = expr_location (stmt);
+    locus = gimple_location (stmt);
   warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
 }
 
@@ -997,7 +998,7 @@ fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
 void
 fold_undefer_and_ignore_overflow_warnings (void)
 {
-  fold_undefer_overflow_warnings (false, NULL_TREE, 0);
+  fold_undefer_overflow_warnings (false, NULL, 0);
 }
 
 /* Whether we are deferring overflow warnings.  */
@@ -1014,7 +1015,6 @@ fold_deferring_overflow_warnings_p (void)
 static void
 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
 {
-  gcc_assert (!flag_wrapv && !flag_trapv);
   if (fold_deferring_overflow_warnings > 0)
     {
       if (fold_deferred_overflow_warning == NULL
@@ -1887,8 +1887,7 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
         flag_rounding_math is set, or if GCC's software emulation
         is unable to accurately represent the result.  */
       if ((flag_rounding_math
-          || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
-              && !flag_unsafe_math_optimizations))
+          || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
          && (inexact || !real_identical (&result, &value)))
        return NULL_TREE;
 
@@ -2399,7 +2398,8 @@ fold_convert_const (enum tree_code code, tree type, tree arg1)
   if (TREE_TYPE (arg1) == type)
     return arg1;
 
-  if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
+  if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
+      || TREE_CODE (type) == OFFSET_TYPE)
     {
       if (TREE_CODE (arg1) == INTEGER_CST)
        return fold_convert_const_int_from_int (type, arg1);
@@ -2641,7 +2641,7 @@ fold_convert (tree type, tree arg)
 
     case VOID_TYPE:
       tem = fold_ignored_result (arg);
-      if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
+      if (TREE_CODE (tem) == MODIFY_EXPR)
        return tem;
       return fold_build1 (NOP_EXPR, type, tem);
 
@@ -2684,7 +2684,6 @@ maybe_lvalue_p (const_tree x)
   case WITH_CLEANUP_EXPR:
   case COMPOUND_EXPR:
   case MODIFY_EXPR:
-  case GIMPLE_MODIFY_STMT:
   case TARGET_EXPR:
   case COND_EXPR:
   case BIND_EXPR:
@@ -2984,17 +2983,6 @@ combine_comparisons (enum tree_code code, enum tree_code lcode,
     return fold_build2 (compcode_to_comparison (compcode),
                        truth_type, ll_arg, lr_arg);
 }
-
-/* Return nonzero if CODE is a tree code that represents a truth value.  */
-
-static int
-truth_value_p (enum tree_code code)
-{
-  return (TREE_CODE_CLASS (code) == tcc_comparison
-         || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
-         || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
-         || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
-}
 \f
 /* Return nonzero if two operands (typically of the same tree node)
    are necessarily equal.  If either argument has side-effects this
@@ -3029,10 +3017,18 @@ operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
     return 0;
 
+  /* Check equality of integer constants before bailing out due to
+     precision differences.  */
+  if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
+    return tree_int_cst_equal (arg0, arg1);
+
   /* If both types don't have the same signedness, then we can't consider
      them equal.  We must check this before the STRIP_NOPS calls
-     because they may change the signedness of the arguments.  */
-  if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
+     because they may change the signedness of the arguments.  As pointers
+     strictly don't have a signedness, require either two pointers or
+     two non-pointers as well.  */
+  if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
+      || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
     return 0;
 
   /* If both types don't have the same precision, then it is not safe
@@ -3163,8 +3159,7 @@ operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
       /* Two conversions are equal only if signedness and modes match.  */
       switch (TREE_CODE (arg0))
         {
-        case NOP_EXPR:
-        case CONVERT_EXPR:
+       CASE_CONVERT:
         case FIX_TRUNC_EXPR:
          if (TYPE_UNSIGNED (TREE_TYPE (arg0))
              != TYPE_UNSIGNED (TREE_TYPE (arg1)))
@@ -3254,6 +3249,9 @@ operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
                  && operand_equal_p (TREE_OPERAND (arg0, 1),
                                      TREE_OPERAND (arg1, 0), flags));
 
+       case COND_EXPR:
+         return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
+         
        default:
          return 0;
        }
@@ -3383,17 +3381,17 @@ static int
 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
 {
   enum tree_code code = TREE_CODE (arg);
-  enum tree_code_class class = TREE_CODE_CLASS (code);
+  enum tree_code_class tclass = TREE_CODE_CLASS (code);
 
   /* We can handle some of the tcc_expression cases here.  */
-  if (class == tcc_expression && code == TRUTH_NOT_EXPR)
-    class = tcc_unary;
-  else if (class == tcc_expression
+  if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
+    tclass = tcc_unary;
+  else if (tclass == tcc_expression
           && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
               || code == COMPOUND_EXPR))
-    class = tcc_binary;
+    tclass = tcc_binary;
 
-  else if (class == tcc_expression && code == SAVE_EXPR
+  else if (tclass == tcc_expression && code == SAVE_EXPR
           && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
     {
       /* If we've already found a CVAL1 or CVAL2, this expression is
@@ -3401,11 +3399,11 @@ twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
       if (*cval1 || *cval2)
        return 0;
 
-      class = tcc_unary;
+      tclass = tcc_unary;
       *save_p = 1;
     }
 
-  switch (class)
+  switch (tclass)
     {
     case tcc_unary:
       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
@@ -3476,16 +3474,16 @@ eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
 {
   tree type = TREE_TYPE (arg);
   enum tree_code code = TREE_CODE (arg);
-  enum tree_code_class class = TREE_CODE_CLASS (code);
+  enum tree_code_class tclass = TREE_CODE_CLASS (code);
 
   /* We can handle some of the tcc_expression cases here.  */
-  if (class == tcc_expression && code == TRUTH_NOT_EXPR)
-    class = tcc_unary;
-  else if (class == tcc_expression
+  if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
+    tclass = tcc_unary;
+  else if (tclass == tcc_expression
           && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
-    class = tcc_binary;
+    tclass = tcc_binary;
 
-  switch (class)
+  switch (tclass)
     {
     case tcc_unary:
       return fold_build1 (code, type,
@@ -3811,6 +3809,9 @@ distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
   else
     return 0;
 
+  common = fold_convert (type, common);
+  left = fold_convert (type, left);
+  right = fold_convert (type, right);
   return fold_build2 (TREE_CODE (arg0), type, common,
                      fold_build2 (code, type, left, right));
 }
@@ -3859,10 +3860,10 @@ distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
 
 static tree
-make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
-                   int unsignedp)
+make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize,
+                   HOST_WIDE_INT bitpos, int unsignedp)
 {
-  tree result;
+  tree result, bftype;
 
   if (bitpos == 0)
     {
@@ -3874,10 +3875,16 @@ make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
        return fold_convert (type, inner);
     }
 
-  result = build3 (BIT_FIELD_REF, type, inner,
+  bftype = type;
+  if (TYPE_PRECISION (bftype) != bitsize
+      || TYPE_UNSIGNED (bftype) == !unsignedp)
+    bftype = build_nonstandard_integer_type (bitsize, 0);
+
+  result = build3 (BIT_FIELD_REF, bftype, inner,
                   size_int (bitsize), bitsize_int (bitpos));
 
-  BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
+  if (bftype != type)
+    result = fold_convert (type, result);
 
   return result;
 }
@@ -4095,8 +4102,7 @@ decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
   /* We are interested in the bare arrangement of bits, so strip everything
      that doesn't affect the machine mode.  However, record the type of the
      outermost expression if it may matter below.  */
-  if (TREE_CODE (exp) == NOP_EXPR
-      || TREE_CODE (exp) == CONVERT_EXPR
+  if (CONVERT_EXPR_P (exp)
       || TREE_CODE (exp) == NON_LVALUE_EXPR)
     outer_type = TREE_TYPE (exp);
   STRIP_NOPS (exp);
@@ -4528,7 +4534,7 @@ make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
          exp = arg0;
          continue;
 
-       case NOP_EXPR:  case NON_LVALUE_EXPR:  case CONVERT_EXPR:
+       CASE_CONVERT: case NON_LVALUE_EXPR:
          if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
            break;
 
@@ -4714,7 +4720,12 @@ build_range_check (tree type, tree exp, int in_p, tree low, tree high)
        {
          if (TYPE_UNSIGNED (etype))
            {
-             etype = signed_type_for (etype);
+             tree signed_etype = signed_type_for (etype);
+             if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
+               etype
+                 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
+             else
+               etype = signed_etype;
              exp = fold_convert (etype, exp);
            }
          return fold_build2 (GT_EXPR, type, exp,
@@ -5072,9 +5083,10 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
 
      Note that all these transformations are correct if A is
      NaN, since the two alternatives (A and -A) are also NaNs.  */
-  if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
-       ? real_zerop (arg01)
-       : integer_zerop (arg01))
+  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
+      && (FLOAT_TYPE_P (TREE_TYPE (arg01))
+         ? real_zerop (arg01)
+         : integer_zerop (arg01))
       && ((TREE_CODE (arg2) == NEGATE_EXPR
           && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
             /* In the case that A is of the form X-Y, '-A' (arg2) may
@@ -5127,7 +5139,8 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
      both transformations are correct when A is NaN: A != 0
      is then true, and A == 0 is false.  */
 
-  if (integer_zerop (arg01) && integer_zerop (arg2))
+  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
+      && integer_zerop (arg01) && integer_zerop (arg2))
     {
       if (comp_code == NE_EXPR)
        return pedantic_non_lvalue (fold_convert (type, arg1));
@@ -5161,7 +5174,8 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
      a number and A is not.  The conditions in the original
      expressions will be false, so all four give B.  The min()
      and max() versions would give a NaN instead.  */
-  if (operand_equal_for_comparison_p (arg01, arg2, arg00)
+  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
+      && operand_equal_for_comparison_p (arg01, arg2, arg00)
       /* Avoid these transformations if the COND_EXPR may be used
         as an lvalue in the C++ front-end.  PR c++/19199.  */
       && (in_gimple_form
@@ -5317,7 +5331,9 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
 
 \f
 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
-#define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
+#define LOGICAL_OP_NON_SHORT_CIRCUIT \
+  (BRANCH_COST (optimize_function_for_speed_p (cfun), \
+               false) >= 2)
 #endif
 
 /* EXP is some logical combination of boolean tests.  See if we can
@@ -5494,7 +5510,7 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
   tree l_const, r_const;
   tree lntype, rntype, result;
-  int first_bit, end_bit;
+  HOST_WIDE_INT first_bit, end_bit;
   int volatilep;
   tree orig_lhs = lhs, orig_rhs = rhs;
   enum tree_code orig_code = code;
@@ -5565,7 +5581,8 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
      that can be merged.  Avoid doing this if the RHS is a floating-point
      comparison since those can trap.  */
 
-  if (BRANCH_COST >= 2
+  if (BRANCH_COST (optimize_function_for_speed_p (cfun),
+                  false) >= 2
       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
       && simple_operand_p (rl_arg)
       && simple_operand_p (rr_arg))
@@ -5574,7 +5591,8 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
       if (code == TRUTH_OR_EXPR
          && lcode == NE_EXPR && integer_zerop (lr_arg)
          && rcode == NE_EXPR && integer_zerop (rr_arg)
-         && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
+         && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
+         && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
        return build2 (NE_EXPR, truth_type,
                       build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
                               ll_arg, rl_arg),
@@ -5584,7 +5602,8 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
       if (code == TRUTH_AND_EXPR
          && lcode == EQ_EXPR && integer_zerop (lr_arg)
          && rcode == EQ_EXPR && integer_zerop (rr_arg)
-         && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
+         && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
+         && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
        return build2 (EQ_EXPR, truth_type,
                       build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
                               ll_arg, rl_arg),
@@ -5885,7 +5904,7 @@ optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
 {
   tree arg0 = op0;
   enum tree_code op_code;
-  tree comp_const = op1;
+  tree comp_const;
   tree minmax_const;
   int consts_equal, consts_lt;
   tree inner;
@@ -5894,6 +5913,7 @@ optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
 
   op_code = TREE_CODE (arg0);
   minmax_const = TREE_OPERAND (arg0, 1);
+  comp_const = fold_convert (TREE_TYPE (arg0), op1);
   consts_equal = tree_int_cst_equal (minmax_const, comp_const);
   consts_lt = tree_int_cst_lt (minmax_const, comp_const);
   inner = TREE_OPERAND (arg0, 0);
@@ -6058,24 +6078,24 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
                            fold_convert (ctype, c), 0);
       break;
 
-    case CONVERT_EXPR:  case NON_LVALUE_EXPR:  case NOP_EXPR:
+    CASE_CONVERT: case NON_LVALUE_EXPR:
       /* If op0 is an expression ...  */
       if ((COMPARISON_CLASS_P (op0)
           || UNARY_CLASS_P (op0)
           || BINARY_CLASS_P (op0)
           || VL_EXP_CLASS_P (op0)
           || EXPRESSION_CLASS_P (op0))
-         /* ... and is unsigned, and its type is smaller than ctype,
-            then we cannot pass through as widening.  */
-         && ((TYPE_UNSIGNED (TREE_TYPE (op0))
+         /* ... and has wrapping overflow, and its type is smaller
+            than ctype, then we cannot pass through as widening.  */
+         && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
               && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
                     && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
-              && (GET_MODE_SIZE (TYPE_MODE (ctype))
-                  > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
+              && (TYPE_PRECISION (ctype)
+                  > TYPE_PRECISION (TREE_TYPE (op0))))
              /* ... or this is a truncation (t is narrower than op0),
                 then we cannot pass through this narrowing.  */
-             || (GET_MODE_SIZE (TYPE_MODE (type))
-                 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
+             || (TYPE_PRECISION (type)
+                 < TYPE_PRECISION (TREE_TYPE (op0)))
              /* ... or signedness changes for division or modulus,
                 then we cannot pass through this conversion.  */
              || (code != MULT_EXPR
@@ -6259,9 +6279,20 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
         (C * 8) % 4 since we know that's zero.  */
       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
           || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
+         /* If the multiplication can overflow we cannot optimize this.
+            ???  Until we can properly mark individual operations as
+            not overflowing we need to treat sizetype special here as
+            stor-layout relies on this opimization to make
+            DECL_FIELD_BIT_OFFSET always a constant.  */
+         && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
+             || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
+                 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
          && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
          && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
-       return omit_one_operand (type, integer_zero_node, op0);
+       {
+         *strict_overflow_p = true;
+         return omit_one_operand (type, integer_zero_node, op0);
+       }
 
       /* ... fall through ...  */
 
@@ -6286,8 +6317,13 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
       /* If these are the same operation types, we can associate them
         assuming no overflow.  */
       if (tcode == code
-         && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
-                                    fold_convert (ctype, c), 0))
+         && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
+                                        fold_convert (ctype, c), 1))
+         && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
+                                              TREE_INT_CST_HIGH (t1),
+                                              (TYPE_UNSIGNED (ctype)
+                                               && tcode != MULT_EXPR) ? -1 : 1,
+                                              TREE_OVERFLOW (t1)))
          && !TREE_OVERFLOW (t1))
        return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
 
@@ -6992,7 +7028,7 @@ tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
   if (TREE_CONSTANT (arg0))
     return 1;
 
-  if (optimize_size)
+  if (optimize_function_for_size_p (cfun))
     return 0;
 
   if (reorder && flag_evaluation_order
@@ -7057,8 +7093,10 @@ fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
   if ((code == EQ_EXPR || code == NE_EXPR
        || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
       && (TREE_TYPE (arg1_unw) == shorter_type
-         || (TYPE_PRECISION (shorter_type)
-             >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
+         || ((TYPE_PRECISION (shorter_type)
+              >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
+             && (TYPE_UNSIGNED (shorter_type)
+                 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
          || (TREE_CODE (arg1_unw) == INTEGER_CST
              && (TREE_CODE (shorter_type) == INTEGER_TYPE
                  || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
@@ -7125,8 +7163,7 @@ fold_sign_changed_comparison (enum tree_code code, tree type,
   tree arg0_inner;
   tree inner_type, outer_type;
 
-  if (TREE_CODE (arg0) != NOP_EXPR
-      && TREE_CODE (arg0) != CONVERT_EXPR)
+  if (!CONVERT_EXPR_P (arg0))
     return NULL_TREE;
 
   outer_type = TREE_TYPE (arg0);
@@ -7145,13 +7182,18 @@ fold_sign_changed_comparison (enum tree_code code, tree type,
   if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
     return NULL_TREE;
 
+  /* If the conversion is from an integral subtype to its basetype
+     leave it alone.  */
+  if (TREE_TYPE (inner_type) == outer_type)
+    return NULL_TREE;
+
   if (TREE_CODE (arg1) != INTEGER_CST
-      && !((TREE_CODE (arg1) == NOP_EXPR
-           || TREE_CODE (arg1) == CONVERT_EXPR)
+      && !(CONVERT_EXPR_P (arg1)
           && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
     return NULL_TREE;
 
-  if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
+  if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
+       || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
       && code != NE_EXPR
       && code != EQ_EXPR)
     return NULL_TREE;
@@ -7243,7 +7285,7 @@ try_move_mult_to_index (tree addr, tree op1)
          else
            {
              /* Try if delta is a multiple of step.  */
-             tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
+             tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
              if (! tmp)
                continue;
              delta = tmp;
@@ -7529,7 +7571,7 @@ native_encode_real (const_tree expr, unsigned char *ptr, int len)
 
   if (total_bytes > len)
     return 0;
-  words = 32 / UNITS_PER_WORD;
+  words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
 
   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
 
@@ -7623,6 +7665,37 @@ native_encode_vector (const_tree expr, unsigned char *ptr, int len)
 }
 
 
+/* Subroutine of native_encode_expr.  Encode the STRING_CST
+   specified by EXPR into the buffer PTR of length LEN bytes.
+   Return the number of bytes placed in the buffer, or zero
+   upon failure.  */
+
+static int
+native_encode_string (const_tree expr, unsigned char *ptr, int len)
+{
+  tree type = TREE_TYPE (expr);
+  HOST_WIDE_INT total_bytes;
+
+  if (TREE_CODE (type) != ARRAY_TYPE
+      || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
+      || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
+      || !host_integerp (TYPE_SIZE_UNIT (type), 0))
+    return 0;
+  total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
+  if (total_bytes > len)
+    return 0;
+  if (TREE_STRING_LENGTH (expr) < total_bytes)
+    {
+      memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
+      memset (ptr + TREE_STRING_LENGTH (expr), 0,
+             total_bytes - TREE_STRING_LENGTH (expr));
+    }
+  else
+    memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
+  return total_bytes;
+}
+
+
 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
    REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
    buffer PTR of length LEN bytes.  Return the number of bytes
@@ -7645,6 +7718,9 @@ native_encode_expr (const_tree expr, unsigned char *ptr, int len)
     case VECTOR_CST:
       return native_encode_vector (expr, ptr, len);
 
+    case STRING_CST:
+      return native_encode_string (expr, ptr, len);
+
     default:
       return 0;
     }
@@ -7719,7 +7795,7 @@ native_interpret_real (tree type, const unsigned char *ptr, int len)
   total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
   if (total_bytes > len || total_bytes > 24)
     return NULL_TREE;
-  words = 32 / UNITS_PER_WORD;
+  words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
 
   memset (tmp, 0, sizeof (tmp));
   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
@@ -7943,7 +8019,7 @@ fold_unary (enum tree_code code, tree type, tree op0)
   arg0 = op0;
   if (arg0)
     {
-      if (code == NOP_EXPR || code == CONVERT_EXPR
+      if (CONVERT_EXPR_CODE_P (code)
          || code == FLOAT_EXPR || code == ABS_EXPR)
        {
          /* Don't use STRIP_NOPS, because signedness of argument type
@@ -7992,7 +8068,7 @@ fold_unary (enum tree_code code, tree type, tree op0)
             so we don't get into an infinite recursion loop taking the
             conversion out and then back in.  */
 
-         if ((code == NOP_EXPR || code == CONVERT_EXPR
+         if ((CONVERT_EXPR_CODE_P (code)
               || code == NON_LVALUE_EXPR)
              && TREE_CODE (tem) == COND_EXPR
              && TREE_CODE (TREE_OPERAND (tem, 1)) == code
@@ -8042,9 +8118,8 @@ fold_unary (enum tree_code code, tree type, tree op0)
        return fold_convert (type, op0);
       return NULL_TREE;
 
-    case NOP_EXPR:
+    CASE_CONVERT:
     case FLOAT_EXPR:
-    case CONVERT_EXPR:
     case FIX_TRUNC_EXPR:
       if (TREE_TYPE (op0) == type)
        return op0;
@@ -8056,8 +8131,7 @@ fold_unary (enum tree_code code, tree type, tree op0)
                            TREE_OPERAND (op0, 1));
 
       /* Handle cases of two conversions in a row.  */
-      if (TREE_CODE (op0) == NOP_EXPR
-         || TREE_CODE (op0) == CONVERT_EXPR)
+      if (CONVERT_EXPR_P (op0))
        {
          tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
          tree inter_type = TREE_TYPE (op0);
@@ -8125,9 +8199,7 @@ fold_unary (enum tree_code code, tree type, tree op0)
             - the initial type is a pointer type and the precisions of the
               intermediate and final types differ, or
             - the final type is a pointer type and the precisions of the
-              initial and intermediate types differ.
-            - the initial type is a pointer to an array and the final type
-              not.  */
+              initial and intermediate types differ.  */
          if (! inside_float && ! inter_float && ! final_float
              && ! inside_vec && ! inter_vec && ! final_vec
              && (inter_prec >= inside_prec || inter_prec >= final_prec)
@@ -8139,10 +8211,7 @@ fold_unary (enum tree_code code, tree type, tree op0)
              && ! (inside_ptr && inter_prec != final_prec)
              && ! (final_ptr && inside_prec != inter_prec)
              && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
-                   && TYPE_MODE (type) == TYPE_MODE (inter_type))
-             && ! (inside_ptr && final_ptr
-                   && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
-                   && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
+                   && TYPE_MODE (type) == TYPE_MODE (inter_type)))
            return fold_build1 (code, type, TREE_OPERAND (op0, 0));
        }
 
@@ -8169,17 +8238,16 @@ fold_unary (enum tree_code code, tree type, tree op0)
            return fold_convert (type, fold_addr_expr (base));
         }
 
-      if ((TREE_CODE (op0) == MODIFY_EXPR
-          || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
-         && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
+      if (TREE_CODE (op0) == MODIFY_EXPR
+         && TREE_CONSTANT (TREE_OPERAND (op0, 1))
          /* Detect assigning a bitfield.  */
-         && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
+         && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
               && DECL_BIT_FIELD
-              (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
+              (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
        {
          /* Don't leave an assignment inside a conversion
             unless assigning a bitfield.  */
-         tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
+         tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
          /* First do the assignment, then return converted constant.  */
          tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
          TREE_NO_WARNING (tem) = 1;
@@ -8189,11 +8257,15 @@ fold_unary (enum tree_code code, tree type, tree op0)
 
       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
         constants (if x has signed type, the sign bit cannot be set
-        in c).  This folds extension into the BIT_AND_EXPR.  */
-      if (INTEGRAL_TYPE_P (type)
-         && TREE_CODE (type) != BOOLEAN_TYPE
+        in c).  This folds extension into the BIT_AND_EXPR.
+        ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
+        very likely don't have maximal range for their precision and this
+        transformation effectively doesn't preserve non-maximal ranges.  */
+      if (TREE_CODE (type) == INTEGER_TYPE
          && TREE_CODE (op0) == BIT_AND_EXPR
-         && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
+         && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
+         /* Not if the conversion is to the sub-type.  */
+         && TREE_TYPE (type) != TREE_TYPE (op0))
        {
          tree and = op0;
          tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
@@ -8257,8 +8329,7 @@ fold_unary (enum tree_code code, tree type, tree op0)
       if (INTEGRAL_TYPE_P (type)
          && TREE_CODE (op0) == BIT_NOT_EXPR
          && INTEGRAL_TYPE_P (TREE_TYPE (op0))
-         && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
-             || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
+         && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
          && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
        {
          tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
@@ -8267,6 +8338,31 @@ fold_unary (enum tree_code code, tree type, tree op0)
            return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
        }
 
+      /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
+        type of X and Y (integer types only).  */
+      if (INTEGRAL_TYPE_P (type)
+         && TREE_CODE (op0) == MULT_EXPR
+         && INTEGRAL_TYPE_P (TREE_TYPE (op0))
+         && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
+       {
+         /* Be careful not to introduce new overflows.  */
+         tree mult_type;
+          if (TYPE_OVERFLOW_WRAPS (type))
+           mult_type = type;
+         else
+           mult_type = unsigned_type_for (type);
+
+         if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
+           {
+             tem = fold_build2 (MULT_EXPR, mult_type,
+                                fold_convert (mult_type,
+                                              TREE_OPERAND (op0, 0)),
+                                fold_convert (mult_type,
+                                              TREE_OPERAND (op0, 1)));
+             return fold_convert (type, tem);
+           }
+       }
+
       tem = fold_convert_const (code, type, op0);
       return tem ? tem : NULL_TREE;
 
@@ -8277,13 +8373,32 @@ fold_unary (enum tree_code code, tree type, tree op0)
     case VIEW_CONVERT_EXPR:
       if (TREE_TYPE (op0) == type)
        return op0;
-      if (TREE_CODE (op0) == VIEW_CONVERT_EXPR
-         || (TREE_CODE (op0) == NOP_EXPR
-             && INTEGRAL_TYPE_P (TREE_TYPE (op0))
-             && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
-             && TYPE_PRECISION (TREE_TYPE (op0))
-                == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
+      if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
+       return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
+
+      /* For integral conversions with the same precision or pointer
+        conversions use a NOP_EXPR instead.  */
+      if ((INTEGRAL_TYPE_P (type)
+          || POINTER_TYPE_P (type))
+         && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
+             || POINTER_TYPE_P (TREE_TYPE (op0)))
+         && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
+         /* Do not muck with VIEW_CONVERT_EXPRs that convert from
+            a sub-type to its base type as generated by the Ada FE.  */
+         && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
+              && TREE_TYPE (TREE_TYPE (op0))))
+       return fold_convert (type, op0);
+
+      /* Strip inner integral conversions that do not change the precision.  */
+      if (CONVERT_EXPR_P (op0)
+         && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
+             || POINTER_TYPE_P (TREE_TYPE (op0)))
+         && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
+             || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
+         && (TYPE_PRECISION (TREE_TYPE (op0))
+             == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
        return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
+
       return fold_view_convert_expr (type, op0);
 
     case NEGATE_EXPR:
@@ -8564,9 +8679,14 @@ maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
   int sgn0;
   bool swap = false;
 
-  /* Match A +- CST code arg1 and CST code arg1.  */
-  if (!(((code0 == MINUS_EXPR
-          || code0 == PLUS_EXPR)
+  /* Match A +- CST code arg1 and CST code arg1.  We can change the
+     first form only if overflow is undefined.  */
+  if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
+        /* In principle pointers also have undefined overflow behavior,
+           but that causes problems elsewhere.  */
+        && !POINTER_TYPE_P (TREE_TYPE (arg0))
+        && (code0 == MINUS_EXPR
+            || code0 == PLUS_EXPR)
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
        || code0 == INTEGER_CST))
     return NULL_TREE;
@@ -8627,9 +8747,21 @@ maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
       *strict_overflow_p = true;
     }
 
-  /* Now build the constant reduced in magnitude.  */
+  /* Now build the constant reduced in magnitude.  But not if that
+     would produce one outside of its types range.  */
+  if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
+      && ((sgn0 == 1
+          && TYPE_MIN_VALUE (TREE_TYPE (cst0))
+          && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
+         || (sgn0 == -1
+             && TYPE_MAX_VALUE (TREE_TYPE (cst0))
+             && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
+    /* We cannot swap the comparison here as that would cause us to
+       endlessly recurse.  */
+    return NULL_TREE;
+
   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
-                      cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
+                      cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
   if (code0 != INTEGER_CST)
     t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
 
@@ -8655,12 +8787,6 @@ maybe_canonicalize_comparison (enum tree_code code, tree type,
   const char * const warnmsg = G_("assuming signed overflow does not occur "
                                  "when reducing constant in comparison");
 
-  /* In principle pointers also have undefined overflow behavior,
-     but that causes problems elsewhere.  */
-  if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
-      || POINTER_TYPE_P (TREE_TYPE (arg0)))
-    return NULL_TREE;
-
   /* Try canonicalization by simplifying arg0.  */
   strict_overflow_p = false;
   t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
@@ -8683,6 +8809,62 @@ maybe_canonicalize_comparison (enum tree_code code, tree type,
   return t;
 }
 
+/* Return whether BASE + OFFSET + BITPOS may wrap around the address
+   space.  This is used to avoid issuing overflow warnings for
+   expressions like &p->x which can not wrap.  */
+
+static bool
+pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
+{
+  unsigned HOST_WIDE_INT offset_low, total_low;
+  HOST_WIDE_INT size, offset_high, total_high;
+
+  if (!POINTER_TYPE_P (TREE_TYPE (base)))
+    return true;
+
+  if (bitpos < 0)
+    return true;
+
+  if (offset == NULL_TREE)
+    {
+      offset_low = 0;
+      offset_high = 0;
+    }
+  else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
+    return true;
+  else
+    {
+      offset_low = TREE_INT_CST_LOW (offset);
+      offset_high = TREE_INT_CST_HIGH (offset);
+    }
+
+  if (add_double_with_sign (offset_low, offset_high,
+                           bitpos / BITS_PER_UNIT, 0,
+                           &total_low, &total_high,
+                           true))
+    return true;
+
+  if (total_high != 0)
+    return true;
+
+  size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
+  if (size <= 0)
+    return true;
+
+  /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
+     array.  */
+  if (TREE_CODE (base) == ADDR_EXPR)
+    {
+      HOST_WIDE_INT base_size;
+
+      base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
+      if (base_size > 0 && size < base_size)
+       size = base_size;
+    }
+
+  return total_low > (unsigned HOST_WIDE_INT) size;
+}
+
 /* Subroutine of fold_binary.  This routine performs all of the
    transformations that are common to the equality/inequality
    operators (EQ_EXPR and NE_EXPR) and the ordering operators
@@ -8787,11 +8969,12 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
       HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
       enum machine_mode mode;
       int volatilep, unsignedp;
-      bool indirect_base0 = false;
+      bool indirect_base0 = false, indirect_base1 = false;
 
       /* Get base and offset for the access.  Strip ADDR_EXPR for
         get_inner_reference, but put it back by stripping INDIRECT_REF
-        off the base object if possible.  */
+        off the base object if possible.  indirect_baseN will be true
+        if baseN is not an address but refers to the object itself.  */
       base0 = arg0;
       if (TREE_CODE (arg0) == ADDR_EXPR)
        {
@@ -8815,32 +8998,41 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
          base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
                                       &bitsize, &bitpos1, &offset1, &mode,
                                       &unsignedp, &volatilep, false);
-         /* We have to make sure to have an indirect/non-indirect base1
-            just the same as we did for base0.  */
-         if (TREE_CODE (base1) == INDIRECT_REF
-             && !indirect_base0)
+         if (TREE_CODE (base1) == INDIRECT_REF)
            base1 = TREE_OPERAND (base1, 0);
-         else if (!indirect_base0)
-           base1 = NULL_TREE;
+         else
+           indirect_base1 = true;
        }
       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
        {
          base1 = TREE_OPERAND (arg1, 0);
          offset1 = TREE_OPERAND (arg1, 1);
        }
-      else if (indirect_base0)
-       base1 = NULL_TREE;
 
       /* If we have equivalent bases we might be able to simplify.  */
-      if (base0 && base1
+      if (indirect_base0 == indirect_base1
          && operand_equal_p (base0, base1, 0))
        {
          /* We can fold this expression to a constant if the non-constant
             offset parts are equal.  */
-         if (offset0 == offset1
-             || (offset0 && offset1
-                 && operand_equal_p (offset0, offset1, 0)))
+         if ((offset0 == offset1
+              || (offset0 && offset1
+                  && operand_equal_p (offset0, offset1, 0)))
+             && (code == EQ_EXPR
+                 || code == NE_EXPR
+                 || POINTER_TYPE_OVERFLOW_UNDEFINED))
+               
            {
+             if (code != EQ_EXPR
+                 && code != NE_EXPR
+                 && bitpos0 != bitpos1
+                 && (pointer_may_wrap_p (base0, offset0, bitpos0)
+                     || pointer_may_wrap_p (base1, offset1, bitpos1)))
+               fold_overflow_warning (("assuming pointer wraparound does not "
+                                       "occur when comparing P +- C1 with "
+                                       "P +- C2"),
+                                      WARN_STRICT_OVERFLOW_CONDITIONAL);
+
              switch (code)
                {
                case EQ_EXPR:
@@ -8865,7 +9057,9 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
             because pointer arithmetic is restricted to retain within an
             object and overflow on pointer differences is undefined as of
             6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
-         else if (bitpos0 == bitpos1)
+         else if (bitpos0 == bitpos1
+                  && ((code == EQ_EXPR || code == NE_EXPR)
+                      || POINTER_TYPE_OVERFLOW_UNDEFINED))
            {
              tree signed_size_type_node;
              signed_size_type_node = signed_type_for (size_type_node);
@@ -8884,9 +9078,60 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
              else
                offset1 = fold_convert (signed_size_type_node, offset1);
 
+             if (code != EQ_EXPR
+                 && code != NE_EXPR
+                 && (pointer_may_wrap_p (base0, offset0, bitpos0)
+                     || pointer_may_wrap_p (base1, offset1, bitpos1)))
+               fold_overflow_warning (("assuming pointer wraparound does not "
+                                       "occur when comparing P +- C1 with "
+                                       "P +- C2"),
+                                      WARN_STRICT_OVERFLOW_COMPARISON);
+
              return fold_build2 (code, type, offset0, offset1);
            }
        }
+      /* For non-equal bases we can simplify if they are addresses
+        of local binding decls or constants.  */
+      else if (indirect_base0 && indirect_base1
+              /* We know that !operand_equal_p (base0, base1, 0)
+                 because the if condition was false.  But make
+                 sure two decls are not the same.  */
+              && base0 != base1
+              && TREE_CODE (arg0) == ADDR_EXPR
+              && TREE_CODE (arg1) == ADDR_EXPR
+              && (((TREE_CODE (base0) == VAR_DECL
+                    || TREE_CODE (base0) == PARM_DECL)
+                   && (targetm.binds_local_p (base0)
+                       || CONSTANT_CLASS_P (base1)))
+                  || CONSTANT_CLASS_P (base0))
+              && (((TREE_CODE (base1) == VAR_DECL
+                    || TREE_CODE (base1) == PARM_DECL)
+                   && (targetm.binds_local_p (base1)
+                       || CONSTANT_CLASS_P (base0)))
+                  || CONSTANT_CLASS_P (base1)))
+       {
+         if (code == EQ_EXPR)
+           return omit_two_operands (type, boolean_false_node, arg0, arg1);
+         else if (code == NE_EXPR)
+           return omit_two_operands (type, boolean_true_node, arg0, arg1);
+       }
+      /* For equal offsets we can simplify to a comparison of the
+        base addresses.  */
+      else if (bitpos0 == bitpos1
+              && (indirect_base0
+                  ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
+              && (indirect_base1
+                  ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
+              && ((offset0 == offset1)
+                  || (offset0 && offset1
+                      && operand_equal_p (offset0, offset1, 0))))
+       {
+         if (indirect_base0)
+           base0 = fold_addr_expr (base0);
+         if (indirect_base1)
+           base1 = fold_addr_expr (base1);
+         return fold_build2 (code, type, base0, base1);
+       }
     }
 
   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
@@ -9073,8 +9318,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
     }
 
   if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
-      && (TREE_CODE (arg0) == NOP_EXPR
-         || TREE_CODE (arg0) == CONVERT_EXPR))
+      && CONVERT_EXPR_P (arg0))
     {
       /* If we are widening one operand of an integer comparison,
         see if the other operand is similarly being widened.  Perhaps we
@@ -9233,27 +9477,6 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
        }
     }
 
-  /* Fold a comparison of the address of COMPONENT_REFs with the same
-     type and component to a comparison of the address of the base
-     object.  In short, &x->a OP &y->a to x OP y and
-     &x->a OP &y.a to x OP &y  */
-  if (TREE_CODE (arg0) == ADDR_EXPR
-      && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
-      && TREE_CODE (arg1) == ADDR_EXPR
-      && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
-    {
-      tree cref0 = TREE_OPERAND (arg0, 0);
-      tree cref1 = TREE_OPERAND (arg1, 0);
-      if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
-       {
-         tree op0 = TREE_OPERAND (cref0, 0);
-         tree op1 = TREE_OPERAND (cref1, 0);
-         return fold_build2 (code, type,
-                             fold_addr_expr (op0),
-                             fold_addr_expr (op1));
-       }
-    }
-
   /* We can fold X/C1 op C2 where C1 and C2 are integer constants
      into a single range test.  */
   if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
@@ -9371,7 +9594,7 @@ get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
            }
        }
 
-      if (DECL_P (expr))
+      if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
        return DECL_ALIGN_UNIT (expr);
     }
   else if (code == POINTER_PLUS_EXPR)
@@ -9434,8 +9657,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
   tree t1 = NULL_TREE;
   bool strict_overflow_p;
 
-  gcc_assert ((IS_EXPR_CODE_CLASS (kind)
-              || IS_GIMPLE_STMT_CODE_CLASS (kind))
+  gcc_assert (IS_EXPR_CODE_CLASS (kind)
              && TREE_CODE_LENGTH (code) == 2
              && op0 != NULL_TREE
              && op1 != NULL_TREE);
@@ -9447,7 +9669,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
      safe for every expression, except for a comparison expression
      because its signedness is derived from its operands.  So, in
      the latter case, only strip conversions that don't change the
-     signedness.
+     signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
+     preserved.
 
      Note that this is done as an internal manipulation within the
      constant folder, in order to find the simplest representation
@@ -9455,7 +9678,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
      cases, the appropriate type conversions should be put back in
      the tree that will get out of the constant folder.  */
 
-  if (kind == tcc_comparison)
+  if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
     {
       STRIP_SIGN_NOPS (arg0);
       STRIP_SIGN_NOPS (arg1);
@@ -9662,7 +9885,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
          /* Convert ~A + 1 to -A.  */
          if (TREE_CODE (arg0) == BIT_NOT_EXPR
              && integer_onep (arg1))
-           return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
+           return fold_build1 (NEGATE_EXPR, type,
+                               fold_convert (type, TREE_OPERAND (arg0, 0)));
 
          /* ~X + X is -1.  */
          if (TREE_CODE (arg0) == BIT_NOT_EXPR
@@ -9987,7 +10211,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
 
          /* With undefined overflow we can only associate constants
             with one variable.  */
-         if ((POINTER_TYPE_P (type)
+         if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
               || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
              && var0 && var1)
            {
@@ -10340,6 +10564,17 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
            return fold_build2 (LSHIFT_EXPR, type, op1,
                                TREE_OPERAND (arg0, 1));
 
+         /* (A + A) * C -> A * 2 * C  */
+         if (TREE_CODE (arg0) == PLUS_EXPR
+             && TREE_CODE (arg1) == INTEGER_CST
+             && operand_equal_p (TREE_OPERAND (arg0, 0),
+                                 TREE_OPERAND (arg0, 1), 0))
+           return fold_build2 (MULT_EXPR, type,
+                               omit_one_operand (type, TREE_OPERAND (arg0, 0),
+                                                 TREE_OPERAND (arg0, 1)),
+                               fold_build2 (MULT_EXPR, type,
+                                            build_int_cst (type, 2) , arg1));
+
          strict_overflow_p = false;
          if (TREE_CODE (arg1) == INTEGER_CST
              && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
@@ -10563,7 +10798,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
                }
 
              /* Optimize x*x as pow(x,2.0), which is expanded as x*x.  */
-             if (! optimize_size
+             if (optimize_function_for_speed_p (cfun)
                  && operand_equal_p (arg0, arg1, 0))
                {
                  tree powfn = mathfn_built_in (type, BUILT_IN_POW);
@@ -10706,8 +10941,10 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
        {
          return fold_build1 (BIT_NOT_EXPR, type,
                              build2 (BIT_AND_EXPR, type,
-                                     TREE_OPERAND (arg0, 0),
-                                     TREE_OPERAND (arg1, 0)));
+                                     fold_convert (type,
+                                                   TREE_OPERAND (arg0, 0)),
+                                     fold_convert (type,
+                                                   TREE_OPERAND (arg1, 0))));
        }
 
       /* See if this can be simplified into a rotate first.  If that
@@ -10891,14 +11128,13 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
          && TREE_CODE (arg1) == INTEGER_CST
          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
        {
-         tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
-         tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
-                                  TREE_OPERAND (arg0, 0), tmp1);
-         tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
-                                  TREE_OPERAND (arg0, 1), tmp1);
+         tree tmp1 = fold_convert (type, arg1);
+         tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
+         tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
+         tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
+         tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
          return fold_convert (type,
-                              fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
-                                           tmp2, tmp3));
+                              fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
        }
 
       /* (X | Y) & Y is (X, Y).  */
@@ -11561,7 +11797,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
        {
          if (strict_overflow_p)
            fold_overflow_warning (("assuming signed overflow does not occur "
-                                   "when simplifying modulos"),
+                                   "when simplifying modulus"),
                                   WARN_STRICT_OVERFLOW_MISC);
          return fold_convert (type, tem);
        }
@@ -11576,7 +11812,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
 
     case RSHIFT_EXPR:
       /* Optimize -1 >> x for arithmetic right shifts.  */
-      if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
+      if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
+         && tree_expr_nonnegative_p (arg1))
        return omit_one_operand (type, arg0, arg1);
       /* ... fall through ...  */
 
@@ -12601,29 +12838,6 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
            }
        }
 
-      /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
-        This transformation affects the cases which are handled in later
-        optimizations involving comparisons with non-negative constants.  */
-      if (TREE_CODE (arg1) == INTEGER_CST
-         && TREE_CODE (arg0) != INTEGER_CST
-         && tree_int_cst_sgn (arg1) > 0)
-       {
-         if (code == GE_EXPR)
-           {
-             arg1 = const_binop (MINUS_EXPR, arg1,
-                                 build_int_cst (TREE_TYPE (arg1), 1), 0);
-             return fold_build2 (GT_EXPR, type, arg0,
-                                 fold_convert (TREE_TYPE (arg0), arg1));
-           }
-         if (code == LT_EXPR)
-           {
-             arg1 = const_binop (MINUS_EXPR, arg1,
-                                 build_int_cst (TREE_TYPE (arg1), 1), 0);
-             return fold_build2 (LE_EXPR, type, arg0,
-                                 fold_convert (TREE_TYPE (arg0), arg1));
-           }
-       }
-
       /* Comparisons with the highest or lowest possible integer of
         the specified precision will have known values.  */
       {
@@ -12631,7 +12845,6 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
        unsigned int width = TYPE_PRECISION (arg1_type);
 
        if (TREE_CODE (arg1) == INTEGER_CST
-           && !TREE_OVERFLOW (arg1)
            && width <= 2 * HOST_BITS_PER_WIDE_INT
            && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
          {
@@ -12847,8 +13060,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
 
       if ((code == LT_EXPR || code == GE_EXPR)
          && TYPE_UNSIGNED (TREE_TYPE (arg0))
-         && (TREE_CODE (arg1) == NOP_EXPR
-             || TREE_CODE (arg1) == CONVERT_EXPR)
+         && CONVERT_EXPR_P (arg1)
          && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
          && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
        return
@@ -13271,9 +13483,7 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
     case BIT_FIELD_REF:
       if ((TREE_CODE (arg0) == VECTOR_CST
           || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
-         && type == TREE_TYPE (TREE_TYPE (arg0))
-         && host_integerp (arg1, 1)
-         && host_integerp (op2, 1))
+         && type == TREE_TYPE (TREE_TYPE (arg0)))
        {
          unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
          unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
@@ -13304,6 +13514,13 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
                return fold_convert (type, integer_zero_node);
            }
        }
+
+      /* A bit-field-ref that referenced the full argument can be stripped.  */
+      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
+         && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
+         && integer_zerop (op2))
+       return fold_convert (type, arg0);
+
       return NULL_TREE;
 
     default:
@@ -13348,8 +13565,7 @@ fold (tree expr)
       return expr;
     }
 
-  if (IS_EXPR_CODE_CLASS (kind)
-      || IS_GIMPLE_STMT_CODE_CLASS (kind))
+  if (IS_EXPR_CODE_CLASS (kind))
     {
       tree type = TREE_TYPE (t);
       tree op0, op1, op2;
@@ -13378,6 +13594,45 @@ fold (tree expr)
 
   switch (code)
     {
+    case ARRAY_REF:
+      {
+       tree op0 = TREE_OPERAND (t, 0);
+       tree op1 = TREE_OPERAND (t, 1);
+
+       if (TREE_CODE (op1) == INTEGER_CST
+           && TREE_CODE (op0) == CONSTRUCTOR
+           && ! type_contains_placeholder_p (TREE_TYPE (op0)))
+         {
+           VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
+           unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
+           unsigned HOST_WIDE_INT begin = 0;
+
+           /* Find a matching index by means of a binary search.  */
+           while (begin != end)
+             {
+               unsigned HOST_WIDE_INT middle = (begin + end) / 2;
+               tree index = VEC_index (constructor_elt, elts, middle)->index;
+
+               if (TREE_CODE (index) == INTEGER_CST
+                   && tree_int_cst_lt (index, op1))
+                 begin = middle + 1;
+               else if (TREE_CODE (index) == INTEGER_CST
+                        && tree_int_cst_lt (op1, index))
+                 end = middle;
+               else if (TREE_CODE (index) == RANGE_EXPR
+                        && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
+                 begin = middle + 1;
+               else if (TREE_CODE (index) == RANGE_EXPR
+                        && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
+                 end = middle;
+               else
+                 return VEC_index (constructor_elt, elts, middle)->value;
+             }
+         }
+
+       return t;
+      }
+
     case CONST_DECL:
       return fold (DECL_INITIAL (t));
 
@@ -13452,7 +13707,7 @@ fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
 {
   const void **slot;
   enum tree_code code;
-  struct tree_function_decl buf;
+  union tree_node buf;
   int i, len;
   
 recursive_label:
@@ -13476,9 +13731,11 @@ recursive_label:
       expr = (tree) &buf;
     }
   else if (TREE_CODE_CLASS (code) == tcc_type
-          && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
+          && (TYPE_POINTER_TO (expr)
+              || TYPE_REFERENCE_TO (expr)
               || TYPE_CACHED_VALUES_P (expr)
-              || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
+              || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
+              || TYPE_NEXT_VARIANT (expr)))
     {
       /* Allow these fields to be modified.  */
       tree tmp;
@@ -13487,6 +13744,7 @@ recursive_label:
       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
       TYPE_POINTER_TO (tmp) = NULL;
       TYPE_REFERENCE_TO (tmp) = NULL;
+      TYPE_NEXT_VARIANT (tmp) = NULL;
       if (TYPE_CACHED_VALUES_P (tmp))
        {
          TYPE_CACHED_VALUES_P (tmp) = 0;
@@ -14025,106 +14283,184 @@ multiple_of_p (tree type, const_tree top, const_tree bottom)
     }
 }
 
-/* Return true if `t' is known to be non-negative.  If the return
+/* Return true if CODE or TYPE is known to be non-negative. */
+
+static bool
+tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
+{
+  if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
+      && truth_value_p (code))
+    /* Truth values evaluate to 0 or 1, which is nonnegative unless we
+       have a signed:1 type (where the value is -1 and 0).  */
+    return true;
+  return false;
+}
+
+/* Return true if (CODE OP0) is known to be non-negative.  If the return
    value is based on the assumption that signed overflow is undefined,
    set *STRICT_OVERFLOW_P to true; otherwise, don't change
    *STRICT_OVERFLOW_P.  */
 
 bool
-tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
+tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
+                               bool *strict_overflow_p)
 {
-  if (t == error_mark_node)
-    return false;
-
-  if (TYPE_UNSIGNED (TREE_TYPE (t)))
+  if (TYPE_UNSIGNED (type))
     return true;
 
-  switch (TREE_CODE (t))
+  switch (code)
     {
-    case SSA_NAME:
-      /* Query VRP to see if it has recorded any information about
-        the range of this object.  */
-      return ssa_name_nonnegative_p (t);
-
     case ABS_EXPR:
       /* We can't return 1 if flag_wrapv is set because
         ABS_EXPR<INT_MIN> = INT_MIN.  */
-      if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
+      if (!INTEGRAL_TYPE_P (type))
        return true;
-      if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
+      if (TYPE_OVERFLOW_UNDEFINED (type))
        {
          *strict_overflow_p = true;
          return true;
        }
       break;
 
-    case INTEGER_CST:
-      return tree_int_cst_sgn (t) >= 0;
+    case NON_LVALUE_EXPR:
+    case FLOAT_EXPR:
+    case FIX_TRUNC_EXPR:
+      return tree_expr_nonnegative_warnv_p (op0,
+                                           strict_overflow_p);
 
-    case REAL_CST:
-      return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
+    case NOP_EXPR:
+      {
+       tree inner_type = TREE_TYPE (op0);
+       tree outer_type = type;
 
-    case FIXED_CST:
-      return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
+       if (TREE_CODE (outer_type) == REAL_TYPE)
+         {
+           if (TREE_CODE (inner_type) == REAL_TYPE)
+             return tree_expr_nonnegative_warnv_p (op0,
+                                                   strict_overflow_p);
+           if (TREE_CODE (inner_type) == INTEGER_TYPE)
+             {
+               if (TYPE_UNSIGNED (inner_type))
+                 return true;
+               return tree_expr_nonnegative_warnv_p (op0,
+                                                     strict_overflow_p);
+             }
+         }
+       else if (TREE_CODE (outer_type) == INTEGER_TYPE)
+         {
+           if (TREE_CODE (inner_type) == REAL_TYPE)
+             return tree_expr_nonnegative_warnv_p (op0,
+                                                   strict_overflow_p);
+           if (TREE_CODE (inner_type) == INTEGER_TYPE)
+             return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
+                     && TYPE_UNSIGNED (inner_type);
+         }
+      }
+      break;
+
+    default:
+      return tree_simple_nonnegative_warnv_p (code, type);
+    }
+
+  /* We don't know sign of `t', so be conservative and return false.  */
+  return false;
+}
+
+/* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
+   value is based on the assumption that signed overflow is undefined,
+   set *STRICT_OVERFLOW_P to true; otherwise, don't change
+   *STRICT_OVERFLOW_P.  */
+
+bool
+tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
+                                     tree op1, bool *strict_overflow_p)
+{
+  if (TYPE_UNSIGNED (type))
+    return true;
 
+  switch (code)
+    {
     case POINTER_PLUS_EXPR:
     case PLUS_EXPR:
-      if (FLOAT_TYPE_P (TREE_TYPE (t)))
-       return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+      if (FLOAT_TYPE_P (type))
+       return (tree_expr_nonnegative_warnv_p (op0,
                                               strict_overflow_p)
-               && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
+               && tree_expr_nonnegative_warnv_p (op1,
                                                  strict_overflow_p));
 
       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
         both unsigned and at least 2 bits shorter than the result.  */
-      if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
-         && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
-         && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
+      if (TREE_CODE (type) == INTEGER_TYPE
+         && TREE_CODE (op0) == NOP_EXPR
+         && TREE_CODE (op1) == NOP_EXPR)
        {
-         tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
-         tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
+         tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
+         tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
          if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
              && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
            {
              unsigned int prec = MAX (TYPE_PRECISION (inner1),
                                       TYPE_PRECISION (inner2)) + 1;
-             return prec < TYPE_PRECISION (TREE_TYPE (t));
+             return prec < TYPE_PRECISION (type);
            }
        }
       break;
 
     case MULT_EXPR:
-      if (FLOAT_TYPE_P (TREE_TYPE (t)))
+      if (FLOAT_TYPE_P (type))
        {
          /* x * x for floating point x is always non-negative.  */
-         if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
+         if (operand_equal_p (op0, op1, 0))
            return true;
-         return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+         return (tree_expr_nonnegative_warnv_p (op0,
                                                 strict_overflow_p)
-                 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
+                 && tree_expr_nonnegative_warnv_p (op1,
                                                    strict_overflow_p));
        }
 
       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
         both unsigned and their total bits is shorter than the result.  */
-      if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
-         && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
-         && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
+      if (TREE_CODE (type) == INTEGER_TYPE
+         && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
+         && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
        {
-         tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
-         tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
-         if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
-             && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
-           return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
-                  < TYPE_PRECISION (TREE_TYPE (t));
+         tree inner0 = (TREE_CODE (op0) == NOP_EXPR) 
+           ? TREE_TYPE (TREE_OPERAND (op0, 0))
+           : TREE_TYPE (op0);
+         tree inner1 = (TREE_CODE (op1) == NOP_EXPR) 
+           ? TREE_TYPE (TREE_OPERAND (op1, 0))
+           : TREE_TYPE (op1);
+
+         bool unsigned0 = TYPE_UNSIGNED (inner0);
+         bool unsigned1 = TYPE_UNSIGNED (inner1);
+
+         if (TREE_CODE (op0) == INTEGER_CST)
+           unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
+
+         if (TREE_CODE (op1) == INTEGER_CST)
+           unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
+
+         if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
+             && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
+           {
+             unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
+               ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
+               : TYPE_PRECISION (inner0);
+
+             unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
+               ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
+               : TYPE_PRECISION (inner1);
+
+             return precision0 + precision1 < TYPE_PRECISION (type);
+           }
        }
       return false;
 
     case BIT_AND_EXPR:
     case MAX_EXPR:
-      return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+      return (tree_expr_nonnegative_warnv_p (op0,
                                             strict_overflow_p)
-             || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
+             || tree_expr_nonnegative_warnv_p (op1,
                                                strict_overflow_p));
 
     case BIT_IOR_EXPR:
@@ -14135,68 +14471,205 @@ tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
     case CEIL_DIV_EXPR:
     case FLOOR_DIV_EXPR:
     case ROUND_DIV_EXPR:
-      return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+      return (tree_expr_nonnegative_warnv_p (op0,
                                             strict_overflow_p)
-             && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
+             && tree_expr_nonnegative_warnv_p (op1,
                                                strict_overflow_p));
 
     case TRUNC_MOD_EXPR:
     case CEIL_MOD_EXPR:
     case FLOOR_MOD_EXPR:
     case ROUND_MOD_EXPR:
-    case SAVE_EXPR:
-    case NON_LVALUE_EXPR:
-    case FLOAT_EXPR:
-    case FIX_TRUNC_EXPR:
-      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+      return tree_expr_nonnegative_warnv_p (op0,
                                            strict_overflow_p);
+    default:
+      return tree_simple_nonnegative_warnv_p (code, type);
+    }
 
-    case COMPOUND_EXPR:
-    case MODIFY_EXPR:
-    case GIMPLE_MODIFY_STMT:
-      return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
-                                           strict_overflow_p);
+  /* We don't know sign of `t', so be conservative and return false.  */
+  return false;
+}
 
-    case BIND_EXPR:
-      return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
-                                           strict_overflow_p);
+/* Return true if T is known to be non-negative.  If the return
+   value is based on the assumption that signed overflow is undefined,
+   set *STRICT_OVERFLOW_P to true; otherwise, don't change
+   *STRICT_OVERFLOW_P.  */
+
+bool
+tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
+{
+  if (TYPE_UNSIGNED (TREE_TYPE (t)))
+    return true;
+
+  switch (TREE_CODE (t))
+    {
+    case INTEGER_CST:
+      return tree_int_cst_sgn (t) >= 0;
+
+    case REAL_CST:
+      return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
+
+    case FIXED_CST:
+      return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
 
     case COND_EXPR:
       return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
                                             strict_overflow_p)
              && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
                                                strict_overflow_p));
+    default:
+      return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
+                                                  TREE_TYPE (t));
+    }
+  /* We don't know sign of `t', so be conservative and return false.  */
+  return false;
+}
 
-    case NOP_EXPR:
-      {
-       tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
-       tree outer_type = TREE_TYPE (t);
-
-       if (TREE_CODE (outer_type) == REAL_TYPE)
+/* Return true if T is known to be non-negative.  If the return
+   value is based on the assumption that signed overflow is undefined,
+   set *STRICT_OVERFLOW_P to true; otherwise, don't change
+   *STRICT_OVERFLOW_P.  */
+
+bool
+tree_call_nonnegative_warnv_p (tree type, tree fndecl,
+                              tree arg0, tree arg1, bool *strict_overflow_p)
+{
+  if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
+    switch (DECL_FUNCTION_CODE (fndecl))
+      {
+       CASE_FLT_FN (BUILT_IN_ACOS):
+       CASE_FLT_FN (BUILT_IN_ACOSH):
+       CASE_FLT_FN (BUILT_IN_CABS):
+       CASE_FLT_FN (BUILT_IN_COSH):
+       CASE_FLT_FN (BUILT_IN_ERFC):
+       CASE_FLT_FN (BUILT_IN_EXP):
+       CASE_FLT_FN (BUILT_IN_EXP10):
+       CASE_FLT_FN (BUILT_IN_EXP2):
+       CASE_FLT_FN (BUILT_IN_FABS):
+       CASE_FLT_FN (BUILT_IN_FDIM):
+       CASE_FLT_FN (BUILT_IN_HYPOT):
+       CASE_FLT_FN (BUILT_IN_POW10):
+       CASE_INT_FN (BUILT_IN_FFS):
+       CASE_INT_FN (BUILT_IN_PARITY):
+       CASE_INT_FN (BUILT_IN_POPCOUNT):
+      case BUILT_IN_BSWAP32:
+      case BUILT_IN_BSWAP64:
+       /* Always true.  */
+       return true;
+
+       CASE_FLT_FN (BUILT_IN_SQRT):
+       /* sqrt(-0.0) is -0.0.  */
+       if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
+         return true;
+       return tree_expr_nonnegative_warnv_p (arg0,
+                                             strict_overflow_p);
+
+       CASE_FLT_FN (BUILT_IN_ASINH):
+       CASE_FLT_FN (BUILT_IN_ATAN):
+       CASE_FLT_FN (BUILT_IN_ATANH):
+       CASE_FLT_FN (BUILT_IN_CBRT):
+       CASE_FLT_FN (BUILT_IN_CEIL):
+       CASE_FLT_FN (BUILT_IN_ERF):
+       CASE_FLT_FN (BUILT_IN_EXPM1):
+       CASE_FLT_FN (BUILT_IN_FLOOR):
+       CASE_FLT_FN (BUILT_IN_FMOD):
+       CASE_FLT_FN (BUILT_IN_FREXP):
+       CASE_FLT_FN (BUILT_IN_LCEIL):
+       CASE_FLT_FN (BUILT_IN_LDEXP):
+       CASE_FLT_FN (BUILT_IN_LFLOOR):
+       CASE_FLT_FN (BUILT_IN_LLCEIL):
+       CASE_FLT_FN (BUILT_IN_LLFLOOR):
+       CASE_FLT_FN (BUILT_IN_LLRINT):
+       CASE_FLT_FN (BUILT_IN_LLROUND):
+       CASE_FLT_FN (BUILT_IN_LRINT):
+       CASE_FLT_FN (BUILT_IN_LROUND):
+       CASE_FLT_FN (BUILT_IN_MODF):
+       CASE_FLT_FN (BUILT_IN_NEARBYINT):
+       CASE_FLT_FN (BUILT_IN_RINT):
+       CASE_FLT_FN (BUILT_IN_ROUND):
+       CASE_FLT_FN (BUILT_IN_SCALB):
+       CASE_FLT_FN (BUILT_IN_SCALBLN):
+       CASE_FLT_FN (BUILT_IN_SCALBN):
+       CASE_FLT_FN (BUILT_IN_SIGNBIT):
+       CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
+       CASE_FLT_FN (BUILT_IN_SINH):
+       CASE_FLT_FN (BUILT_IN_TANH):
+       CASE_FLT_FN (BUILT_IN_TRUNC):
+       /* True if the 1st argument is nonnegative.  */
+       return tree_expr_nonnegative_warnv_p (arg0,
+                                             strict_overflow_p);
+
+       CASE_FLT_FN (BUILT_IN_FMAX):
+       /* True if the 1st OR 2nd arguments are nonnegative.  */
+       return (tree_expr_nonnegative_warnv_p (arg0,
+                                              strict_overflow_p)
+               || (tree_expr_nonnegative_warnv_p (arg1,
+                                                  strict_overflow_p)));
+
+       CASE_FLT_FN (BUILT_IN_FMIN):
+       /* True if the 1st AND 2nd arguments are nonnegative.  */
+       return (tree_expr_nonnegative_warnv_p (arg0,
+                                              strict_overflow_p)
+               && (tree_expr_nonnegative_warnv_p (arg1,
+                                                  strict_overflow_p)));
+
+       CASE_FLT_FN (BUILT_IN_COPYSIGN):
+       /* True if the 2nd argument is nonnegative.  */
+       return tree_expr_nonnegative_warnv_p (arg1,
+                                             strict_overflow_p);
+
+       CASE_FLT_FN (BUILT_IN_POWI):
+       /* True if the 1st argument is nonnegative or the second
+          argument is an even integer.  */
+       if (TREE_CODE (arg1) == INTEGER_CST
+           && (TREE_INT_CST_LOW (arg1) & 1) == 0)
+         return true;
+       return tree_expr_nonnegative_warnv_p (arg0,
+                                             strict_overflow_p);
+
+       CASE_FLT_FN (BUILT_IN_POW):
+       /* True if the 1st argument is nonnegative or the second
+          argument is an even integer valued real.  */
+       if (TREE_CODE (arg1) == REAL_CST)
          {
-           if (TREE_CODE (inner_type) == REAL_TYPE)
-             return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
-                                                   strict_overflow_p);
-           if (TREE_CODE (inner_type) == INTEGER_TYPE)
+           REAL_VALUE_TYPE c;
+           HOST_WIDE_INT n;
+
+           c = TREE_REAL_CST (arg1);
+           n = real_to_integer (&c);
+           if ((n & 1) == 0)
              {
-               if (TYPE_UNSIGNED (inner_type))
+               REAL_VALUE_TYPE cint;
+               real_from_integer (&cint, VOIDmode, n,
+                                  n < 0 ? -1 : 0, 0);
+               if (real_identical (&c, &cint))
                  return true;
-               return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
-                                                     strict_overflow_p);
              }
          }
-       else if (TREE_CODE (outer_type) == INTEGER_TYPE)
-         {
-           if (TREE_CODE (inner_type) == REAL_TYPE)
-             return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
-                                                   strict_overflow_p);
-           if (TREE_CODE (inner_type) == INTEGER_TYPE)
-             return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
-                     && TYPE_UNSIGNED (inner_type);
-         }
+       return tree_expr_nonnegative_warnv_p (arg0,
+                                             strict_overflow_p);
+
+      default:
+       break;
       }
-      break;
+  return tree_simple_nonnegative_warnv_p (CALL_EXPR,
+                                         type);
+}
+
+/* Return true if T is known to be non-negative.  If the return
+   value is based on the assumption that signed overflow is undefined,
+   set *STRICT_OVERFLOW_P to true; otherwise, don't change
+   *STRICT_OVERFLOW_P.  */
+
+bool
+tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
+{
+  enum tree_code code = TREE_CODE (t);
+  if (TYPE_UNSIGNED (TREE_TYPE (t)))
+    return true;
 
+  switch (code)
+    {
     case TARGET_EXPR:
       {
        tree temp = TARGET_EXPR_SLOT (t);
@@ -14221,10 +14694,9 @@ tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
            else
              break;
          }
-       if ((TREE_CODE (t) == MODIFY_EXPR
-            || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
-           && GENERIC_TREE_OPERAND (t, 0) == temp)
-         return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
+       if (TREE_CODE (t) == MODIFY_EXPR
+           && TREE_OPERAND (t, 0) == temp)
+         return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
                                                strict_overflow_p);
 
        return false;
@@ -14232,145 +14704,103 @@ tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
 
     case CALL_EXPR:
       {
-       tree fndecl = get_callee_fndecl (t);
-       if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
-         switch (DECL_FUNCTION_CODE (fndecl))
-           {
-           CASE_FLT_FN (BUILT_IN_ACOS):
-           CASE_FLT_FN (BUILT_IN_ACOSH):
-           CASE_FLT_FN (BUILT_IN_CABS):
-           CASE_FLT_FN (BUILT_IN_COSH):
-           CASE_FLT_FN (BUILT_IN_ERFC):
-           CASE_FLT_FN (BUILT_IN_EXP):
-           CASE_FLT_FN (BUILT_IN_EXP10):
-           CASE_FLT_FN (BUILT_IN_EXP2):
-           CASE_FLT_FN (BUILT_IN_FABS):
-           CASE_FLT_FN (BUILT_IN_FDIM):
-           CASE_FLT_FN (BUILT_IN_HYPOT):
-           CASE_FLT_FN (BUILT_IN_POW10):
-           CASE_INT_FN (BUILT_IN_FFS):
-           CASE_INT_FN (BUILT_IN_PARITY):
-           CASE_INT_FN (BUILT_IN_POPCOUNT):
-           case BUILT_IN_BSWAP32:
-           case BUILT_IN_BSWAP64:
-             /* Always true.  */
-             return true;
-
-           CASE_FLT_FN (BUILT_IN_SQRT):
-             /* sqrt(-0.0) is -0.0.  */
-             if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
-               return true;
-             return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
-                                                   strict_overflow_p);
+       tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
+       tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
+
+       return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
+                                             get_callee_fndecl (t),
+                                             arg0,
+                                             arg1,
+                                             strict_overflow_p);
+      }
+    case COMPOUND_EXPR:
+    case MODIFY_EXPR:
+      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
+                                           strict_overflow_p);
+    case BIND_EXPR:
+      return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
+                                           strict_overflow_p);
+    case SAVE_EXPR:
+      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+                                           strict_overflow_p);
 
-           CASE_FLT_FN (BUILT_IN_ASINH):
-           CASE_FLT_FN (BUILT_IN_ATAN):
-           CASE_FLT_FN (BUILT_IN_ATANH):
-           CASE_FLT_FN (BUILT_IN_CBRT):
-           CASE_FLT_FN (BUILT_IN_CEIL):
-           CASE_FLT_FN (BUILT_IN_ERF):
-           CASE_FLT_FN (BUILT_IN_EXPM1):
-           CASE_FLT_FN (BUILT_IN_FLOOR):
-           CASE_FLT_FN (BUILT_IN_FMOD):
-           CASE_FLT_FN (BUILT_IN_FREXP):
-           CASE_FLT_FN (BUILT_IN_LCEIL):
-           CASE_FLT_FN (BUILT_IN_LDEXP):
-           CASE_FLT_FN (BUILT_IN_LFLOOR):
-           CASE_FLT_FN (BUILT_IN_LLCEIL):
-           CASE_FLT_FN (BUILT_IN_LLFLOOR):
-           CASE_FLT_FN (BUILT_IN_LLRINT):
-           CASE_FLT_FN (BUILT_IN_LLROUND):
-           CASE_FLT_FN (BUILT_IN_LRINT):
-           CASE_FLT_FN (BUILT_IN_LROUND):
-           CASE_FLT_FN (BUILT_IN_MODF):
-           CASE_FLT_FN (BUILT_IN_NEARBYINT):
-           CASE_FLT_FN (BUILT_IN_RINT):
-           CASE_FLT_FN (BUILT_IN_ROUND):
-           CASE_FLT_FN (BUILT_IN_SCALB):
-           CASE_FLT_FN (BUILT_IN_SCALBLN):
-           CASE_FLT_FN (BUILT_IN_SCALBN):
-           CASE_FLT_FN (BUILT_IN_SIGNBIT):
-           CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
-           CASE_FLT_FN (BUILT_IN_SINH):
-           CASE_FLT_FN (BUILT_IN_TANH):
-           CASE_FLT_FN (BUILT_IN_TRUNC):
-             /* True if the 1st argument is nonnegative.  */
-             return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
-                                                   strict_overflow_p);
+    default:
+      return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
+                                                  TREE_TYPE (t));
+    }
 
-           CASE_FLT_FN (BUILT_IN_FMAX):
-             /* True if the 1st OR 2nd arguments are nonnegative.  */
-             return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
-                                                    strict_overflow_p)
-                     || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
-                                                        strict_overflow_p)));
-
-           CASE_FLT_FN (BUILT_IN_FMIN):
-             /* True if the 1st AND 2nd arguments are nonnegative.  */
-             return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
-                                                    strict_overflow_p)
-                     && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
-                                                        strict_overflow_p)));
-
-           CASE_FLT_FN (BUILT_IN_COPYSIGN):
-             /* True if the 2nd argument is nonnegative.  */
-             return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
-                                                   strict_overflow_p);
+  /* We don't know sign of `t', so be conservative and return false.  */
+  return false;
+}
 
-           CASE_FLT_FN (BUILT_IN_POWI):
-             /* True if the 1st argument is nonnegative or the second
-                argument is an even integer.  */
-             if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
-               {
-                 tree arg1 = CALL_EXPR_ARG (t, 1);
-                 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
-                   return true;
-               }
-             return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
-                                                   strict_overflow_p);
+/* Return true if T is known to be non-negative.  If the return
+   value is based on the assumption that signed overflow is undefined,
+   set *STRICT_OVERFLOW_P to true; otherwise, don't change
+   *STRICT_OVERFLOW_P.  */
 
-           CASE_FLT_FN (BUILT_IN_POW):
-             /* True if the 1st argument is nonnegative or the second
-                argument is an even integer valued real.  */
-             if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
-               {
-                 REAL_VALUE_TYPE c;
-                 HOST_WIDE_INT n;
+bool
+tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
+{
+  enum tree_code code;
+  if (t == error_mark_node)
+    return false;
 
-                 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
-                 n = real_to_integer (&c);
-                 if ((n & 1) == 0)
-                   {
-                     REAL_VALUE_TYPE cint;
-                     real_from_integer (&cint, VOIDmode, n,
-                                        n < 0 ? -1 : 0, 0);
-                     if (real_identical (&c, &cint))
-                       return true;
-                   }
-               }
-             return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
-                                                   strict_overflow_p);
+  code = TREE_CODE (t);
+  switch (TREE_CODE_CLASS (code))
+    {
+    case tcc_binary:
+    case tcc_comparison:
+      return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
+                                             TREE_TYPE (t),
+                                             TREE_OPERAND (t, 0),
+                                             TREE_OPERAND (t, 1),
+                                             strict_overflow_p);
 
-           default:
-             break;
-           }
-      }
+    case tcc_unary:
+      return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
+                                            TREE_TYPE (t),
+                                            TREE_OPERAND (t, 0),
+                                            strict_overflow_p);
 
-      /* ... fall through ...  */
+    case tcc_constant:
+    case tcc_declaration:
+    case tcc_reference:
+      return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
 
     default:
-      {
-       tree type = TREE_TYPE (t);
-       if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
-           && truth_value_p (TREE_CODE (t)))
-         /* Truth values evaluate to 0 or 1, which is nonnegative unless we
-             have a signed:1 type (where the value is -1 and 0).  */
-         return true;
-      }
+      break;
     }
 
-  /* We don't know sign of `t', so be conservative and return false.  */
-  return false;
+  switch (code)
+    {
+    case TRUTH_AND_EXPR:
+    case TRUTH_OR_EXPR:
+    case TRUTH_XOR_EXPR:
+      return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
+                                             TREE_TYPE (t),
+                                             TREE_OPERAND (t, 0),
+                                             TREE_OPERAND (t, 1),
+                                             strict_overflow_p);
+    case TRUTH_NOT_EXPR:
+      return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
+                                            TREE_TYPE (t),
+                                            TREE_OPERAND (t, 0),
+                                            strict_overflow_p);
+
+    case COND_EXPR:
+    case CONSTRUCTOR:
+    case OBJ_TYPE_REF:
+    case ASSERT_EXPR:
+    case ADDR_EXPR:
+    case WITH_SIZE_EXPR:
+    case EXC_PTR_EXPR:
+    case SSA_NAME:
+    case FILTER_EXPR:
+      return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
+
+    default:
+      return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
+    }
 }
 
 /* Return true if `t' is known to be non-negative.  Handle warnings
@@ -14391,7 +14821,8 @@ tree_expr_nonnegative_p (tree t)
   return ret;
 }
 
-/* Return true when T is an address and is known to be nonzero.
+
+/* Return true when (CODE OP0) is an address and is known to be nonzero.
    For floating point we further ensure that T is not denormal.
    Similar logic is present in nonzero_address in rtlanal.h.
 
@@ -14400,29 +14831,54 @@ tree_expr_nonnegative_p (tree t)
    change *STRICT_OVERFLOW_P.  */
 
 bool
-tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
+tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
+                                bool *strict_overflow_p)
 {
-  tree type = TREE_TYPE (t);
-  bool sub_strict_overflow_p;
+  switch (code)
+    {
+    case ABS_EXPR:
+      return tree_expr_nonzero_warnv_p (op0,
+                                       strict_overflow_p);
 
-  /* Doing something useful for floating point would need more work.  */
-  if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
-    return false;
+    case NOP_EXPR:
+      {
+       tree inner_type = TREE_TYPE (op0);
+       tree outer_type = type;
 
-  switch (TREE_CODE (t))
-    {
-    case SSA_NAME:
-      /* Query VRP to see if it has recorded any information about
-        the range of this object.  */
-      return ssa_name_nonzero_p (t);
+       return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
+               && tree_expr_nonzero_warnv_p (op0,
+                                             strict_overflow_p));
+      }
+      break;
 
-    case ABS_EXPR:
-      return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
+    case NON_LVALUE_EXPR:
+      return tree_expr_nonzero_warnv_p (op0,
                                        strict_overflow_p);
 
-    case INTEGER_CST:
-      return !integer_zerop (t);
+    default:
+      break;
+  }
+
+  return false;
+}
+
+/* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
+   For floating point we further ensure that T is not denormal.
+   Similar logic is present in nonzero_address in rtlanal.h.
+
+   If the return value is based on the assumption that signed overflow
+   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
+   change *STRICT_OVERFLOW_P.  */
 
+bool
+tree_binary_nonzero_warnv_p (enum tree_code code,
+                            tree type,
+                            tree op0,
+                            tree op1, bool *strict_overflow_p)
+{
+  bool sub_strict_overflow_p;
+  switch (code)
+    {
     case POINTER_PLUS_EXPR:
     case PLUS_EXPR:
       if (TYPE_OVERFLOW_UNDEFINED (type))
@@ -14430,18 +14886,18 @@ tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
          /* With the presence of negative values it is hard
             to say something.  */
          sub_strict_overflow_p = false;
-         if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+         if (!tree_expr_nonnegative_warnv_p (op0,
                                              &sub_strict_overflow_p)
-             || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
+             || !tree_expr_nonnegative_warnv_p (op1,
                                                 &sub_strict_overflow_p))
            return false;
          /* One of operands must be positive and the other non-negative.  */
          /* We don't set *STRICT_OVERFLOW_P here: even if this value
             overflows, on a twos-complement machine the sum of two
             nonnegative numbers can never be zero.  */
-         return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
+         return (tree_expr_nonzero_warnv_p (op0,
                                             strict_overflow_p)
-                 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
+                 || tree_expr_nonzero_warnv_p (op1,
                                                strict_overflow_p));
        }
       break;
@@ -14449,9 +14905,9 @@ tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
     case MULT_EXPR:
       if (TYPE_OVERFLOW_UNDEFINED (type))
        {
-         if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
+         if (tree_expr_nonzero_warnv_p (op0,
                                         strict_overflow_p)
-             && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
+             && tree_expr_nonzero_warnv_p (op1,
                                            strict_overflow_p))
            {
              *strict_overflow_p = true;
@@ -14460,18 +14916,78 @@ tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
        }
       break;
 
-    case NOP_EXPR:
-      {
-       tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
-       tree outer_type = TREE_TYPE (t);
+    case MIN_EXPR:
+      sub_strict_overflow_p = false;
+      if (tree_expr_nonzero_warnv_p (op0,
+                                    &sub_strict_overflow_p)
+         && tree_expr_nonzero_warnv_p (op1,
+                                       &sub_strict_overflow_p))
+       {
+         if (sub_strict_overflow_p)
+           *strict_overflow_p = true;
+       }
+      break;
 
-       return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
-               && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
-                                             strict_overflow_p));
-      }
+    case MAX_EXPR:
+      sub_strict_overflow_p = false;
+      if (tree_expr_nonzero_warnv_p (op0,
+                                    &sub_strict_overflow_p))
+       {
+         if (sub_strict_overflow_p)
+           *strict_overflow_p = true;
+
+         /* When both operands are nonzero, then MAX must be too.  */
+         if (tree_expr_nonzero_warnv_p (op1,
+                                        strict_overflow_p))
+           return true;
+
+         /* MAX where operand 0 is positive is positive.  */
+         return tree_expr_nonnegative_warnv_p (op0,
+                                              strict_overflow_p);
+       }
+      /* MAX where operand 1 is positive is positive.  */
+      else if (tree_expr_nonzero_warnv_p (op1,
+                                         &sub_strict_overflow_p)
+              && tree_expr_nonnegative_warnv_p (op1,
+                                                &sub_strict_overflow_p))
+       {
+         if (sub_strict_overflow_p)
+           *strict_overflow_p = true;
+         return true;
+       }
+      break;
+
+    case BIT_IOR_EXPR:
+      return (tree_expr_nonzero_warnv_p (op1,
+                                        strict_overflow_p)
+             || tree_expr_nonzero_warnv_p (op0,
+                                           strict_overflow_p));
+
+    default:
       break;
+  }
+
+  return false;
+}
+
+/* Return true when T is an address and is known to be nonzero.
+   For floating point we further ensure that T is not denormal.
+   Similar logic is present in nonzero_address in rtlanal.h.
+
+   If the return value is based on the assumption that signed overflow
+   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
+   change *STRICT_OVERFLOW_P.  */
+
+bool
+tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
+{
+  bool sub_strict_overflow_p;
+  switch (TREE_CODE (t))
+    {
+    case INTEGER_CST:
+      return !integer_zerop (t);
 
-   case ADDR_EXPR:
+    case ADDR_EXPR:
       {
        tree base = get_base_address (TREE_OPERAND (t, 0));
 
@@ -14502,65 +15018,86 @@ tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
        }
       break;
 
-    case MIN_EXPR:
-      sub_strict_overflow_p = false;
-      if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
-                                    &sub_strict_overflow_p)
-         && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
-                                       &sub_strict_overflow_p))
-       {
-         if (sub_strict_overflow_p)
-           *strict_overflow_p = true;
-       }
+    default:
       break;
+    }
+  return false;
+}
 
-    case MAX_EXPR:
-      sub_strict_overflow_p = false;
-      if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
-                                    &sub_strict_overflow_p))
-       {
-         if (sub_strict_overflow_p)
-           *strict_overflow_p = true;
+/* Return true when T is an address and is known to be nonzero.
+   For floating point we further ensure that T is not denormal.
+   Similar logic is present in nonzero_address in rtlanal.h.
 
-         /* When both operands are nonzero, then MAX must be too.  */
-         if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
-                                        strict_overflow_p))
-           return true;
+   If the return value is based on the assumption that signed overflow
+   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
+   change *STRICT_OVERFLOW_P.  */
 
-         /* MAX where operand 0 is positive is positive.  */
-         return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
+bool
+tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
+{
+  tree type = TREE_TYPE (t);
+  enum tree_code code;
+
+  /* Doing something useful for floating point would need more work.  */
+  if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
+    return false;
+
+  code = TREE_CODE (t);
+  switch (TREE_CODE_CLASS (code))
+    {
+    case tcc_unary:
+      return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
+                                             strict_overflow_p);
+    case tcc_binary:
+    case tcc_comparison:
+      return tree_binary_nonzero_warnv_p (code, type,
+                                              TREE_OPERAND (t, 0),
+                                              TREE_OPERAND (t, 1),
                                               strict_overflow_p);
-       }
-      /* MAX where operand 1 is positive is positive.  */
-      else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
-                                         &sub_strict_overflow_p)
-              && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
-                                                &sub_strict_overflow_p))
-       {
-         if (sub_strict_overflow_p)
-           *strict_overflow_p = true;
-         return true;
-       }
+    case tcc_constant:
+    case tcc_declaration:
+    case tcc_reference:
+      return tree_single_nonzero_warnv_p (t, strict_overflow_p);
+
+    default:
       break;
+    }
+
+  switch (code)
+    {
+    case TRUTH_NOT_EXPR:
+      return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
+                                             strict_overflow_p);
+
+    case TRUTH_AND_EXPR:
+    case TRUTH_OR_EXPR:
+    case TRUTH_XOR_EXPR:
+      return tree_binary_nonzero_warnv_p (code, type,
+                                              TREE_OPERAND (t, 0),
+                                              TREE_OPERAND (t, 1),
+                                              strict_overflow_p);
+
+    case COND_EXPR:
+    case CONSTRUCTOR:
+    case OBJ_TYPE_REF:
+    case ASSERT_EXPR:
+    case ADDR_EXPR:
+    case WITH_SIZE_EXPR:
+    case EXC_PTR_EXPR:
+    case SSA_NAME:
+    case FILTER_EXPR:
+      return tree_single_nonzero_warnv_p (t, strict_overflow_p);
 
     case COMPOUND_EXPR:
     case MODIFY_EXPR:
-    case GIMPLE_MODIFY_STMT:
     case BIND_EXPR:
-      return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
+      return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
                                        strict_overflow_p);
 
     case SAVE_EXPR:
-    case NON_LVALUE_EXPR:
       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
                                        strict_overflow_p);
 
-    case BIT_IOR_EXPR:
-      return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
-                                       strict_overflow_p)
-             || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
-                                           strict_overflow_p));
-
     case CALL_EXPR:
       return alloca_call_p (t);
 
@@ -14646,7 +15183,7 @@ fold_read_from_constant_string (tree exp)
             with constant folding.  (E.g. suppose the lower bound is 1,
             and its mode is QI.  Without the conversion,l (ARRAY
             +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
-            +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
+            +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
          if (! integer_zerop (low_bound))
            index = size_diffop (index, fold_convert (sizetype, low_bound));
 
@@ -15000,6 +15537,34 @@ fold_indirect_ref_1 (tree type, tree op0)
        }
     }
 
+  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
+  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
+      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
+    { 
+      tree op00 = TREE_OPERAND (sub, 0);
+      tree op01 = TREE_OPERAND (sub, 1);
+      tree op00type;
+      
+      STRIP_NOPS (op00);
+      op00type = TREE_TYPE (op00);
+      if (TREE_CODE (op00) == ADDR_EXPR
+          && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
+          && type == TREE_TYPE (TREE_TYPE (op00type)))
+       { 
+         HOST_WIDE_INT offset = tree_low_cst (op01, 0);
+         tree part_width = TYPE_SIZE (type);
+         unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
+         unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
+         tree index = bitsize_int (indexi);
+
+         if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
+           return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
+                               part_width, index);
+        
+       }
+    }
+
+
   /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)