OSDN Git Service

* tree-ssa-ccp.c (insert_clobber_before_stack_restore): Recurse on copy
[pf3gnuchains/gcc-fork.git] / gcc / tree-ssa-ccp.c
index cf4321a..ac544d3 100644 (file)
@@ -1,6 +1,6 @@
 /* Conditional constant propagation pass for the GNU compiler.
    Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
-   2010 Free Software Foundation, Inc.
+   2010, 2011, 2012, 2013 Free Software Foundation, Inc.
    Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
    Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
 
@@ -131,8 +131,9 @@ along with GCC; see the file COPYING3.  If not see
 #include "langhooks.h"
 #include "target.h"
 #include "diagnostic-core.h"
-#include "toplev.h"
 #include "dbgcnt.h"
+#include "gimple-fold.h"
+#include "params.h"
 
 
 /* Possible lattice values.  */
@@ -408,7 +409,8 @@ valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
 
   /* Now both lattice values are CONSTANT.  */
 
-  /* Allow transitioning from &x to &x & ~3.  */
+  /* Allow transitioning from PHI <&x, not executable> == &x
+     to PHI <&x, &y> == common alignment.  */
   if (TREE_CODE (old_val.value) != INTEGER_CST
       && TREE_CODE (new_val.value) == INTEGER_CST)
     return true;
@@ -505,74 +507,25 @@ value_to_double_int (prop_value_t val)
 static prop_value_t
 get_value_from_alignment (tree expr)
 {
+  tree type = TREE_TYPE (expr);
   prop_value_t val;
-  HOST_WIDE_INT bitsize, bitpos;
-  tree base, offset;
-  enum machine_mode mode;
-  int align;
+  unsigned HOST_WIDE_INT bitpos;
+  unsigned int align;
 
   gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
 
-  base = get_inner_reference (TREE_OPERAND (expr, 0),
-                             &bitsize, &bitpos, &offset,
-                             &mode, &align, &align, false);
-  if (TREE_CODE (base) == MEM_REF)
-    val = bit_value_binop (PLUS_EXPR, TREE_TYPE (expr),
-                          TREE_OPERAND (base, 0), TREE_OPERAND (base, 1));
-  else if (base
-          && ((align = get_object_alignment (base, BIGGEST_ALIGNMENT))
-               > BITS_PER_UNIT))
-    {
-      val.lattice_val = CONSTANT;
-      /* We assume pointers are zero-extended.  */
-      val.mask = double_int_and_not
-                  (double_int_mask (TYPE_PRECISION (TREE_TYPE (expr))),
-                   uhwi_to_double_int (align / BITS_PER_UNIT - 1));
-      val.value = build_int_cst (TREE_TYPE (expr), 0);
-    }
+  align = get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitpos);
+  val.mask
+    = double_int_and_not (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
+                         ? double_int_mask (TYPE_PRECISION (type))
+                         : double_int_minus_one,
+                         uhwi_to_double_int (align / BITS_PER_UNIT - 1));
+  val.lattice_val = double_int_minus_one_p (val.mask) ? VARYING : CONSTANT;
+  if (val.lattice_val == CONSTANT)
+    val.value
+      = double_int_to_tree (type, uhwi_to_double_int (bitpos / BITS_PER_UNIT));
   else
-    {
-      val.lattice_val = VARYING;
-      val.mask = double_int_minus_one;
-      val.value = NULL_TREE;
-    }
-  if (bitpos != 0)
-    {
-      double_int value, mask;
-      bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
-                        TREE_TYPE (expr), value_to_double_int (val), val.mask,
-                        TREE_TYPE (expr),
-                        shwi_to_double_int (bitpos / BITS_PER_UNIT),
-                        double_int_zero);
-      val.lattice_val = double_int_minus_one_p (mask) ? VARYING : CONSTANT;
-      val.mask = mask;
-      if (val.lattice_val == CONSTANT)
-       val.value = double_int_to_tree (TREE_TYPE (expr), value);
-      else
-       val.value = NULL_TREE;
-    }
-  /* ???  We should handle i * 4 and more complex expressions from
-     the offset, possibly by just expanding get_value_for_expr.  */
-  if (offset != NULL_TREE)
-    {
-      double_int value, mask;
-      prop_value_t oval = get_value_for_expr (offset, true);
-      bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
-                        TREE_TYPE (expr), value_to_double_int (val), val.mask,
-                        TREE_TYPE (expr), value_to_double_int (oval),
-                        oval.mask);
-      val.mask = mask;
-      if (double_int_minus_one_p (mask))
-       {
-         val.lattice_val = VARYING;
-         val.value = NULL_TREE;
-       }
-      else
-       {
-         val.lattice_val = CONSTANT;
-         val.value = double_int_to_tree (TREE_TYPE (expr), value);
-       }
-    }
+    val.value = NULL_TREE;
 
   return val;
 }
@@ -700,14 +653,20 @@ likely_value (gimple stmt)
             the undefined operand may be promoted.  */
          return UNDEFINED;
 
+       case ADDR_EXPR:
+         /* If any part of an address is UNDEFINED, like the index
+            of an ARRAY_EXPR, then treat the result as UNDEFINED.  */
+         return UNDEFINED;
+
        default:
          ;
        }
     }
   /* If there was an UNDEFINED operand but the result may be not UNDEFINED
-     fall back to VARYING even if there were CONSTANT operands.  */
+     fall back to CONSTANT.  During iteration UNDEFINED may still drop
+     to CONSTANT.  */
   if (has_undefined_operand)
-    return VARYING;
+    return CONSTANT;
 
   /* We do not consider virtual operands here -- load from read-only
      memory may have only VARYING virtual operands, but still be
@@ -1092,209 +1051,6 @@ ccp_fold (gimple stmt)
   location_t loc = gimple_location (stmt);
   switch (gimple_code (stmt))
     {
-    case GIMPLE_ASSIGN:
-      {
-        enum tree_code subcode = gimple_assign_rhs_code (stmt);
-
-        switch (get_gimple_rhs_class (subcode))
-          {
-          case GIMPLE_SINGLE_RHS:
-            {
-              tree rhs = gimple_assign_rhs1 (stmt);
-              enum tree_code_class kind = TREE_CODE_CLASS (subcode);
-
-              if (TREE_CODE (rhs) == SSA_NAME)
-                {
-                  /* If the RHS is an SSA_NAME, return its known constant value,
-                     if any.  */
-                  return get_constant_value (rhs);
-                }
-             /* Handle propagating invariant addresses into address operations.
-                The folding we do here matches that in tree-ssa-forwprop.c.  */
-             else if (TREE_CODE (rhs) == ADDR_EXPR)
-               {
-                 tree *base;
-                 base = &TREE_OPERAND (rhs, 0);
-                 while (handled_component_p (*base))
-                   base = &TREE_OPERAND (*base, 0);
-                 if (TREE_CODE (*base) == MEM_REF
-                     && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
-                   {
-                     tree val = get_constant_value (TREE_OPERAND (*base, 0));
-                     if (val
-                         && TREE_CODE (val) == ADDR_EXPR)
-                       {
-                         tree ret, save = *base;
-                         tree new_base;
-                         new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
-                                                 unshare_expr (val),
-                                                 TREE_OPERAND (*base, 1));
-                         /* We need to return a new tree, not modify the IL
-                            or share parts of it.  So play some tricks to
-                            avoid manually building it.  */
-                         *base = new_base;
-                         ret = unshare_expr (rhs);
-                         recompute_tree_invariant_for_addr_expr (ret);
-                         *base = save;
-                         return ret;
-                       }
-                   }
-               }
-             else if (TREE_CODE (rhs) == CONSTRUCTOR
-                      && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
-                      && (CONSTRUCTOR_NELTS (rhs)
-                          == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
-               {
-                 unsigned i;
-                 tree val, list;
-
-                 list = NULL_TREE;
-                 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
-                   {
-                     val = valueize_op (val);
-                     if (TREE_CODE (val) == INTEGER_CST
-                         || TREE_CODE (val) == REAL_CST
-                         || TREE_CODE (val) == FIXED_CST)
-                       list = tree_cons (NULL_TREE, val, list);
-                     else
-                       return NULL_TREE;
-                   }
-
-                 return build_vector (TREE_TYPE (rhs), nreverse (list));
-               }
-
-              if (kind == tcc_reference)
-               {
-                 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
-                      || TREE_CODE (rhs) == REALPART_EXPR
-                      || TREE_CODE (rhs) == IMAGPART_EXPR)
-                     && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
-                   {
-                     tree val = get_constant_value (TREE_OPERAND (rhs, 0));
-                     if (val)
-                       return fold_unary_loc (EXPR_LOCATION (rhs),
-                                              TREE_CODE (rhs),
-                                              TREE_TYPE (rhs), val);
-                   }
-                 else if (TREE_CODE (rhs) == MEM_REF
-                          && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
-                   {
-                     tree val = get_constant_value (TREE_OPERAND (rhs, 0));
-                     if (val
-                         && TREE_CODE (val) == ADDR_EXPR)
-                       {
-                         tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
-                                                 unshare_expr (val),
-                                                 TREE_OPERAND (rhs, 1));
-                         if (tem)
-                           rhs = tem;
-                       }
-                   }
-                 return fold_const_aggregate_ref (rhs);
-               }
-              else if (kind == tcc_declaration)
-                return get_symbol_constant_value (rhs);
-              return rhs;
-            }
-
-          case GIMPLE_UNARY_RHS:
-            {
-              /* Handle unary operators that can appear in GIMPLE form.
-                 Note that we know the single operand must be a constant,
-                 so this should almost always return a simplified RHS.  */
-              tree lhs = gimple_assign_lhs (stmt);
-              tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
-
-             /* Conversions are useless for CCP purposes if they are
-                value-preserving.  Thus the restrictions that
-                useless_type_conversion_p places for pointer type conversions
-                do not apply here.  Substitution later will only substitute to
-                allowed places.  */
-             if (CONVERT_EXPR_CODE_P (subcode)
-                 && POINTER_TYPE_P (TREE_TYPE (lhs))
-                 && POINTER_TYPE_P (TREE_TYPE (op0)))
-               {
-                 tree tem;
-                 /* Try to re-construct array references on-the-fly.  */
-                 if (!useless_type_conversion_p (TREE_TYPE (lhs),
-                                                 TREE_TYPE (op0))
-                     && ((tem = maybe_fold_offset_to_address
-                          (loc,
-                           op0, integer_zero_node, TREE_TYPE (lhs)))
-                         != NULL_TREE))
-                   return tem;
-                 return op0;
-               }
-
-              return
-               fold_unary_ignore_overflow_loc (loc, subcode,
-                                               gimple_expr_type (stmt), op0);
-            }
-
-          case GIMPLE_BINARY_RHS:
-            {
-              /* Handle binary operators that can appear in GIMPLE form.  */
-              tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
-              tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
-
-             /* Translate &x + CST into an invariant form suitable for
-                further propagation.  */
-             if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
-                 && TREE_CODE (op0) == ADDR_EXPR
-                 && TREE_CODE (op1) == INTEGER_CST)
-               {
-                 tree off = fold_convert (ptr_type_node, op1);
-                 return build_fold_addr_expr
-                          (fold_build2 (MEM_REF,
-                                        TREE_TYPE (TREE_TYPE (op0)),
-                                        unshare_expr (op0), off));
-               }
-
-              return fold_binary_loc (loc, subcode,
-                                     gimple_expr_type (stmt), op0, op1);
-            }
-
-          case GIMPLE_TERNARY_RHS:
-            {
-              /* Handle ternary operators that can appear in GIMPLE form.  */
-              tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
-              tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
-              tree op2 = valueize_op (gimple_assign_rhs3 (stmt));
-
-              return fold_ternary_loc (loc, subcode,
-                                      gimple_expr_type (stmt), op0, op1, op2);
-            }
-
-          default:
-            gcc_unreachable ();
-          }
-      }
-      break;
-
-    case GIMPLE_CALL:
-      {
-       tree fn = valueize_op (gimple_call_fn (stmt));
-       if (TREE_CODE (fn) == ADDR_EXPR
-           && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
-           && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
-         {
-           tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
-           tree call, retval;
-           unsigned i;
-           for (i = 0; i < gimple_call_num_args (stmt); ++i)
-             args[i] = valueize_op (gimple_call_arg (stmt, i));
-           call = build_call_array_loc (loc,
-                                        gimple_call_return_type (stmt),
-                                        fn, gimple_call_num_args (stmt), args);
-           retval = fold_call_expr (EXPR_LOCATION (call), call, false);
-           if (retval)
-             /* fold_call_expr wraps the result inside a NOP_EXPR.  */
-             STRIP_NOPS (retval);
-           return retval;
-         }
-       return NULL_TREE;
-      }
-
     case GIMPLE_COND:
       {
         /* Handle comparison operators that can appear in GIMPLE form.  */
@@ -1310,263 +1066,13 @@ ccp_fold (gimple stmt)
         return valueize_op (gimple_switch_index (stmt));
       }
 
-    default:
-      gcc_unreachable ();
-    }
-}
-
-/* See if we can find constructor defining value of BASE.
-
-   As a special case, return error_mark_node when constructor
-   is not explicitly available, but it is known to be zero
-   such as 'static const int a;'.  */
-static tree
-get_base_constructor (tree base, tree *offset)
-{
-  *offset = NULL;
-  if (TREE_CODE (base) == MEM_REF)
-    {
-      if (!integer_zerop (TREE_OPERAND (base, 1)))
-        *offset = TREE_OPERAND (base, 1);
-
-      base = get_constant_value (TREE_OPERAND (base, 0));
-      if (!base || TREE_CODE (base) != ADDR_EXPR)
-        return NULL_TREE;
-      base = TREE_OPERAND (base, 0);
-    }
-
-  /* Get a CONSTRUCTOR.  If BASE is a VAR_DECL, get its
-     DECL_INITIAL.  If BASE is a nested reference into another
-     ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
-     the inner reference.  */
-  switch (TREE_CODE (base))
-    {
-    case VAR_DECL:
-      if (!const_value_known_p (base))
-       return NULL_TREE;
-
-      /* Fallthru.  */
-    case CONST_DECL:
-      if (!DECL_INITIAL (base)
-         && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
-        return error_mark_node;
-      return DECL_INITIAL (base);
-      
-      break;
-
-    case ARRAY_REF:
-    case COMPONENT_REF:
-      return fold_const_aggregate_ref (base);
-      break;
-
-    case STRING_CST:
-    case CONSTRUCTOR:
-      return base;
-      break;
-
-    default:
-      return NULL_TREE;
-    }
-}
-
-/* Return the tree representing the element referenced by T if T is an
-   ARRAY_REF or COMPONENT_REF into constant aggregates.  Return
-   NULL_TREE otherwise.  */
-
-tree
-fold_const_aggregate_ref (tree t)
-{
-  tree ctor, idx, field;
-  unsigned HOST_WIDE_INT cnt;
-  tree cfield, cval;
-  tree tem;
-
-  if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
-    return get_symbol_constant_value (t);
-
-  tem = fold_read_from_constant_string (t);
-  if (tem)
-    return tem;
-
-  switch (TREE_CODE (t))
-    {
-    case ARRAY_REF:
-      ctor = get_base_constructor (TREE_OPERAND (t, 0), &idx);
-
-      if (idx)
-       return NULL_TREE;
-
-      if (ctor == error_mark_node)
-       return build_zero_cst (TREE_TYPE (t));
-
-      if (ctor == NULL_TREE
-         || (TREE_CODE (ctor) != CONSTRUCTOR
-             && TREE_CODE (ctor) != STRING_CST))
-       return NULL_TREE;
-
-      /* Get the index.  If we have an SSA_NAME, try to resolve it
-        with the current lattice value for the SSA_NAME.  */
-      idx = TREE_OPERAND (t, 1);
-      switch (TREE_CODE (idx))
-       {
-       case SSA_NAME:
-         if ((tem = get_constant_value (idx))
-             && TREE_CODE (tem) == INTEGER_CST)
-           idx = tem;
-         else
-           return NULL_TREE;
-         break;
-
-       case INTEGER_CST:
-         break;
-
-       default:
-         return NULL_TREE;
-       }
-
-      /* Fold read from constant string.  */
-      if (TREE_CODE (ctor) == STRING_CST)
-       {
-         tree low_bound = array_ref_low_bound (t);
-         double_int low_bound_cst;
-         double_int index_cst;
-         double_int length_cst;
-         bool signed_p = TYPE_UNSIGNED (TREE_TYPE (idx));
-
-         if (TREE_CODE (idx) != INTEGER_CST
-             || !INTEGRAL_TYPE_P (TREE_TYPE (t))
-             || TREE_CODE (low_bound) != INTEGER_CST)
-           return NULL_TREE;
-         low_bound_cst = tree_to_double_int (low_bound);
-         index_cst = tree_to_double_int (idx);
-         length_cst = uhwi_to_double_int (TREE_STRING_LENGTH (ctor));
-         index_cst = double_int_sub (index_cst, low_bound_cst);
-         if ((TYPE_MODE (TREE_TYPE (t))
-              == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
-             && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
-                 == MODE_INT)
-             && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
-             && double_int_cmp (index_cst, length_cst, signed_p) < 0)
-           return build_int_cst_type (TREE_TYPE (t),
-                                      (TREE_STRING_POINTER (ctor)
-                                       [double_int_to_uhwi (index_cst)]));
-         return NULL_TREE;
-       }
-
-      /* Whoo-hoo!  I'll fold ya baby.  Yeah!  */
-      FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
-       if (tree_int_cst_equal (cfield, idx))
-         return canonicalize_constructor_val (cval);
-      break;
-
-    case COMPONENT_REF:
-      /* Get a CONSTRUCTOR.  If BASE is a VAR_DECL, get its
-        DECL_INITIAL.  If BASE is a nested reference into another
-        ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
-        the inner reference.  */
-      ctor = get_base_constructor (TREE_OPERAND (t, 0), &idx);
-
-      if (idx)
-       return NULL_TREE;
-
-      if (ctor == error_mark_node)
-       return build_zero_cst (TREE_TYPE (t));
-
-      if (ctor == NULL_TREE
-         || TREE_CODE (ctor) != CONSTRUCTOR)
-       return NULL_TREE;
-
-      field = TREE_OPERAND (t, 1);
-
-      FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
-       if (cfield == field
-           /* FIXME: Handle bit-fields.  */
-           && ! DECL_BIT_FIELD (cfield))
-         return canonicalize_constructor_val (cval);
-      break;
-
-    case REALPART_EXPR:
-    case IMAGPART_EXPR:
-      {
-       tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
-       if (c && TREE_CODE (c) == COMPLEX_CST)
-         return fold_build1_loc (EXPR_LOCATION (t),
-                             TREE_CODE (t), TREE_TYPE (t), c);
-       break;
-      }
-
-    case MEM_REF:
-      ctor = get_base_constructor (t, &idx);
-
-      if (ctor == error_mark_node)
-       return build_zero_cst (TREE_TYPE (t));
-
-      if (ctor && !AGGREGATE_TYPE_P (TREE_TYPE (ctor))
-         && !idx)
-       {
-         if (ctor
-             && !useless_type_conversion_p
-                   (TREE_TYPE (t), TREE_TYPE (ctor)))
-           ctor = fold_unary (VIEW_CONVERT_EXPR, TREE_TYPE (t), ctor);
-         return ctor;
-       }
-
-      if (!idx)
-       idx = integer_zero_node;
-
-      if (ctor == NULL_TREE
-         || (TREE_CODE (ctor) != CONSTRUCTOR
-             && TREE_CODE (ctor) != STRING_CST))
-       return NULL_TREE;
-
-      /* Fold read from constant string.  */
-      if (TREE_CODE (ctor) == STRING_CST)
-       {
-         if (INTEGRAL_TYPE_P (TREE_TYPE (t))
-             && (TYPE_MODE (TREE_TYPE (t))
-                 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
-             && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
-                 == MODE_INT)
-             && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
-             && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
-           return build_int_cst_type (TREE_TYPE (t),
-                                      (TREE_STRING_POINTER (ctor)
-                                       [TREE_INT_CST_LOW (idx)]));
-         return NULL_TREE;
-       }
-
-      /* ???  Implement byte-offset indexing into a non-array CONSTRUCTOR.  */
-      if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
-         && (TYPE_MODE (TREE_TYPE (t))
-             == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
-         && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t))) != 0
-         && integer_zerop
-              (int_const_binop
-                 (TRUNC_MOD_EXPR, idx,
-                  size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t)))), 0)))
-       {
-         idx = int_const_binop (TRUNC_DIV_EXPR, idx,
-                                size_int (GET_MODE_SIZE
-                                            (TYPE_MODE (TREE_TYPE (t)))), 0);
-         FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
-           if (tree_int_cst_equal (cfield, idx))
-             {
-               cval = canonicalize_constructor_val (cval);
-               if (useless_type_conversion_p (TREE_TYPE (t), TREE_TYPE (cval)))
-                 return cval;
-               else if (CONSTANT_CLASS_P (cval))
-                 return fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (t), cval);
-               else
-                 return NULL_TREE;
-             }
-       }
-      break;
+    case GIMPLE_ASSIGN:
+    case GIMPLE_CALL:
+      return gimple_fold_stmt_to_constant_1 (stmt, valueize_op);
 
     default:
-      break;
+      gcc_unreachable ();
     }
-
-  return NULL_TREE;
 }
 
 /* Apply the operation CODE in type TYPE to the value, mask pair
@@ -1698,6 +1204,13 @@ bit_value_binop_1 (enum tree_code code, tree type,
            }
          else if (shift < 0)
            {
+             /* ???  We can have sizetype related inconsistencies in
+                the IL.  */
+             if ((TREE_CODE (r1type) == INTEGER_TYPE
+                  && (TYPE_IS_SIZETYPE (r1type)
+                      ? 0 : TYPE_UNSIGNED (r1type))) != uns)
+               break;
+
              shift = -shift;
              *mask = double_int_rshift (r1mask, shift,
                                         TYPE_PRECISION (type), !uns);
@@ -1808,6 +1321,14 @@ bit_value_binop_1 (enum tree_code code, tree type,
        if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
          break;
 
+       /* For comparisons the signedness is in the comparison operands.  */
+       uns = (TREE_CODE (r1type) == INTEGER_TYPE
+              && TYPE_IS_SIZETYPE (r1type) ? 0 : TYPE_UNSIGNED (r1type));
+       /* ???  We can have sizetype related inconsistencies in the IL.  */
+       if ((TREE_CODE (r2type) == INTEGER_TYPE
+            && TYPE_IS_SIZETYPE (r2type) ? 0 : TYPE_UNSIGNED (r2type)) != uns)
+         break;
+
        /* If we know the most significant bits we know the values
           value ranges by means of treating varying bits as zero
           or one.  Do a cross comparison of the max/min pairs.  */
@@ -1854,6 +1375,10 @@ bit_value_unop (enum tree_code code, tree type, tree rhs)
   prop_value_t rval = get_value_for_expr (rhs, true);
   double_int value, mask;
   prop_value_t val;
+
+  if (rval.lattice_val == UNDEFINED)
+    return rval;
+
   gcc_assert ((rval.lattice_val == CONSTANT
               && TREE_CODE (rval.value) == INTEGER_CST)
              || double_int_minus_one_p (rval.mask));
@@ -1885,6 +1410,16 @@ bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
   prop_value_t r2val = get_value_for_expr (rhs2, true);
   double_int value, mask;
   prop_value_t val;
+
+  if (r1val.lattice_val == UNDEFINED
+      || r2val.lattice_val == UNDEFINED)
+    {
+      val.lattice_val = VARYING;
+      val.value = NULL_TREE;
+      val.mask = double_int_minus_one;
+      return val;
+    }
+
   gcc_assert ((r1val.lattice_val == CONSTANT
               && TREE_CODE (r1val.value) == INTEGER_CST)
              || double_int_minus_one_p (r1val.mask));
@@ -1910,6 +1445,64 @@ bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
   return val;
 }
 
+/* Return the propagation value when applying __builtin_assume_aligned to
+   its arguments.  */
+
+static prop_value_t
+bit_value_assume_aligned (gimple stmt)
+{
+  tree ptr = gimple_call_arg (stmt, 0), align, misalign = NULL_TREE;
+  tree type = TREE_TYPE (ptr);
+  unsigned HOST_WIDE_INT aligni, misaligni = 0;
+  prop_value_t ptrval = get_value_for_expr (ptr, true);
+  prop_value_t alignval;
+  double_int value, mask;
+  prop_value_t val;
+  if (ptrval.lattice_val == UNDEFINED)
+    return ptrval;
+  gcc_assert ((ptrval.lattice_val == CONSTANT
+              && TREE_CODE (ptrval.value) == INTEGER_CST)
+             || double_int_minus_one_p (ptrval.mask));
+  align = gimple_call_arg (stmt, 1);
+  if (!host_integerp (align, 1))
+    return ptrval;
+  aligni = tree_low_cst (align, 1);
+  if (aligni <= 1
+      || (aligni & (aligni - 1)) != 0)
+    return ptrval;
+  if (gimple_call_num_args (stmt) > 2)
+    {
+      misalign = gimple_call_arg (stmt, 2);
+      if (!host_integerp (misalign, 1))
+       return ptrval;
+      misaligni = tree_low_cst (misalign, 1);
+      if (misaligni >= aligni)
+       return ptrval;
+    }
+  align = build_int_cst_type (type, -aligni);
+  alignval = get_value_for_expr (align, true);
+  bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
+                    type, value_to_double_int (ptrval), ptrval.mask,
+                    type, value_to_double_int (alignval), alignval.mask);
+  if (!double_int_minus_one_p (mask))
+    {
+      val.lattice_val = CONSTANT;
+      val.mask = mask;
+      gcc_assert ((mask.low & (aligni - 1)) == 0);
+      gcc_assert ((value.low & (aligni - 1)) == 0);
+      value.low |= misaligni;
+      /* ???  Delay building trees here.  */
+      val.value = double_int_to_tree (type, value);
+    }
+  else
+    {
+      val.lattice_val = VARYING;
+      val.value = NULL_TREE;
+      val.mask = double_int_minus_one;
+    }
+  return val;
+}
+
 /* Evaluate statement STMT.
    Valid only for assignments, calls, conditionals, and switches. */
 
@@ -1920,6 +1513,7 @@ evaluate_stmt (gimple stmt)
   tree simplified = NULL_TREE;
   ccp_lattice_t likelyvalue = likely_value (stmt);
   bool is_constant = false;
+  unsigned int align;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
@@ -1990,11 +1584,10 @@ evaluate_stmt (gimple stmt)
 
   /* Resort to simplification for bitwise tracking.  */
   if (flag_tree_bit_ccp
-      && likelyvalue == CONSTANT
+      && (likelyvalue == CONSTANT || is_gimple_call (stmt))
       && !is_constant)
     {
       enum gimple_code code = gimple_code (stmt);
-      tree fndecl;
       val.lattice_val = VARYING;
       val.value = NULL_TREE;
       val.mask = double_int_minus_one;
@@ -2022,9 +1615,10 @@ evaluate_stmt (gimple stmt)
              if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
                  || POINTER_TYPE_P (TREE_TYPE (rhs1)))
                {
+                 tree lhs = gimple_assign_lhs (stmt);
                  tree rhs2 = gimple_assign_rhs2 (stmt);
                  val = bit_value_binop (subcode,
-                                        TREE_TYPE (rhs1), rhs1, rhs2);
+                                        TREE_TYPE (lhs), rhs1, rhs2);
                }
              break;
 
@@ -2040,15 +1634,16 @@ evaluate_stmt (gimple stmt)
              || POINTER_TYPE_P (TREE_TYPE (rhs1)))
            val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
        }
-      else if (code == GIMPLE_CALL
-              && (fndecl = gimple_call_fndecl (stmt))
-              && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
+      else if (gimple_call_builtin_class_p (stmt, BUILT_IN_NORMAL))
        {
+         tree fndecl = gimple_call_fndecl (stmt);
          switch (DECL_FUNCTION_CODE (fndecl))
            {
            case BUILT_IN_MALLOC:
            case BUILT_IN_REALLOC:
            case BUILT_IN_CALLOC:
+           case BUILT_IN_STRDUP:
+           case BUILT_IN_STRNDUP:
              val.lattice_val = CONSTANT;
              val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
              val.mask = shwi_to_double_int
@@ -2057,13 +1652,35 @@ evaluate_stmt (gimple stmt)
              break;
 
            case BUILT_IN_ALLOCA:
+           case BUILT_IN_ALLOCA_WITH_ALIGN:
+             align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
+                      ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
+                      : BIGGEST_ALIGNMENT);
              val.lattice_val = CONSTANT;
              val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
              val.mask = shwi_to_double_int
-                          (~(((HOST_WIDE_INT) BIGGEST_ALIGNMENT)
+                          (~(((HOST_WIDE_INT) align)
                              / BITS_PER_UNIT - 1));
              break;
 
+           /* These builtins return their first argument, unmodified.  */
+           case BUILT_IN_MEMCPY:
+           case BUILT_IN_MEMMOVE:
+           case BUILT_IN_MEMSET:
+           case BUILT_IN_STRCPY:
+           case BUILT_IN_STRNCPY:
+           case BUILT_IN_MEMCPY_CHK:
+           case BUILT_IN_MEMMOVE_CHK:
+           case BUILT_IN_MEMSET_CHK:
+           case BUILT_IN_STRCPY_CHK:
+           case BUILT_IN_STRNCPY_CHK:
+             val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
+             break;
+
+           case BUILT_IN_ASSUME_ALIGNED:
+             val = bit_value_assume_aligned (stmt);
+             break;
+
            default:;
            }
        }
@@ -2092,6 +1709,157 @@ evaluate_stmt (gimple stmt)
   return val;
 }
 
+/* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
+   each matching BUILT_IN_STACK_RESTORE.  Mark visited phis in VISITED.  */
+
+static void
+insert_clobber_before_stack_restore (tree saved_val, tree var, htab_t *visited)
+{
+  gimple stmt, clobber_stmt;
+  tree clobber;
+  imm_use_iterator iter;
+  gimple_stmt_iterator i;
+  gimple *slot;
+
+  FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
+    if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
+      {
+       clobber = build_constructor (TREE_TYPE (var), NULL);
+       TREE_THIS_VOLATILE (clobber) = 1;
+       clobber_stmt = gimple_build_assign (var, clobber);
+
+       i = gsi_for_stmt (stmt);
+       gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
+      }
+    else if (gimple_code (stmt) == GIMPLE_PHI)
+      {
+       if (*visited == NULL)
+         *visited = htab_create (10, htab_hash_pointer, htab_eq_pointer, NULL);
+
+       slot = (gimple *)htab_find_slot (*visited, stmt, INSERT);
+       if (*slot != NULL)
+         continue;
+
+       *slot = stmt;
+       insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
+                                            visited);
+      }
+    else if (gimple_assign_ssa_name_copy_p (stmt))
+      insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var,
+                                          visited);
+    else
+      gcc_assert (is_gimple_debug (stmt));
+}
+
+/* Advance the iterator to the previous non-debug gimple statement in the same
+   or dominating basic block.  */
+
+static inline void
+gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
+{
+  basic_block dom;
+
+  gsi_prev_nondebug (i);
+  while (gsi_end_p (*i))
+    {
+      dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
+      if (dom == NULL || dom == ENTRY_BLOCK_PTR)
+       return;
+
+      *i = gsi_last_bb (dom);
+    }
+}
+
+/* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
+   a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
+
+   It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
+   previous pass (such as DOM) duplicated it along multiple paths to a BB.  In
+   that case the function gives up without inserting the clobbers.  */
+
+static void
+insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
+{
+  gimple stmt;
+  tree saved_val;
+  htab_t visited = NULL;
+
+  for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
+    {
+      stmt = gsi_stmt (i);
+
+      if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
+       continue;
+
+      saved_val = gimple_call_lhs (stmt);
+      if (saved_val == NULL_TREE)
+       continue;
+
+      insert_clobber_before_stack_restore (saved_val, var, &visited);
+      break;
+    }
+
+  if (visited != NULL)
+    htab_delete (visited);
+}
+
+/* Detects a __builtin_alloca_with_align with constant size argument.  Declares
+   fixed-size array and returns the address, if found, otherwise returns
+   NULL_TREE.  */
+
+static tree
+fold_builtin_alloca_with_align (gimple stmt)
+{
+  unsigned HOST_WIDE_INT size, threshold, n_elem;
+  tree lhs, arg, block, var, elem_type, array_type;
+
+  /* Get lhs.  */
+  lhs = gimple_call_lhs (stmt);
+  if (lhs == NULL_TREE)
+    return NULL_TREE;
+
+  /* Detect constant argument.  */
+  arg = get_constant_value (gimple_call_arg (stmt, 0));
+  if (arg == NULL_TREE
+      || TREE_CODE (arg) != INTEGER_CST
+      || !host_integerp (arg, 1))
+    return NULL_TREE;
+
+  size = TREE_INT_CST_LOW (arg);
+
+  /* Heuristic: don't fold large allocas.  */
+  threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
+  /* In case the alloca is located at function entry, it has the same lifetime
+     as a declared array, so we allow a larger size.  */
+  block = gimple_block (stmt);
+  if (!(cfun->after_inlining
+        && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
+    threshold /= 10;
+  if (size > threshold)
+    return NULL_TREE;
+
+  /* Declare array.  */
+  elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
+  n_elem = size * 8 / BITS_PER_UNIT;
+  array_type = build_array_type_nelts (elem_type, n_elem);
+  var = create_tmp_var (array_type, NULL);
+  DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
+  {
+    struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
+    if (pi != NULL && !pi->pt.anything)
+      {
+       bool singleton_p;
+       unsigned uid;
+       singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
+       gcc_assert (singleton_p);
+       SET_DECL_PT_UID (var, uid);
+      }
+  }
+
+  /* Fold alloca to the address of the array.  */
+  return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
+}
+
 /* Fold the stmt at *GSI with CCP specific information that propagating
    and regular folding does not catch.  */
 
@@ -2133,6 +1901,7 @@ ccp_fold_stmt (gimple_stmt_iterator *gsi)
     case GIMPLE_CALL:
       {
        tree lhs = gimple_call_lhs (stmt);
+       int flags = gimple_call_flags (stmt);
        tree val;
        tree argt;
        bool changed = false;
@@ -2143,7 +1912,10 @@ ccp_fold_stmt (gimple_stmt_iterator *gsi)
           type issues.  */
        if (lhs
            && TREE_CODE (lhs) == SSA_NAME
-           && (val = get_constant_value (lhs)))
+           && (val = get_constant_value (lhs))
+           /* Don't optimize away calls that have side-effects.  */
+           && (flags & (ECF_CONST|ECF_PURE)) != 0
+           && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
          {
            tree new_rhs = unshare_expr (val);
            bool res;
@@ -2155,11 +1927,32 @@ ccp_fold_stmt (gimple_stmt_iterator *gsi)
            return true;
          }
 
+       /* Internal calls provide no argument types, so the extra laxity
+          for normal calls does not apply.  */
+       if (gimple_call_internal_p (stmt))
+         return false;
+
+        /* The heuristic of fold_builtin_alloca_with_align differs before and
+          after inlining, so we don't require the arg to be changed into a
+          constant for folding, but just to be constant.  */
+        if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
+          {
+            tree new_rhs = fold_builtin_alloca_with_align (stmt);
+            if (new_rhs)
+             {
+               bool res = update_call_from_tree (gsi, new_rhs);
+               tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
+               gcc_assert (res);
+               insert_clobbers_for_var (*gsi, var);
+               return true;
+             }
+          }
+
        /* Propagate into the call arguments.  Compared to replace_uses_in
           this can use the argument slot types for type verification
           instead of the current argument type.  We also can safely
           drop qualifiers here as we are dealing with constants anyway.  */
-       argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
+       argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
        for (i = 0; i < gimple_call_num_args (stmt) && argt;
             ++i, argt = TREE_CHAIN (argt))
          {
@@ -2350,12 +2143,14 @@ ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
 static unsigned int
 do_ssa_ccp (void)
 {
+  unsigned int todo = 0;
+  calculate_dominance_info (CDI_DOMINATORS);
   ccp_initialize ();
   ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
   if (ccp_finalize ())
-    return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
-  else
-    return 0;
+    todo = (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
+  free_dominance_info (CDI_DOMINATORS);
+  return todo;
 }
 
 
@@ -2381,7 +2176,7 @@ struct gimple_opt_pass pass_ccp =
   0,                                   /* properties_provided */
   0,                                   /* properties_destroyed */
   0,                                   /* todo_flags_start */
-  TODO_dump_func | TODO_verify_ssa
+  TODO_verify_ssa
   | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
  }
 };
@@ -2421,7 +2216,8 @@ optimize_stack_restore (gimple_stmt_iterator i)
       if (!callee
          || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
          /* All regular builtins are ok, just obviously not alloca.  */
-         || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
+         || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
+         || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
        return NULL_TREE;
 
       if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
@@ -2500,7 +2296,7 @@ optimize_stdarg_builtin (gimple call)
     case BUILT_IN_VA_START:
       if (!va_list_simple_ptr
          || targetm.expand_builtin_va_start != NULL
-          || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
+         || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
        return NULL_TREE;
 
       if (gimple_call_num_args (call) != 2)
@@ -2513,7 +2309,7 @@ optimize_stdarg_builtin (gimple call)
        return NULL_TREE;
 
       lhs = build_fold_indirect_ref_loc (loc, lhs);
-      rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
+      rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
                              1, integer_zero_node);
       rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
       return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
@@ -2598,6 +2394,11 @@ execute_fold_all_builtins (void)
                 result = integer_zero_node;
                break;
 
+             case BUILT_IN_ASSUME_ALIGNED:
+               /* Remove __builtin_assume_aligned.  */
+               result = gimple_call_arg (stmt, 0);
+               break;
+
              case BUILT_IN_STACK_RESTORE:
                result = optimize_stack_restore (i);
                if (result)
@@ -2684,8 +2485,7 @@ struct gimple_opt_pass pass_fold_builtins =
   0,                                   /* properties_provided */
   0,                                   /* properties_destroyed */
   0,                                   /* todo_flags_start */
-  TODO_dump_func
-    | TODO_verify_ssa
+  TODO_verify_ssa
     | TODO_update_ssa                  /* todo_flags_finish */
  }
 };