OSDN Git Service

Daily bump.
[pf3gnuchains/gcc-fork.git] / gcc / tree-sra.c
index 1439c43..681f3b7 100644 (file)
@@ -701,7 +701,12 @@ type_internals_preclude_sra_p (tree type, const char **msg)
              {
                *msg = "structure field size not fixed";
                return true;
-             }       
+             }
+           if (!host_integerp (bit_position (fld), 0))
+             {
+               *msg = "structure field size too big";
+               return true;
+             }
            if (AGGREGATE_TYPE_P (ft)
                    && int_bit_position (fld) % BITS_PER_UNIT != 0)
              {
@@ -971,7 +976,8 @@ static void
 disqualify_base_of_expr (tree t, const char *reason)
 {
   t = get_base_address (t);
-  if (sra_mode == SRA_MODE_EARLY_IPA
+  if (t
+      && sra_mode == SRA_MODE_EARLY_IPA
       && TREE_CODE (t) == MEM_REF)
     t = get_ssa_base_param (TREE_OPERAND (t, 0));
 
@@ -1526,10 +1532,12 @@ build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
      we can extract more optimistic alignment information
      by looking at the access mode.  That would constrain the
      alignment of base + base_offset which we would need to
-     adjust according to offset.
-     ???  But it is not at all clear that prev_base is an access
-     that was in the IL that way, so be conservative for now.  */
+     adjust according to offset.  */
   align = get_pointer_alignment_1 (base, &misalign);
+  if (misalign == 0
+      && (TREE_CODE (prev_base) == MEM_REF
+         || TREE_CODE (prev_base) == TARGET_MEM_REF))
+    align = MAX (align, TYPE_ALIGN (TREE_TYPE (prev_base)));
   misalign += (double_int_sext (tree_to_double_int (off),
                                TYPE_PRECISION (TREE_TYPE (off))).low
               * BITS_PER_UNIT);
@@ -2179,9 +2187,12 @@ analyze_access_subtree (struct access *root, struct access *parent,
          && (TREE_CODE (root->type) != INTEGER_TYPE
              || TYPE_PRECISION (root->type) != root->size)
          /* But leave bitfield accesses alone.  */
-         && (root->offset % BITS_PER_UNIT) == 0)
+         && (TREE_CODE (root->expr) != COMPONENT_REF
+             || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
        {
          tree rt = root->type;
+         gcc_assert ((root->offset % BITS_PER_UNIT) == 0
+                     && (root->size % BITS_PER_UNIT) == 0);
          root->type = build_nonstandard_integer_type (root->size,
                                                       TYPE_UNSIGNED (rt));
          root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
@@ -3033,15 +3044,13 @@ sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
             ???  This should move to fold_stmt which we simply should
             call after building a VIEW_CONVERT_EXPR here.  */
          if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
-             && !contains_bitfld_comp_ref_p (lhs)
-             && !access_has_children_p (lacc))
+             && !contains_bitfld_comp_ref_p (lhs))
            {
              lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
              gimple_assign_set_lhs (*stmt, lhs);
            }
          else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
-                  && !contains_vce_or_bfcref_p (rhs)
-                  && !access_has_children_p (racc))
+                  && !contains_vce_or_bfcref_p (rhs))
            rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
 
          if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
@@ -3118,7 +3127,13 @@ sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
     }
   else
     {
-      if (access_has_children_p (lacc) && access_has_children_p (racc))
+      if (access_has_children_p (lacc)
+         && access_has_children_p (racc)
+         /* When an access represents an unscalarizable region, it usually
+            represents accesses with variable offset and thus must not be used
+            to generate new memory accesses.  */
+         && !lacc->grp_unscalarizable_region
+         && !racc->grp_unscalarizable_region)
        {
          gimple_stmt_iterator orig_gsi = *gsi;
          enum unscalarized_data_handling refreshed;
@@ -4691,8 +4706,8 @@ convert_callers_for_node (struct cgraph_node *node,
       if (dump_file)
        fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
                 cs->caller->uid, cs->callee->uid,
-                cgraph_node_name (cs->caller),
-                cgraph_node_name (cs->callee));
+                xstrdup (cgraph_node_name (cs->caller)),
+                xstrdup (cgraph_node_name (cs->callee)));
 
       ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
 
@@ -4766,6 +4781,8 @@ modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
 
   new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
                                         false, NULL, NULL, "isra");
+  VEC_free (cgraph_edge_p, heap, redirect_callers);
+
   current_function_decl = new_node->decl;
   push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));