OSDN Git Service

* var-tracking.c (vt_expand_loc_callback): Don't run
[pf3gnuchains/gcc-fork.git] / gcc / tree-inline.c
index e0928b9..bb2ee23 100644 (file)
@@ -41,7 +41,6 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree-mudflap.h"
 #include "tree-flow.h"
 #include "function.h"
-#include "ggc.h"
 #include "tree-flow.h"
 #include "diagnostic.h"
 #include "except.h"
@@ -102,10 +101,6 @@ along with GCC; see the file COPYING3.  If not see
      calls?  */
 
 
-/* Weights that estimate_num_insns uses for heuristics in inlining.  */
-
-eni_weights eni_inlining_weights;
-
 /* Weights that estimate_num_insns uses to estimate the size of the
    produced code.  */
 
@@ -211,11 +206,21 @@ remap_ssa_name (tree name, copy_body_data *id)
       && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
          || !id->transform_return_to_modify))
     {
+      struct ptr_info_def *pi;
       new_tree = make_ssa_name (new_tree, NULL);
       insert_decl_map (id, name, new_tree);
       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
        = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
       TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
+      /* At least IPA points-to info can be directly transferred.  */
+      if (id->src_cfun->gimple_df
+         && id->src_cfun->gimple_df->ipa_pta
+         && (pi = SSA_NAME_PTR_INFO (name))
+         && !pi->pt.anything)
+       {
+         struct ptr_info_def *new_pi = get_ptr_info (new_tree);
+         new_pi->pt = pi->pt;
+       }
       if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
        {
          /* By inlining function having uninitialized variable, we might
@@ -660,16 +665,30 @@ copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
     }
 
   if (BIND_EXPR_VARS (*tp))
-    /* This will remap a lot of the same decls again, but this should be
-       harmless.  */
-    BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
+    {
+      tree t;
+
+      /* This will remap a lot of the same decls again, but this should be
+        harmless.  */
+      BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
+      /* Also copy value-expressions.  */
+      for (t = BIND_EXPR_VARS (*tp); t; t = TREE_CHAIN (t))
+       if (TREE_CODE (t) == VAR_DECL
+           && DECL_HAS_VALUE_EXPR_P (t))
+         {
+           tree tem = DECL_VALUE_EXPR (t);
+           walk_tree (&tem, copy_tree_body_r, id, NULL);
+           SET_DECL_VALUE_EXPR (t, tem);
+         }
+    }
 }
 
 
 /* Create a new gimple_seq by remapping all the statements in BODY
    using the inlining information in ID.  */
 
-gimple_seq
+static gimple_seq
 remap_gimple_seq (gimple_seq body, copy_body_data *id)
 {
   gimple_stmt_iterator si;
@@ -1391,6 +1410,12 @@ remap_gimple_stmt (gimple stmt, copy_body_data *id)
                  default:
                    break;
                  }
+
+             /* Reset alias info if we didn't apply measures to
+                keep it valid over inlining by setting DECL_PT_UID.  */
+             if (!id->src_cfun->gimple_df
+                 || !id->src_cfun->gimple_df->ipa_pta)
+               gimple_call_reset_alias_info (copy);
            }
            break;
 
@@ -1673,9 +1698,8 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
              /* Constant propagation on argument done during inlining
                 may create new direct call.  Produce an edge for it.  */
              if ((!edge
-                  || (edge->indirect_call
+                  || (edge->indirect_inlining_edge
                       && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
-                 && is_gimple_call (stmt)
                  && (fn = gimple_call_fndecl (stmt)) != NULL)
                {
                  struct cgraph_node *dest = cgraph_node (fn);
@@ -1687,6 +1711,7 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
                     other cases we hit a bug (incorrect node sharing is the
                     most common reason for missing edges).  */
                  gcc_assert (dest->needed || !dest->analyzed
+                             || dest->address_taken
                              || !id->src_node->analyzed);
                  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
                    cgraph_create_edge_including_clones
@@ -1999,7 +2024,6 @@ initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
   cfun->last_verified = src_cfun->last_verified;
   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
-  cfun->function_frequency = src_cfun->function_frequency;
   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
   cfun->stdarg = src_cfun->stdarg;
   cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
@@ -3254,22 +3278,93 @@ estimate_num_insns (gimple stmt, eni_weights *weights)
        if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
          switch (DECL_FUNCTION_CODE (decl))
            {
+           /* Builtins that expand to constants.  */
            case BUILT_IN_CONSTANT_P:
-             return 0;
            case BUILT_IN_EXPECT:
-             return 0;
-
-           /* Prefetch instruction is not expensive.  */
-           case BUILT_IN_PREFETCH:
-             cost = weights->target_builtin_call_cost;
-             break;
-
+           case BUILT_IN_OBJECT_SIZE:
+           case BUILT_IN_UNREACHABLE:
+           /* Simple register moves or loads from stack.  */
+           case BUILT_IN_RETURN_ADDRESS:
+           case BUILT_IN_EXTRACT_RETURN_ADDR:
+           case BUILT_IN_FROB_RETURN_ADDR:
+           case BUILT_IN_RETURN:
+           case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
+           case BUILT_IN_FRAME_ADDRESS:
+           case BUILT_IN_VA_END:
+           case BUILT_IN_STACK_SAVE:
+           case BUILT_IN_STACK_RESTORE:
            /* Exception state returns or moves registers around.  */
            case BUILT_IN_EH_FILTER:
            case BUILT_IN_EH_POINTER:
            case BUILT_IN_EH_COPY_VALUES:
              return 0;
 
+           /* builtins that are not expensive (that is they are most probably
+              expanded inline into resonably simple code).  */
+           case BUILT_IN_ABS:
+           case BUILT_IN_ALLOCA:
+           case BUILT_IN_BSWAP32:
+           case BUILT_IN_BSWAP64:
+           case BUILT_IN_CLZ:
+           case BUILT_IN_CLZIMAX:
+           case BUILT_IN_CLZL:
+           case BUILT_IN_CLZLL:
+           case BUILT_IN_CTZ:
+           case BUILT_IN_CTZIMAX:
+           case BUILT_IN_CTZL:
+           case BUILT_IN_CTZLL:
+           case BUILT_IN_FFS:
+           case BUILT_IN_FFSIMAX:
+           case BUILT_IN_FFSL:
+           case BUILT_IN_FFSLL:
+           case BUILT_IN_IMAXABS:
+           case BUILT_IN_FINITE:
+           case BUILT_IN_FINITEF:
+           case BUILT_IN_FINITEL:
+           case BUILT_IN_FINITED32:
+           case BUILT_IN_FINITED64:
+           case BUILT_IN_FINITED128:
+           case BUILT_IN_FPCLASSIFY:
+           case BUILT_IN_ISFINITE:
+           case BUILT_IN_ISINF_SIGN:
+           case BUILT_IN_ISINF:
+           case BUILT_IN_ISINFF:
+           case BUILT_IN_ISINFL:
+           case BUILT_IN_ISINFD32:
+           case BUILT_IN_ISINFD64:
+           case BUILT_IN_ISINFD128:
+           case BUILT_IN_ISNAN:
+           case BUILT_IN_ISNANF:
+           case BUILT_IN_ISNANL:
+           case BUILT_IN_ISNAND32:
+           case BUILT_IN_ISNAND64:
+           case BUILT_IN_ISNAND128:
+           case BUILT_IN_ISNORMAL:
+           case BUILT_IN_ISGREATER:
+           case BUILT_IN_ISGREATEREQUAL:
+           case BUILT_IN_ISLESS:
+           case BUILT_IN_ISLESSEQUAL:
+           case BUILT_IN_ISLESSGREATER:
+           case BUILT_IN_ISUNORDERED:
+           case BUILT_IN_VA_ARG_PACK:
+           case BUILT_IN_VA_ARG_PACK_LEN:
+           case BUILT_IN_VA_COPY:
+           case BUILT_IN_TRAP:
+           case BUILT_IN_SAVEREGS:
+           case BUILT_IN_POPCOUNTL:
+           case BUILT_IN_POPCOUNTLL:
+           case BUILT_IN_POPCOUNTIMAX:
+           case BUILT_IN_POPCOUNT:
+           case BUILT_IN_PARITYL:
+           case BUILT_IN_PARITYLL:
+           case BUILT_IN_PARITYIMAX:
+           case BUILT_IN_PARITY:
+           case BUILT_IN_LABS:
+           case BUILT_IN_LLABS:
+           case BUILT_IN_PREFETCH:
+             cost = weights->target_builtin_call_cost;
+             break;
+
            default:
              break;
            }
@@ -3458,7 +3553,7 @@ get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
   struct cgraph_edge *cs;
 
   cs = cgraph_edge (node, stmt);
-  if (cs)
+  if (cs && !cs->indirect_unknown_callee)
     return cs->callee->decl;
 
   return NULL_TREE;
@@ -3541,7 +3636,7 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
       /* If this call was originally indirect, we do not want to emit any
         inlining related warnings or sorry messages because there are no
         guarantees regarding those.  */
-      if (cg_edge->indirect_call)
+      if (cg_edge->indirect_inlining_edge)
        goto egress;
 
       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
@@ -3724,12 +3819,9 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
             cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
             bb, return_block);
 
-  /* Reset the escaped and callused solutions.  */
+  /* Reset the escaped solution.  */
   if (cfun->gimple_df)
-    {
-      pt_solution_reset (&cfun->gimple_df->escaped);
-      pt_solution_reset (&cfun->gimple_df->callused);
-    }
+    pt_solution_reset (&cfun->gimple_df->escaped);
 
   /* Clean up.  */
   if (id->debug_map)
@@ -4512,6 +4604,8 @@ copy_decl_to_var (tree decl, copy_body_data *id)
 
   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
                     VAR_DECL, DECL_NAME (decl), type);
+  if (DECL_PT_UID_SET_P (decl))
+    SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
   TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
   TREE_READONLY (copy) = TREE_READONLY (decl);
   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
@@ -4537,6 +4631,8 @@ copy_result_decl_to_var (tree decl, copy_body_data *id)
 
   copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
                     VAR_DECL, DECL_NAME (decl), type);
+  if (DECL_PT_UID_SET_P (decl))
+    SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
   TREE_READONLY (copy) = TREE_READONLY (decl);
   TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
   if (!DECL_BY_REFERENCE (decl))
@@ -4829,6 +4925,8 @@ tree_function_versioning (tree old_decl, tree new_decl,
     (DECL_STRUCT_FUNCTION (old_decl));
   initialize_cfun (new_decl, old_decl,
                   old_entry_block->count);
+  DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
+    = id.src_cfun->gimple_df->ipa_pta;
   push_cfun (DECL_STRUCT_FUNCTION (new_decl));
 
   /* Copy the function's static chain.  */
@@ -4847,6 +4945,15 @@ tree_function_versioning (tree old_decl, tree new_decl,
        if (replace_info->replace_p)
          {
            tree op = replace_info->new_tree;
+           if (!replace_info->old_tree)
+             {
+               int i = replace_info->parm_num;
+               tree parm;
+               for (parm = DECL_ARGUMENTS (old_decl); i; parm = TREE_CHAIN (parm))
+                 i --;
+               replace_info->old_tree = parm;
+             }
+               
 
            STRIP_NOPS (op);
 
@@ -4940,6 +5047,8 @@ tree_function_versioning (tree old_decl, tree new_decl,
   pointer_set_destroy (id.statements_to_fold);
   fold_cond_expr_cond ();
   delete_unreachable_blocks_update_callgraph (&id);
+  if (id.dst_node->analyzed)
+    cgraph_rebuild_references ();
   update_ssa (TODO_update_ssa);
   free_dominance_info (CDI_DOMINATORS);
   free_dominance_info (CDI_POST_DOMINATORS);
@@ -5066,7 +5175,7 @@ tree_can_inline_p (struct cgraph_edge *e)
        return false;
     }
 #endif
-  tree caller, callee;
+  tree caller, callee, lhs;
 
   caller = e->caller->decl;
   callee = e->callee->decl;
@@ -5092,8 +5201,16 @@ tree_can_inline_p (struct cgraph_edge *e)
       return false;
     }
 
+  /* Do not inline calls where we cannot triviall work around mismatches
+     in argument or return types.  */
   if (e->call_stmt
-      && !gimple_check_call_args (e->call_stmt))
+      && ((DECL_RESULT (callee)
+          && !DECL_BY_REFERENCE (DECL_RESULT (callee))
+          && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE
+          && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
+                                         TREE_TYPE (lhs))
+          && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
+         || !gimple_check_call_args (e->call_stmt)))
     {
       e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
       gimple_call_set_cannot_inline (e->call_stmt, true);