/* Interprocedural constant propagation
- Copyright (C) 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
+ Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
Contributed by Razya Ladelsky <RAZYA@il.ibm.com>
This file is part of GCC.
else if (jfunc->type == IPA_JF_PASS_THROUGH)
{
struct ipcp_lattice *caller_lat;
+ tree cst;
- caller_lat = ipcp_get_lattice (info, jfunc->value.formal_id);
+ caller_lat = ipcp_get_lattice (info, jfunc->value.pass_through.formal_id);
lat->type = caller_lat->type;
- lat->constant = caller_lat->constant;
+ if (caller_lat->type != IPA_CONST_VALUE)
+ return;
+ cst = caller_lat->constant;
+
+ if (jfunc->value.pass_through.operation != NOP_EXPR)
+ {
+ tree restype;
+ if (TREE_CODE_CLASS (jfunc->value.pass_through.operation)
+ == tcc_comparison)
+ restype = boolean_type_node;
+ else
+ restype = TREE_TYPE (cst);
+ cst = fold_binary (jfunc->value.pass_through.operation,
+ restype, cst, jfunc->value.pass_through.operand);
+ }
+ if (!cst || !is_gimple_ip_invariant (cst))
+ lat->type = IPA_BOTTOM;
+ lat->constant = cst;
+ }
+ else if (jfunc->type == IPA_JF_ANCESTOR)
+ {
+ struct ipcp_lattice *caller_lat;
+ tree t;
+ bool ok;
+
+ caller_lat = ipcp_get_lattice (info, jfunc->value.ancestor.formal_id);
+ lat->type = caller_lat->type;
+ if (caller_lat->type != IPA_CONST_VALUE)
+ return;
+ if (TREE_CODE (caller_lat->constant) != ADDR_EXPR)
+ {
+ /* This can happen when the constant is a NULL pointer. */
+ lat->type = IPA_BOTTOM;
+ return;
+ }
+ t = TREE_OPERAND (caller_lat->constant, 0);
+ ok = build_ref_for_offset (&t, TREE_TYPE (t),
+ jfunc->value.ancestor.offset,
+ jfunc->value.ancestor.type, false);
+ if (!ok)
+ {
+ lat->type = IPA_BOTTOM;
+ lat->constant = NULL_TREE;
+ }
+ else
+ lat->constant = build_fold_addr_expr (t);
}
else
lat->type = IPA_BOTTOM;
}
}
+/* Return true if ipcp algorithms would allow cloning NODE. */
+
+static bool
+ipcp_versionable_function_p (struct cgraph_node *node)
+{
+ tree decl = node->decl;
+ basic_block bb;
+
+ /* There are a number of generic reasons functions cannot be versioned. */
+ if (!tree_versionable_function_p (decl))
+ return false;
+
+ /* Removing arguments doesn't work if the function takes varargs. */
+ if (DECL_STRUCT_FUNCTION (decl)->stdarg)
+ return false;
+
+ /* Removing arguments doesn't work if we use __builtin_apply_args. */
+ FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (decl))
+ {
+ gimple_stmt_iterator gsi;
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ const_gimple stmt = gsi_stmt (gsi);
+ tree t;
+
+ if (!is_gimple_call (stmt))
+ continue;
+ t = gimple_call_fndecl (stmt);
+ if (t == NULL_TREE)
+ continue;
+ if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
+ && DECL_FUNCTION_CODE (t) == BUILT_IN_APPLY_ARGS)
+ return false;
+ }
+ }
+
+ return true;
+}
+
/* Return true if this NODE is viable candidate for cloning. */
static bool
ipcp_cloning_candidate_p (struct cgraph_node *node)
FIXME: in future we should clone such functions when they are called with
different constants, but current ipcp implementation is not good on this.
*/
- if (!node->needed || !node->analyzed)
+ if (cgraph_only_called_directly_p (node) || !node->analyzed)
return false;
if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
cgraph_node_name (node));
return false;
}
- if (!tree_versionable_function_p (node->decl))
+ if (!ipcp_versionable_function_p (node))
{
if (dump_file)
fprintf (dump_file, "Not considering %s for cloning; body is not versionable.\n",
cgraph_node_name (node));
return false;
}
- if (node->local.inline_summary.self_insns < n_calls)
+ if (node->local.inline_summary.self_size < n_calls)
{
if (dump_file)
fprintf (dump_file, "Considering %s for cloning; code would shrink.\n",
if (dump_file)
fprintf (dump_file, "Not considering %s for cloning; no hot calls.\n",
cgraph_node_name (node));
+ return false;
}
if (dump_file)
fprintf (dump_file, "Considering %s for cloning.\n",
if (ipa_is_called_with_var_arguments (info))
type = IPA_BOTTOM;
- else if (!node->needed)
+ else if (cgraph_only_called_directly_p (node))
type = IPA_TOP;
/* When cloning is allowed, we can assume that externally visible functions
are not called. We will compensate this by cloning later. */
/* building jump functions */
for (cs = node->callees; cs; cs = cs->next_callee)
{
- if (!cs->callee->analyzed)
+ /* We do not need to bother analyzing calls to unknown
+ functions unless they may become known during lto/whopr. */
+ if (!cs->callee->analyzed && !flag_lto && !flag_whopr)
continue;
ipa_count_arguments (cs);
if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
struct ipa_node_params *callee_info = IPA_NODE_REF (cs->callee);
struct ipa_edge_args *args = IPA_EDGE_REF (cs);
- if (ipa_is_called_with_var_arguments (callee_info))
+ if (ipa_is_called_with_var_arguments (callee_info)
+ || !cs->callee->analyzed
+ || ipa_is_called_with_var_arguments (callee_info))
continue;
count = ipa_get_cs_argument_count (args);
if (dump_file)
fprintf (dump_file, "\nIPA iterate stage:\n\n");
+
+ if (in_lto_p)
+ ipa_update_after_lto_read ();
+
for (node = cgraph_nodes; node; node = node->next)
{
ipcp_initialize_node_lattices (node);
{
/* Once we will be able to do in-place replacement, we can be more
lax here. */
- return tree_versionable_function_p (node->decl);
+ return ipcp_versionable_function_p (node);
}
/* Print count scale data structures. */
{
next = cs->next_caller;
if (!ipcp_node_is_clone (cs->caller) && ipcp_need_redirect_p (cs))
- {
- cgraph_redirect_edge_callee (cs, orig_node);
- gimple_call_set_fndecl (cs->call_stmt, orig_node->decl);
- }
+ cgraph_redirect_edge_callee (cs, orig_node);
}
}
}
struct cgraph_edge *cs;
int redirectable_node_callers = 0;
int removable_args = 0;
- bool need_original = node->needed;
+ bool need_original = !cgraph_only_called_directly_p (node);
struct ipa_node_params *info;
int i, count;
int growth;
call site. Precise cost is dificult to get, as our size metric counts
constants and moves as free. Generally we are looking for cases that
small function is called very many times. */
- growth = node->local.inline_summary.self_insns
+ growth = node->local.inline_summary.self_size
- removable_args * redirectable_node_callers;
if (growth < 0)
return 0;
cost /= freq_sum * 1000 / REG_BR_PROB_BASE + 1;
if (dump_file)
fprintf (dump_file, "Cost of versioning %s is %i, (size: %i, freq: %i)\n",
- cgraph_node_name (node), cost, node->local.inline_summary.self_insns,
+ cgraph_node_name (node), cost, node->local.inline_summary.self_size,
freq_sum);
return cost + 1;
}
{
if (node->count > max_count)
max_count = node->count;
- overall_size += node->local.inline_summary.self_insns;
+ overall_size += node->local.inline_summary.self_size;
}
max_new_size = overall_size;
for (cs = node->callers; cs != NULL; cs = cs->next_caller)
if (cs->caller == node || ipcp_need_redirect_p (cs))
break;
- if (!cs && !node->needed)
+ if (!cs && cgraph_only_called_directly_p (node))
bitmap_set_bit (dead_nodes, node->uid);
info = IPA_NODE_REF (node);
ipcp_init_stage ();
}
+/* Write ipcp summary for nodes in SET. */
+static void
+ipcp_write_summary (cgraph_node_set set)
+{
+ ipa_prop_write_jump_functions (set);
+}
+
+/* Read ipcp summary. */
+static void
+ipcp_read_summary (void)
+{
+ ipa_prop_read_jump_functions ();
+}
+
/* Gate for IPCP optimization. */
static bool
cgraph_gate_cp (void)
return flag_ipa_cp;
}
-struct ipa_opt_pass pass_ipa_cp =
+struct ipa_opt_pass_d pass_ipa_cp =
{
{
IPA_PASS,
TODO_remove_functions /* todo_flags_finish */
},
ipcp_generate_summary, /* generate_summary */
- NULL, /* write_summary */
- NULL, /* read_summary */
+ ipcp_write_summary, /* write_summary */
+ ipcp_read_summary, /* read_summary */
NULL, /* function_read_summary */
0, /* TODOs */
NULL, /* function_transform */