static struct cgraph_node_hook_list *node_removal_hook_holder;
static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
static struct cgraph_2node_hook_list *node_duplication_hook_holder;
+static struct cgraph_node_hook_list *function_insertion_hook_holder;
/* Add cgraph NODE described by INFO to the worklist WL regardless of whether
it is in one or not. It should almost never be used directly, as opposed to
wl = NULL;
for (node = cgraph_nodes; node; node = node->next)
- if (node->analyzed)
+ if (node->analyzed && !node->alias)
{
struct ipa_node_params *info = IPA_NODE_REF (node);
/* Unreachable nodes should have been eliminated before ipcp and
/* Return index of the formal whose tree is PTREE in function which corresponds
to INFO. */
-static int
+int
ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
{
int i, count;
}
}
+/* Structure to be passed in between detect_type_change and
+ check_stmt_for_type_change. */
+
+struct type_change_info
+{
+ /* Set to true if dynamic type change has been detected. */
+ bool type_maybe_changed;
+};
+
+/* Return true if STMT can modify a virtual method table pointer.
+
+ This function makes special assumptions about both constructors and
+ destructors which are all the functions that are allowed to alter the VMT
+ pointers. It assumes that destructors begin with assignment into all VMT
+ pointers and that constructors essentially look in the following way:
+
+ 1) The very first thing they do is that they call constructors of ancestor
+ sub-objects that have them.
+
+ 2) Then VMT pointers of this and all its ancestors is set to new values
+ corresponding to the type corresponding to the constructor.
+
+ 3) Only afterwards, other stuff such as constructor of member sub-objects
+ and the code written by the user is run. Only this may include calling
+ virtual functions, directly or indirectly.
+
+ There is no way to call a constructor of an ancestor sub-object in any
+ other way.
+
+ This means that we do not have to care whether constructors get the correct
+ type information because they will always change it (in fact, if we define
+ the type to be given by the VMT pointer, it is undefined).
+
+ The most important fact to derive from the above is that if, for some
+ statement in the section 3, we try to detect whether the dynamic type has
+ changed, we can safely ignore all calls as we examine the function body
+ backwards until we reach statements in section 2 because these calls cannot
+ be ancestor constructors or destructors (if the input is not bogus) and so
+ do not change the dynamic type (this holds true only for automatically
+ allocated objects but at the moment we devirtualize only these). We then
+ must detect that statements in section 2 change the dynamic type and can try
+ to derive the new type. That is enough and we can stop, we will never see
+ the calls into constructors of sub-objects in this code. Therefore we can
+ safely ignore all call statements that we traverse.
+ */
+
+static bool
+stmt_may_be_vtbl_ptr_store (gimple stmt)
+{
+ if (is_gimple_call (stmt))
+ return false;
+ else if (is_gimple_assign (stmt))
+ {
+ tree lhs = gimple_assign_lhs (stmt);
+
+ if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
+ {
+ if (flag_strict_aliasing
+ && !POINTER_TYPE_P (TREE_TYPE (lhs)))
+ return false;
+
+ if (TREE_CODE (lhs) == COMPONENT_REF
+ && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
+ return false;
+ /* In the future we might want to use get_base_ref_and_offset to find
+ if there is a field corresponding to the offset and if so, proceed
+ almost like if it was a component ref. */
+ }
+ }
+ return true;
+}
+
+/* Callback of walk_aliased_vdefs and a helper function for
+ detect_type_change to check whether a particular statement may modify
+ the virtual table pointer, and if possible also determine the new type of
+ the (sub-)object. It stores its result into DATA, which points to a
+ type_change_info structure. */
+
+static bool
+check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
+{
+ gimple stmt = SSA_NAME_DEF_STMT (vdef);
+ struct type_change_info *tci = (struct type_change_info *) data;
+
+ if (stmt_may_be_vtbl_ptr_store (stmt))
+ {
+ tci->type_maybe_changed = true;
+ return true;
+ }
+ else
+ return false;
+}
+
+/* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
+ looking for assignments to its virtual table pointer. If it is, return true
+ and fill in the jump function JFUNC with relevant type information or set it
+ to unknown. ARG is the object itself (not a pointer to it, unless
+ dereferenced). BASE is the base of the memory access as returned by
+ get_ref_base_and_extent, as is the offset. */
+
+static bool
+detect_type_change (tree arg, tree base, gimple call,
+ struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
+{
+ struct type_change_info tci;
+ ao_ref ao;
+
+ gcc_checking_assert (DECL_P (arg)
+ || TREE_CODE (arg) == MEM_REF
+ || handled_component_p (arg));
+ /* Const calls cannot call virtual methods through VMT and so type changes do
+ not matter. */
+ if (!flag_devirtualize || !gimple_vuse (call))
+ return false;
+
+ tci.type_maybe_changed = false;
+
+ ao.ref = arg;
+ ao.base = base;
+ ao.offset = offset;
+ ao.size = POINTER_SIZE;
+ ao.max_size = ao.size;
+ ao.ref_alias_set = -1;
+ ao.base_alias_set = -1;
+
+ walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
+ &tci, NULL);
+ if (!tci.type_maybe_changed)
+ return false;
+
+ jfunc->type = IPA_JF_UNKNOWN;
+ return true;
+}
+
+/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
+ SSA name (its dereference will become the base and the offset is assumed to
+ be zero). */
+
+static bool
+detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
+{
+ gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
+ if (!flag_devirtualize
+ || !POINTER_TYPE_P (TREE_TYPE (arg))
+ || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
+ return false;
+
+ arg = build2 (MEM_REF, ptr_type_node, arg,
+ build_int_cst (ptr_type_node, 0));
+
+ return detect_type_change (arg, arg, call, jfunc, 0);
+}
+
+
/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
of an assignment statement STMT, try to find out whether NAME can be
described by a (possibly polynomial) pass-through jump-function or an
static void
compute_complex_assign_jump_func (struct ipa_node_params *info,
struct ipa_jump_func *jfunc,
- gimple stmt, tree name)
+ gimple call, gimple stmt, tree name)
{
HOST_WIDE_INT offset, size, max_size;
- tree op1, op2, base, type;
+ tree op1, op2, base, ssa;
int index;
op1 = gimple_assign_rhs1 (stmt);
jfunc->value.pass_through.operation = gimple_assign_rhs_code (stmt);
jfunc->value.pass_through.operand = op2;
}
- else if (gimple_assign_unary_nop_p (stmt))
+ else if (gimple_assign_unary_nop_p (stmt)
+ && !detect_type_change_ssa (op1, call, jfunc))
{
jfunc->type = IPA_JF_PASS_THROUGH;
jfunc->value.pass_through.formal_id = index;
if (TREE_CODE (op1) != ADDR_EXPR)
return;
-
op1 = TREE_OPERAND (op1, 0);
- type = TREE_TYPE (op1);
- if (TREE_CODE (type) != RECORD_TYPE)
+ if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
return;
base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
if (TREE_CODE (base) != MEM_REF
|| max_size != size)
return;
offset += mem_ref_offset (base).low * BITS_PER_UNIT;
- base = TREE_OPERAND (base, 0);
- if (TREE_CODE (base) != SSA_NAME
- || !SSA_NAME_IS_DEFAULT_DEF (base)
+ ssa = TREE_OPERAND (base, 0);
+ if (TREE_CODE (ssa) != SSA_NAME
+ || !SSA_NAME_IS_DEFAULT_DEF (ssa)
|| offset < 0)
return;
/* Dynamic types are changed only in constructors and destructors and */
- index = ipa_get_param_decl_index (info, SSA_NAME_VAR (base));
- if (index >= 0)
+ index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
+ if (index >= 0
+ && !detect_type_change (op1, base, call, jfunc, offset))
{
jfunc->type = IPA_JF_ANCESTOR;
jfunc->value.ancestor.formal_id = index;
jfunc->value.ancestor.offset = offset;
- jfunc->value.ancestor.type = type;
+ jfunc->value.ancestor.type = TREE_TYPE (op1);
}
}
+/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
+ it looks like:
+
+ iftmp.1_3 = &obj_2(D)->D.1762;
+
+ The base of the MEM_REF must be a default definition SSA NAME of a
+ parameter. Return NULL_TREE if it looks otherwise. If case of success, the
+ whole MEM_REF expression is returned and the offset calculated from any
+ handled components and the MEM_REF itself is stored into *OFFSET. The whole
+ RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
+
+static tree
+get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
+{
+ HOST_WIDE_INT size, max_size;
+ tree expr, parm, obj;
+
+ if (!gimple_assign_single_p (assign))
+ return NULL_TREE;
+ expr = gimple_assign_rhs1 (assign);
+
+ if (TREE_CODE (expr) != ADDR_EXPR)
+ return NULL_TREE;
+ expr = TREE_OPERAND (expr, 0);
+ obj = expr;
+ expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
+
+ if (TREE_CODE (expr) != MEM_REF
+ /* If this is a varying address, punt. */
+ || max_size == -1
+ || max_size != size
+ || *offset < 0)
+ return NULL_TREE;
+ parm = TREE_OPERAND (expr, 0);
+ if (TREE_CODE (parm) != SSA_NAME
+ || !SSA_NAME_IS_DEFAULT_DEF (parm)
+ || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
+ return NULL_TREE;
+
+ *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
+ *obj_p = obj;
+ return expr;
+}
+
/* Given that an actual argument is an SSA_NAME that is a result of a phi
statement PHI, try to find out whether NAME is in fact a
static void
compute_complex_ancestor_jump_func (struct ipa_node_params *info,
struct ipa_jump_func *jfunc,
- gimple phi)
+ gimple call, gimple phi)
{
- HOST_WIDE_INT offset, size, max_size;
+ HOST_WIDE_INT offset;
gimple assign, cond;
basic_block phi_bb, assign_bb, cond_bb;
- tree tmp, parm, expr;
+ tree tmp, parm, expr, obj;
int index, i;
if (gimple_phi_num_args (phi) != 2)
assign = SSA_NAME_DEF_STMT (tmp);
assign_bb = gimple_bb (assign);
- if (!single_pred_p (assign_bb)
- || !gimple_assign_single_p (assign))
- return;
- expr = gimple_assign_rhs1 (assign);
-
- if (TREE_CODE (expr) != ADDR_EXPR)
+ if (!single_pred_p (assign_bb))
return;
- expr = TREE_OPERAND (expr, 0);
- expr = get_ref_base_and_extent (expr, &offset, &size, &max_size);
-
- if (TREE_CODE (expr) != MEM_REF
- /* If this is a varying address, punt. */
- || max_size == -1
- || max_size != size)
+ expr = get_ancestor_addr_info (assign, &obj, &offset);
+ if (!expr)
return;
- offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
parm = TREE_OPERAND (expr, 0);
- if (TREE_CODE (parm) != SSA_NAME
- || !SSA_NAME_IS_DEFAULT_DEF (parm)
- || offset < 0)
- return;
-
index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
- if (index < 0)
- return;
+ gcc_assert (index >= 0);
cond_bb = single_pred (assign_bb);
cond = last_stmt (cond_bb);
|| !integer_zerop (gimple_cond_rhs (cond)))
return;
-
phi_bb = gimple_bb (phi);
for (i = 0; i < 2; i++)
{
return;
}
- jfunc->type = IPA_JF_ANCESTOR;
- jfunc->value.ancestor.formal_id = index;
- jfunc->value.ancestor.offset = offset;
- jfunc->value.ancestor.type = TREE_TYPE (TREE_TYPE (tmp));
+ if (!detect_type_change (obj, expr, call, jfunc, offset))
+ {
+ jfunc->type = IPA_JF_ANCESTOR;
+ jfunc->value.ancestor.formal_id = index;
+ jfunc->value.ancestor.offset = offset;
+ jfunc->value.ancestor.type = TREE_TYPE (obj);
+ }
}
-/* Given OP whch is passed as an actual argument to a called function,
+/* Given OP which is passed as an actual argument to a called function,
determine if it is possible to construct a KNOWN_TYPE jump function for it
and if so, create one and store it to JFUNC. */
static void
-compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc)
+compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
+ gimple call)
{
HOST_WIDE_INT offset, size, max_size;
tree base, binfo;
- if (TREE_CODE (op) != ADDR_EXPR
+ if (!flag_devirtualize
+ || TREE_CODE (op) != ADDR_EXPR
|| TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
return;
|| is_global_var (base))
return;
+ if (detect_type_change (op, base, call, jfunc, offset))
+ return;
+
binfo = TYPE_BINFO (TREE_TYPE (base));
if (!binfo)
return;
{
int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
- if (index >= 0)
+ if (index >= 0
+ && !detect_type_change_ssa (arg, call, &functions[num]))
{
functions[num].type = IPA_JF_PASS_THROUGH;
functions[num].value.pass_through.formal_id = index;
gimple stmt = SSA_NAME_DEF_STMT (arg);
if (is_gimple_assign (stmt))
compute_complex_assign_jump_func (info, &functions[num],
- stmt, arg);
+ call, stmt, arg);
else if (gimple_code (stmt) == GIMPLE_PHI)
compute_complex_ancestor_jump_func (info, &functions[num],
- stmt);
+ call, stmt);
}
}
else
- compute_known_type_jump_func (arg, &functions[num]);
+ compute_known_type_jump_func (arg, &functions[num], call);
}
}
jfunc->value.member_cst.delta = delta;
}
-/* If RHS is an SSA_NAMe and it is defined by a simple copy assign statement,
+/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
return the rhs of its defining statement. */
static inline tree
for (cs = node->callees; cs; cs = cs->next_callee)
{
+ struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee, NULL);
/* We do not need to bother analyzing calls to unknown
functions unless they may become known during lto/whopr. */
if (!cs->callee->analyzed && !flag_lto)
ipa_count_arguments (cs);
/* If the descriptor of the callee is not initialized yet, we have to do
it now. */
- if (cs->callee->analyzed)
- ipa_initialize_node_params (cs->callee);
+ if (callee->analyzed)
+ ipa_initialize_node_params (callee);
if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
- != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
- ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
+ != ipa_get_param_count (IPA_NODE_REF (callee)))
+ ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));
ipa_compute_jump_functions_for_edge (parms_info, cs);
}
return false;
}
-/* Find the indirect call graph edge corresponding to STMT and add to it all
- information necessary to describe a call to a parameter number PARAM_INDEX.
- NODE is the caller. POLYMORPHIC should be set to true iff the call is a
- virtual one. */
+/* Find the indirect call graph edge corresponding to STMT and mark it as a
+ call to a parameter number PARAM_INDEX. NODE is the caller. Return the
+ indirect call graph edge. */
-static void
-ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt,
- bool polymorphic)
+static struct cgraph_edge *
+ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
{
struct cgraph_edge *cs;
cs = cgraph_edge (node, stmt);
cs->indirect_info->param_index = param_index;
cs->indirect_info->anc_offset = 0;
- cs->indirect_info->polymorphic = polymorphic;
- if (polymorphic)
- {
- tree otr = gimple_call_fn (stmt);
- tree type, token = OBJ_TYPE_REF_TOKEN (otr);
- cs->indirect_info->otr_token = tree_low_cst (token, 1);
- type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (otr)));
- cs->indirect_info->otr_type = type;
- }
+ cs->indirect_info->polymorphic = 0;
+ return cs;
}
/* Analyze the CALL and examine uses of formal parameters of the caller NODE
tree var = SSA_NAME_VAR (target);
index = ipa_get_param_decl_index (info, var);
if (index >= 0)
- ipa_note_param_call (node, index, call, false);
+ ipa_note_param_call (node, index, call);
return;
}
if (!branch || gimple_code (branch) != GIMPLE_COND)
return;
- if (gimple_cond_code (branch) != NE_EXPR
+ if ((gimple_cond_code (branch) != NE_EXPR
+ && gimple_cond_code (branch) != EQ_EXPR)
|| !integer_zerop (gimple_cond_rhs (branch)))
return;
index = ipa_get_param_decl_index (info, rec);
if (index >= 0 && !is_parm_modified_before_call (&parms_info[index],
call, rec))
- ipa_note_param_call (node, index, call, false);
+ ipa_note_param_call (node, index, call);
return;
}
struct ipa_node_params *info, gimple call,
tree target)
{
+ struct cgraph_edge *cs;
+ struct cgraph_indirect_call_info *ii;
+ struct ipa_jump_func jfunc;
tree obj = OBJ_TYPE_REF_OBJECT (target);
- tree var;
int index;
+ HOST_WIDE_INT anc_offset;
+
+ if (!flag_devirtualize)
+ return;
- if (TREE_CODE (obj) == ADDR_EXPR)
+ if (TREE_CODE (obj) != SSA_NAME)
+ return;
+
+ if (SSA_NAME_IS_DEFAULT_DEF (obj))
{
- do
- {
- obj = TREE_OPERAND (obj, 0);
- }
- while (TREE_CODE (obj) == COMPONENT_REF);
- if (TREE_CODE (obj) != MEM_REF)
+ if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
return;
- obj = TREE_OPERAND (obj, 0);
- }
- if (TREE_CODE (obj) != SSA_NAME
- || !SSA_NAME_IS_DEFAULT_DEF (obj))
- return;
+ anc_offset = 0;
+ index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
+ gcc_assert (index >= 0);
+ if (detect_type_change_ssa (obj, call, &jfunc))
+ return;
+ }
+ else
+ {
+ gimple stmt = SSA_NAME_DEF_STMT (obj);
+ tree expr;
- var = SSA_NAME_VAR (obj);
- index = ipa_get_param_decl_index (info, var);
+ expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
+ if (!expr)
+ return;
+ index = ipa_get_param_decl_index (info,
+ SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
+ gcc_assert (index >= 0);
+ if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
+ return;
+ }
- if (index >= 0)
- ipa_note_param_call (node, index, call, true);
+ cs = ipa_note_param_call (node, index, call);
+ ii = cs->indirect_info;
+ ii->anc_offset = anc_offset;
+ ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
+ ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
+ ii->polymorphic = 1;
}
/* Analyze a call statement CALL whether and how it utilizes formal parameters
{
tree target = gimple_call_fn (call);
+ if (!target)
+ return;
if (TREE_CODE (target) == SSA_NAME)
ipa_analyze_indirect_call_uses (node, info, parms_info, call, target);
else if (TREE_CODE (target) == OBJ_TYPE_REF)
info->uses_analysis_done = 1;
}
-/* Initialize the array describing properties of of formal parameters of NODE,
- analyze their uses and and compute jump functions associated witu actual
- arguments of calls from within NODE. */
+/* Initialize the array describing properties of of formal parameters
+ of NODE, analyze their uses and compute jump functions associated
+ with actual arguments of calls from within NODE. */
void
ipa_analyze_node (struct cgraph_node *node)
{
- struct ipa_node_params *info = IPA_NODE_REF (node);
+ struct ipa_node_params *info;
struct param_analysis_info *parms_info;
int i, param_count;
+ ipa_check_create_node_params ();
+ ipa_check_create_edge_args ();
+ info = IPA_NODE_REF (node);
+ push_cfun (DECL_STRUCT_FUNCTION (node->decl));
+ current_function_decl = node->decl;
ipa_initialize_node_params (node);
param_count = ipa_get_param_count (info);
for (i = 0; i < param_count; i++)
if (parms_info[i].visited_statements)
BITMAP_FREE (parms_info[i].visited_statements);
+
+ current_function_decl = NULL;
+ pop_cfun ();
}
-/* Update the jump function DST when the call graph edge correspondng to SRC is
+/* Update the jump function DST when the call graph edge corresponding to SRC is
is being inlined, knowing that DST is of type ancestor and src of known
type. */
target = TREE_OPERAND (target, 0);
if (TREE_CODE (target) != FUNCTION_DECL)
return NULL;
- callee = cgraph_node (target);
+ callee = cgraph_get_node (target);
if (!callee)
return NULL;
ipa_check_create_node_params ();
fprintf (dump_file, "\n");
}
}
+ callee = cgraph_function_or_thunk_node (callee, NULL);
if (ipa_get_cs_argument_count (IPA_EDGE_REF (ie))
!= ipa_get_param_count (IPA_NODE_REF (callee)))
}
/* Try to find a destination for indirect edge IE that corresponds to a
- virtuall call based on a formal parameter which is described by jump
+ virtual call based on a formal parameter which is described by jump
function JFUNC and if it can be determined, make it direct and return the
direct edge. Otherwise, return NULL. */
type = ie->indirect_info->otr_type;
binfo = get_binfo_at_offset (binfo, ie->indirect_info->anc_offset, type);
if (binfo)
- target = gimple_get_virt_mehtod_for_binfo (token, binfo, &delta, true);
+ target = gimple_get_virt_method_for_binfo (token, binfo, &delta);
else
return NULL;
ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
VEC (cgraph_edge_p, heap) **new_edges)
{
- /* FIXME lto: We do not stream out indirect call information. */
- if (flag_wpa)
- return false;
-
/* Do nothing if the preparation phase has not been carried out yet
(i.e. during early inlining). */
if (!ipa_node_params_vector)
void
ipa_free_node_params_substructures (struct ipa_node_params *info)
{
- if (info->params)
- free (info->params);
+ free (info->params);
memset (info, 0, sizeof (*info));
}
static void
ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
- __attribute__((unused)) void *data)
+ ATTRIBUTE_UNUSED void *data)
{
struct ipa_node_params *old_info, *new_info;
int param_count, i;
new_info->node_enqueued = old_info->node_enqueued;
}
+
+/* Analyze newly added function into callgraph. */
+
+static void
+ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
+{
+ ipa_analyze_node (node);
+}
+
/* Register our cgraph hooks if they are not already there. */
void
if (!node_duplication_hook_holder)
node_duplication_hook_holder =
cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
+ function_insertion_hook_holder =
+ cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
}
/* Unregister our cgraph hooks if they are not already there. */
edge_duplication_hook_holder = NULL;
cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
node_duplication_hook_holder = NULL;
+ cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
+ function_insertion_hook_holder = NULL;
}
-/* Allocate all necessary data strucutures necessary for indirect inlining. */
+/* Allocate all necessary data structures necessary for indirect inlining. */
void
ipa_create_all_structures_for_iinln (void)
base_offset
+ adj->offset / BITS_PER_UNIT);
off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
- off, 0);
+ off);
base = TREE_OPERAND (base, 0);
}
else
lto_output_uleb128_stream (ob->main_stream, node_ref);
bp = bitpack_create (ob->main_stream);
- bp_pack_value (&bp, info->called_with_var_arguments, 1);
gcc_assert (info->uses_analysis_done
|| ipa_get_param_count (info) == 0);
gcc_assert (!info->node_enqueued);
ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
}
for (e = node->indirect_calls; e; e = e->next_callee)
- ipa_write_indirect_edge_info (ob, e);
+ {
+ struct ipa_edge_args *args = IPA_EDGE_REF (e);
+
+ lto_output_uleb128_stream (ob->main_stream,
+ ipa_get_cs_argument_count (args));
+ for (j = 0; j < ipa_get_cs_argument_count (args); j++)
+ ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
+ ipa_write_indirect_edge_info (ob, e);
+ }
}
-/* Srtream in NODE info from IB. */
+/* Stream in NODE info from IB. */
static void
ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
ipa_initialize_node_params (node);
bp = lto_input_bitpack (ib);
- info->called_with_var_arguments = bp_unpack_value (&bp, 1);
if (ipa_get_param_count (info) != 0)
info->uses_analysis_done = true;
info->node_enqueued = false;
ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
}
for (e = node->indirect_calls; e; e = e->next_callee)
- ipa_read_indirect_edge_info (ib, data_in, e);
+ {
+ struct ipa_edge_args *args = IPA_EDGE_REF (e);
+ int count = lto_input_uleb128 (ib);
+
+ ipa_set_cs_argument_count (args, count);
+ if (count)
+ {
+ args->jump_functions = ggc_alloc_cleared_vec_ipa_jump_func
+ (ipa_get_cs_argument_count (args));
+ for (k = 0; k < ipa_get_cs_argument_count (args); k++)
+ ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
+ }
+ ipa_read_indirect_edge_info (ib, data_in, e);
+ }
}
/* Write jump functions for nodes in SET. */
ipa_prop_write_jump_functions (cgraph_node_set set)
{
struct cgraph_node *node;
- struct output_block *ob = create_output_block (LTO_section_jump_functions);
+ struct output_block *ob;
unsigned int count = 0;
cgraph_node_set_iterator csi;
- ob->cgraph_node = NULL;
+ if (!ipa_node_params_vector)
+ return;
+ ob = create_output_block (LTO_section_jump_functions);
+ ob->cgraph_node = NULL;
for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
{
node = csi_node (csi);
- if (node->analyzed && IPA_NODE_REF (node) != NULL)
+ if (cgraph_function_with_gimple_body_p (node)
+ && IPA_NODE_REF (node) != NULL)
count++;
}
for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
{
node = csi_node (csi);
- if (node->analyzed && IPA_NODE_REF (node) != NULL)
+ if (cgraph_function_with_gimple_body_p (node)
+ && IPA_NODE_REF (node) != NULL)
ipa_write_node_info (ob, node);
}
lto_output_1_stream (ob->main_stream, 0);
}
/* After merging units, we can get mismatch in argument counts.
- Also decl merging might've rendered parameter lists obsolette.
+ Also decl merging might've rendered parameter lists obsolete.
Also compute called_with_variable_arg info. */
void
if (node->analyzed)
for (cs = node->callees; cs; cs = cs->next_callee)
{
+ struct cgraph_node *callee;
+
+ callee = cgraph_function_or_thunk_node (cs->callee, NULL);
if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
- != ipa_get_param_count (IPA_NODE_REF (cs->callee)))
- ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee));
+ != ipa_get_param_count (IPA_NODE_REF (callee)))
+ ipa_set_called_with_variable_arg (IPA_NODE_REF (callee));
}
}
+
+/* Given the jump function JFUNC, compute the lattice LAT that describes the
+ value coming down the callsite. INFO describes the caller node so that
+ pass-through jump functions can be evaluated. */
+
+void
+ipa_lattice_from_jfunc (struct ipa_node_params *info, struct ipcp_lattice *lat,
+ struct ipa_jump_func *jfunc)
+{
+ if (jfunc->type == IPA_JF_CONST)
+ {
+ lat->type = IPA_CONST_VALUE;
+ lat->constant = jfunc->value.constant;
+ }
+ else if (jfunc->type == IPA_JF_PASS_THROUGH)
+ {
+ struct ipcp_lattice *caller_lat;
+ tree cst;
+
+ caller_lat = ipa_get_lattice (info, jfunc->value.pass_through.formal_id);
+ lat->type = caller_lat->type;
+ if (caller_lat->type != IPA_CONST_VALUE)
+ return;
+ cst = caller_lat->constant;
+
+ if (jfunc->value.pass_through.operation != NOP_EXPR)
+ {
+ tree restype;
+ if (TREE_CODE_CLASS (jfunc->value.pass_through.operation)
+ == tcc_comparison)
+ restype = boolean_type_node;
+ else
+ restype = TREE_TYPE (cst);
+ cst = fold_binary (jfunc->value.pass_through.operation,
+ restype, cst, jfunc->value.pass_through.operand);
+ }
+ if (!cst || !is_gimple_ip_invariant (cst))
+ lat->type = IPA_BOTTOM;
+ lat->constant = cst;
+ }
+ else if (jfunc->type == IPA_JF_ANCESTOR)
+ {
+ struct ipcp_lattice *caller_lat;
+ tree t;
+
+ caller_lat = ipa_get_lattice (info, jfunc->value.ancestor.formal_id);
+ lat->type = caller_lat->type;
+ if (caller_lat->type != IPA_CONST_VALUE)
+ return;
+ if (TREE_CODE (caller_lat->constant) != ADDR_EXPR)
+ {
+ /* This can happen when the constant is a NULL pointer. */
+ lat->type = IPA_BOTTOM;
+ return;
+ }
+ t = TREE_OPERAND (caller_lat->constant, 0);
+ t = build_ref_for_offset (EXPR_LOCATION (t), t,
+ jfunc->value.ancestor.offset,
+ jfunc->value.ancestor.type, NULL, false);
+ lat->constant = build_fold_addr_expr (t);
+ }
+ else
+ lat->type = IPA_BOTTOM;
+}
+
+/* Determine whether JFUNC evaluates to a constant and if so, return it.
+ Otherwise return NULL. INFO describes the caller node so that pass-through
+ jump functions can be evaluated. */
+
+tree
+ipa_cst_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
+{
+ struct ipcp_lattice lat;
+
+ ipa_lattice_from_jfunc (info, &lat, jfunc);
+ if (lat.type == IPA_CONST_VALUE)
+ return lat.constant;
+ else
+ return NULL_TREE;
+}