+/* Inspect the given TYPE and return true iff it has the same structure (the
+ same number of fields of the same types) as a C++ member pointer. If
+ METHOD_PTR and DELTA are non-NULL, store the trees representing the
+ corresponding fields there. */
+
+static bool
+type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
+{
+ tree fld;
+
+ if (TREE_CODE (type) != RECORD_TYPE)
+ return false;
+
+ fld = TYPE_FIELDS (type);
+ if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
+ || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE)
+ return false;
+
+ if (method_ptr)
+ *method_ptr = fld;
+
+ fld = DECL_CHAIN (fld);
+ if (!fld || INTEGRAL_TYPE_P (fld))
+ return false;
+ if (delta)
+ *delta = fld;
+
+ if (DECL_CHAIN (fld))
+ return false;
+
+ return true;
+}
+
+/* Go through arguments of the CALL and for every one that looks like a member
+ pointer, check whether it can be safely declared pass-through and if so,
+ mark that to the corresponding item of jump FUNCTIONS. Return true iff
+ there are non-pass-through member pointers within the arguments. INFO
+ describes formal parameters of the caller. PARMS_INFO is a pointer to a
+ vector containing intermediate information about each formal parameter. */
+
+static bool
+compute_pass_through_member_ptrs (struct ipa_node_params *info,
+ struct param_analysis_info *parms_ainfo,
+ struct ipa_edge_args *args,
+ gimple call)
+{
+ bool undecided_members = false;
+ unsigned num;
+ tree arg;
+
+ for (num = 0; num < gimple_call_num_args (call); num++)
+ {
+ arg = gimple_call_arg (call, num);
+
+ if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
+ {
+ if (TREE_CODE (arg) == PARM_DECL)
+ {
+ int index = ipa_get_param_decl_index (info, arg);
+
+ gcc_assert (index >=0);
+ if (!is_parm_modified_before_stmt (&parms_ainfo[index], call,
+ arg))
+ {
+ struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args,
+ num);
+ jfunc->type = IPA_JF_PASS_THROUGH;
+ jfunc->value.pass_through.formal_id = index;
+ jfunc->value.pass_through.operation = NOP_EXPR;
+ }
+ else
+ undecided_members = true;
+ }
+ else
+ undecided_members = true;
+ }
+ }
+
+ return undecided_members;
+}
+
+/* Simple function filling in a member pointer constant jump function (with PFN
+ and DELTA as the constant value) into JFUNC. */
+
+static void
+fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
+ tree pfn, tree delta)
+{
+ jfunc->type = IPA_JF_CONST_MEMBER_PTR;
+ jfunc->value.member_cst.pfn = pfn;
+ jfunc->value.member_cst.delta = delta;
+}
+
+/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
+ return the rhs of its defining statement. */
+
+static inline tree
+get_ssa_def_if_simple_copy (tree rhs)
+{
+ while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
+ {
+ gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
+
+ if (gimple_assign_single_p (def_stmt))
+ rhs = gimple_assign_rhs1 (def_stmt);
+ else
+ break;
+ }
+ return rhs;
+}
+
+/* Traverse statements from CALL backwards, scanning whether the argument ARG
+ which is a member pointer is filled in with constant values. If it is, fill
+ the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
+ fields of the record type of the member pointer. To give an example, we
+ look for a pattern looking like the following:
+
+ D.2515.__pfn ={v} printStuff;
+ D.2515.__delta ={v} 0;
+ i_1 = doprinting (D.2515); */
+
+static void
+determine_cst_member_ptr (gimple call, tree arg, tree method_field,
+ tree delta_field, struct ipa_jump_func *jfunc)
+{
+ gimple_stmt_iterator gsi;
+ tree method = NULL_TREE;
+ tree delta = NULL_TREE;
+
+ gsi = gsi_for_stmt (call);
+
+ gsi_prev (&gsi);
+ for (; !gsi_end_p (gsi); gsi_prev (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+ tree lhs, rhs, fld;
+
+ if (!stmt_may_clobber_ref_p (stmt, arg))
+ continue;
+ if (!gimple_assign_single_p (stmt))
+ return;
+
+ lhs = gimple_assign_lhs (stmt);
+ rhs = gimple_assign_rhs1 (stmt);
+
+ if (TREE_CODE (lhs) != COMPONENT_REF
+ || TREE_OPERAND (lhs, 0) != arg)
+ return;
+
+ fld = TREE_OPERAND (lhs, 1);
+ if (!method && fld == method_field)
+ {
+ rhs = get_ssa_def_if_simple_copy (rhs);
+ if (TREE_CODE (rhs) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (rhs, 0)) == FUNCTION_DECL
+ && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) == METHOD_TYPE)
+ {
+ method = TREE_OPERAND (rhs, 0);
+ if (delta)
+ {
+ fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
+ return;
+ }
+ }
+ else
+ return;
+ }
+
+ if (!delta && fld == delta_field)
+ {
+ rhs = get_ssa_def_if_simple_copy (rhs);
+ if (TREE_CODE (rhs) == INTEGER_CST)
+ {
+ delta = rhs;
+ if (method)
+ {
+ fill_member_ptr_cst_jump_function (jfunc, rhs, delta);
+ return;
+ }
+ }
+ else
+ return;
+ }
+ }
+
+ return;
+}
+
+/* Go through the arguments of the CALL and for every member pointer within
+ tries determine whether it is a constant. If it is, create a corresponding
+ constant jump function in FUNCTIONS which is an array of jump functions
+ associated with the call. */
+
+static void
+compute_cst_member_ptr_arguments (struct ipa_edge_args *args,
+ gimple call)
+{
+ unsigned num;
+ tree arg, method_field, delta_field;
+
+ for (num = 0; num < gimple_call_num_args (call); num++)
+ {
+ struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, num);
+ arg = gimple_call_arg (call, num);
+
+ if (jfunc->type == IPA_JF_UNKNOWN
+ && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
+ &delta_field))
+ determine_cst_member_ptr (call, arg, method_field, delta_field, jfunc);
+ }
+}
+
+/* Compute jump function for all arguments of callsite CS and insert the
+ information in the jump_functions array in the ipa_edge_args corresponding
+ to this callsite. */
+
+static void
+ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
+ struct cgraph_edge *cs)
+{
+ struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
+ struct ipa_edge_args *args = IPA_EDGE_REF (cs);
+ gimple call = cs->call_stmt;
+ int arg_num = gimple_call_num_args (call);
+
+ if (arg_num == 0 || args->jump_functions)
+ return;
+ VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
+
+ /* We will deal with constants and SSA scalars first: */
+ compute_scalar_jump_functions (info, parms_ainfo, args, call);
+
+ /* Let's check whether there are any potential member pointers and if so,
+ whether we can determine their functions as pass_through. */
+ if (!compute_pass_through_member_ptrs (info, parms_ainfo, args, call))
+ return;
+
+ /* Finally, let's check whether we actually pass a new constant member
+ pointer here... */
+ compute_cst_member_ptr_arguments (args, call);
+}
+
+/* Compute jump functions for all edges - both direct and indirect - outgoing
+ from NODE. Also count the actual arguments in the process. */
+
+static void
+ipa_compute_jump_functions (struct cgraph_node *node,
+ struct param_analysis_info *parms_ainfo)
+{
+ struct cgraph_edge *cs;
+
+ for (cs = node->callees; cs; cs = cs->next_callee)
+ {
+ struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
+ NULL);
+ /* We do not need to bother analyzing calls to unknown
+ functions unless they may become known during lto/whopr. */
+ if (!callee->analyzed && !flag_lto)
+ continue;
+ ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
+ }
+
+ for (cs = node->indirect_calls; cs; cs = cs->next_callee)
+ ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
+}
+
+/* If RHS looks like a rhs of a statement loading pfn from a member
+ pointer formal parameter, return the parameter, otherwise return
+ NULL. If USE_DELTA, then we look for a use of the delta field
+ rather than the pfn. */
+
+static tree
+ipa_get_member_ptr_load_param (tree rhs, bool use_delta)
+{
+ tree rec, ref_field, ref_offset, fld, fld_offset, ptr_field, delta_field;
+
+ if (TREE_CODE (rhs) == COMPONENT_REF)
+ {
+ ref_field = TREE_OPERAND (rhs, 1);
+ rhs = TREE_OPERAND (rhs, 0);
+ }
+ else
+ ref_field = NULL_TREE;
+ if (TREE_CODE (rhs) != MEM_REF)
+ return NULL_TREE;
+ rec = TREE_OPERAND (rhs, 0);
+ if (TREE_CODE (rec) != ADDR_EXPR)
+ return NULL_TREE;
+ rec = TREE_OPERAND (rec, 0);
+ if (TREE_CODE (rec) != PARM_DECL
+ || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
+ return NULL_TREE;
+
+ ref_offset = TREE_OPERAND (rhs, 1);
+
+ if (ref_field)
+ {
+ if (integer_nonzerop (ref_offset))
+ return NULL_TREE;
+
+ if (use_delta)
+ fld = delta_field;
+ else
+ fld = ptr_field;
+
+ return ref_field == fld ? rec : NULL_TREE;
+ }
+
+ if (use_delta)
+ fld_offset = byte_position (delta_field);
+ else
+ fld_offset = byte_position (ptr_field);
+
+ return tree_int_cst_equal (ref_offset, fld_offset) ? rec : NULL_TREE;
+}
+
+/* If STMT looks like a statement loading a value from a member pointer formal
+ parameter, this function returns that parameter. */
+
+static tree
+ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta)
+{
+ tree rhs;
+
+ if (!gimple_assign_single_p (stmt))
+ return NULL_TREE;
+
+ rhs = gimple_assign_rhs1 (stmt);
+ return ipa_get_member_ptr_load_param (rhs, use_delta);
+}
+
+/* Returns true iff T is an SSA_NAME defined by a statement. */
+
+static bool
+ipa_is_ssa_with_stmt_def (tree t)
+{
+ if (TREE_CODE (t) == SSA_NAME
+ && !SSA_NAME_IS_DEFAULT_DEF (t))
+ return true;
+ else
+ return false;
+}
+
+/* Find the indirect call graph edge corresponding to STMT and mark it as a
+ call to a parameter number PARAM_INDEX. NODE is the caller. Return the
+ indirect call graph edge. */
+
+static struct cgraph_edge *
+ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
+{
+ struct cgraph_edge *cs;
+
+ cs = cgraph_edge (node, stmt);
+ cs->indirect_info->param_index = param_index;
+ cs->indirect_info->anc_offset = 0;
+ cs->indirect_info->polymorphic = 0;
+ return cs;
+}
+
+/* Analyze the CALL and examine uses of formal parameters of the caller NODE
+ (described by INFO). PARMS_AINFO is a pointer to a vector containing
+ intermediate information about each formal parameter. Currently it checks
+ whether the call calls a pointer that is a formal parameter and if so, the
+ parameter is marked with the called flag and an indirect call graph edge
+ describing the call is created. This is very simple for ordinary pointers
+ represented in SSA but not-so-nice when it comes to member pointers. The
+ ugly part of this function does nothing more than trying to match the
+ pattern of such a call. An example of such a pattern is the gimple dump
+ below, the call is on the last line:
+
+ <bb 2>:
+ f$__delta_5 = f.__delta;
+ f$__pfn_24 = f.__pfn;
+
+ or
+ <bb 2>:
+ f$__delta_5 = MEM[(struct *)&f];
+ f$__pfn_24 = MEM[(struct *)&f + 4B];
+
+ and a few lines below:
+
+ <bb 5>
+ D.2496_3 = (int) f$__pfn_24;
+ D.2497_4 = D.2496_3 & 1;
+ if (D.2497_4 != 0)
+ goto <bb 3>;
+ else
+ goto <bb 4>;
+
+ <bb 6>:
+ D.2500_7 = (unsigned int) f$__delta_5;
+ D.2501_8 = &S + D.2500_7;
+ D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
+ D.2503_10 = *D.2502_9;
+ D.2504_12 = f$__pfn_24 + -1;
+ D.2505_13 = (unsigned int) D.2504_12;
+ D.2506_14 = D.2503_10 + D.2505_13;
+ D.2507_15 = *D.2506_14;
+ iftmp.11_16 = (String:: *) D.2507_15;
+
+ <bb 7>:
+ # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
+ D.2500_19 = (unsigned int) f$__delta_5;
+ D.2508_20 = &S + D.2500_19;
+ D.2493_21 = iftmp.11_1 (D.2508_20, 4);
+
+ Such patterns are results of simple calls to a member pointer:
+
+ int doprinting (int (MyString::* f)(int) const)
+ {
+ MyString S ("somestring");
+
+ return (S.*f)(4);
+ }
+*/
+
+static void
+ipa_analyze_indirect_call_uses (struct cgraph_node *node,
+ struct ipa_node_params *info,
+ struct param_analysis_info *parms_ainfo,
+ gimple call, tree target)
+{
+ gimple def;
+ tree n1, n2;
+ gimple d1, d2;
+ tree rec, rec2, cond;
+ gimple branch;
+ int index;
+ basic_block bb, virt_bb, join;
+
+ if (SSA_NAME_IS_DEFAULT_DEF (target))
+ {
+ tree var = SSA_NAME_VAR (target);
+ index = ipa_get_param_decl_index (info, var);
+ if (index >= 0)
+ ipa_note_param_call (node, index, call);
+ return;
+ }
+
+ /* Now we need to try to match the complex pattern of calling a member
+ pointer. */
+
+ if (!POINTER_TYPE_P (TREE_TYPE (target))
+ || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
+ return;
+
+ def = SSA_NAME_DEF_STMT (target);
+ if (gimple_code (def) != GIMPLE_PHI)
+ return;
+
+ if (gimple_phi_num_args (def) != 2)
+ return;
+
+ /* First, we need to check whether one of these is a load from a member
+ pointer that is a parameter to this function. */
+ n1 = PHI_ARG_DEF (def, 0);
+ n2 = PHI_ARG_DEF (def, 1);
+ if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
+ return;
+ d1 = SSA_NAME_DEF_STMT (n1);
+ d2 = SSA_NAME_DEF_STMT (n2);
+
+ join = gimple_bb (def);
+ if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false)))
+ {
+ if (ipa_get_stmt_member_ptr_load_param (d2, false))
+ return;
+
+ bb = EDGE_PRED (join, 0)->src;
+ virt_bb = gimple_bb (d2);
+ }
+ else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false)))
+ {
+ bb = EDGE_PRED (join, 1)->src;
+ virt_bb = gimple_bb (d1);
+ }
+ else
+ return;
+
+ /* Second, we need to check that the basic blocks are laid out in the way
+ corresponding to the pattern. */
+
+ if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
+ || single_pred (virt_bb) != bb
+ || single_succ (virt_bb) != join)
+ return;
+
+ /* Third, let's see that the branching is done depending on the least
+ significant bit of the pfn. */
+
+ branch = last_stmt (bb);
+ if (!branch || gimple_code (branch) != GIMPLE_COND)
+ return;
+
+ if ((gimple_cond_code (branch) != NE_EXPR
+ && gimple_cond_code (branch) != EQ_EXPR)
+ || !integer_zerop (gimple_cond_rhs (branch)))
+ return;
+
+ cond = gimple_cond_lhs (branch);
+ if (!ipa_is_ssa_with_stmt_def (cond))
+ return;
+
+ def = SSA_NAME_DEF_STMT (cond);
+ if (!is_gimple_assign (def)
+ || gimple_assign_rhs_code (def) != BIT_AND_EXPR
+ || !integer_onep (gimple_assign_rhs2 (def)))
+ return;
+
+ cond = gimple_assign_rhs1 (def);
+ if (!ipa_is_ssa_with_stmt_def (cond))
+ return;
+
+ def = SSA_NAME_DEF_STMT (cond);
+
+ if (is_gimple_assign (def)
+ && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
+ {
+ cond = gimple_assign_rhs1 (def);
+ if (!ipa_is_ssa_with_stmt_def (cond))
+ return;
+ def = SSA_NAME_DEF_STMT (cond);
+ }
+
+ rec2 = ipa_get_stmt_member_ptr_load_param (def,
+ (TARGET_PTRMEMFUNC_VBIT_LOCATION
+ == ptrmemfunc_vbit_in_delta));
+
+ if (rec != rec2)
+ return;
+
+ index = ipa_get_param_decl_index (info, rec);
+ if (index >= 0 && !is_parm_modified_before_stmt (&parms_ainfo[index],
+ call, rec))
+ ipa_note_param_call (node, index, call);
+
+ return;
+}
+
+/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
+ object referenced in the expression is a formal parameter of the caller
+ (described by INFO), create a call note for the statement. */
+
+static void
+ipa_analyze_virtual_call_uses (struct cgraph_node *node,
+ struct ipa_node_params *info, gimple call,
+ tree target)
+{
+ struct cgraph_edge *cs;
+ struct cgraph_indirect_call_info *ii;
+ struct ipa_jump_func jfunc;
+ tree obj = OBJ_TYPE_REF_OBJECT (target);
+ int index;
+ HOST_WIDE_INT anc_offset;
+
+ if (!flag_devirtualize)
+ return;
+
+ if (TREE_CODE (obj) != SSA_NAME)
+ return;
+
+ if (SSA_NAME_IS_DEFAULT_DEF (obj))
+ {
+ if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
+ return;
+
+ anc_offset = 0;
+ index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
+ gcc_assert (index >= 0);
+ if (detect_type_change_ssa (obj, call, &jfunc))
+ return;
+ }
+ else
+ {
+ gimple stmt = SSA_NAME_DEF_STMT (obj);
+ tree expr;
+
+ expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
+ if (!expr)
+ return;
+ index = ipa_get_param_decl_index (info,
+ SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
+ gcc_assert (index >= 0);
+ if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
+ return;
+ }
+
+ cs = ipa_note_param_call (node, index, call);
+ ii = cs->indirect_info;
+ ii->anc_offset = anc_offset;
+ ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
+ ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
+ ii->polymorphic = 1;
+}
+
+/* Analyze a call statement CALL whether and how it utilizes formal parameters
+ of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
+ containing intermediate information about each formal parameter. */
+
+static void
+ipa_analyze_call_uses (struct cgraph_node *node,
+ struct ipa_node_params *info,
+ struct param_analysis_info *parms_ainfo, gimple call)
+{
+ tree target = gimple_call_fn (call);
+
+ if (!target)
+ return;
+ if (TREE_CODE (target) == SSA_NAME)
+ ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
+ else if (TREE_CODE (target) == OBJ_TYPE_REF)
+ ipa_analyze_virtual_call_uses (node, info, call, target);
+}
+
+
+/* Analyze the call statement STMT with respect to formal parameters (described
+ in INFO) of caller given by NODE. Currently it only checks whether formal
+ parameters are called. PARMS_AINFO is a pointer to a vector containing
+ intermediate information about each formal parameter. */
+
+static void
+ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
+ struct param_analysis_info *parms_ainfo, gimple stmt)
+{
+ if (is_gimple_call (stmt))
+ ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
+}
+
+/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
+ If OP is a parameter declaration, mark it as used in the info structure
+ passed in DATA. */
+
+static bool
+visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
+ tree op, void *data)
+{
+ struct ipa_node_params *info = (struct ipa_node_params *) data;
+
+ op = get_base_address (op);
+ if (op
+ && TREE_CODE (op) == PARM_DECL)
+ {
+ int index = ipa_get_param_decl_index (info, op);
+ gcc_assert (index >= 0);
+ ipa_set_param_used (info, index, true);
+ }
+
+ return false;
+}
+
+/* Scan the function body of NODE and inspect the uses of formal parameters.
+ Store the findings in various structures of the associated ipa_node_params
+ structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
+ vector containing intermediate information about each formal parameter. */
+
+static void
+ipa_analyze_params_uses (struct cgraph_node *node,
+ struct param_analysis_info *parms_ainfo)
+{
+ tree decl = node->decl;
+ basic_block bb;
+ struct function *func;
+ gimple_stmt_iterator gsi;
+ struct ipa_node_params *info = IPA_NODE_REF (node);
+ int i;
+
+ if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
+ return;
+
+ for (i = 0; i < ipa_get_param_count (info); i++)
+ {
+ tree parm = ipa_get_param (info, i);
+ /* For SSA regs see if parameter is used. For non-SSA we compute
+ the flag during modification analysis. */
+ if (is_gimple_reg (parm)
+ && gimple_default_def (DECL_STRUCT_FUNCTION (node->decl), parm))
+ ipa_set_param_used (info, i, true);
+ }
+
+ func = DECL_STRUCT_FUNCTION (decl);
+ FOR_EACH_BB_FN (bb, func)
+ {
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+
+ if (is_gimple_debug (stmt))
+ continue;
+
+ ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
+ walk_stmt_load_store_addr_ops (stmt, info,
+ visit_ref_for_mod_analysis,
+ visit_ref_for_mod_analysis,
+ visit_ref_for_mod_analysis);
+ }
+ for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
+ walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
+ visit_ref_for_mod_analysis,
+ visit_ref_for_mod_analysis,
+ visit_ref_for_mod_analysis);
+ }
+
+ info->uses_analysis_done = 1;
+}
+
+/* Initialize the array describing properties of of formal parameters
+ of NODE, analyze their uses and compute jump functions associated
+ with actual arguments of calls from within NODE. */
+
+void
+ipa_analyze_node (struct cgraph_node *node)
+{
+ struct ipa_node_params *info;
+ struct param_analysis_info *parms_ainfo;
+ int i, param_count;
+
+ ipa_check_create_node_params ();
+ ipa_check_create_edge_args ();
+ info = IPA_NODE_REF (node);
+ push_cfun (DECL_STRUCT_FUNCTION (node->decl));
+ current_function_decl = node->decl;
+ ipa_initialize_node_params (node);
+
+ param_count = ipa_get_param_count (info);
+ parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
+ memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
+
+ ipa_analyze_params_uses (node, parms_ainfo);
+ ipa_compute_jump_functions (node, parms_ainfo);
+
+ for (i = 0; i < param_count; i++)
+ if (parms_ainfo[i].visited_statements)
+ BITMAP_FREE (parms_ainfo[i].visited_statements);
+
+ current_function_decl = NULL;
+ pop_cfun ();
+}
+
+
+/* Update the jump function DST when the call graph edge corresponding to SRC is
+ is being inlined, knowing that DST is of type ancestor and src of known
+ type. */
+
+static void
+combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
+ struct ipa_jump_func *dst)
+{
+ HOST_WIDE_INT combined_offset;
+ tree combined_type;
+
+ combined_offset = src->value.known_type.offset + dst->value.ancestor.offset;
+ combined_type = dst->value.ancestor.type;
+
+ dst->type = IPA_JF_KNOWN_TYPE;
+ dst->value.known_type.base_type = src->value.known_type.base_type;
+ dst->value.known_type.offset = combined_offset;
+ dst->value.known_type.component_type = combined_type;
+}
+
+/* Update the jump functions associated with call graph edge E when the call
+ graph edge CS is being inlined, assuming that E->caller is already (possibly
+ indirectly) inlined into CS->callee and that E has not been inlined. */
+
+static void
+update_jump_functions_after_inlining (struct cgraph_edge *cs,
+ struct cgraph_edge *e)
+{
+ struct ipa_edge_args *top = IPA_EDGE_REF (cs);
+ struct ipa_edge_args *args = IPA_EDGE_REF (e);
+ int count = ipa_get_cs_argument_count (args);
+ int i;
+
+ for (i = 0; i < count; i++)
+ {
+ struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
+
+ if (dst->type == IPA_JF_ANCESTOR)
+ {
+ struct ipa_jump_func *src;
+
+ /* Variable number of arguments can cause havoc if we try to access
+ one that does not exist in the inlined edge. So make sure we
+ don't. */
+ if (dst->value.ancestor.formal_id >= ipa_get_cs_argument_count (top))
+ {
+ dst->type = IPA_JF_UNKNOWN;
+ continue;
+ }
+
+ src = ipa_get_ith_jump_func (top, dst->value.ancestor.formal_id);
+ if (src->type == IPA_JF_KNOWN_TYPE)
+ combine_known_type_and_ancestor_jfs (src, dst);
+ else if (src->type == IPA_JF_PASS_THROUGH
+ && src->value.pass_through.operation == NOP_EXPR)
+ dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
+ else if (src->type == IPA_JF_ANCESTOR)
+ {
+ dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
+ dst->value.ancestor.offset += src->value.ancestor.offset;
+ }
+ else
+ dst->type = IPA_JF_UNKNOWN;
+ }
+ else if (dst->type == IPA_JF_PASS_THROUGH)
+ {
+ struct ipa_jump_func *src;
+ /* We must check range due to calls with variable number of arguments
+ and we cannot combine jump functions with operations. */
+ if (dst->value.pass_through.operation == NOP_EXPR
+ && (dst->value.pass_through.formal_id
+ < ipa_get_cs_argument_count (top)))
+ {
+ src = ipa_get_ith_jump_func (top,
+ dst->value.pass_through.formal_id);
+ *dst = *src;
+ }
+ else
+ dst->type = IPA_JF_UNKNOWN;
+ }
+ }
+}
+
+/* If TARGET is an addr_expr of a function declaration, make it the destination
+ of an indirect edge IE and return the edge. Otherwise, return NULL. */
+
+struct cgraph_edge *
+ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
+{
+ struct cgraph_node *callee;
+
+ if (TREE_CODE (target) == ADDR_EXPR)
+ target = TREE_OPERAND (target, 0);
+ if (TREE_CODE (target) != FUNCTION_DECL)
+ return NULL;
+ callee = cgraph_get_node (target);
+ if (!callee)
+ return NULL;
+ ipa_check_create_node_params ();
+
+ /* We can not make edges to inline clones. It is bug that someone removed
+ the cgraph node too early. */
+ gcc_assert (!callee->global.inlined_to);
+
+ cgraph_make_edge_direct (ie, callee);
+ if (dump_file)
+ {
+ fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
+ "(%s/%i -> %s/%i), for stmt ",
+ ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
+ cgraph_node_name (ie->caller), ie->caller->uid,
+ cgraph_node_name (ie->callee), ie->callee->uid);
+ if (ie->call_stmt)
+ print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
+ else
+ fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
+ }
+ callee = cgraph_function_or_thunk_node (callee, NULL);
+
+ return ie;
+}
+
+/* Try to find a destination for indirect edge IE that corresponds to a simple
+ call or a call of a member function pointer and where the destination is a
+ pointer formal parameter described by jump function JFUNC. If it can be
+ determined, return the newly direct edge, otherwise return NULL. */
+
+static struct cgraph_edge *
+try_make_edge_direct_simple_call (struct cgraph_edge *ie,
+ struct ipa_jump_func *jfunc)
+{
+ tree target;
+
+ if (jfunc->type == IPA_JF_CONST)
+ target = jfunc->value.constant;
+ else if (jfunc->type == IPA_JF_CONST_MEMBER_PTR)
+ target = jfunc->value.member_cst.pfn;
+ else
+ return NULL;
+
+ return ipa_make_edge_direct_to_target (ie, target);
+}
+
+/* Try to find a destination for indirect edge IE that corresponds to a
+ virtual call based on a formal parameter which is described by jump
+ function JFUNC and if it can be determined, make it direct and return the
+ direct edge. Otherwise, return NULL. */
+
+static struct cgraph_edge *
+try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
+ struct ipa_jump_func *jfunc)
+{
+ tree binfo, target;
+
+ if (jfunc->type != IPA_JF_KNOWN_TYPE)
+ return NULL;
+
+ binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
+ gcc_checking_assert (binfo);
+ binfo = get_binfo_at_offset (binfo, jfunc->value.known_type.offset
+ + ie->indirect_info->anc_offset,
+ ie->indirect_info->otr_type);
+ if (binfo)
+ target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
+ binfo);
+ else
+ return NULL;
+
+ if (target)
+ return ipa_make_edge_direct_to_target (ie, target);
+ else
+ return NULL;
+}
+
+/* Update the param called notes associated with NODE when CS is being inlined,
+ assuming NODE is (potentially indirectly) inlined into CS->callee.
+ Moreover, if the callee is discovered to be constant, create a new cgraph
+ edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
+ unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
+
+static bool
+update_indirect_edges_after_inlining (struct cgraph_edge *cs,
+ struct cgraph_node *node,
+ VEC (cgraph_edge_p, heap) **new_edges)
+{
+ struct ipa_edge_args *top;
+ struct cgraph_edge *ie, *next_ie, *new_direct_edge;
+ bool res = false;
+
+ ipa_check_create_edge_args ();
+ top = IPA_EDGE_REF (cs);
+
+ for (ie = node->indirect_calls; ie; ie = next_ie)
+ {
+ struct cgraph_indirect_call_info *ici = ie->indirect_info;
+ struct ipa_jump_func *jfunc;
+
+ next_ie = ie->next_callee;
+
+ if (ici->param_index == -1)
+ continue;
+
+ /* We must check range due to calls with variable number of arguments: */
+ if (ici->param_index >= ipa_get_cs_argument_count (top))
+ {
+ ici->param_index = -1;
+ continue;
+ }
+
+ jfunc = ipa_get_ith_jump_func (top, ici->param_index);
+ if (jfunc->type == IPA_JF_PASS_THROUGH
+ && jfunc->value.pass_through.operation == NOP_EXPR)
+ ici->param_index = jfunc->value.pass_through.formal_id;
+ else if (jfunc->type == IPA_JF_ANCESTOR)
+ {
+ ici->param_index = jfunc->value.ancestor.formal_id;
+ ici->anc_offset += jfunc->value.ancestor.offset;
+ }
+ else
+ /* Either we can find a destination for this edge now or never. */
+ ici->param_index = -1;
+
+ if (!flag_indirect_inlining)
+ continue;
+
+ if (ici->polymorphic)
+ new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc);
+ else
+ new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc);
+
+ if (new_direct_edge)
+ {
+ new_direct_edge->indirect_inlining_edge = 1;
+ if (new_direct_edge->call_stmt)
+ new_direct_edge->call_stmt_cannot_inline_p
+ = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
+ new_direct_edge->callee->decl);
+ if (new_edges)
+ {
+ VEC_safe_push (cgraph_edge_p, heap, *new_edges,
+ new_direct_edge);
+ top = IPA_EDGE_REF (cs);
+ res = true;
+ }
+ }
+ }
+
+ return res;
+}
+
+/* Recursively traverse subtree of NODE (including node) made of inlined
+ cgraph_edges when CS has been inlined and invoke
+ update_indirect_edges_after_inlining on all nodes and
+ update_jump_functions_after_inlining on all non-inlined edges that lead out
+ of this subtree. Newly discovered indirect edges will be added to
+ *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
+ created. */
+
+static bool
+propagate_info_to_inlined_callees (struct cgraph_edge *cs,
+ struct cgraph_node *node,
+ VEC (cgraph_edge_p, heap) **new_edges)
+{
+ struct cgraph_edge *e;
+ bool res;
+
+ res = update_indirect_edges_after_inlining (cs, node, new_edges);
+
+ for (e = node->callees; e; e = e->next_callee)
+ if (!e->inline_failed)
+ res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
+ else
+ update_jump_functions_after_inlining (cs, e);
+ for (e = node->indirect_calls; e; e = e->next_callee)
+ update_jump_functions_after_inlining (cs, e);
+
+ return res;
+}
+
+/* Update jump functions and call note functions on inlining the call site CS.
+ CS is expected to lead to a node already cloned by
+ cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
+ *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
+ created. */
+
+bool
+ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
+ VEC (cgraph_edge_p, heap) **new_edges)
+{
+ bool changed;
+ /* Do nothing if the preparation phase has not been carried out yet
+ (i.e. during early inlining). */
+ if (!ipa_node_params_vector)
+ return false;
+ gcc_assert (ipa_edge_args_vector);
+
+ changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
+
+ /* We do not keep jump functions of inlined edges up to date. Better to free
+ them so we do not access them accidentally. */
+ ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
+ return changed;
+}
+
+/* Frees all dynamically allocated structures that the argument info points
+ to. */
+
+void
+ipa_free_edge_args_substructures (struct ipa_edge_args *args)
+{
+ if (args->jump_functions)
+ ggc_free (args->jump_functions);
+
+ memset (args, 0, sizeof (*args));
+}
+
+/* Free all ipa_edge structures. */
+
+void
+ipa_free_all_edge_args (void)
+{
+ int i;
+ struct ipa_edge_args *args;
+
+ FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
+ ipa_free_edge_args_substructures (args);
+
+ VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
+ ipa_edge_args_vector = NULL;
+}
+
+/* Frees all dynamically allocated structures that the param info points
+ to. */
+
+void
+ipa_free_node_params_substructures (struct ipa_node_params *info)
+{
+ VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
+ free (info->lattices);
+ /* Lattice values and their sources are deallocated with their alocation
+ pool. */
+ VEC_free (tree, heap, info->known_vals);
+ memset (info, 0, sizeof (*info));
+}
+
+/* Free all ipa_node_params structures. */
+
+void
+ipa_free_all_node_params (void)
+{
+ int i;
+ struct ipa_node_params *info;
+
+ FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
+ ipa_free_node_params_substructures (info);
+
+ VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
+ ipa_node_params_vector = NULL;
+}
+
+/* Hook that is called by cgraph.c when an edge is removed. */